From dd6bcb0e57ebe693d3a3f398eba9045542f6ea4b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 26 Sep 2016 12:48:40 -0700 Subject: [PATCH 001/855] Moving all logging files into subdirectory. Done via: $ mkdir -p logging/google/cloud $ cp google/__init__.py logging/google/__init__.py $ git add logging/google/__init__.py $ cp google/cloud/__init__.py logging/google/cloud/__init__.py $ git add logging/google/cloud/__init__.py $ git mv google/cloud/logging logging/google/cloud/logging $ git mv unit_tests/logging logging/unit_tests --- .../google-cloud-logging/google/__init__.py | 20 + .../google/cloud/__init__.py | 20 + .../google/cloud/logging/__init__.py | 26 + .../google/cloud/logging/_gax.py | 645 ++++++++++ .../google/cloud/logging/client.py | 302 +++++ .../google/cloud/logging/connection.py | 441 +++++++ .../google/cloud/logging/entries.py | 157 +++ .../google/cloud/logging/handlers/__init__.py | 18 + .../google/cloud/logging/handlers/handlers.py | 133 ++ .../logging/handlers/transports/__init__.py | 26 + .../handlers/transports/background_thread.py | 171 +++ .../cloud/logging/handlers/transports/base.py | 35 + .../cloud/logging/handlers/transports/sync.py | 43 + .../google/cloud/logging/logger.py | 453 +++++++ .../google/cloud/logging/metric.py | 183 +++ .../google/cloud/logging/sink.py | 187 +++ .../unit_tests/__init__.py | 13 + .../unit_tests/handlers/__init__.py | 13 + .../unit_tests/handlers/test_handlers.py | 122 ++ .../handlers/transports/__init__.py | 13 + .../transports/test_background_thread.py | 193 +++ .../handlers/transports/test_base.py | 32 + .../handlers/transports/test_sync.py | 93 ++ .../unit_tests/test__gax.py | 1104 +++++++++++++++++ .../unit_tests/test_client.py | 496 ++++++++ .../unit_tests/test_connection.py | 640 ++++++++++ .../unit_tests/test_entries.py | 235 ++++ .../unit_tests/test_logger.py | 705 +++++++++++ .../unit_tests/test_metric.py | 251 ++++ .../unit_tests/test_sink.py | 262 ++++ 30 files changed, 7032 insertions(+) create mode 100644 packages/google-cloud-logging/google/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/_gax.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/client.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/connection.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/entries.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/logger.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/metric.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/sink.py create mode 100644 packages/google-cloud-logging/unit_tests/__init__.py create mode 100644 packages/google-cloud-logging/unit_tests/handlers/__init__.py create mode 100644 packages/google-cloud-logging/unit_tests/handlers/test_handlers.py create mode 100644 packages/google-cloud-logging/unit_tests/handlers/transports/__init__.py create mode 100644 packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py create mode 100644 packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py create mode 100644 packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py create mode 100644 packages/google-cloud-logging/unit_tests/test__gax.py create mode 100644 packages/google-cloud-logging/unit_tests/test_client.py create mode 100644 packages/google-cloud-logging/unit_tests/test_connection.py create mode 100644 packages/google-cloud-logging/unit_tests/test_entries.py create mode 100644 packages/google-cloud-logging/unit_tests/test_logger.py create mode 100644 packages/google-cloud-logging/unit_tests/test_metric.py create mode 100644 packages/google-cloud-logging/unit_tests/test_sink.py diff --git a/packages/google-cloud-logging/google/__init__.py b/packages/google-cloud-logging/google/__init__.py new file mode 100644 index 000000000000..b2b833373882 --- /dev/null +++ b/packages/google-cloud-logging/google/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-logging/google/cloud/__init__.py b/packages/google-cloud-logging/google/cloud/__init__.py new file mode 100644 index 000000000000..8ac7b74af136 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-logging/google/cloud/logging/__init__.py b/packages/google-cloud-logging/google/cloud/logging/__init__.py new file mode 100644 index 000000000000..ea29393f6292 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/__init__.py @@ -0,0 +1,26 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Stackdriver Logging API wrapper.""" + + +from google.cloud.logging.client import Client +from google.cloud.logging.connection import Connection + + +SCOPE = Connection.SCOPE +ASCENDING = 'timestamp asc' +"""Query string to order by ascending timestamps.""" +DESCENDING = 'timestamp desc' +"""Query string to order by decending timestamps.""" diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py new file mode 100644 index 000000000000..fab3077941c2 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -0,0 +1,645 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""GAX wrapper for Logging API requests.""" + +import json + +from google.gax import CallOptions +from google.gax import INITIAL_PAGE +from google.gax.errors import GaxError +from google.gax.grpc import exc_to_code +from google.logging.type.log_severity_pb2 import LogSeverity +from google.logging.v2.logging_config_pb2 import LogSink +from google.logging.v2.logging_metrics_pb2 import LogMetric +from google.logging.v2.log_entry_pb2 import LogEntry +from google.protobuf.json_format import Parse +from grpc import StatusCode + +# pylint: disable=ungrouped-imports +from google.cloud._helpers import _datetime_to_pb_timestamp +from google.cloud._helpers import _pb_timestamp_to_rfc3339 +from google.cloud.exceptions import Conflict +from google.cloud.exceptions import NotFound +# pylint: enable=ungrouped-imports + + +class _LoggingAPI(object): + """Helper mapping logging-related APIs. + + :type gax_api: + :class:`google.logging.v2.logging_service_v2_api.LoggingServiceV2Api` + :param gax_api: API object used to make GAX requests. + """ + def __init__(self, gax_api): + self._gax_api = gax_api + + def list_entries(self, projects, filter_='', order_by='', + page_size=0, page_token=None): + """Return a page of log entry resources. + + :type projects: list of strings + :param projects: project IDs to include. If not passed, + defaults to the project bound to the API's client. + + :type filter_: str + :param filter_: a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters + + :type order_by: str + :param order_by: One of :data:`~google.cloud.logging.ASCENDING` + or :data:`~google.cloud.logging.DESCENDING`. + + :type page_size: int + :param page_size: maximum number of entries to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + :rtype: tuple, (list, str) + :returns: list of mappings, plus a "next page token" string: + if not None, indicates that more entries can be retrieved + with another call (pass that value as ``page_token``). + """ + if page_token is None: + page_token = INITIAL_PAGE + options = CallOptions(page_token=page_token) + page_iter = self._gax_api.list_log_entries( + projects, filter_, order_by, page_size, options) + entries = [_log_entry_pb_to_mapping(entry_pb) + for entry_pb in page_iter.next()] + token = page_iter.page_token or None + return entries, token + + def write_entries(self, entries, logger_name=None, resource=None, + labels=None): + """API call: log an entry resource via a POST request + + :type entries: sequence of mapping + :param entries: the log entry resources to log. + + :type logger_name: string + :param logger_name: name of default logger to which to log the entries; + individual entries may override. + + :type resource: mapping + :param resource: default resource to associate with entries; + individual entries may override. + + :type labels: mapping + :param labels: default labels to associate with entries; + individual entries may override. + """ + options = None + partial_success = False + entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries] + self._gax_api.write_log_entries(entry_pbs, logger_name, resource, + labels, partial_success, options) + + def logger_delete(self, project, logger_name): + """API call: delete all entries in a logger via a DELETE request + + :type project: string + :param project: ID of project containing the log entries to delete + + :type logger_name: string + :param logger_name: name of logger containing the log entries to delete + """ + options = None + path = 'projects/%s/logs/%s' % (project, logger_name) + try: + self._gax_api.delete_log(path, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(path) + raise + + +class _SinksAPI(object): + """Helper mapping sink-related APIs. + + :type gax_api: + :class:`google.logging.v2.config_service_v2_api.ConfigServiceV2Api` + :param gax_api: API object used to make GAX requests. + """ + def __init__(self, gax_api): + self._gax_api = gax_api + + def list_sinks(self, project, page_size=0, page_token=None): + """List sinks for the project associated with this client. + + :type project: string + :param project: ID of the project whose sinks are to be listed. + + :type page_size: int + :param page_size: maximum number of sinks to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of sinks. If not + passed, the API will return the first page of + sinks. + + :rtype: tuple, (list, str) + :returns: list of mappings, plus a "next page token" string: + if not None, indicates that more sinks can be retrieved + with another call (pass that value as ``page_token``). + """ + if page_token is None: + page_token = INITIAL_PAGE + options = CallOptions(page_token=page_token) + path = 'projects/%s' % (project,) + page_iter = self._gax_api.list_sinks(path, page_size, options) + sinks = [_log_sink_pb_to_mapping(log_sink_pb) + for log_sink_pb in page_iter.next()] + token = page_iter.page_token or None + return sinks, token + + def sink_create(self, project, sink_name, filter_, destination): + """API call: create a sink resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create + + :type project: string + :param project: ID of the project in which to create the sink. + + :type sink_name: string + :param sink_name: the name of the sink + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the sink. + + :type destination: string + :param destination: destination URI for the entries exported by + the sink. + """ + options = None + parent = 'projects/%s' % (project,) + sink_pb = LogSink(name=sink_name, filter=filter_, + destination=destination) + try: + self._gax_api.create_sink(parent, sink_pb, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: + path = 'projects/%s/sinks/%s' % (project, sink_name) + raise Conflict(path) + raise + + def sink_get(self, project, sink_name): + """API call: retrieve a sink resource. + + :type project: string + :param project: ID of the project containing the sink. + + :type sink_name: string + :param sink_name: the name of the sink + + :rtype: dict + :returns: The sink object returned from the API (converted from a + protobuf to a dictionary). + """ + options = None + path = 'projects/%s/sinks/%s' % (project, sink_name) + try: + sink_pb = self._gax_api.get_sink(path, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(path) + raise + return _log_sink_pb_to_mapping(sink_pb) + + def sink_update(self, project, sink_name, filter_, destination): + """API call: update a sink resource. + + :type project: string + :param project: ID of the project containing the sink. + + :type sink_name: string + :param sink_name: the name of the sink + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the sink. + + :type destination: string + :param destination: destination URI for the entries exported by + the sink. + + :rtype: dict + :returns: The sink object returned from the API (converted from a + protobuf to a dictionary). + """ + options = None + path = 'projects/%s/sinks/%s' % (project, sink_name) + sink_pb = LogSink(name=path, filter=filter_, destination=destination) + try: + self._gax_api.update_sink(path, sink_pb, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(path) + raise + return _log_sink_pb_to_mapping(sink_pb) + + def sink_delete(self, project, sink_name): + """API call: delete a sink resource. + + :type project: string + :param project: ID of the project containing the sink. + + :type sink_name: string + :param sink_name: the name of the sink + """ + options = None + path = 'projects/%s/sinks/%s' % (project, sink_name) + try: + self._gax_api.delete_sink(path, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(path) + raise + + +class _MetricsAPI(object): + """Helper mapping sink-related APIs. + + :type gax_api: + :class:`google.logging.v2.metrics_service_v2_api.MetricsServiceV2Api` + :param gax_api: API object used to make GAX requests. + """ + def __init__(self, gax_api): + self._gax_api = gax_api + + def list_metrics(self, project, page_size=0, page_token=None): + """List metrics for the project associated with this client. + + :type project: string + :param project: ID of the project whose metrics are to be listed. + + :type page_size: int + :param page_size: maximum number of metrics to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of metrics. If not + passed, the API will return the first page of + metrics. + + :rtype: tuple, (list, str) + :returns: list of mappings, plus a "next page token" string: + if not None, indicates that more metrics can be retrieved + with another call (pass that value as ``page_token``). + """ + if page_token is None: + page_token = INITIAL_PAGE + options = CallOptions(page_token=page_token) + path = 'projects/%s' % (project,) + page_iter = self._gax_api.list_log_metrics(path, page_size, options) + metrics = [_log_metric_pb_to_mapping(log_metric_pb) + for log_metric_pb in page_iter.next()] + token = page_iter.page_token or None + return metrics, token + + def metric_create(self, project, metric_name, filter_, description): + """API call: create a metric resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create + + :type project: string + :param project: ID of the project in which to create the metric. + + :type metric_name: string + :param metric_name: the name of the metric + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the metric. + + :type description: string + :param description: description of the metric. + """ + options = None + parent = 'projects/%s' % (project,) + metric_pb = LogMetric(name=metric_name, filter=filter_, + description=description) + try: + self._gax_api.create_log_metric(parent, metric_pb, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: + path = 'projects/%s/metrics/%s' % (project, metric_name) + raise Conflict(path) + raise + + def metric_get(self, project, metric_name): + """API call: retrieve a metric resource. + + :type project: string + :param project: ID of the project containing the metric. + + :type metric_name: string + :param metric_name: the name of the metric + + :rtype: dict + :returns: The metric object returned from the API (converted from a + protobuf to a dictionary). + """ + options = None + path = 'projects/%s/metrics/%s' % (project, metric_name) + try: + metric_pb = self._gax_api.get_log_metric(path, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(path) + raise + return _log_metric_pb_to_mapping(metric_pb) + + def metric_update(self, project, metric_name, filter_, description): + """API call: update a metric resource. + + :type project: string + :param project: ID of the project containing the metric. + + :type metric_name: string + :param metric_name: the name of the metric + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the metric. + + :type description: string + :param description: description of the metric. + + :rtype: dict + :returns: The metric object returned from the API (converted from a + protobuf to a dictionary). + """ + options = None + path = 'projects/%s/metrics/%s' % (project, metric_name) + metric_pb = LogMetric(name=path, filter=filter_, + description=description) + try: + self._gax_api.update_log_metric(path, metric_pb, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(path) + raise + return _log_metric_pb_to_mapping(metric_pb) + + def metric_delete(self, project, metric_name): + """API call: delete a metric resource. + + :type project: string + :param project: ID of the project containing the metric. + + :type metric_name: string + :param metric_name: the name of the metric + """ + options = None + path = 'projects/%s/metrics/%s' % (project, metric_name) + try: + self._gax_api.delete_log_metric(path, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(path) + raise + + +def _mon_resource_pb_to_mapping(resource_pb): + """Helper for :func:_log_entry_pb_to_mapping`. + + Performs "impedance matching" between the protobuf attrs and the keys + expected in the JSON API. + """ + mapping = { + 'type': resource_pb.type, + } + if resource_pb.labels: + mapping['labels'] = resource_pb.labels + return mapping + + +def _value_pb_to_value(value_pb): + """Helper for :func:`_log_entry_pb_to_mapping`. + + Performs "impedance matching" between the protobuf attrs and the keys + expected in the JSON API. + """ + kind = value_pb.WhichOneof('kind') + + if kind is None: + result = None + + elif kind == 'string_value': + result = value_pb.string_value + + elif kind == 'bool_value': + result = value_pb.bool_value + + elif kind == 'number_value': + result = value_pb.number_value + + elif kind == 'list_value': + result = [_value_pb_to_value(element) + for element in value_pb.list_value.values] + + elif kind == 'struct_value': + result = _struct_pb_to_mapping(value_pb.struct_value) + + else: + raise ValueError('Value protobuf had unknown kind: %s' % (kind,)) + + return result + + +def _struct_pb_to_mapping(struct_pb): + """Helper for :func:`_log_entry_pb_to_mapping`. + + Performs "impedance matching" between the protobuf attrs and the keys + expected in the JSON API. + """ + return {key: _value_pb_to_value(struct_pb.fields[key]) + for key in struct_pb.fields} + + +def _log_entry_pb_to_mapping(entry_pb): + """Helper for :meth:`list_entries`, et aliae + + Performs "impedance matching" between the protobuf attrs and the keys + expected in the JSON API. + """ + mapping = { + 'logName': entry_pb.log_name, + 'resource': _mon_resource_pb_to_mapping(entry_pb.resource), + 'severity': LogSeverity.Name(entry_pb.severity), + 'insertId': entry_pb.insert_id, + 'timestamp': _pb_timestamp_to_rfc3339(entry_pb.timestamp), + 'labels': entry_pb.labels, + } + if entry_pb.HasField('text_payload'): + mapping['textPayload'] = entry_pb.text_payload + + if entry_pb.HasField('json_payload'): + mapping['jsonPayload'] = _struct_pb_to_mapping(entry_pb.json_payload) + + if entry_pb.HasField('proto_payload'): + mapping['protoPayload'] = entry_pb.proto_payload + + if entry_pb.http_request: + request = entry_pb.http_request + mapping['httpRequest'] = { + 'requestMethod': request.request_method, + 'requestUrl': request.request_url, + 'status': request.status, + 'referer': request.referer, + 'userAgent': request.user_agent, + 'cacheHit': request.cache_hit, + 'requestSize': request.request_size, + 'responseSize': request.response_size, + 'remoteIp': request.remote_ip, + } + + if entry_pb.operation: + operation = entry_pb.operation + mapping['operation'] = { + 'producer': operation.producer, + 'id': operation.id, + 'first': operation.first, + 'last': operation.last, + } + + return mapping + + +def _http_request_mapping_to_pb(info, request): + """Helper for _log_entry_mapping_to_pb + + Performs "impedance matching" between the protobuf attrs and the keys + expected in the JSON API. + """ + optional_request_keys = { + 'requestMethod': 'request_method', + 'requestUrl': 'request_url', + 'status': 'status', + 'referer': 'referer', + 'userAgent': 'user_agent', + 'cacheHit': 'cache_hit', + 'requestSize': 'request_size', + 'responseSize': 'response_size', + 'remoteIp': 'remote_ip', + } + for key, pb_name in optional_request_keys.items(): + if key in info: + setattr(request, pb_name, info[key]) + + +def _log_operation_mapping_to_pb(info, operation): + """Helper for _log_entry_mapping_to_pb + + Performs "impedance matching" between the protobuf attrs and the keys + expected in the JSON API. + """ + operation.producer = info['producer'] + operation.id = info['id'] + + if 'first' in info: + operation.first = info['first'] + + if 'last' in info: + operation.last = info['last'] + + +def _log_entry_mapping_to_pb(mapping): + """Helper for :meth:`write_entries`, et aliae + + Performs "impedance matching" between the protobuf attrs and the keys + expected in the JSON API. + """ + # pylint: disable=too-many-branches + entry_pb = LogEntry() + + optional_scalar_keys = { + 'logName': 'log_name', + 'insertId': 'insert_id', + 'textPayload': 'text_payload', + } + + for key, pb_name in optional_scalar_keys.items(): + if key in mapping: + setattr(entry_pb, pb_name, mapping[key]) + + if 'resource' in mapping: + entry_pb.resource.type = mapping['resource']['type'] + + if 'severity' in mapping: + severity = mapping['severity'] + if isinstance(severity, str): + severity = LogSeverity.Value(severity) + entry_pb.severity = severity + + if 'timestamp' in mapping: + timestamp = _datetime_to_pb_timestamp(mapping['timestamp']) + entry_pb.timestamp.CopyFrom(timestamp) + + if 'labels' in mapping: + for key, value in mapping['labels'].items(): + entry_pb.labels[key] = value + + if 'jsonPayload' in mapping: + for key, value in mapping['jsonPayload'].items(): + entry_pb.json_payload[key] = value + + if 'protoPayload' in mapping: + Parse(json.dumps(mapping['protoPayload']), entry_pb.proto_payload) + + if 'httpRequest' in mapping: + _http_request_mapping_to_pb( + mapping['httpRequest'], entry_pb.http_request) + + if 'operation' in mapping: + _log_operation_mapping_to_pb( + mapping['operation'], entry_pb.operation) + + return entry_pb + # pylint: enable=too-many-branches + + +def _log_sink_pb_to_mapping(sink_pb): + """Helper for :meth:`list_sinks`, et aliae + + Performs "impedance matching" between the protobuf attrs and the keys + expected in the JSON API. + """ + return { + 'name': sink_pb.name, + 'destination': sink_pb.destination, + 'filter': sink_pb.filter, + } + + +def _log_metric_pb_to_mapping(metric_pb): + """Helper for :meth:`list_metrics`, et aliae + + Performs "impedance matching" between the protobuf attrs and the keys + expected in the JSON API. + """ + return { + 'name': metric_pb.name, + 'description': metric_pb.description, + 'filter': metric_pb.filter, + } diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py new file mode 100644 index 000000000000..ab02c90f464e --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -0,0 +1,302 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Client for interacting with the Google Stackdriver Logging API.""" + +import os + +try: + from google.cloud.gapic.logging.v2.config_service_v2_api import ( + ConfigServiceV2Api as GeneratedSinksAPI) + from google.cloud.gapic.logging.v2.logging_service_v2_api import ( + LoggingServiceV2Api as GeneratedLoggingAPI) + from google.cloud.gapic.logging.v2.metrics_service_v2_api import ( + MetricsServiceV2Api as GeneratedMetricsAPI) + from google.cloud.logging._gax import _LoggingAPI as GAXLoggingAPI + from google.cloud.logging._gax import _MetricsAPI as GAXMetricsAPI + from google.cloud.logging._gax import _SinksAPI as GAXSinksAPI +except ImportError: # pragma: NO COVER + _HAVE_GAX = False + GeneratedLoggingAPI = GAXLoggingAPI = None + GeneratedMetricsAPI = GAXMetricsAPI = None + GeneratedSinksAPI = GAXSinksAPI = None +else: + _HAVE_GAX = True + +from google.cloud.client import JSONClient +from google.cloud.environment_vars import DISABLE_GRPC +from google.cloud.logging.connection import Connection +from google.cloud.logging.connection import _LoggingAPI as JSONLoggingAPI +from google.cloud.logging.connection import _MetricsAPI as JSONMetricsAPI +from google.cloud.logging.connection import _SinksAPI as JSONSinksAPI +from google.cloud.logging.entries import ProtobufEntry +from google.cloud.logging.entries import StructEntry +from google.cloud.logging.entries import TextEntry +from google.cloud.logging.logger import Logger +from google.cloud.logging.metric import Metric +from google.cloud.logging.sink import Sink + + +_DISABLE_GAX = os.getenv(DISABLE_GRPC, False) +_USE_GAX = _HAVE_GAX and not _DISABLE_GAX + + +class Client(JSONClient): + """Client to bundle configuration needed for API requests. + + :type project: str + :param project: the project which the client acts on behalf of. + If not passed, falls back to the default inferred + from the environment. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for the connection + owned by this client. If not passed (and if no ``http`` + object is passed), falls back to the default inferred + from the environment. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. + """ + + _connection_class = Connection + _logging_api = _sinks_api = _metrics_api = None + + @property + def logging_api(self): + """Helper for logging-related API calls. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs + """ + if self._logging_api is None: + if _USE_GAX: + generated = GeneratedLoggingAPI() + self._logging_api = GAXLoggingAPI(generated) + else: + self._logging_api = JSONLoggingAPI(self.connection) + return self._logging_api + + @property + def sinks_api(self): + """Helper for log sink-related API calls. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks + """ + if self._sinks_api is None: + if _USE_GAX: + generated = GeneratedSinksAPI() + self._sinks_api = GAXSinksAPI(generated) + else: + self._sinks_api = JSONSinksAPI(self.connection) + return self._sinks_api + + @property + def metrics_api(self): + """Helper for log metric-related API calls. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics + """ + if self._metrics_api is None: + if _USE_GAX: + generated = GeneratedMetricsAPI() + self._metrics_api = GAXMetricsAPI(generated) + else: + self._metrics_api = JSONMetricsAPI(self.connection) + return self._metrics_api + + def logger(self, name): + """Creates a logger bound to the current client. + + :type name: str + :param name: the name of the logger to be constructed. + + :rtype: :class:`google.cloud.logging.logger.Logger` + :returns: Logger created with the current client. + """ + return Logger(name, client=self) + + def _entry_from_resource(self, resource, loggers): + """Detect correct entry type from resource and instantiate. + + :type resource: dict + :param resource: one entry resource from API response + + :type loggers: dict or None + :param loggers: A mapping of logger fullnames -> loggers. If not + passed, the entry will have a newly-created logger. + + :rtype: One of: + :class:`google.cloud.logging.entries.TextEntry`, + :class:`google.cloud.logging.entries.StructEntry`, + :class:`google.cloud.logging.entries.ProtobufEntry` + :returns: the entry instance, constructed via the resource + """ + if 'textPayload' in resource: + return TextEntry.from_api_repr(resource, self, loggers) + elif 'jsonPayload' in resource: + return StructEntry.from_api_repr(resource, self, loggers) + elif 'protoPayload' in resource: + return ProtobufEntry.from_api_repr(resource, self, loggers) + raise ValueError('Cannot parse log entry resource') + + def list_entries(self, projects=None, filter_=None, order_by=None, + page_size=None, page_token=None): + """Return a page of log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list + + :type projects: list of strings + :param projects: project IDs to include. If not passed, + defaults to the project bound to the client. + + :type filter_: str + :param filter_: a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters + + :type order_by: str + :param order_by: One of :data:`~google.cloud.logging.ASCENDING` + or :data:`~google.cloud.logging.DESCENDING`. + + :type page_size: int + :param page_size: maximum number of entries to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + :rtype: tuple, (list, str) + :returns: list of :class:`google.cloud.logging.entry.TextEntry`, plus a + "next page token" string: if not None, indicates that + more entries can be retrieved with another call (pass that + value as ``page_token``). + """ + if projects is None: + projects = [self.project] + + resources, token = self.logging_api.list_entries( + projects=projects, filter_=filter_, order_by=order_by, + page_size=page_size, page_token=page_token) + loggers = {} + entries = [self._entry_from_resource(resource, loggers) + for resource in resources] + return entries, token + + def sink(self, name, filter_=None, destination=None): + """Creates a sink bound to the current client. + + :type name: str + :param name: the name of the sink to be constructed. + + :type filter_: str + :param filter_: (optional) the advanced logs filter expression + defining the entries exported by the sink. If not + passed, the instance should already exist, to be + refreshed via :meth:`Sink.reload`. + + :type destination: str + :param destination: destination URI for the entries exported by + the sink. If not passed, the instance should + already exist, to be refreshed via + :meth:`Sink.reload`. + + :rtype: :class:`google.cloud.logging.sink.Sink` + :returns: Sink created with the current client. + """ + return Sink(name, filter_, destination, client=self) + + def list_sinks(self, page_size=None, page_token=None): + """List sinks for the project associated with this client. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/list + + :type page_size: int + :param page_size: maximum number of sinks to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of sinks. If not + passed, the API will return the first page of + sinks. + + :rtype: tuple, (list, str) + :returns: list of :class:`google.cloud.logging.sink.Sink`, plus a + "next page token" string: if not None, indicates that + more sinks can be retrieved with another call (pass that + value as ``page_token``). + """ + resources, token = self.sinks_api.list_sinks( + self.project, page_size, page_token) + sinks = [Sink.from_api_repr(resource, self) + for resource in resources] + return sinks, token + + def metric(self, name, filter_=None, description=''): + """Creates a metric bound to the current client. + + :type name: str + :param name: the name of the metric to be constructed. + + :type filter_: str + :param filter_: the advanced logs filter expression defining the + entries tracked by the metric. If not + passed, the instance should already exist, to be + refreshed via :meth:`Metric.reload`. + + :type description: str + :param description: the description of the metric to be constructed. + If not passed, the instance should already exist, + to be refreshed via :meth:`Metric.reload`. + + :rtype: :class:`google.cloud.logging.metric.Metric` + :returns: Metric created with the current client. + """ + return Metric(name, filter_, client=self, description=description) + + def list_metrics(self, page_size=None, page_token=None): + """List metrics for the project associated with this client. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/list + + :type page_size: int + :param page_size: maximum number of metrics to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of metrics. If not + passed, the API will return the first page of + metrics. + + :rtype: tuple, (list, str) + :returns: list of :class:`google.cloud.logging.metric.Metric`, plus a + "next page token" string: if not None, indicates that + more metrics can be retrieved with another call (pass that + value as ``page_token``). + """ + resources, token = self.metrics_api.list_metrics( + self.project, page_size, page_token) + metrics = [Metric.from_api_repr(resource, self) + for resource in resources] + return metrics, token diff --git a/packages/google-cloud-logging/google/cloud/logging/connection.py b/packages/google-cloud-logging/google/cloud/logging/connection.py new file mode 100644 index 000000000000..60b893751fc6 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/connection.py @@ -0,0 +1,441 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with Stackdriver Logging connections.""" + +from google.cloud import connection as base_connection + + +class Connection(base_connection.JSONConnection): + """A connection to Google Stackdriver Logging via the JSON REST API. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + connection. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: (Optional) HTTP object to make requests. + + :type api_base_url: string + :param api_base_url: The base of the API call URL. Defaults to the value + :attr:`Connection.API_BASE_URL`. + """ + + API_BASE_URL = 'https://logging.googleapis.com' + """The base of the API call URL.""" + + API_VERSION = 'v2beta1' + """The version of the API, used in building the API call's URL.""" + + API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' + """A template for the URL of a particular API call.""" + + SCOPE = ('https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/cloud-platform') + """The scopes required for authenticating as a Logging consumer.""" + + +class _LoggingAPI(object): + """Helper mapping logging-related APIs. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs + + :type connection: :class:`google.cloud.logging.connection.Connection` + :param connection: the connection used to make API requests. + """ + def __init__(self, connection): + self._connection = connection + + def list_entries(self, projects, filter_=None, order_by=None, + page_size=None, page_token=None): + """Return a page of log entry resources. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list + + :type projects: list of strings + :param projects: project IDs to include. If not passed, + defaults to the project bound to the client. + + :type filter_: str + :param filter_: a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters + + :type order_by: str + :param order_by: One of :data:`~google.cloud.logging.ASCENDING` + or :data:`~google.cloud.logging.DESCENDING`. + + :type page_size: int + :param page_size: maximum number of entries to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + :rtype: tuple, (list, str) + :returns: list of mappings, plus a "next page token" string: + if not None, indicates that more entries can be retrieved + with another call (pass that value as ``page_token``). + """ + params = {'projectIds': projects} + + if filter_ is not None: + params['filter'] = filter_ + + if order_by is not None: + params['orderBy'] = order_by + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + resp = self._connection.api_request( + method='POST', path='/entries:list', data=params) + + return resp.get('entries', ()), resp.get('nextPageToken') + + def write_entries(self, entries, logger_name=None, resource=None, + labels=None): + """API call: log an entry resource via a POST request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + + :type entries: sequence of mapping + :param entries: the log entry resources to log. + + :type logger_name: string + :param logger_name: name of default logger to which to log the entries; + individual entries may override. + + :type resource: mapping + :param resource: default resource to associate with entries; + individual entries may override. + + :type labels: mapping + :param labels: default labels to associate with entries; + individual entries may override. + """ + data = {'entries': list(entries)} + + if logger_name is not None: + data['logName'] = logger_name + + if resource is not None: + data['resource'] = resource + + if labels is not None: + data['labels'] = labels + + self._connection.api_request(method='POST', path='/entries:write', + data=data) + + def logger_delete(self, project, logger_name): + """API call: delete all entries in a logger via a DELETE request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs/delete + + :type project: string + :param project: ID of project containing the log entries to delete + + :type logger_name: string + :param logger_name: name of logger containing the log entries to delete + """ + path = '/projects/%s/logs/%s' % (project, logger_name) + self._connection.api_request(method='DELETE', path=path) + + +class _SinksAPI(object): + """Helper mapping sink-related APIs. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks + + :type connection: :class:`google.cloud.logging.connection.Connection` + :param connection: the connection used to make API requests. + """ + def __init__(self, connection): + self._connection = connection + + def list_sinks(self, project, page_size=None, page_token=None): + """List sinks for the project associated with this client. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/list + + :type project: string + :param project: ID of the project whose sinks are to be listed. + + :type page_size: int + :param page_size: maximum number of sinks to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of sinks. If not + passed, the API will return the first page of + sinks. + + :rtype: tuple, (list, str) + :returns: list of mappings, plus a "next page token" string: + if not None, indicates that more sinks can be retrieved + with another call (pass that value as ``page_token``). + """ + params = {} + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/sinks' % (project,) + resp = self._connection.api_request( + method='GET', path=path, query_params=params) + sinks = resp.get('sinks', ()) + return sinks, resp.get('nextPageToken') + + def sink_create(self, project, sink_name, filter_, destination): + """API call: create a sink resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create + + :type project: string + :param project: ID of the project in which to create the sink. + + :type sink_name: string + :param sink_name: the name of the sink + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the sink. + + :type destination: string + :param destination: destination URI for the entries exported by + the sink. + """ + target = '/projects/%s/sinks' % (project,) + data = { + 'name': sink_name, + 'filter': filter_, + 'destination': destination, + } + self._connection.api_request(method='POST', path=target, data=data) + + def sink_get(self, project, sink_name): + """API call: retrieve a sink resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get + + :type project: string + :param project: ID of the project containing the sink. + + :type sink_name: string + :param sink_name: the name of the sink + + :rtype: dict + :returns: The JSON sink object returned from the API. + """ + target = '/projects/%s/sinks/%s' % (project, sink_name) + return self._connection.api_request(method='GET', path=target) + + def sink_update(self, project, sink_name, filter_, destination): + """API call: update a sink resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update + + :type project: string + :param project: ID of the project containing the sink. + + :type sink_name: string + :param sink_name: the name of the sink + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the sink. + + :type destination: string + :param destination: destination URI for the entries exported by + the sink. + """ + target = '/projects/%s/sinks/%s' % (project, sink_name) + data = { + 'name': sink_name, + 'filter': filter_, + 'destination': destination, + } + self._connection.api_request(method='PUT', path=target, data=data) + + def sink_delete(self, project, sink_name): + """API call: delete a sink resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete + + :type project: string + :param project: ID of the project containing the sink. + + :type sink_name: string + :param sink_name: the name of the sink + """ + target = '/projects/%s/sinks/%s' % (project, sink_name) + self._connection.api_request(method='DELETE', path=target) + + +class _MetricsAPI(object): + """Helper mapping sink-related APIs. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics + + :type connection: :class:`google.cloud.logging.connection.Connection` + :param connection: the connection used to make API requests. + """ + def __init__(self, connection): + self._connection = connection + + def list_metrics(self, project, page_size=None, page_token=None): + """List metrics for the project associated with this client. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/list + + :type project: string + :param project: ID of the project whose metrics are to be listed. + + :type page_size: int + :param page_size: maximum number of metrics to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of metrics. If not + passed, the API will return the first page of + metrics. + + :rtype: tuple, (list, str) + :returns: list of mappings, plus a "next page token" string: + if not None, indicates that more metrics can be retrieved + with another call (pass that value as ``page_token``). + """ + params = {} + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/metrics' % (project,) + resp = self._connection.api_request( + method='GET', path=path, query_params=params) + metrics = resp.get('metrics', ()) + return metrics, resp.get('nextPageToken') + + def metric_create(self, project, metric_name, filter_, description=None): + """API call: create a metric resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create + + :type project: string + :param project: ID of the project in which to create the metric. + + :type metric_name: string + :param metric_name: the name of the metric + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the metric. + + :type description: string + :param description: description of the metric. + """ + target = '/projects/%s/metrics' % (project,) + data = { + 'name': metric_name, + 'filter': filter_, + 'description': description, + } + self._connection.api_request(method='POST', path=target, data=data) + + def metric_get(self, project, metric_name): + """API call: retrieve a metric resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get + + :type project: string + :param project: ID of the project containing the metric. + + :type metric_name: string + :param metric_name: the name of the metric + + :rtype: dict + :returns: The JSON metric object returned from the API. + """ + target = '/projects/%s/metrics/%s' % (project, metric_name) + return self._connection.api_request(method='GET', path=target) + + def metric_update(self, project, metric_name, filter_, description): + """API call: update a metric resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/update + + :type project: string + :param project: ID of the project containing the metric. + + :type metric_name: string + :param metric_name: the name of the metric + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the metric. + + :type description: string + :param description: description of the metric. + """ + target = '/projects/%s/metrics/%s' % (project, metric_name) + data = { + 'name': metric_name, + 'filter': filter_, + 'description': description, + } + self._connection.api_request(method='PUT', path=target, data=data) + + def metric_delete(self, project, metric_name): + """API call: delete a metric resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/delete + + :type project: string + :param project: ID of the project containing the metric. + + :type metric_name: string + :param metric_name: the name of the metric. + """ + target = '/projects/%s/metrics/%s' % (project, metric_name) + self._connection.api_request(method='DELETE', path=target) diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py new file mode 100644 index 000000000000..cad23ee9c48a --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -0,0 +1,157 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Log entries within the Google Stackdriver Logging API.""" + +import json +import re + +from google.protobuf.json_format import Parse + +from google.cloud._helpers import _name_from_project_path +from google.cloud._helpers import _rfc3339_nanos_to_datetime + + +_LOGGER_TEMPLATE = re.compile(r""" + projects/ # static prefix + (?P[^/]+) # initial letter, wordchars + hyphen + /logs/ # static midfix + (?P[^/]+) # initial letter, wordchars + allowed punc +""", re.VERBOSE) + + +def logger_name_from_path(path): + """Validate a logger URI path and get the logger name. + + :type path: str + :param path: URI path for a logger API request. + + :rtype: str + :returns: Logger name parsed from ``path``. + :raises: :class:`ValueError` if the ``path`` is ill-formed or if + the project from the ``path`` does not agree with the + ``project`` passed in. + """ + return _name_from_project_path(path, None, _LOGGER_TEMPLATE) + + +class _BaseEntry(object): + """Base class for TextEntry, StructEntry. + + :type payload: text or dict + :param payload: The payload passed as ``textPayload``, ``jsonPayload``, + or ``protoPayload``. + + :type logger: :class:`google.cloud.logging.logger.Logger` + :param logger: the logger used to write the entry. + + :type insert_id: text, or :class:`NoneType` + :param insert_id: (optional) the ID used to identify an entry uniquely. + + :type timestamp: :class:`datetime.datetime`, or :class:`NoneType` + :param timestamp: (optional) timestamp for the entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry + """ + def __init__(self, payload, logger, insert_id=None, timestamp=None, + labels=None, severity=None, http_request=None): + self.payload = payload + self.logger = logger + self.insert_id = insert_id + self.timestamp = timestamp + self.labels = labels + self.severity = severity + self.http_request = http_request + + @classmethod + def from_api_repr(cls, resource, client, loggers=None): + """Factory: construct an entry given its API representation + + :type resource: dict + :param resource: text entry resource representation returned from + the API + + :type client: :class:`google.cloud.logging.client.Client` + :param client: Client which holds credentials and project + configuration. + + :type loggers: dict or None + :param loggers: A mapping of logger fullnames -> loggers. If not + passed, the entry will have a newly-created logger. + + :rtype: :class:`google.cloud.logging.entries.TextEntry` + :returns: Text entry parsed from ``resource``. + """ + if loggers is None: + loggers = {} + logger_fullname = resource['logName'] + logger = loggers.get(logger_fullname) + if logger is None: + logger_name = logger_name_from_path(logger_fullname) + logger = loggers[logger_fullname] = client.logger(logger_name) + payload = resource[cls._PAYLOAD_KEY] + insert_id = resource.get('insertId') + timestamp = resource.get('timestamp') + if timestamp is not None: + timestamp = _rfc3339_nanos_to_datetime(timestamp) + labels = resource.get('labels') + severity = resource.get('severity') + http_request = resource.get('httpRequest') + return cls(payload, logger, insert_id=insert_id, timestamp=timestamp, + labels=labels, severity=severity, http_request=http_request) + + +class TextEntry(_BaseEntry): + """Entry created with ``textPayload``. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry + """ + _PAYLOAD_KEY = 'textPayload' + + +class StructEntry(_BaseEntry): + """Entry created with ``jsonPayload``. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry + """ + _PAYLOAD_KEY = 'jsonPayload' + + +class ProtobufEntry(_BaseEntry): + """Entry created with ``protoPayload``. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry + """ + _PAYLOAD_KEY = 'protoPayload' + + def parse_message(self, message): + """Parse payload into a protobuf message. + + Mutates the passed-in ``message`` in place. + + :type message: Protobuf message + :param message: the message to be logged + """ + Parse(json.dumps(self.payload), message) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py new file mode 100644 index 000000000000..57d08af8637f --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py @@ -0,0 +1,18 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Python :mod:`logging` handlers for Google Cloud Logging.""" + +from google.cloud.logging.handlers.handlers import CloudLoggingHandler +from google.cloud.logging.handlers.handlers import setup_logging diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py new file mode 100644 index 000000000000..a6bd083c9944 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py @@ -0,0 +1,133 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Python :mod:`logging` handlers for Google Cloud Logging.""" + +import logging + +from google.cloud.logging.handlers.transports import BackgroundThreadTransport + + +EXCLUDE_LOGGER_DEFAULTS = ( + 'google.cloud', + 'oauth2client' +) + +DEFAULT_LOGGER_NAME = 'python' + + +class CloudLoggingHandler(logging.StreamHandler): + """Python standard ``logging`` handler. + + This handler can be used to route Python standard logging messages + directly to the Stackdriver Logging API. + + Note that this handler currently only supports a synchronous API call, + which means each logging statement that uses this handler will require + an API call. + + :type client: :class:`google.cloud.logging.client` + :param client: the authenticated Google Cloud Logging client for this + handler to use + + :type name: str + :param name: the name of the custom log in Stackdriver Logging. Defaults + to 'python'. The name of the Python logger will be represented + in the ``python_logger`` field. + + :type transport: type + :param transport: Class for creating new transport objects. It should + extend from the base :class:`.Transport` type and + implement :meth`.Transport.send`. Defaults to + :class:`.BackgroundThreadTransport`. The other + option is :class:`.SyncTransport`. + + Example: + + .. doctest:: + + import google.cloud.logging + from google.cloud.logging.handlers import CloudLoggingHandler + + client = google.cloud.logging.Client() + handler = CloudLoggingHandler(client) + + cloud_logger = logging.getLogger('cloudLogger') + cloud_logger.setLevel(logging.INFO) + cloud_logger.addHandler(handler) + + cloud.logger.error('bad news') # API call + + """ + + def __init__(self, client, + name=DEFAULT_LOGGER_NAME, + transport=BackgroundThreadTransport): + super(CloudLoggingHandler, self).__init__() + self.name = name + self.client = client + self.transport = transport(client, name) + + def emit(self, record): + """Actually log the specified logging record. + + Overrides the default emit behavior of ``StreamHandler``. + + See: https://docs.python.org/2/library/logging.html#handler-objects + + :type record: :class:`logging.LogRecord` + :param record: The record to be logged. + """ + message = super(CloudLoggingHandler, self).format(record) + self.transport.send(record, message) + + +def setup_logging(handler, excluded_loggers=EXCLUDE_LOGGER_DEFAULTS): + """Attach the ``CloudLogging`` handler to the Python root logger + + Excludes loggers that this library itself uses to avoid + infinite recursion. + + :type handler: :class:`logging.handler` + :param handler: the handler to attach to the global handler + + :type excluded_loggers: tuple + :param excluded_loggers: The loggers to not attach the handler to. This + will always include the loggers in the path of + the logging client itself. + + Example: + + .. doctest:: + + import logging + import google.cloud.logging + from google.cloud.logging.handlers import CloudLoggingHandler + + client = google.cloud.logging.Client() + handler = CloudLoggingHandler(client) + google.cloud.logging.setup_logging(handler) + logging.getLogger().setLevel(logging.DEBUG) + + logging.error('bad news') # API call + + """ + all_excluded_loggers = set(excluded_loggers + EXCLUDE_LOGGER_DEFAULTS) + logger = logging.getLogger() + logger.addHandler(handler) + logger.addHandler(logging.StreamHandler()) + for logger_name in all_excluded_loggers: + logger = logging.getLogger(logger_name) + logger.propagate = False + logger.addHandler(logging.StreamHandler()) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py new file mode 100644 index 000000000000..6c689e378a42 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py @@ -0,0 +1,26 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Transport classes for Python logging integration. + +Currently two options are provided, a synchronous transport that makes +an API call for each log statement, and an asynchronous handler that +sends the API using a :class:`~google.cloud.logging.logger.Batch` object in +the background. +""" + +from google.cloud.logging.handlers.transports.base import Transport +from google.cloud.logging.handlers.transports.sync import SyncTransport +from google.cloud.logging.handlers.transports.background_thread import ( + BackgroundThreadTransport) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py new file mode 100644 index 000000000000..3c1e76872985 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -0,0 +1,171 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Transport for Python logging handler + +Uses a background worker to log to Stackdriver Logging asynchronously. +""" + +import atexit +import copy +import threading + +from google.cloud.logging.client import Client +from google.cloud.logging.handlers.transports.base import Transport + + +class _Worker(object): + """A threaded worker that writes batches of log entries + + Writes entries to the logger API. + + This class reuses a single :class:`Batch` method to write successive + entries. + + Currently, the only public methods are constructing it (which also starts + it) and enqueuing :class:`Logger` (record, message) pairs. + """ + + def __init__(self, logger): + self.started = False + self.stopping = False + self.stopped = False + + # _entries_condition is used to signal from the main thread whether + # there are any waiting queued logger entries to be written + self._entries_condition = threading.Condition() + + # _stop_condition is used to signal from the worker thread to the + # main thread that it's finished its last entries + self._stop_condition = threading.Condition() + + # This object continually reuses the same :class:`Batch` object to + # write multiple entries at the same time. + self.logger = logger + self.batch = self.logger.batch() + + self._thread = None + + # Number in seconds of how long to wait for worker to send remaining + self._stop_timeout = 5 + + self._start() + + def _run(self): + """The entry point for the worker thread. + + Loops until ``stopping`` is set to :data:`True`, and commits batch + entries written during :meth:`enqueue`. + """ + try: + self._entries_condition.acquire() + self.started = True + while not self.stopping: + if len(self.batch.entries) == 0: + # branch coverage of this code extremely flaky + self._entries_condition.wait() # pragma: NO COVER + + if len(self.batch.entries) > 0: + self.batch.commit() + finally: + self._entries_condition.release() + + # main thread may be waiting for worker thread to finish writing its + # final entries. here we signal that it's done. + self._stop_condition.acquire() + self._stop_condition.notify() + self._stop_condition.release() + + def _start(self): + """Called by this class's constructor + + This method is responsible for starting the thread and registering + the exit handlers. + """ + try: + self._entries_condition.acquire() + self._thread = threading.Thread( + target=self._run, + name='google.cloud.logging.handlers.transport.Worker') + self._thread.setDaemon(True) + self._thread.start() + finally: + self._entries_condition.release() + atexit.register(self._stop) + + def _stop(self): + """Signals the worker thread to shut down + + Also waits for ``stop_timeout`` seconds for the worker to finish. + + This method is called by the ``atexit`` handler registered by + :meth:`start`. + """ + if not self.started or self.stopping: + return + + # lock the stop condition first so that the worker + # thread can't notify it's finished before we wait + self._stop_condition.acquire() + + # now notify the worker thread to shutdown + self._entries_condition.acquire() + self.stopping = True + self._entries_condition.notify() + self._entries_condition.release() + + # now wait for it to signal it's finished + self._stop_condition.wait(self._stop_timeout) + self._stop_condition.release() + self.stopped = True + + def enqueue(self, record, message): + """Queues up a log entry to be written by the background thread.""" + try: + self._entries_condition.acquire() + if self.stopping: + return + info = {'message': message, 'python_logger': record.name} + self.batch.log_struct(info, severity=record.levelname) + self._entries_condition.notify() + finally: + self._entries_condition.release() + + +class BackgroundThreadTransport(Transport): + """Aysnchronous transport that uses a background thread. + + Writes logging entries as a batch process. + """ + + def __init__(self, client, name): + http = copy.deepcopy(client.connection.http) + http = client.connection.credentials.authorize(http) + self.client = Client(client.project, + client.connection.credentials, + http) + logger = self.client.logger(name) + self.worker = _Worker(logger) + + def send(self, record, message): + """Overrides Transport.send(). + + :type record: :class:`logging.LogRecord` + :param record: Python log record that the handler was called with. + + :type message: str + :param message: The message from the ``LogRecord`` after being + formatted by the associated log formatters. + """ + self.worker.enqueue(record, message) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py new file mode 100644 index 000000000000..09711231bce2 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py @@ -0,0 +1,35 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Module containing base class for logging transport.""" + + +class Transport(object): + """Base class for Google Cloud Logging handler transports. + + Subclasses of :class:`Transport` must have constructors that accept a + client and name object, and must override :meth:`send`. + """ + + def send(self, record, message): + """Transport send to be implemented by subclasses. + + :type record: :class:`logging.LogRecord` + :param record: Python log record that the handler was called with. + + :type message: str + :param message: The message from the ``LogRecord`` after being + formatted by the associated log formatters. + """ + raise NotImplementedError diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py new file mode 100644 index 000000000000..eec5ffecf6ee --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py @@ -0,0 +1,43 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Transport for Python logging handler. + +Logs directly to the the Stackdriver Logging API with a synchronous call. +""" + +from google.cloud.logging.handlers.transports.base import Transport + + +class SyncTransport(Transport): + """Basic sychronous transport. + + Uses this library's Logging client to directly make the API call. + """ + + def __init__(self, client, name): + self.logger = client.logger(name) + + def send(self, record, message): + """Overrides transport.send(). + + :type record: :class:`logging.LogRecord` + :param record: Python log record that the handler was called with. + + :type message: str + :param message: The message from the ``LogRecord`` after being + formatted by the associated log formatters. + """ + info = {'message': message, 'python_logger': record.name} + self.logger.log_struct(info, severity=record.levelname) diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py new file mode 100644 index 000000000000..833971ee547f --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -0,0 +1,453 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API Loggers.""" + +import json + +from google.protobuf.json_format import MessageToJson + + +class Logger(object): + """Loggers represent named targets for log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs + + :type name: string + :param name: the name of the logger + + :type client: :class:`google.cloud.logging.client.Client` + :param client: A client which holds credentials and project configuration + for the logger (which requires a project). + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of default labels for entries written + via this logger. + """ + def __init__(self, name, client, labels=None): + self.name = name + self._client = client + self.labels = labels + + @property + def client(self): + """Clent bound to the logger.""" + return self._client + + @property + def project(self): + """Project bound to the logger.""" + return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in logging APIs""" + return 'projects/%s/logs/%s' % (self.project, self.name) + + @property + def path(self): + """URI path for use in logging APIs""" + return '/%s' % (self.full_name,) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + + :rtype: :class:`google.cloud.logging.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def batch(self, client=None): + """Return a batch to use as a context manager. + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current topic. + + :rtype: :class:`Batch` + :returns: A batch to use as a context manager. + """ + client = self._require_client(client) + return Batch(self, client) + + def _make_entry_resource(self, text=None, info=None, message=None, + labels=None, insert_id=None, severity=None, + http_request=None): + """Return a log entry resource of the appropriate type. + + Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`. + + Only one of ``text``, ``info``, or ``message`` should be passed. + + :type text: string or :class:`NoneType` + :param text: text payload + + :type info: dict or :class:`NoneType` + :param info: struct payload + + :type message: Protobuf message or :class:`NoneType` + :param message: protobuf payload + + :type labels: dict or :class:`NoneType` + :param labels: labels passed in to calling method. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry + + :rtype: dict + :returns: The JSON resource created. + """ + resource = { + 'logName': self.full_name, + 'resource': {'type': 'global'}, + } + + if text is not None: + resource['textPayload'] = text + + if info is not None: + resource['jsonPayload'] = info + + if message is not None: + as_json_str = MessageToJson(message) + as_json = json.loads(as_json_str) + resource['protoPayload'] = as_json + + if labels is None: + labels = self.labels + + if labels is not None: + resource['labels'] = labels + + if insert_id is not None: + resource['insertId'] = insert_id + + if severity is not None: + resource['severity'] = severity + + if http_request is not None: + resource['httpRequest'] = http_request + + return resource + + def log_text(self, text, client=None, labels=None, insert_id=None, + severity=None, http_request=None): + """API call: log a text message via a POST request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + + :type text: text + :param text: the log message. + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry + """ + client = self._require_client(client) + entry_resource = self._make_entry_resource( + text=text, labels=labels, insert_id=insert_id, severity=severity, + http_request=http_request) + client.logging_api.write_entries([entry_resource]) + + def log_struct(self, info, client=None, labels=None, insert_id=None, + severity=None, http_request=None): + """API call: log a structured message via a POST request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + + :type info: dict + :param info: the log entry information + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry. + """ + client = self._require_client(client) + entry_resource = self._make_entry_resource( + info=info, labels=labels, insert_id=insert_id, severity=severity, + http_request=http_request) + client.logging_api.write_entries([entry_resource]) + + def log_proto(self, message, client=None, labels=None, insert_id=None, + severity=None, http_request=None): + """API call: log a protobuf message via a POST request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + + :type message: Protobuf message + :param message: the message to be logged + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry. + """ + client = self._require_client(client) + entry_resource = self._make_entry_resource( + message=message, labels=labels, insert_id=insert_id, + severity=severity, http_request=http_request) + client.logging_api.write_entries([entry_resource]) + + def delete(self, client=None): + """API call: delete all entries in a logger via a DELETE request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs/delete + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + """ + client = self._require_client(client) + client.logging_api.logger_delete(self.project, self.name) + + def list_entries(self, projects=None, filter_=None, order_by=None, + page_size=None, page_token=None): + """Return a page of log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list + + :type projects: list of strings + :param projects: project IDs to include. If not passed, + defaults to the project bound to the client. + + :type filter_: string + :param filter_: a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters + + :type order_by: string + :param order_by: One of :data:`~google.cloud.logging.ASCENDING` + or :data:`~google.cloud.logging.DESCENDING`. + + :type page_size: int + :param page_size: maximum number of entries to return, If not passed, + defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + :rtype: tuple, (list, str) + :returns: list of :class:`google.cloud.logging.entry.TextEntry`, plus a + "next page token" string: if not None, indicates that + more entries can be retrieved with another call (pass that + value as ``page_token``). + """ + log_filter = 'logName=%s' % (self.full_name,) + if filter_ is not None: + filter_ = '%s AND %s' % (filter_, log_filter) + else: + filter_ = log_filter + return self.client.list_entries( + projects=projects, filter_=filter_, order_by=order_by, + page_size=page_size, page_token=page_token) + + +class Batch(object): + """Context manager: collect entries to log via a single API call. + + Helper returned by :meth:`Logger.batch` + + :type logger: :class:`google.cloud.logging.logger.Logger` + :param logger: the logger to which entries will be logged. + + :type client: :class:`google.cloud.logging.client.Client` + :param client: The client to use. + """ + def __init__(self, logger, client): + self.logger = logger + self.entries = [] + self.client = client + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is None: + self.commit() + + def log_text(self, text, labels=None, insert_id=None, severity=None, + http_request=None): + """Add a text entry to be logged during :meth:`commit`. + + :type text: string + :param text: the text entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry. + """ + self.entries.append( + ('text', text, labels, insert_id, severity, http_request)) + + def log_struct(self, info, labels=None, insert_id=None, severity=None, + http_request=None): + """Add a struct entry to be logged during :meth:`commit`. + + :type info: dict + :param info: the struct entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry. + """ + self.entries.append( + ('struct', info, labels, insert_id, severity, http_request)) + + def log_proto(self, message, labels=None, insert_id=None, severity=None, + http_request=None): + """Add a protobuf entry to be logged during :meth:`commit`. + + :type message: protobuf message + :param message: the protobuf entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry. + """ + self.entries.append( + ('proto', message, labels, insert_id, severity, http_request)) + + def commit(self, client=None): + """Send saved log entries as a single API call. + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current batch. + """ + if client is None: + client = self.client + + kwargs = { + 'logger_name': self.logger.full_name, + 'resource': {'type': 'global'}, + } + if self.logger.labels is not None: + kwargs['labels'] = self.logger.labels + + entries = [] + for entry_type, entry, labels, iid, severity, http_req in self.entries: + if entry_type == 'text': + info = {'textPayload': entry} + elif entry_type == 'struct': + info = {'jsonPayload': entry} + elif entry_type == 'proto': + as_json_str = MessageToJson(entry) + as_json = json.loads(as_json_str) + info = {'protoPayload': as_json} + else: + raise ValueError('Unknown entry type: %s' % (entry_type,)) + if labels is not None: + info['labels'] = labels + if iid is not None: + info['insertId'] = iid + if severity is not None: + info['severity'] = severity + if http_req is not None: + info['httpRequest'] = http_req + entries.append(info) + + client.logging_api.write_entries(entries, **kwargs) + del self.entries[:] diff --git a/packages/google-cloud-logging/google/cloud/logging/metric.py b/packages/google-cloud-logging/google/cloud/logging/metric.py new file mode 100644 index 000000000000..05e18dba852a --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/metric.py @@ -0,0 +1,183 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define Stackdriver Logging API Metrics.""" + +from google.cloud.exceptions import NotFound + + +class Metric(object): + """Metrics represent named filters for log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics + + :type name: string + :param name: the name of the metric + + :type filter_: string + :param filter_: the advanced logs filter expression defining the entries + tracked by the metric. If not passed, the instance should + already exist, to be refreshed via :meth:`reload`. + + :type client: :class:`google.cloud.logging.client.Client` + :param client: A client which holds credentials and project configuration + for the metric (which requires a project). + + :type description: string + :param description: an optional description of the metric. + """ + def __init__(self, name, filter_=None, client=None, description=''): + self.name = name + self._client = client + self.filter_ = filter_ + self.description = description + + @property + def client(self): + """Clent bound to the logger.""" + return self._client + + @property + def project(self): + """Project bound to the logger.""" + return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in metric APIs""" + return 'projects/%s/metrics/%s' % (self.project, self.name) + + @property + def path(self): + """URL path for the metric's APIs""" + return '/%s' % (self.full_name,) + + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a metric given its API representation + + :type resource: dict + :param resource: metric resource representation returned from the API + + :type client: :class:`google.cloud.logging.client.Client` + :param client: Client which holds credentials and project + configuration for the metric. + + :rtype: :class:`google.cloud.logging.metric.Metric` + :returns: Metric parsed from ``resource``. + """ + metric_name = resource['name'] + filter_ = resource['filter'] + description = resource.get('description', '') + return cls(metric_name, filter_, client=client, + description=description) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + + :rtype: :class:`google.cloud.logging.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def create(self, client=None): + """API call: create the metric via a PUT request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + client.metrics_api.metric_create( + self.project, self.name, self.filter_, self.description) + + def exists(self, client=None): + """API call: test for the existence of the metric via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + + :rtype: bool + :returns: Boolean indicating existence of the metric. + """ + client = self._require_client(client) + + try: + client.metrics_api.metric_get(self.project, self.name) + except NotFound: + return False + else: + return True + + def reload(self, client=None): + """API call: sync local metric configuration via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + data = client.metrics_api.metric_get(self.project, self.name) + self.description = data.get('description', '') + self.filter_ = data['filter'] + + def update(self, client=None): + """API call: update metric configuration via a PUT request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/update + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + client.metrics_api.metric_update( + self.project, self.name, self.filter_, self.description) + + def delete(self, client=None): + """API call: delete a metric via a DELETE request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/delete + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + client.metrics_api.metric_delete(self.project, self.name) diff --git a/packages/google-cloud-logging/google/cloud/logging/sink.py b/packages/google-cloud-logging/google/cloud/logging/sink.py new file mode 100644 index 000000000000..356ade5407be --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/sink.py @@ -0,0 +1,187 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define Stackdriver Logging API Sinks.""" + +from google.cloud.exceptions import NotFound + + +class Sink(object): + """Sinks represent filtered exports for log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks + + :type name: string + :param name: the name of the sink + + :type filter_: string + :param filter_: the advanced logs filter expression defining the entries + exported by the sink. If not passed, the instance should + already exist, to be refreshed via :meth:`reload`. + + :type destination: string + :param destination: destination URI for the entries exported by the sink. + If not passed, the instance should already exist, to + be refreshed via :meth:`reload`. + + :type client: :class:`google.cloud.logging.client.Client` + :param client: A client which holds credentials and project configuration + for the sink (which requires a project). + """ + def __init__(self, name, filter_=None, destination=None, client=None): + self.name = name + self.filter_ = filter_ + self.destination = destination + self._client = client + + @property + def client(self): + """Clent bound to the sink.""" + return self._client + + @property + def project(self): + """Project bound to the sink.""" + return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in sink APIs""" + return 'projects/%s/sinks/%s' % (self.project, self.name) + + @property + def path(self): + """URL path for the sink's APIs""" + return '/%s' % (self.full_name) + + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a sink given its API representation + + :type resource: dict + :param resource: sink resource representation returned from the API + + :type client: :class:`google.cloud.logging.client.Client` + :param client: Client which holds credentials and project + configuration for the sink. + + :rtype: :class:`google.cloud.logging.sink.Sink` + :returns: Sink parsed from ``resource``. + :raises: :class:`ValueError` if ``client`` is not ``None`` and the + project from the resource does not agree with the project + from the client. + """ + sink_name = resource['name'] + filter_ = resource['filter'] + destination = resource['destination'] + return cls(sink_name, filter_, destination, client=client) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + + :rtype: :class:`google.cloud.logging.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def create(self, client=None): + """API call: create the sink via a PUT request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + client.sinks_api.sink_create( + self.project, self.name, self.filter_, self.destination) + + def exists(self, client=None): + """API call: test for the existence of the sink via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + + :rtype: bool + :returns: Boolean indicating existence of the sink. + """ + client = self._require_client(client) + + try: + client.sinks_api.sink_get(self.project, self.name) + except NotFound: + return False + else: + return True + + def reload(self, client=None): + """API call: sync local sink configuration via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + data = client.sinks_api.sink_get(self.project, self.name) + self.filter_ = data['filter'] + self.destination = data['destination'] + + def update(self, client=None): + """API call: update sink configuration via a PUT request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + client.sinks_api.sink_update( + self.project, self.name, self.filter_, self.destination) + + def delete(self, client=None): + """API call: delete a sink via a DELETE request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + client.sinks_api.sink_delete(self.project, self.name) diff --git a/packages/google-cloud-logging/unit_tests/__init__.py b/packages/google-cloud-logging/unit_tests/__init__.py new file mode 100644 index 000000000000..58e0d9153632 --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-logging/unit_tests/handlers/__init__.py b/packages/google-cloud-logging/unit_tests/handlers/__init__.py new file mode 100644 index 000000000000..58e0d9153632 --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/handlers/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py b/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py new file mode 100644 index 000000000000..f836e5a335d5 --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py @@ -0,0 +1,122 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + + +class TestCloudLoggingHandler(unittest.TestCase): + + PROJECT = 'PROJECT' + + def _getTargetClass(self): + from google.cloud.logging.handlers.handlers import CloudLoggingHandler + return CloudLoggingHandler + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + client = _Client(self.PROJECT) + handler = self._makeOne(client, transport=_Transport) + self.assertEqual(handler.client, client) + + def test_emit(self): + client = _Client(self.PROJECT) + handler = self._makeOne(client, transport=_Transport) + LOGNAME = 'loggername' + MESSAGE = 'hello world' + record = _Record(LOGNAME, logging.INFO, MESSAGE) + handler.emit(record) + + self.assertEqual(handler.transport.send_called_with, (record, MESSAGE)) + + +class TestSetupLogging(unittest.TestCase): + + def _callFUT(self, handler, excludes=None): + from google.cloud.logging.handlers.handlers import setup_logging + if excludes: + return setup_logging(handler, excluded_loggers=excludes) + else: + return setup_logging(handler) + + def test_setup_logging(self): + handler = _Handler(logging.INFO) + self._callFUT(handler) + + root_handlers = logging.getLogger().handlers + self.assertIn(handler, root_handlers) + + def test_setup_logging_excludes(self): + INCLUDED_LOGGER_NAME = 'includeme' + EXCLUDED_LOGGER_NAME = 'excludeme' + + handler = _Handler(logging.INFO) + self._callFUT(handler, (EXCLUDED_LOGGER_NAME,)) + + included_logger = logging.getLogger(INCLUDED_LOGGER_NAME) + self.assertTrue(included_logger.propagate) + + excluded_logger = logging.getLogger(EXCLUDED_LOGGER_NAME) + self.assertNotIn(handler, excluded_logger.handlers) + self.assertFalse(excluded_logger.propagate) + + def setUp(self): + self._handlers_cache = logging.getLogger().handlers[:] + + def tearDown(self): + # cleanup handlers + logging.getLogger().handlers = self._handlers_cache[:] + + +class _Handler(object): + + def __init__(self, level): + self.level = level + + def acquire(self): + pass # pragma: NO COVER + + def release(self): + pass # pragma: NO COVER + + +class _Client(object): + + def __init__(self, project): + self.project = project + + +class _Record(object): + + def __init__(self, name, level, message): + self.name = name + self.levelname = level + self.message = message + self.exc_info = None + self.exc_text = None + self.stack_info = None + + def getMessage(self): + return self.message + + +class _Transport(object): + + def __init__(self, client, name): + pass + + def send(self, record, message): + self.send_called_with = (record, message) diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/__init__.py b/packages/google-cloud-logging/unit_tests/handlers/transports/__init__.py new file mode 100644 index 000000000000..58e0d9153632 --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py new file mode 100644 index 000000000000..d9ae8297ec22 --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py @@ -0,0 +1,193 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import time +import unittest + + +class TestBackgroundThreadHandler(unittest.TestCase): + + PROJECT = 'PROJECT' + + def _getTargetClass(self): + from google.cloud.logging.handlers.transports import ( + BackgroundThreadTransport) + return BackgroundThreadTransport + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + client = _Client(self.PROJECT) + NAME = 'python_logger' + transport = self._makeOne(client, NAME) + self.assertEquals(transport.worker.logger.name, NAME) + + def test_send(self): + client = _Client(self.PROJECT) + NAME = 'python_logger' + transport = self._makeOne(client, NAME) + transport.worker.batch = client.logger(NAME).batch() + + PYTHON_LOGGER_NAME = 'mylogger' + MESSAGE = 'hello world' + record = _Record(PYTHON_LOGGER_NAME, logging.INFO, MESSAGE) + transport.send(record, MESSAGE) + + EXPECTED_STRUCT = { + 'message': MESSAGE, + 'python_logger': PYTHON_LOGGER_NAME + } + EXPECTED_SENT = (EXPECTED_STRUCT, logging.INFO) + self.assertEqual(transport.worker.batch.log_struct_called_with, + EXPECTED_SENT) + + +class TestWorker(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.logging.handlers.transports import background_thread + return background_thread._Worker + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + NAME = 'python_logger' + logger = _Logger(NAME) + worker = self._makeOne(logger) + self.assertEquals(worker.batch, logger._batch) + + def test_run(self): + NAME = 'python_logger' + logger = _Logger(NAME) + worker = self._makeOne(logger) + + PYTHON_LOGGER_NAME = 'mylogger' + MESSAGE = 'hello world' + record = _Record(PYTHON_LOGGER_NAME, logging.INFO, MESSAGE) + + worker._start() + + # first sleep is for branch coverage - ensure condition + # where queue is empty occurs + time.sleep(1) + # second polling is to avoid starting/stopping worker + # before anything ran + while not worker.started: + time.sleep(1) # pragma: NO COVER + + worker.enqueue(record, MESSAGE) + # Set timeout to none so worker thread finishes + worker._stop_timeout = None + worker._stop() + self.assertTrue(worker.batch.commit_called) + + def test_run_after_stopped(self): + # No-op + NAME = 'python_logger' + logger = _Logger(NAME) + worker = self._makeOne(logger) + + PYTHON_LOGGER_NAME = 'mylogger' + MESSAGE = 'hello world' + record = _Record(PYTHON_LOGGER_NAME, logging.INFO, MESSAGE) + + worker._start() + while not worker.started: + time.sleep(1) # pragma: NO COVER + worker._stop_timeout = None + worker._stop() + worker.enqueue(record, MESSAGE) + self.assertFalse(worker.batch.commit_called) + worker._stop() + + def test_run_enqueue_early(self): + # No-op + NAME = 'python_logger' + logger = _Logger(NAME) + worker = self._makeOne(logger) + + PYTHON_LOGGER_NAME = 'mylogger' + MESSAGE = 'hello world' + record = _Record(PYTHON_LOGGER_NAME, logging.INFO, MESSAGE) + + worker.enqueue(record, MESSAGE) + worker._start() + while not worker.started: + time.sleep(1) # pragma: NO COVER + worker._stop_timeout = None + worker._stop() + self.assertTrue(worker.stopped) + + +class _Record(object): + + def __init__(self, name, level, message): + self.name = name + self.levelname = level + self.message = message + self.exc_info = None + self.exc_text = None + self.stack_info = None + + +class _Batch(object): + + def __init__(self): + self.entries = [] + self.commit_called = False + + def log_struct(self, record, severity=logging.INFO): + self.log_struct_called_with = (record, severity) + self.entries.append(record) + + def commit(self): + self.commit_called = True + del self.entries[:] + + +class _Credentials(object): + + def authorize(self, _): + pass + + +class _Connection(object): + + def __init__(self): + self.http = None + self.credentials = _Credentials() + + +class _Logger(object): + + def __init__(self, name): + self.name = name + + def batch(self): + self._batch = _Batch() + return self._batch + + +class _Client(object): + + def __init__(self, project): + self.project = project + self.connection = _Connection() + + def logger(self, name): # pylint: disable=unused-argument + self._logger = _Logger(name) + return self._logger diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py new file mode 100644 index 000000000000..9e3324e3ba0c --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py @@ -0,0 +1,32 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestBaseHandler(unittest.TestCase): + + PROJECT = 'PROJECT' + + def _getTargetClass(self): + from google.cloud.logging.handlers.transports import Transport + return Transport + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_send_is_abstract(self): + target = self._makeOne() + with self.assertRaises(NotImplementedError): + target.send(None, None) diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py new file mode 100644 index 000000000000..7639a8f77787 --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py @@ -0,0 +1,93 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + + +class TestSyncHandler(unittest.TestCase): + + PROJECT = 'PROJECT' + + def _getTargetClass(self): + from google.cloud.logging.handlers.transports import SyncTransport + return SyncTransport + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + client = _Client(self.PROJECT) + NAME = 'python_logger' + transport = self._makeOne(client, NAME) + self.assertEqual(transport.logger.name, 'python_logger') + + def test_send(self): + client = _Client(self.PROJECT) + STACKDRIVER_LOGGER_NAME = 'python' + PYTHON_LOGGER_NAME = 'mylogger' + transport = self._makeOne(client, STACKDRIVER_LOGGER_NAME) + MESSAGE = 'hello world' + record = _Record(PYTHON_LOGGER_NAME, logging.INFO, MESSAGE) + + transport.send(record, MESSAGE) + EXPECTED_STRUCT = { + 'message': MESSAGE, + 'python_logger': PYTHON_LOGGER_NAME + } + EXPECTED_SENT = (EXPECTED_STRUCT, logging.INFO) + self.assertEqual( + transport.logger.log_struct_called_with, EXPECTED_SENT) + + +class _Record(object): + + def __init__(self, name, level, message): + self.name = name + self.levelname = level + self.message = message + self.exc_info = None + self.exc_text = None + self.stack_info = None + + +class _Logger(object): + + def __init__(self, name): + self.name = name + + def log_struct(self, message, severity=None): + self.log_struct_called_with = (message, severity) + + +class _Client(object): + + def __init__(self, project): + self.project = project + + def logger(self, name): # pylint: disable=unused-argument + self._logger = _Logger(name) + return self._logger + + +class _Handler(object): + + def __init__(self, level): + self.level = level # pragma: NO COVER + + def acquire(self): + pass # pragma: NO COVER + + def release(self): + pass # pragma: NO COVER diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py new file mode 100644 index 000000000000..6b0396c5d421 --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -0,0 +1,1104 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +try: + # pylint: disable=unused-import + import google.cloud.logging._gax + # pylint: enable=unused-import +except ImportError: # pragma: NO COVER + _HAVE_GAX = False +else: + _HAVE_GAX = True + +from google.cloud._testing import _GAXBaseAPI + + +class _Base(object): + PROJECT = 'PROJECT' + PROJECT_PATH = 'projects/%s' % (PROJECT,) + FILTER = 'logName:syslog AND severity>=ERROR' + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + +@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +class Test_LoggingAPI(_Base, unittest.TestCase): + LOG_NAME = 'log_name' + + def _getTargetClass(self): + from google.cloud.logging._gax import _LoggingAPI + return _LoggingAPI + + def test_ctor(self): + gax_api = _GAXLoggingAPI() + api = self._makeOne(gax_api) + self.assertIs(api._gax_api, gax_api) + + def test_list_entries_no_paging(self): + from google.gax import INITIAL_PAGE + from google.cloud.logging import DESCENDING + from google.cloud._testing import _GAXPageIterator + + TOKEN = 'TOKEN' + TEXT = 'TEXT' + response = _GAXPageIterator( + [_LogEntryPB(self.LOG_NAME, text_payload=TEXT)], TOKEN) + gax_api = _GAXLoggingAPI(_list_log_entries_response=response) + api = self._makeOne(gax_api) + + entries, next_token = api.list_entries( + [self.PROJECT], self.FILTER, DESCENDING) + + self.assertEqual(len(entries), 1) + entry = entries[0] + self.assertIsInstance(entry, dict) + self.assertEqual(entry['logName'], self.LOG_NAME) + self.assertEqual(entry['resource'], {'type': 'global'}) + self.assertEqual(entry['textPayload'], TEXT) + self.assertEqual(next_token, TOKEN) + + projects, filter_, order_by, page_size, options = ( + gax_api._list_log_entries_called_with) + self.assertEqual(projects, [self.PROJECT]) + self.assertEqual(filter_, self.FILTER) + self.assertEqual(order_by, DESCENDING) + self.assertEqual(page_size, 0) + self.assertIs(options.page_token, INITIAL_PAGE) + + def test_list_entries_with_paging(self): + from google.protobuf.struct_pb2 import Value + from google.cloud._testing import _GAXPageIterator + SIZE = 23 + TOKEN = 'TOKEN' + NEW_TOKEN = 'NEW_TOKEN' + PAYLOAD = {'message': 'MESSAGE', 'weather': 'sunny'} + struct_pb = _StructPB({ + key: Value(string_value=value) for key, value in PAYLOAD.items() + }) + response = _GAXPageIterator( + [_LogEntryPB(self.LOG_NAME, json_payload=struct_pb)], NEW_TOKEN) + gax_api = _GAXLoggingAPI(_list_log_entries_response=response) + api = self._makeOne(gax_api) + + entries, next_token = api.list_entries( + [self.PROJECT], page_size=SIZE, page_token=TOKEN) + + self.assertEqual(len(entries), 1) + entry = entries[0] + self.assertIsInstance(entry, dict) + self.assertEqual(entry['logName'], self.LOG_NAME) + self.assertEqual(entry['resource'], {'type': 'global'}) + self.assertEqual(entry['jsonPayload'], PAYLOAD) + self.assertEqual(next_token, NEW_TOKEN) + + projects, filter_, order_by, page_size, options = ( + gax_api._list_log_entries_called_with) + self.assertEqual(projects, [self.PROJECT]) + self.assertEqual(filter_, '') + self.assertEqual(order_by, '') + self.assertEqual(page_size, SIZE) + self.assertEqual(options.page_token, TOKEN) + + def test_list_entries_with_extra_properties(self): + from datetime import datetime + from google.logging.type.log_severity_pb2 import WARNING + from google.cloud._testing import _GAXPageIterator + from google.cloud._helpers import UTC + from google.cloud._helpers import _datetime_to_rfc3339 + from google.cloud._helpers import _datetime_to_pb_timestamp + NOW = datetime.utcnow().replace(tzinfo=UTC) + SIZE = 23 + TOKEN = 'TOKEN' + NEW_TOKEN = 'NEW_TOKEN' + PAYLOAD = {'message': 'MESSAGE', 'weather': 'sunny'} + SEVERITY = 'WARNING' + LABELS = { + 'foo': 'bar', + } + IID = 'IID' + request = _HTTPRequestPB() + operation = _LogEntryOperationPB() + EXTRAS = { + 'severity': WARNING, + 'labels': LABELS, + 'insert_id': IID, + 'http_request': request, + 'operation': operation, + } + ENTRY = _LogEntryPB(self.LOG_NAME, proto_payload=PAYLOAD, **EXTRAS) + ENTRY.resource.labels['foo'] = 'bar' + ENTRY.timestamp = _datetime_to_pb_timestamp(NOW) + response = _GAXPageIterator([ENTRY], NEW_TOKEN) + gax_api = _GAXLoggingAPI(_list_log_entries_response=response) + api = self._makeOne(gax_api) + + entries, next_token = api.list_entries( + [self.PROJECT], page_size=SIZE, page_token=TOKEN) + + self.assertEqual(len(entries), 1) + entry = entries[0] + self.assertIsInstance(entry, dict) + self.assertEqual(entry['logName'], self.LOG_NAME) + self.assertEqual(entry['resource'], + {'type': 'global', 'labels': {'foo': 'bar'}}) + self.assertEqual(entry['protoPayload'], PAYLOAD) + self.assertEqual(entry['severity'], SEVERITY) + self.assertEqual(entry['labels'], LABELS) + self.assertEqual(entry['insertId'], IID) + self.assertEqual(entry['timestamp'], _datetime_to_rfc3339(NOW)) + EXPECTED_REQUEST = { + 'requestMethod': request.request_method, + 'requestUrl': request.request_url, + 'status': request.status, + 'requestSize': request.request_size, + 'responseSize': request.response_size, + 'referer': request.referer, + 'userAgent': request.user_agent, + 'remoteIp': request.remote_ip, + 'cacheHit': request.cache_hit, + } + self.assertEqual(entry['httpRequest'], EXPECTED_REQUEST) + EXPECTED_OPERATION = { + 'producer': operation.producer, + 'id': operation.id, + 'first': operation.first, + 'last': operation.last, + } + self.assertEqual(entry['operation'], EXPECTED_OPERATION) + self.assertEqual(next_token, NEW_TOKEN) + + projects, filter_, order_by, page_size, options = ( + gax_api._list_log_entries_called_with) + self.assertEqual(projects, [self.PROJECT]) + self.assertEqual(filter_, '') + self.assertEqual(order_by, '') + self.assertEqual(page_size, SIZE) + self.assertEqual(options.page_token, TOKEN) + + def test_write_entries_single(self): + from google.logging.v2.log_entry_pb2 import LogEntry + TEXT = 'TEXT' + LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) + ENTRY = { + 'logName': LOG_PATH, + 'resource': {'type': 'global'}, + 'textPayload': TEXT, + } + gax_api = _GAXLoggingAPI() + api = self._makeOne(gax_api) + + api.write_entries([ENTRY]) + + entries, log_name, resource, labels, partial_success, options = ( + gax_api._write_log_entries_called_with) + self.assertEqual(len(entries), 1) + + entry = entries[0] + self.assertIsInstance(entry, LogEntry) + self.assertEqual(entry.log_name, LOG_PATH) + self.assertEqual(entry.resource.type, 'global') + self.assertEqual(entry.labels, {}) + self.assertEqual(entry.text_payload, TEXT) + + self.assertIsNone(log_name) + self.assertIsNone(resource) + self.assertIsNone(labels) + self.assertEqual(partial_success, False) + self.assertIsNone(options) + + def test_write_entries_w_extra_properties(self): + # pylint: disable=too-many-statements + from datetime import datetime + from google.logging.type.log_severity_pb2 import WARNING + from google.logging.v2.log_entry_pb2 import LogEntry + from google.cloud._helpers import UTC, _pb_timestamp_to_datetime + NOW = datetime.utcnow().replace(tzinfo=UTC) + TEXT = 'TEXT' + LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) + SEVERITY = 'WARNING' + LABELS = { + 'foo': 'bar', + } + IID = 'IID' + REQUEST_METHOD = 'GET' + REQUEST_URL = 'http://example.com/requested' + STATUS = 200 + REQUEST_SIZE = 256 + RESPONSE_SIZE = 1024 + REFERRER_URL = 'http://example.com/referer' + USER_AGENT = 'Agent/1.0' + REMOTE_IP = '1.2.3.4' + REQUEST = { + 'requestMethod': REQUEST_METHOD, + 'requestUrl': REQUEST_URL, + 'status': STATUS, + 'requestSize': REQUEST_SIZE, + 'responseSize': RESPONSE_SIZE, + 'referer': REFERRER_URL, + 'userAgent': USER_AGENT, + 'remoteIp': REMOTE_IP, + 'cacheHit': False, + } + PRODUCER = 'PRODUCER' + OPID = 'OPID' + OPERATION = { + 'producer': PRODUCER, + 'id': OPID, + 'first': False, + 'last': True, + } + ENTRY = { + 'logName': LOG_PATH, + 'resource': {'type': 'global'}, + 'textPayload': TEXT, + 'severity': SEVERITY, + 'labels': LABELS, + 'insertId': IID, + 'timestamp': NOW, + 'httpRequest': REQUEST, + 'operation': OPERATION, + } + gax_api = _GAXLoggingAPI() + api = self._makeOne(gax_api) + + api.write_entries([ENTRY]) + + entries, log_name, resource, labels, partial_success, options = ( + gax_api._write_log_entries_called_with) + self.assertEqual(len(entries), 1) + + entry = entries[0] + self.assertIsInstance(entry, LogEntry) + self.assertEqual(entry.log_name, LOG_PATH) + self.assertEqual(entry.resource.type, 'global') + self.assertEqual(entry.text_payload, TEXT) + self.assertEqual(entry.severity, WARNING) + self.assertEqual(entry.labels, LABELS) + self.assertEqual(entry.insert_id, IID) + stamp = _pb_timestamp_to_datetime(entry.timestamp) + self.assertEqual(stamp, NOW) + + request = entry.http_request + self.assertEqual(request.request_method, REQUEST_METHOD) + self.assertEqual(request.request_url, REQUEST_URL) + self.assertEqual(request.status, STATUS) + self.assertEqual(request.request_size, REQUEST_SIZE) + self.assertEqual(request.response_size, RESPONSE_SIZE) + self.assertEqual(request.referer, REFERRER_URL) + self.assertEqual(request.user_agent, USER_AGENT) + self.assertEqual(request.remote_ip, REMOTE_IP) + self.assertEqual(request.cache_hit, False) + + operation = entry.operation + self.assertEqual(operation.producer, PRODUCER) + self.assertEqual(operation.id, OPID) + self.assertFalse(operation.first) + self.assertTrue(operation.last) + + self.assertIsNone(log_name) + self.assertIsNone(resource) + self.assertIsNone(labels) + self.assertEqual(partial_success, False) + self.assertIsNone(options) + # pylint: enable=too-many-statements + + def test_write_entries_multiple(self): + # pylint: disable=too-many-statements + import datetime + from google.logging.type.log_severity_pb2 import WARNING + from google.logging.v2.log_entry_pb2 import LogEntry + from google.protobuf.any_pb2 import Any + from google.protobuf.struct_pb2 import Struct + from google.cloud._helpers import _datetime_to_rfc3339, UTC + TEXT = 'TEXT' + NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP_TYPE_URL = 'type.googleapis.com/google.protobuf.Timestamp' + JSON = {'payload': 'PAYLOAD', 'type': 'json'} + PROTO = { + '@type': TIMESTAMP_TYPE_URL, + 'value': _datetime_to_rfc3339(NOW), + } + PRODUCER = 'PRODUCER' + OPID = 'OPID' + URL = 'http://example.com/' + ENTRIES = [ + {'textPayload': TEXT, + 'severity': WARNING}, + {'jsonPayload': JSON, + 'operation': {'producer': PRODUCER, 'id': OPID}}, + {'protoPayload': PROTO, + 'httpRequest': {'requestUrl': URL}}, + ] + LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) + RESOURCE = { + 'type': 'global', + } + LABELS = { + 'foo': 'bar', + } + gax_api = _GAXLoggingAPI() + api = self._makeOne(gax_api) + + api.write_entries(ENTRIES, LOG_PATH, RESOURCE, LABELS) + + entries, log_name, resource, labels, partial_success, options = ( + gax_api._write_log_entries_called_with) + self.assertEqual(len(entries), len(ENTRIES)) + + entry = entries[0] + self.assertIsInstance(entry, LogEntry) + self.assertEqual(entry.log_name, '') + self.assertEqual(entry.resource.type, '') + self.assertEqual(entry.labels, {}) + self.assertEqual(entry.text_payload, TEXT) + self.assertEqual(entry.severity, WARNING) + + entry = entries[1] + self.assertIsInstance(entry, LogEntry) + self.assertEqual(entry.log_name, '') + self.assertEqual(entry.resource.type, '') + self.assertEqual(entry.labels, {}) + json_struct = entry.json_payload + self.assertIsInstance(json_struct, Struct) + self.assertEqual(json_struct.fields['payload'].string_value, + JSON['payload']) + operation = entry.operation + self.assertEqual(operation.producer, PRODUCER) + self.assertEqual(operation.id, OPID) + + entry = entries[2] + self.assertIsInstance(entry, LogEntry) + self.assertEqual(entry.log_name, '') + self.assertEqual(entry.resource.type, '') + self.assertEqual(entry.labels, {}) + proto = entry.proto_payload + self.assertIsInstance(proto, Any) + self.assertEqual(proto.type_url, TIMESTAMP_TYPE_URL) + request = entry.http_request + self.assertEqual(request.request_url, URL) + + self.assertEqual(log_name, LOG_PATH) + self.assertEqual(resource, RESOURCE) + self.assertEqual(labels, LABELS) + self.assertEqual(partial_success, False) + self.assertIsNone(options) + # pylint: enable=too-many-statements + + def test_logger_delete(self): + LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) + gax_api = _GAXLoggingAPI() + api = self._makeOne(gax_api) + + api.logger_delete(self.PROJECT, self.LOG_NAME) + + log_name, options = gax_api._delete_log_called_with + self.assertEqual(log_name, LOG_PATH) + self.assertIsNone(options) + + def test_logger_delete_not_found(self): + from google.cloud.exceptions import NotFound + LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) + gax_api = _GAXLoggingAPI(_delete_not_found=True) + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.logger_delete(self.PROJECT, self.LOG_NAME) + + log_name, options = gax_api._delete_log_called_with + self.assertEqual(log_name, LOG_PATH) + self.assertIsNone(options) + + def test_logger_delete_error(self): + from google.gax.errors import GaxError + LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) + gax_api = _GAXLoggingAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.logger_delete(self.PROJECT, self.LOG_NAME) + + log_name, options = gax_api._delete_log_called_with + self.assertEqual(log_name, LOG_PATH) + self.assertIsNone(options) + + +@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +class Test_SinksAPI(_Base, unittest.TestCase): + SINK_NAME = 'sink_name' + SINK_PATH = 'projects/%s/sinks/%s' % (_Base.PROJECT, SINK_NAME) + DESTINATION_URI = 'faux.googleapis.com/destination' + + def _getTargetClass(self): + from google.cloud.logging._gax import _SinksAPI + return _SinksAPI + + def test_ctor(self): + gax_api = _GAXSinksAPI() + api = self._makeOne(gax_api) + self.assertIs(api._gax_api, gax_api) + + def test_list_sinks_no_paging(self): + from google.gax import INITIAL_PAGE + from google.cloud._testing import _GAXPageIterator + TOKEN = 'TOKEN' + SINKS = [{ + 'name': self.SINK_PATH, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + }] + response = _GAXPageIterator( + [_LogSinkPB(self.SINK_PATH, self.DESTINATION_URI, self.FILTER)], + TOKEN) + gax_api = _GAXSinksAPI(_list_sinks_response=response) + api = self._makeOne(gax_api) + + sinks, token = api.list_sinks(self.PROJECT) + + self.assertEqual(sinks, SINKS) + self.assertEqual(token, TOKEN) + + project, page_size, options = gax_api._list_sinks_called_with + self.assertEqual(project, self.PROJECT_PATH) + self.assertEqual(page_size, 0) + self.assertEqual(options.page_token, INITIAL_PAGE) + + def test_list_sinks_w_paging(self): + from google.cloud._testing import _GAXPageIterator + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + SINKS = [{ + 'name': self.SINK_PATH, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + }] + response = _GAXPageIterator( + [_LogSinkPB(self.SINK_PATH, self.DESTINATION_URI, self.FILTER)], + None) + gax_api = _GAXSinksAPI(_list_sinks_response=response) + api = self._makeOne(gax_api) + + sinks, token = api.list_sinks( + self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + + self.assertEqual(sinks, SINKS) + self.assertIsNone(token) + + project, page_size, options = gax_api._list_sinks_called_with + self.assertEqual(project, self.PROJECT_PATH) + self.assertEqual(page_size, PAGE_SIZE) + self.assertEqual(options.page_token, TOKEN) + + def test_sink_create_error(self): + from google.gax.errors import GaxError + gax_api = _GAXSinksAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.sink_create( + self.PROJECT, self.SINK_NAME, self.FILTER, + self.DESTINATION_URI) + + def test_sink_create_conflict(self): + from google.cloud.exceptions import Conflict + gax_api = _GAXSinksAPI(_create_sink_conflict=True) + api = self._makeOne(gax_api) + + with self.assertRaises(Conflict): + api.sink_create( + self.PROJECT, self.SINK_NAME, self.FILTER, + self.DESTINATION_URI) + + def test_sink_create_ok(self): + from google.logging.v2.logging_config_pb2 import LogSink + gax_api = _GAXSinksAPI() + api = self._makeOne(gax_api) + + api.sink_create( + self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + + parent, sink, options = ( + gax_api._create_sink_called_with) + self.assertEqual(parent, self.PROJECT_PATH) + self.assertIsInstance(sink, LogSink) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertIsNone(options) + + def test_sink_get_error(self): + from google.cloud.exceptions import NotFound + gax_api = _GAXSinksAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.sink_get(self.PROJECT, self.SINK_NAME) + + def test_sink_get_miss(self): + from google.gax.errors import GaxError + gax_api = _GAXSinksAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.sink_get(self.PROJECT, self.SINK_NAME) + + def test_sink_get_hit(self): + RESPONSE = { + 'name': self.SINK_PATH, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + sink_pb = _LogSinkPB( + self.SINK_PATH, self.DESTINATION_URI, self.FILTER) + gax_api = _GAXSinksAPI(_get_sink_response=sink_pb) + api = self._makeOne(gax_api) + + response = api.sink_get(self.PROJECT, self.SINK_NAME) + + self.assertEqual(response, RESPONSE) + + sink_name, options = gax_api._get_sink_called_with + self.assertEqual(sink_name, self.SINK_PATH) + self.assertIsNone(options) + + def test_sink_update_error(self): + from google.gax.errors import GaxError + gax_api = _GAXSinksAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.sink_update( + self.PROJECT, self.SINK_NAME, self.FILTER, + self.DESTINATION_URI) + + def test_sink_update_miss(self): + from google.cloud.exceptions import NotFound + gax_api = _GAXSinksAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.sink_update( + self.PROJECT, self.SINK_NAME, self.FILTER, + self.DESTINATION_URI) + + def test_sink_update_hit(self): + from google.logging.v2.logging_config_pb2 import LogSink + response = _LogSinkPB( + self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + gax_api = _GAXSinksAPI(_update_sink_response=response) + api = self._makeOne(gax_api) + + api.sink_update( + self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + + sink_name, sink, options = ( + gax_api._update_sink_called_with) + self.assertEqual(sink_name, self.SINK_PATH) + self.assertIsInstance(sink, LogSink) + self.assertEqual(sink.name, self.SINK_PATH) + self.assertEqual(sink.filter, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertIsNone(options) + + def test_sink_delete_error(self): + from google.gax.errors import GaxError + gax_api = _GAXSinksAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.sink_delete(self.PROJECT, self.SINK_NAME) + + def test_sink_delete_miss(self): + from google.cloud.exceptions import NotFound + gax_api = _GAXSinksAPI(_sink_not_found=True) + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.sink_delete(self.PROJECT, self.SINK_NAME) + + def test_sink_delete_hit(self): + gax_api = _GAXSinksAPI() + api = self._makeOne(gax_api) + + api.sink_delete(self.PROJECT, self.SINK_NAME) + + sink_name, options = gax_api._delete_sink_called_with + self.assertEqual(sink_name, self.SINK_PATH) + self.assertIsNone(options) + + +@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +class Test_MetricsAPI(_Base, unittest.TestCase): + METRIC_NAME = 'metric_name' + METRIC_PATH = 'projects/%s/metrics/%s' % (_Base.PROJECT, METRIC_NAME) + DESCRIPTION = 'Description' + + def _getTargetClass(self): + from google.cloud.logging._gax import _MetricsAPI + return _MetricsAPI + + def test_ctor(self): + gax_api = _GAXMetricsAPI() + api = self._makeOne(gax_api) + self.assertIs(api._gax_api, gax_api) + + def test_list_metrics_no_paging(self): + from google.gax import INITIAL_PAGE + from google.cloud._testing import _GAXPageIterator + TOKEN = 'TOKEN' + METRICS = [{ + 'name': self.METRIC_PATH, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + }] + response = _GAXPageIterator( + [_LogMetricPB(self.METRIC_PATH, self.DESCRIPTION, self.FILTER)], + TOKEN) + gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) + api = self._makeOne(gax_api) + + metrics, token = api.list_metrics(self.PROJECT) + + self.assertEqual(metrics, METRICS) + self.assertEqual(token, TOKEN) + + project, page_size, options = gax_api._list_log_metrics_called_with + self.assertEqual(project, self.PROJECT_PATH) + self.assertEqual(page_size, 0) + self.assertEqual(options.page_token, INITIAL_PAGE) + + def test_list_metrics_w_paging(self): + from google.cloud._testing import _GAXPageIterator + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + METRICS = [{ + 'name': self.METRIC_PATH, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + }] + response = _GAXPageIterator( + [_LogMetricPB(self.METRIC_PATH, self.DESCRIPTION, self.FILTER)], + None) + gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) + api = self._makeOne(gax_api) + + metrics, token = api.list_metrics( + self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + + self.assertEqual(metrics, METRICS) + self.assertIsNone(token) + + project, page_size, options = gax_api._list_log_metrics_called_with + self.assertEqual(project, self.PROJECT_PATH) + self.assertEqual(page_size, PAGE_SIZE) + self.assertEqual(options.page_token, TOKEN) + + def test_metric_create_error(self): + from google.gax.errors import GaxError + gax_api = _GAXMetricsAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.metric_create( + self.PROJECT, self.METRIC_NAME, self.FILTER, + self.DESCRIPTION) + + def test_metric_create_conflict(self): + from google.cloud.exceptions import Conflict + gax_api = _GAXMetricsAPI(_create_log_metric_conflict=True) + api = self._makeOne(gax_api) + + with self.assertRaises(Conflict): + api.metric_create( + self.PROJECT, self.METRIC_NAME, self.FILTER, + self.DESCRIPTION) + + def test_metric_create_ok(self): + from google.logging.v2.logging_metrics_pb2 import LogMetric + gax_api = _GAXMetricsAPI() + api = self._makeOne(gax_api) + + api.metric_create( + self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) + + parent, metric, options = ( + gax_api._create_log_metric_called_with) + self.assertEqual(parent, self.PROJECT_PATH) + self.assertIsInstance(metric, LogMetric) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertIsNone(options) + + def test_metric_get_error(self): + from google.cloud.exceptions import NotFound + gax_api = _GAXMetricsAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.metric_get(self.PROJECT, self.METRIC_NAME) + + def test_metric_get_miss(self): + from google.gax.errors import GaxError + gax_api = _GAXMetricsAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.metric_get(self.PROJECT, self.METRIC_NAME) + + def test_metric_get_hit(self): + RESPONSE = { + 'name': self.METRIC_PATH, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + metric_pb = _LogMetricPB( + self.METRIC_PATH, self.DESCRIPTION, self.FILTER) + gax_api = _GAXMetricsAPI(_get_log_metric_response=metric_pb) + api = self._makeOne(gax_api) + + response = api.metric_get(self.PROJECT, self.METRIC_NAME) + + self.assertEqual(response, RESPONSE) + + metric_name, options = gax_api._get_log_metric_called_with + self.assertEqual(metric_name, self.METRIC_PATH) + self.assertIsNone(options) + + def test_metric_update_error(self): + from google.gax.errors import GaxError + gax_api = _GAXMetricsAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.metric_update( + self.PROJECT, self.METRIC_NAME, self.FILTER, + self.DESCRIPTION) + + def test_metric_update_miss(self): + from google.cloud.exceptions import NotFound + gax_api = _GAXMetricsAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.metric_update( + self.PROJECT, self.METRIC_NAME, self.FILTER, + self.DESCRIPTION) + + def test_metric_update_hit(self): + from google.logging.v2.logging_metrics_pb2 import LogMetric + response = _LogMetricPB( + self.METRIC_NAME, self.FILTER, self.DESCRIPTION) + gax_api = _GAXMetricsAPI(_update_log_metric_response=response) + api = self._makeOne(gax_api) + + api.metric_update( + self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) + + metric_name, metric, options = ( + gax_api._update_log_metric_called_with) + self.assertEqual(metric_name, self.METRIC_PATH) + self.assertIsInstance(metric, LogMetric) + self.assertEqual(metric.name, self.METRIC_PATH) + self.assertEqual(metric.filter, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertIsNone(options) + + def test_metric_delete_error(self): + from google.gax.errors import GaxError + gax_api = _GAXMetricsAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.metric_delete(self.PROJECT, self.METRIC_NAME) + + def test_metric_delete_miss(self): + from google.cloud.exceptions import NotFound + gax_api = _GAXMetricsAPI(_log_metric_not_found=True) + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.metric_delete(self.PROJECT, self.METRIC_NAME) + + def test_metric_delete_hit(self): + gax_api = _GAXMetricsAPI() + api = self._makeOne(gax_api) + + api.metric_delete(self.PROJECT, self.METRIC_NAME) + + metric_name, options = gax_api._delete_log_metric_called_with + self.assertEqual(metric_name, self.METRIC_PATH) + self.assertIsNone(options) + + +@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +class Test_value_pb_to_value(_Base, unittest.TestCase): + + def _callFUT(self, value_pb): + from google.cloud.logging._gax import _value_pb_to_value + return _value_pb_to_value(value_pb) + + def test_w_null_values(self): + from google.protobuf.struct_pb2 import Value + value_pb = Value() + self.assertIsNone(self._callFUT(value_pb)) + value_pb = Value(null_value=None) + self.assertIsNone(self._callFUT(value_pb)) + + def test_w_string_value(self): + from google.protobuf.struct_pb2 import Value + STRING = 'STRING' + value_pb = Value(string_value=STRING) + self.assertEqual(self._callFUT(value_pb), STRING) + + def test_w_bool_values(self): + from google.protobuf.struct_pb2 import Value + true_value_pb = Value(bool_value=True) + self.assertIs(self._callFUT(true_value_pb), True) + false_value_pb = Value(bool_value=False) + self.assertIs(self._callFUT(false_value_pb), False) + + def test_w_number_values(self): + from google.protobuf.struct_pb2 import Value + ANSWER = 42 + PI = 3.1415926 + int_value_pb = Value(number_value=ANSWER) + self.assertEqual(self._callFUT(int_value_pb), ANSWER) + float_value_pb = Value(number_value=PI) + self.assertEqual(self._callFUT(float_value_pb), PI) + + def test_w_list_value(self): + from google.protobuf.struct_pb2 import Value + STRING = 'STRING' + PI = 3.1415926 + value_pb = Value() + value_pb.list_value.values.add(string_value=STRING) + value_pb.list_value.values.add(bool_value=True) + value_pb.list_value.values.add(number_value=PI) + self.assertEqual(self._callFUT(value_pb), [STRING, True, PI]) + + def test_w_struct_value(self): + from google.protobuf.struct_pb2 import Value + STRING = 'STRING' + PI = 3.1415926 + value_pb = Value() + value_pb.struct_value.fields['string'].string_value = STRING + value_pb.struct_value.fields['bool'].bool_value = True + value_pb.struct_value.fields['number'].number_value = PI + self.assertEqual(self._callFUT(value_pb), + {'string': STRING, 'bool': True, 'number': PI}) + + def test_w_unknown_kind(self): + + class _Value(object): + + def WhichOneof(self, name): + assert name == 'kind' + return 'UNKNOWN' + + with self.assertRaises(ValueError): + self._callFUT(_Value()) + + +class _GAXLoggingAPI(_GAXBaseAPI): + + _delete_not_found = False + + def list_log_entries( + self, projects, filter_, order_by, page_size, options): + self._list_log_entries_called_with = ( + projects, filter_, order_by, page_size, options) + return self._list_log_entries_response + + def write_log_entries(self, entries, log_name, resource, labels, + partial_success, options): + self._write_log_entries_called_with = ( + entries, log_name, resource, labels, partial_success, options) + + def delete_log(self, log_name, options): + from google.gax.errors import GaxError + self._delete_log_called_with = log_name, options + if self._random_gax_error: + raise GaxError('error') + if self._delete_not_found: + raise GaxError('notfound', self._make_grpc_not_found()) + + +class _GAXSinksAPI(_GAXBaseAPI): + + _create_sink_conflict = False + _sink_not_found = False + + def list_sinks(self, parent, page_size, options): + self._list_sinks_called_with = parent, page_size, options + return self._list_sinks_response + + def create_sink(self, parent, sink, options): + from google.gax.errors import GaxError + self._create_sink_called_with = parent, sink, options + if self._random_gax_error: + raise GaxError('error') + if self._create_sink_conflict: + raise GaxError('conflict', self._make_grpc_failed_precondition()) + + def get_sink(self, sink_name, options): + from google.gax.errors import GaxError + self._get_sink_called_with = sink_name, options + if self._random_gax_error: + raise GaxError('error') + try: + return self._get_sink_response + except AttributeError: + raise GaxError('notfound', self._make_grpc_not_found()) + + def update_sink(self, sink_name, sink, options=None): + from google.gax.errors import GaxError + self._update_sink_called_with = sink_name, sink, options + if self._random_gax_error: + raise GaxError('error') + try: + return self._update_sink_response + except AttributeError: + raise GaxError('notfound', self._make_grpc_not_found()) + + def delete_sink(self, sink_name, options=None): + from google.gax.errors import GaxError + self._delete_sink_called_with = sink_name, options + if self._random_gax_error: + raise GaxError('error') + if self._sink_not_found: + raise GaxError('notfound', self._make_grpc_not_found()) + + +class _GAXMetricsAPI(_GAXBaseAPI): + + _create_log_metric_conflict = False + _log_metric_not_found = False + + def list_log_metrics(self, parent, page_size, options): + self._list_log_metrics_called_with = parent, page_size, options + return self._list_log_metrics_response + + def create_log_metric(self, parent, metric, options): + from google.gax.errors import GaxError + self._create_log_metric_called_with = parent, metric, options + if self._random_gax_error: + raise GaxError('error') + if self._create_log_metric_conflict: + raise GaxError('conflict', self._make_grpc_failed_precondition()) + + def get_log_metric(self, metric_name, options): + from google.gax.errors import GaxError + self._get_log_metric_called_with = metric_name, options + if self._random_gax_error: + raise GaxError('error') + try: + return self._get_log_metric_response + except AttributeError: + raise GaxError('notfound', self._make_grpc_not_found()) + + def update_log_metric(self, metric_name, metric, options=None): + from google.gax.errors import GaxError + self._update_log_metric_called_with = metric_name, metric, options + if self._random_gax_error: + raise GaxError('error') + try: + return self._update_log_metric_response + except AttributeError: + raise GaxError('notfound', self._make_grpc_not_found()) + + def delete_log_metric(self, metric_name, options=None): + from google.gax.errors import GaxError + self._delete_log_metric_called_with = metric_name, options + if self._random_gax_error: + raise GaxError('error') + if self._log_metric_not_found: + raise GaxError('notfound', self._make_grpc_not_found()) + + +class _HTTPRequestPB(object): + + request_url = 'http://example.com/requested' + request_method = 'GET' + status = 200 + referer = 'http://example.com/referer' + user_agent = 'AGENT' + cache_hit = False + request_size = 256 + response_size = 1024 + remote_ip = '1.2.3.4' + + +class _LogEntryOperationPB(object): + + producer = 'PRODUCER' + first = last = False + id = 'OPID' + + +class _ResourcePB(object): + + def __init__(self, type_='global', **labels): + self.type = type_ + self.labels = labels + + +class _StructPB(object): + + def __init__(self, fields): + self.fields = fields + + +class _LogEntryPB(object): + + severity = 0 + http_request = operation = insert_id = None + text_payload = json_payload = proto_payload = None + + def __init__(self, log_name, **kw): + self.log_name = log_name + self.resource = _ResourcePB() + self.timestamp = self._make_timestamp() + self.labels = kw.pop('labels', {}) + self.__dict__.update(kw) + + def HasField(self, field_name): + return getattr(self, field_name, None) is not None + + @staticmethod + def _make_timestamp(): + from datetime import datetime + from google.cloud._helpers import UTC + from google.cloud._helpers import _datetime_to_pb_timestamp + NOW = datetime.utcnow().replace(tzinfo=UTC) + return _datetime_to_pb_timestamp(NOW) + + +class _LogSinkPB(object): + + def __init__(self, name, destination, filter_): + self.name = name + self.destination = destination + self.filter = filter_ + + +class _LogMetricPB(object): + + def __init__(self, name, description, filter_): + self.name = name + self.description = description + self.filter = filter_ diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py new file mode 100644 index 000000000000..114f5f635c45 --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -0,0 +1,496 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestClient(unittest.TestCase): + + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + SINK_NAME = 'SINK_NAME' + FILTER = 'logName:syslog AND severity>=ERROR' + DESTINATION_URI = 'faux.googleapis.com/destination' + METRIC_NAME = 'metric_name' + FILTER = 'logName:syslog AND severity>=ERROR' + DESCRIPTION = 'DESCRIPTION' + + def _getTargetClass(self): + from google.cloud.logging.client import Client + return Client + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + self.assertEqual(client.project, self.PROJECT) + + def test_logging_api_wo_gax(self): + from google.cloud.logging.connection import _LoggingAPI + from google.cloud.logging import client as MUT + from google.cloud._testing import _Monkey + client = self._makeOne(self.PROJECT, credentials=_Credentials()) + conn = client.connection = object() + + with _Monkey(MUT, _USE_GAX=False): + api = client.logging_api + + self.assertIsInstance(api, _LoggingAPI) + self.assertIs(api._connection, conn) + # API instance is cached + again = client.logging_api + self.assertIs(again, api) + + def test_logging_api_w_gax(self): + from google.cloud.logging import client as MUT + from google.cloud._testing import _Monkey + + wrapped = object() + _called_with = [] + + def _generated_api(*args, **kw): + _called_with.append((args, kw)) + return wrapped + + class _GaxLoggingAPI(object): + + def __init__(self, _wrapped): + self._wrapped = _wrapped + + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + + with _Monkey(MUT, + _USE_GAX=True, + GeneratedLoggingAPI=_generated_api, + GAXLoggingAPI=_GaxLoggingAPI): + api = client.logging_api + + self.assertIsInstance(api, _GaxLoggingAPI) + self.assertIs(api._wrapped, wrapped) + # API instance is cached + again = client.logging_api + self.assertIs(again, api) + + def test_sinks_api_wo_gax(self): + from google.cloud.logging.connection import _SinksAPI + from google.cloud.logging import client as MUT + from google.cloud._testing import _Monkey + client = self._makeOne(self.PROJECT, credentials=_Credentials()) + conn = client.connection = object() + + with _Monkey(MUT, _USE_GAX=False): + api = client.sinks_api + + self.assertIsInstance(api, _SinksAPI) + self.assertIs(api._connection, conn) + # API instance is cached + again = client.sinks_api + self.assertIs(again, api) + + def test_sinks_api_w_gax(self): + from google.cloud.logging import client as MUT + from google.cloud._testing import _Monkey + + wrapped = object() + _called_with = [] + + def _generated_api(*args, **kw): + _called_with.append((args, kw)) + return wrapped + + class _GaxSinksAPI(object): + + def __init__(self, _wrapped): + self._wrapped = _wrapped + + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + + with _Monkey(MUT, + _USE_GAX=True, + GeneratedSinksAPI=_generated_api, + GAXSinksAPI=_GaxSinksAPI): + api = client.sinks_api + + self.assertIsInstance(api, _GaxSinksAPI) + self.assertIs(api._wrapped, wrapped) + # API instance is cached + again = client.sinks_api + self.assertIs(again, api) + + def test_metrics_api_wo_gax(self): + from google.cloud.logging.connection import _MetricsAPI + from google.cloud.logging import client as MUT + from google.cloud._testing import _Monkey + client = self._makeOne(self.PROJECT, credentials=_Credentials()) + conn = client.connection = object() + + with _Monkey(MUT, _USE_GAX=False): + api = client.metrics_api + + self.assertIsInstance(api, _MetricsAPI) + self.assertIs(api._connection, conn) + # API instance is cached + again = client.metrics_api + self.assertIs(again, api) + + def test_metrics_api_w_gax(self): + from google.cloud.logging import client as MUT + from google.cloud._testing import _Monkey + + wrapped = object() + _called_with = [] + + def _generated_api(*args, **kw): + _called_with.append((args, kw)) + return wrapped + + class _GaxMetricsAPI(object): + + def __init__(self, _wrapped): + self._wrapped = _wrapped + + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + + with _Monkey(MUT, + _USE_GAX=True, + GeneratedMetricsAPI=_generated_api, + GAXMetricsAPI=_GaxMetricsAPI): + api = client.metrics_api + + self.assertIsInstance(api, _GaxMetricsAPI) + self.assertIs(api._wrapped, wrapped) + # API instance is cached + again = client.metrics_api + self.assertIs(again, api) + + def test_logger(self): + from google.cloud.logging.logger import Logger + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + logger = client.logger(self.LOGGER_NAME) + self.assertIsInstance(logger, Logger) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + + def test__entry_from_resource_unknown_type(self): + PROJECT = 'PROJECT' + creds = _Credentials() + client = self._makeOne(PROJECT, creds) + loggers = {} + with self.assertRaises(ValueError): + client._entry_from_resource({'unknownPayload': {}}, loggers) + + def test_list_entries_defaults(self): + from google.cloud.logging.entries import TextEntry + IID = 'IID' + TEXT = 'TEXT' + TOKEN = 'TOKEN' + ENTRIES = [{ + 'textPayload': TEXT, + 'insertId': IID, + 'resource': { + 'type': 'global', + }, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }] + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + api = client._logging_api = _DummyLoggingAPI() + api._list_entries_response = ENTRIES, TOKEN + + entries, token = client.list_entries() + + self.assertEqual(len(entries), 1) + entry = entries[0] + self.assertIsInstance(entry, TextEntry) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.payload, TEXT) + logger = entry.logger + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(token, TOKEN) + + self.assertEqual( + api._list_entries_called_with, + ([self.PROJECT], None, None, None, None)) + + def test_list_entries_explicit(self): + from google.cloud.logging import DESCENDING + from google.cloud.logging.entries import ProtobufEntry + from google.cloud.logging.entries import StructEntry + from google.cloud.logging.logger import Logger + + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + FILTER = 'logName:LOGNAME' + IID1 = 'IID1' + IID2 = 'IID2' + PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD['@type'] = 'type.googleapis.com/testing.example' + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + ENTRIES = [{ + 'jsonPayload': PAYLOAD, + 'insertId': IID1, + 'resource': { + 'type': 'global', + }, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }, { + 'protoPayload': PROTO_PAYLOAD, + 'insertId': IID2, + 'resource': { + 'type': 'global', + }, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }] + client = self._makeOne(self.PROJECT, credentials=_Credentials()) + api = client._logging_api = _DummyLoggingAPI() + api._list_entries_response = ENTRIES, None + + entries, token = client.list_entries( + projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, + page_size=PAGE_SIZE, page_token=TOKEN) + self.assertEqual(len(entries), 2) + + entry = entries[0] + self.assertIsInstance(entry, StructEntry) + self.assertEqual(entry.insert_id, IID1) + self.assertEqual(entry.payload, PAYLOAD) + logger = entry.logger + self.assertIsInstance(logger, Logger) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + + entry = entries[1] + self.assertIsInstance(entry, ProtobufEntry) + self.assertEqual(entry.insert_id, IID2) + self.assertEqual(entry.payload, PROTO_PAYLOAD) + logger = entry.logger + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + + self.assertIs(entries[0].logger, entries[1].logger) + + self.assertIsNone(token) + self.assertEqual( + api._list_entries_called_with, + ([PROJECT1, PROJECT2], FILTER, DESCENDING, PAGE_SIZE, TOKEN)) + + def test_sink_defaults(self): + from google.cloud.logging.sink import Sink + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + sink = client.sink(self.SINK_NAME) + self.assertIsInstance(sink, Sink) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertIsNone(sink.filter_) + self.assertIsNone(sink.destination) + self.assertIs(sink.client, client) + self.assertEqual(sink.project, self.PROJECT) + + def test_sink_explicit(self): + from google.cloud.logging.sink import Sink + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + sink = client.sink(self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + self.assertIsInstance(sink, Sink) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertIs(sink.client, client) + self.assertEqual(sink.project, self.PROJECT) + + def test_list_sinks_no_paging(self): + from google.cloud.logging.sink import Sink + PROJECT = 'PROJECT' + TOKEN = 'TOKEN' + SINK_NAME = 'sink_name' + FILTER = 'logName:syslog AND severity>=ERROR' + SINKS = [{ + 'name': SINK_NAME, + 'filter': FILTER, + 'destination': self.DESTINATION_URI, + }] + client = self._makeOne(project=PROJECT, credentials=_Credentials()) + api = client._sinks_api = _DummySinksAPI() + api._list_sinks_response = SINKS, TOKEN + + sinks, token = client.list_sinks() + + self.assertEqual(len(sinks), 1) + sink = sinks[0] + self.assertIsInstance(sink, Sink) + self.assertEqual(sink.name, SINK_NAME) + self.assertEqual(sink.filter_, FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + + self.assertEqual(token, TOKEN) + self.assertEqual(api._list_sinks_called_with, + (PROJECT, None, None)) + + def test_list_sinks_with_paging(self): + from google.cloud.logging.sink import Sink + PROJECT = 'PROJECT' + SINK_NAME = 'sink_name' + FILTER = 'logName:syslog AND severity>=ERROR' + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + SINKS = [{ + 'name': SINK_NAME, + 'filter': FILTER, + 'destination': self.DESTINATION_URI, + }] + client = self._makeOne(project=PROJECT, credentials=_Credentials()) + api = client._sinks_api = _DummySinksAPI() + api._list_sinks_response = SINKS, None + + sinks, token = client.list_sinks(PAGE_SIZE, TOKEN) + + self.assertEqual(len(sinks), 1) + sink = sinks[0] + self.assertIsInstance(sink, Sink) + self.assertEqual(sink.name, SINK_NAME) + self.assertEqual(sink.filter_, FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertIsNone(token) + self.assertEqual(api._list_sinks_called_with, + (PROJECT, PAGE_SIZE, TOKEN)) + + def test_metric_defaults(self): + from google.cloud.logging.metric import Metric + creds = _Credentials() + + client_obj = self._makeOne(project=self.PROJECT, credentials=creds) + metric = client_obj.metric(self.METRIC_NAME) + self.assertIsInstance(metric, Metric) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertIsNone(metric.filter_) + self.assertEqual(metric.description, '') + self.assertIs(metric.client, client_obj) + self.assertEqual(metric.project, self.PROJECT) + + def test_metric_explicit(self): + from google.cloud.logging.metric import Metric + creds = _Credentials() + + client_obj = self._makeOne(project=self.PROJECT, credentials=creds) + metric = client_obj.metric(self.METRIC_NAME, self.FILTER, + description=self.DESCRIPTION) + self.assertIsInstance(metric, Metric) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertIs(metric.client, client_obj) + self.assertEqual(metric.project, self.PROJECT) + + def test_list_metrics_no_paging(self): + from google.cloud.logging.metric import Metric + PROJECT = 'PROJECT' + TOKEN = 'TOKEN' + METRICS = [{ + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + }] + client = self._makeOne(project=PROJECT, credentials=_Credentials()) + api = client._metrics_api = _DummyMetricsAPI() + api._list_metrics_response = METRICS, TOKEN + + metrics, token = client.list_metrics() + + self.assertEqual(len(metrics), 1) + metric = metrics[0] + self.assertIsInstance(metric, Metric) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertEqual(token, TOKEN) + self.assertEqual(api._list_metrics_called_with, + (PROJECT, None, None)) + + def test_list_metrics_with_paging(self): + from google.cloud.logging.metric import Metric + PROJECT = 'PROJECT' + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + METRICS = [{ + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + }] + client = self._makeOne(project=PROJECT, credentials=_Credentials()) + api = client._metrics_api = _DummyMetricsAPI() + api._list_metrics_response = METRICS, None + + # Execute request. + metrics, token = client.list_metrics(PAGE_SIZE, TOKEN) + # Test values are correct. + self.assertEqual(len(metrics), 1) + metric = metrics[0] + self.assertIsInstance(metric, Metric) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertIsNone(token) + self.assertEqual(api._list_metrics_called_with, + (PROJECT, PAGE_SIZE, TOKEN)) + + +class _Credentials(object): + + _scopes = None + + @staticmethod + def create_scoped_required(): + return True + + def create_scoped(self, scope): + self._scopes = scope + return self + + +class _DummyLoggingAPI(object): + + def list_entries(self, projects, filter_, order_by, page_size, page_token): + self._list_entries_called_with = ( + projects, filter_, order_by, page_size, page_token) + return self._list_entries_response + + +class _DummySinksAPI(object): + + def list_sinks(self, project, page_size, page_token): + self._list_sinks_called_with = (project, page_size, page_token) + return self._list_sinks_response + + +class _DummyMetricsAPI(object): + + def list_metrics(self, project, page_size, page_token): + self._list_metrics_called_with = (project, page_size, page_token) + return self._list_metrics_response diff --git a/packages/google-cloud-logging/unit_tests/test_connection.py b/packages/google-cloud-logging/unit_tests/test_connection.py new file mode 100644 index 000000000000..07d09ceb9a02 --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/test_connection.py @@ -0,0 +1,640 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestConnection(unittest.TestCase): + + PROJECT = 'project' + FILTER = 'logName:syslog AND severity>=ERROR' + + def _getTargetClass(self): + from google.cloud.logging.connection import Connection + return Connection + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_default_url(self): + creds = _Credentials() + conn = self._makeOne(creds) + klass = self._getTargetClass() + self.assertEqual(conn.credentials._scopes, klass.SCOPE) + + +class Test_LoggingAPI(unittest.TestCase): + + PROJECT = 'project' + LIST_ENTRIES_PATH = 'entries:list' + WRITE_ENTRIES_PATH = 'entries:write' + LOGGER_NAME = 'LOGGER_NAME' + FILTER = 'logName:syslog AND severity>=ERROR' + + def _getTargetClass(self): + from google.cloud.logging.connection import _LoggingAPI + return _LoggingAPI + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + connection = object() + api = self._makeOne(connection) + self.assertIs(api._connection, connection) + + @staticmethod + def _make_timestamp(): + from datetime import datetime + from google.cloud._helpers import UTC + + NOW = datetime.utcnow().replace(tzinfo=UTC) + return _datetime_to_rfc3339_w_nanos(NOW) + + def test_list_entries_no_paging(self): + TIMESTAMP = self._make_timestamp() + IID = 'IID' + TEXT = 'TEXT' + SENT = { + 'projectIds': [self.PROJECT], + } + TOKEN = 'TOKEN' + RETURNED = { + 'entries': [{ + 'textPayload': TEXT, + 'insertId': IID, + 'resource': { + 'type': 'global', + }, + 'timestamp': TIMESTAMP, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }], + 'nextPageToken': TOKEN, + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + entries, token = api.list_entries([self.PROJECT]) + + self.assertEqual(entries, RETURNED['entries']) + self.assertEqual(token, TOKEN) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/%s' % self.LIST_ENTRIES_PATH + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_list_entries_w_paging(self): + from google.cloud.logging import DESCENDING + + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + TIMESTAMP = self._make_timestamp() + IID1 = 'IID1' + IID2 = 'IID2' + PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD['@type'] = 'type.googleapis.com/testing.example' + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + SENT = { + 'projectIds': [PROJECT1, PROJECT2], + 'filter': self.FILTER, + 'orderBy': DESCENDING, + 'pageSize': PAGE_SIZE, + 'pageToken': TOKEN, + } + RETURNED = { + 'entries': [{ + 'jsonPayload': PAYLOAD, + 'insertId': IID1, + 'resource': { + 'type': 'global', + }, + 'timestamp': TIMESTAMP, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }, { + 'protoPayload': PROTO_PAYLOAD, + 'insertId': IID2, + 'resource': { + 'type': 'global', + }, + 'timestamp': TIMESTAMP, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }], + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + entries, token = api.list_entries( + projects=[PROJECT1, PROJECT2], filter_=self.FILTER, + order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN) + + self.assertEqual(entries, RETURNED['entries']) + self.assertIsNone(token) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/%s' % self.LIST_ENTRIES_PATH + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_write_entries_single(self): + TEXT = 'TEXT' + ENTRY = { + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + } + SENT = { + 'entries': [ENTRY], + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.write_entries([ENTRY]) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/%s' % self.WRITE_ENTRIES_PATH + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_write_entries_multiple(self): + TEXT = 'TEXT' + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + RESOURCE = { + 'type': 'global', + } + LABELS = { + 'baz': 'qux', + 'spam': 'eggs', + } + ENTRY1 = { + 'textPayload': TEXT, + } + ENTRY2 = { + 'jsonPayload': {'foo': 'bar'}, + } + SENT = { + 'logName': LOG_NAME, + 'resource': RESOURCE, + 'labels': LABELS, + 'entries': [ENTRY1, ENTRY2], + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.write_entries([ENTRY1, ENTRY2], LOG_NAME, RESOURCE, LABELS) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/%s' % self.WRITE_ENTRIES_PATH + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_logger_delete(self): + path = '/projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + conn = _Connection({}) + api = self._makeOne(conn) + + api.logger_delete(self.PROJECT, self.LOGGER_NAME) + + self.assertEqual(conn._called_with['method'], 'DELETE') + self.assertEqual(conn._called_with['path'], path) + + +class Test_SinksAPI(unittest.TestCase): + + PROJECT = 'project' + FILTER = 'logName:syslog AND severity>=ERROR' + LIST_SINKS_PATH = 'projects/%s/sinks' % (PROJECT,) + SINK_NAME = 'sink_name' + SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) + DESTINATION_URI = 'faux.googleapis.com/destination' + + def _getTargetClass(self): + from google.cloud.logging.connection import _SinksAPI + return _SinksAPI + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + connection = object() + api = self._makeOne(connection) + self.assertIs(api._connection, connection) + + def test_list_sinks_no_paging(self): + TOKEN = 'TOKEN' + RETURNED = { + 'sinks': [{ + 'name': self.SINK_PATH, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + }], + 'nextPageToken': TOKEN, + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + sinks, token = api.list_sinks(self.PROJECT) + + self.assertEqual(sinks, RETURNED['sinks']) + self.assertEqual(token, TOKEN) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/%s' % (self.LIST_SINKS_PATH,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['query_params'], {}) + + def test_list_sinks_w_paging(self): + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + RETURNED = { + 'sinks': [{ + 'name': self.SINK_PATH, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + }], + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + sinks, token = api.list_sinks( + self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + + self.assertEqual(sinks, RETURNED['sinks']) + self.assertIsNone(token) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/%s' % (self.LIST_SINKS_PATH,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['query_params'], + {'pageSize': PAGE_SIZE, 'pageToken': TOKEN}) + + def test_sink_create_conflict(self): + from google.cloud.exceptions import Conflict + SENT = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection() + conn._raise_conflict = True + api = self._makeOne(conn) + + with self.assertRaises(Conflict): + api.sink_create( + self.PROJECT, self.SINK_NAME, self.FILTER, + self.DESTINATION_URI) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/projects/%s/sinks' % (self.PROJECT,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_sink_create_ok(self): + SENT = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.sink_create( + self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/projects/%s/sinks' % (self.PROJECT,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_sink_get_miss(self): + from google.cloud.exceptions import NotFound + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.sink_get(self.PROJECT, self.SINK_NAME) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_sink_get_hit(self): + RESPONSE = { + 'name': self.SINK_PATH, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection(RESPONSE) + api = self._makeOne(conn) + + response = api.sink_get(self.PROJECT, self.SINK_NAME) + + self.assertEqual(response, RESPONSE) + self.assertEqual(conn._called_with['method'], 'GET') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_sink_update_miss(self): + from google.cloud.exceptions import NotFound + SENT = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.sink_update( + self.PROJECT, self.SINK_NAME, self.FILTER, + self.DESTINATION_URI) + + self.assertEqual(conn._called_with['method'], 'PUT') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_sink_update_hit(self): + SENT = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.sink_update( + self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + + self.assertEqual(conn._called_with['method'], 'PUT') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_sink_delete_miss(self): + from google.cloud.exceptions import NotFound + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.sink_delete(self.PROJECT, self.SINK_NAME) + + self.assertEqual(conn._called_with['method'], 'DELETE') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_sink_delete_hit(self): + conn = _Connection({}) + api = self._makeOne(conn) + + api.sink_delete(self.PROJECT, self.SINK_NAME) + + self.assertEqual(conn._called_with['method'], 'DELETE') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + + +class Test_MetricsAPI(unittest.TestCase): + + PROJECT = 'project' + FILTER = 'logName:syslog AND severity>=ERROR' + LIST_METRICS_PATH = 'projects/%s/metrics' % (PROJECT,) + METRIC_NAME = 'metric_name' + METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, METRIC_NAME) + DESCRIPTION = 'DESCRIPTION' + + def _getTargetClass(self): + from google.cloud.logging.connection import _MetricsAPI + return _MetricsAPI + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_list_metrics_no_paging(self): + TOKEN = 'TOKEN' + RETURNED = { + 'metrics': [{ + 'name': self.METRIC_PATH, + 'filter': self.FILTER, + }], + 'nextPageToken': TOKEN, + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + metrics, token = api.list_metrics(self.PROJECT) + + self.assertEqual(metrics, RETURNED['metrics']) + self.assertEqual(token, TOKEN) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/%s' % (self.LIST_METRICS_PATH,) + self.assertEqual(conn._called_with['path'], path) + + def test_list_metrics_w_paging(self): + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + RETURNED = { + 'metrics': [{ + 'name': self.METRIC_PATH, + 'filter': self.FILTER, + }], + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + metrics, token = api.list_metrics( + self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + + self.assertEqual(metrics, RETURNED['metrics']) + self.assertIsNone(token) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/%s' % (self.LIST_METRICS_PATH,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['query_params'], + {'pageSize': PAGE_SIZE, 'pageToken': TOKEN}) + + def test_metric_create_conflict(self): + from google.cloud.exceptions import Conflict + SENT = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn = _Connection() + conn._raise_conflict = True + api = self._makeOne(conn) + + with self.assertRaises(Conflict): + api.metric_create( + self.PROJECT, self.METRIC_NAME, self.FILTER, + self.DESCRIPTION) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/projects/%s/metrics' % (self.PROJECT,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_metric_create_ok(self): + SENT = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.metric_create( + self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/projects/%s/metrics' % (self.PROJECT,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_metric_get_miss(self): + from google.cloud.exceptions import NotFound + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.metric_get(self.PROJECT, self.METRIC_NAME) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_metric_get_hit(self): + RESPONSE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn = _Connection(RESPONSE) + api = self._makeOne(conn) + + response = api.metric_get(self.PROJECT, self.METRIC_NAME) + + self.assertEqual(response, RESPONSE) + self.assertEqual(conn._called_with['method'], 'GET') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_metric_update_miss(self): + from google.cloud.exceptions import NotFound + SENT = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.metric_update( + self.PROJECT, self.METRIC_NAME, self.FILTER, + self.DESCRIPTION) + + self.assertEqual(conn._called_with['method'], 'PUT') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_metric_update_hit(self): + SENT = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.metric_update( + self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) + + self.assertEqual(conn._called_with['method'], 'PUT') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_metric_delete_miss(self): + from google.cloud.exceptions import NotFound + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.metric_delete(self.PROJECT, self.METRIC_NAME) + + self.assertEqual(conn._called_with['method'], 'DELETE') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_metric_delete_hit(self): + conn = _Connection({}) + api = self._makeOne(conn) + + api.metric_delete(self.PROJECT, self.METRIC_NAME) + + self.assertEqual(conn._called_with['method'], 'DELETE') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + + +class _Credentials(object): + + _scopes = None + + @staticmethod + def create_scoped_required(): + return True + + def create_scoped(self, scope): + self._scopes = scope + return self + + +class _Connection(object): + + _called_with = None + _raise_conflict = False + + def __init__(self, *responses): + self._responses = responses + + def api_request(self, **kw): + from google.cloud.exceptions import Conflict + from google.cloud.exceptions import NotFound + self._called_with = kw + if self._raise_conflict: + raise Conflict('oops') + try: + response, self._responses = self._responses[0], self._responses[1:] + except IndexError: + raise NotFound('miss') + return response + + +def _datetime_to_rfc3339_w_nanos(value): + from google.cloud._helpers import _RFC3339_NO_FRACTION + no_fraction = value.strftime(_RFC3339_NO_FRACTION) + return '%s.%09dZ' % (no_fraction, value.microsecond * 1000) diff --git a/packages/google-cloud-logging/unit_tests/test_entries.py b/packages/google-cloud-logging/unit_tests/test_entries.py new file mode 100644 index 000000000000..5a78243b1336 --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/test_entries.py @@ -0,0 +1,235 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class Test_logger_name_from_path(unittest.TestCase): + + def _callFUT(self, path): + from google.cloud.logging.entries import logger_name_from_path + return logger_name_from_path(path) + + def test_w_simple_name(self): + LOGGER_NAME = 'LOGGER_NAME' + PROJECT = 'my-project-1234' + PATH = 'projects/%s/logs/%s' % (PROJECT, LOGGER_NAME) + logger_name = self._callFUT(PATH) + self.assertEqual(logger_name, LOGGER_NAME) + + def test_w_name_w_all_extras(self): + LOGGER_NAME = 'LOGGER_NAME-part.one~part.two%part-three' + PROJECT = 'my-project-1234' + PATH = 'projects/%s/logs/%s' % (PROJECT, LOGGER_NAME) + logger_name = self._callFUT(PATH) + self.assertEqual(logger_name, LOGGER_NAME) + + +class Test_BaseEntry(unittest.TestCase): + + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + + def _getTargetClass(self): + from google.cloud.logging.entries import _BaseEntry + + class _Dummy(_BaseEntry): + _PAYLOAD_KEY = 'dummyPayload' + + return _Dummy + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + PAYLOAD = 'PAYLOAD' + logger = _Logger(self.LOGGER_NAME, self.PROJECT) + entry = self._makeOne(PAYLOAD, logger) + self.assertEqual(entry.payload, PAYLOAD) + self.assertIs(entry.logger, logger) + self.assertIsNone(entry.insert_id) + self.assertIsNone(entry.timestamp) + self.assertIsNone(entry.labels) + self.assertIsNone(entry.severity) + self.assertIsNone(entry.http_request) + + def test_ctor_explicit(self): + import datetime + PAYLOAD = 'PAYLOAD' + IID = 'IID' + TIMESTAMP = datetime.datetime.now() + LABELS = {'foo': 'bar', 'baz': 'qux'} + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + logger = _Logger(self.LOGGER_NAME, self.PROJECT) + entry = self._makeOne(PAYLOAD, logger, + insert_id=IID, + timestamp=TIMESTAMP, + labels=LABELS, + severity=SEVERITY, + http_request=REQUEST) + self.assertEqual(entry.payload, PAYLOAD) + self.assertIs(entry.logger, logger) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, TIMESTAMP) + self.assertEqual(entry.labels, LABELS) + self.assertEqual(entry.severity, SEVERITY) + self.assertEqual(entry.http_request['requestMethod'], METHOD) + self.assertEqual(entry.http_request['requestUrl'], URI) + self.assertEqual(entry.http_request['status'], STATUS) + + def test_from_api_repr_missing_data_no_loggers(self): + client = _Client(self.PROJECT) + PAYLOAD = 'PAYLOAD' + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + API_REPR = { + 'dummyPayload': PAYLOAD, + 'logName': LOG_NAME, + } + klass = self._getTargetClass() + entry = klass.from_api_repr(API_REPR, client) + self.assertEqual(entry.payload, PAYLOAD) + self.assertIsNone(entry.insert_id) + self.assertIsNone(entry.timestamp) + self.assertIsNone(entry.severity) + self.assertIsNone(entry.http_request) + logger = entry.logger + self.assertIsInstance(logger, _Logger) + self.assertIs(logger.client, client) + self.assertEqual(logger.name, self.LOGGER_NAME) + + def test_from_api_repr_w_loggers_no_logger_match(self): + from datetime import datetime + from google.cloud._helpers import UTC + klass = self._getTargetClass() + client = _Client(self.PROJECT) + PAYLOAD = 'PAYLOAD' + SEVERITY = 'CRITICAL' + IID = 'IID' + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + LABELS = {'foo': 'bar', 'baz': 'qux'} + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + API_REPR = { + 'dummyPayload': PAYLOAD, + 'logName': LOG_NAME, + 'insertId': IID, + 'timestamp': TIMESTAMP, + 'labels': LABELS, + 'severity': SEVERITY, + 'httpRequest': { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + }, + } + loggers = {} + entry = klass.from_api_repr(API_REPR, client, loggers=loggers) + self.assertEqual(entry.payload, PAYLOAD) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + self.assertEqual(entry.labels, LABELS) + self.assertEqual(entry.severity, SEVERITY) + self.assertEqual(entry.http_request['requestMethod'], METHOD) + self.assertEqual(entry.http_request['requestUrl'], URI) + self.assertEqual(entry.http_request['status'], STATUS) + logger = entry.logger + self.assertIsInstance(logger, _Logger) + self.assertIs(logger.client, client) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertEqual(loggers, {LOG_NAME: logger}) + + def test_from_api_repr_w_loggers_w_logger_match(self): + from datetime import datetime + from google.cloud._helpers import UTC + client = _Client(self.PROJECT) + PAYLOAD = 'PAYLOAD' + IID = 'IID' + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + LABELS = {'foo': 'bar', 'baz': 'qux'} + API_REPR = { + 'dummyPayload': PAYLOAD, + 'logName': LOG_NAME, + 'insertId': IID, + 'timestamp': TIMESTAMP, + 'labels': LABELS, + } + LOGGER = object() + loggers = {LOG_NAME: LOGGER} + klass = self._getTargetClass() + entry = klass.from_api_repr(API_REPR, client, loggers=loggers) + self.assertEqual(entry.payload, PAYLOAD) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + self.assertEqual(entry.labels, LABELS) + self.assertIs(entry.logger, LOGGER) + + +class TestProtobufEntry(unittest.TestCase): + + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + + def _getTargetClass(self): + from google.cloud.logging.entries import ProtobufEntry + return ProtobufEntry + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_parse_message(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + LOGGER = object() + message = Struct(fields={'foo': Value(bool_value=False)}) + with_true = Struct(fields={'foo': Value(bool_value=True)}) + PAYLOAD = json.loads(MessageToJson(with_true)) + entry = self._makeOne(PAYLOAD, LOGGER) + entry.parse_message(message) + self.assertTrue(message.fields['foo']) + + +def _datetime_to_rfc3339_w_nanos(value): + from google.cloud._helpers import _RFC3339_NO_FRACTION + no_fraction = value.strftime(_RFC3339_NO_FRACTION) + return '%s.%09dZ' % (no_fraction, value.microsecond * 1000) + + +class _Logger(object): + + def __init__(self, name, client): + self.name = name + self.client = client + + +class _Client(object): + + def __init__(self, project): + self.project = project + + def logger(self, name): + return _Logger(name, self) diff --git a/packages/google-cloud-logging/unit_tests/test_logger.py b/packages/google-cloud-logging/unit_tests/test_logger.py new file mode 100644 index 000000000000..575b92a3af4d --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/test_logger.py @@ -0,0 +1,705 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestLogger(unittest.TestCase): + + PROJECT = 'test-project' + LOGGER_NAME = 'logger-name' + + def _getTargetClass(self): + from google.cloud.logging.logger import Logger + return Logger + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + conn = object() + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(logger.full_name, 'projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual(logger.path, '/projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) + self.assertIsNone(logger.labels) + + def test_ctor_explicit(self): + LABELS = {'foo': 'bar', 'baz': 'qux'} + conn = object() + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client, labels=LABELS) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(logger.full_name, 'projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual(logger.path, '/projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual(logger.labels, LABELS) + + def test_batch_w_bound_client(self): + from google.cloud.logging.logger import Batch + conn = object() + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + batch = logger.batch() + self.assertIsInstance(batch, Batch) + self.assertIs(batch.logger, logger) + self.assertIs(batch.client, client) + + def test_batch_w_alternate_client(self): + from google.cloud.logging.logger import Batch + conn1 = object() + conn2 = object() + client1 = _Client(self.PROJECT, conn1) + client2 = _Client(self.PROJECT, conn2) + logger = self._makeOne(self.LOGGER_NAME, client=client1) + batch = logger.batch(client2) + self.assertIsInstance(batch, Batch) + self.assertIs(batch.logger, logger) + self.assertIs(batch.client, client2) + + def test_log_text_w_str_implicit_client(self): + TEXT = 'TEXT' + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client) + + logger.log_text(TEXT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_text_w_default_labels(self): + TEXT = 'TEXT' + DEFAULT_LABELS = {'foo': 'spam'} + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client, + labels=DEFAULT_LABELS) + + logger.log_text(TEXT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): + TEXT = u'TEXT' + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + }] + client1 = _Client(self.PROJECT) + client2 = _Client(self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client1, + labels=DEFAULT_LABELS) + + logger.log_text(TEXT, client=client2, labels=LABELS, + insert_id=IID, severity=SEVERITY, http_request=REQUEST) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_struct_w_implicit_client(self): + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client) + + logger.log_struct(STRUCT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_struct_w_default_labels(self): + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + DEFAULT_LABELS = {'foo': 'spam'} + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client, + labels=DEFAULT_LABELS) + + logger.log_struct(STRUCT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_struct_w_explicit_client_labels_severity_httpreq(self): + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + }] + client1 = _Client(self.PROJECT) + client2 = _Client(self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client1, + labels=DEFAULT_LABELS) + + logger.log_struct(STRUCT, client=client2, labels=LABELS, + insert_id=IID, severity=SEVERITY, + http_request=REQUEST) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_proto_w_implicit_client(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client) + + logger.log_proto(message) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_proto_w_default_labels(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client, + labels=DEFAULT_LABELS) + + logger.log_proto(message) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_proto_w_explicit_client_labels_severity_httpreq(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + }] + client1 = _Client(self.PROJECT) + client2 = _Client(self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client1, + labels=DEFAULT_LABELS) + + logger.log_proto(message, client=client2, labels=LABELS, + insert_id=IID, severity=SEVERITY, + http_request=REQUEST) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_delete_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client) + + logger.delete() + + self.assertEqual(api._logger_delete_called_with, + (self.PROJECT, self.LOGGER_NAME)) + + def test_delete_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client1) + + logger.delete(client=client2) + + self.assertEqual(api._logger_delete_called_with, + (self.PROJECT, self.LOGGER_NAME)) + + def test_list_entries_defaults(self): + LISTED = { + 'projects': None, + 'filter_': 'logName=projects/%s/logs/%s' % + (self.PROJECT, self.LOGGER_NAME), + 'order_by': None, + 'page_size': None, + 'page_token': None, + } + TOKEN = 'TOKEN' + client = _Client(self.PROJECT) + client._token = TOKEN + logger = self._makeOne(self.LOGGER_NAME, client=client) + entries, token = logger.list_entries() + self.assertEqual(len(entries), 0) + self.assertEqual(token, TOKEN) + self.assertEqual(client._listed, LISTED) + + def test_list_entries_explicit(self): + from google.cloud.logging import DESCENDING + + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + FILTER = 'resource.type:global' + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + LISTED = { + 'projects': ['PROJECT1', 'PROJECT2'], + 'filter_': '%s AND logName=projects/%s/logs/%s' % + (FILTER, self.PROJECT, self.LOGGER_NAME), + 'order_by': DESCENDING, + 'page_size': PAGE_SIZE, + 'page_token': TOKEN, + } + client = _Client(self.PROJECT) + logger = self._makeOne(self.LOGGER_NAME, client=client) + entries, token = logger.list_entries( + projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, + page_size=PAGE_SIZE, page_token=TOKEN) + self.assertEqual(len(entries), 0) + self.assertIsNone(token) + self.assertEqual(client._listed, LISTED) + + +class TestBatch(unittest.TestCase): + + PROJECT = 'test-project' + + def _getTargetClass(self): + from google.cloud.logging.logger import Batch + return Batch + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_ctor_defaults(self): + logger = _Logger() + client = _Client(project=self.PROJECT) + batch = self._makeOne(logger, client) + self.assertIs(batch.logger, logger) + self.assertIs(batch.client, client) + self.assertEqual(len(batch.entries), 0) + + def test_log_text_defaults(self): + TEXT = 'This is the entry text' + client = _Client(project=self.PROJECT, connection=object()) + logger = _Logger() + batch = self._makeOne(logger, client=client) + batch.log_text(TEXT) + self.assertEqual(batch.entries, + [('text', TEXT, None, None, None, None)]) + + def test_log_text_explicit(self): + TEXT = 'This is the entry text' + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + client = _Client(project=self.PROJECT, connection=object()) + logger = _Logger() + batch = self._makeOne(logger, client=client) + batch.log_text(TEXT, labels=LABELS, insert_id=IID, severity=SEVERITY, + http_request=REQUEST) + self.assertEqual(batch.entries, + [('text', TEXT, LABELS, IID, SEVERITY, REQUEST)]) + + def test_log_struct_defaults(self): + STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} + client = _Client(project=self.PROJECT, connection=object()) + logger = _Logger() + batch = self._makeOne(logger, client=client) + batch.log_struct(STRUCT) + self.assertEqual(batch.entries, + [('struct', STRUCT, None, None, None, None)]) + + def test_log_struct_explicit(self): + STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + client = _Client(project=self.PROJECT, connection=object()) + logger = _Logger() + batch = self._makeOne(logger, client=client) + batch.log_struct(STRUCT, labels=LABELS, insert_id=IID, + severity=SEVERITY, http_request=REQUEST) + self.assertEqual(batch.entries, + [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST)]) + + def test_log_proto_defaults(self): + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + client = _Client(project=self.PROJECT, connection=object()) + logger = _Logger() + batch = self._makeOne(logger, client=client) + batch.log_proto(message) + self.assertEqual(batch.entries, + [('proto', message, None, None, None, None)]) + + def test_log_proto_explicit(self): + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + client = _Client(project=self.PROJECT, connection=object()) + logger = _Logger() + batch = self._makeOne(logger, client=client) + batch.log_proto(message, labels=LABELS, insert_id=IID, + severity=SEVERITY, http_request=REQUEST) + self.assertEqual(batch.entries, + [('proto', message, LABELS, IID, SEVERITY, REQUEST)]) + + def test_commit_w_invalid_entry_type(self): + logger = _Logger() + client = _Client(project=self.PROJECT, connection=object()) + batch = self._makeOne(logger, client) + batch.entries.append(('bogus', 'BOGUS', None, None, None, None)) + with self.assertRaises(ValueError): + batch.commit() + + def test_commit_w_bound_client(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + message = Struct(fields={'foo': Value(bool_value=True)}) + IID1 = 'IID1' + IID2 = 'IID2' + IID3 = 'IID3' + RESOURCE = { + 'type': 'global', + } + ENTRIES = [ + {'textPayload': TEXT, 'insertId': IID1}, + {'jsonPayload': STRUCT, 'insertId': IID2}, + {'protoPayload': json.loads(MessageToJson(message)), + 'insertId': IID3}, + ] + client = _Client(project=self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = _Logger() + batch = self._makeOne(logger, client=client) + + batch.log_text(TEXT, insert_id=IID1) + batch.log_struct(STRUCT, insert_id=IID2) + batch.log_proto(message, insert_id=IID3) + batch.commit() + + self.assertEqual(list(batch.entries), []) + self.assertEqual(api._write_entries_called_with, + (ENTRIES, logger.full_name, RESOURCE, None)) + + def test_commit_w_alternate_client(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + from google.cloud.logging.logger import Logger + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = { + 'foo': 'bar', + 'baz': 'qux', + } + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() + logger = Logger('logger_name', client1, labels=DEFAULT_LABELS) + RESOURCE = {'type': 'global'} + ENTRIES = [ + {'textPayload': TEXT, 'labels': LABELS}, + {'jsonPayload': STRUCT, 'severity': SEVERITY}, + {'protoPayload': json.loads(MessageToJson(message)), + 'httpRequest': REQUEST}, + ] + batch = self._makeOne(logger, client=client1) + + batch.log_text(TEXT, labels=LABELS) + batch.log_struct(STRUCT, severity=SEVERITY) + batch.log_proto(message, http_request=REQUEST) + batch.commit(client=client2) + + self.assertEqual(list(batch.entries), []) + self.assertEqual(api._write_entries_called_with, + (ENTRIES, logger.full_name, RESOURCE, DEFAULT_LABELS)) + + def test_context_mgr_success(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + from google.cloud.logging.logger import Logger + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + client = _Client(project=self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = Logger('logger_name', client, labels=DEFAULT_LABELS) + RESOURCE = { + 'type': 'global', + } + ENTRIES = [ + {'textPayload': TEXT, 'httpRequest': REQUEST}, + {'jsonPayload': STRUCT, 'labels': LABELS}, + {'protoPayload': json.loads(MessageToJson(message)), + 'severity': SEVERITY}, + ] + batch = self._makeOne(logger, client=client) + + with batch as other: + other.log_text(TEXT, http_request=REQUEST) + other.log_struct(STRUCT, labels=LABELS) + other.log_proto(message, severity=SEVERITY) + + self.assertEqual(list(batch.entries), []) + self.assertEqual(api._write_entries_called_with, + (ENTRIES, logger.full_name, RESOURCE, DEFAULT_LABELS)) + + def test_context_mgr_failure(self): + from google.protobuf.struct_pb2 import Struct, Value + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + message = Struct(fields={'foo': Value(bool_value=True)}) + client = _Client(project=self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = _Logger() + UNSENT = [ + ('text', TEXT, None, IID, None, None), + ('struct', STRUCT, None, None, SEVERITY, None), + ('proto', message, LABELS, None, None, REQUEST), + ] + batch = self._makeOne(logger, client=client) + + try: + with batch as other: + other.log_text(TEXT, insert_id=IID) + other.log_struct(STRUCT, severity=SEVERITY) + other.log_proto(message, labels=LABELS, http_request=REQUEST) + raise _Bugout() + except _Bugout: + pass + + self.assertEqual(list(batch.entries), UNSENT) + self.assertIsNone(api._write_entries_called_with) + + +class _Logger(object): + + labels = None + + def __init__(self, name='NAME', project='PROJECT'): + self.full_name = 'projects/%s/logs/%s' % (project, name) + + +class _DummyLoggingAPI(object): + + _write_entries_called_with = None + + def write_entries(self, entries, logger_name=None, resource=None, + labels=None): + self._write_entries_called_with = ( + entries, logger_name, resource, labels) + + def logger_delete(self, project, logger_name): + self._logger_delete_called_with = (project, logger_name) + + +class _Client(object): + + _listed = _token = None + _entries = () + + def __init__(self, project, connection=None): + self.project = project + self.connection = connection + + def list_entries(self, **kw): + self._listed = kw + return self._entries, self._token + + +class _Bugout(Exception): + pass diff --git a/packages/google-cloud-logging/unit_tests/test_metric.py b/packages/google-cloud-logging/unit_tests/test_metric.py new file mode 100644 index 000000000000..8f777ec0e33e --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/test_metric.py @@ -0,0 +1,251 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestMetric(unittest.TestCase): + + PROJECT = 'test-project' + METRIC_NAME = 'metric-name' + FILTER = 'logName:syslog AND severity>=ERROR' + DESCRIPTION = 'DESCRIPTION' + + def _getTargetClass(self): + from google.cloud.logging.metric import Metric + return Metric + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + client = _Client(self.PROJECT) + metric = self._makeOne(self.METRIC_NAME, client=client) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertIsNone(metric.filter_) + self.assertEqual(metric.description, '') + self.assertIs(metric.client, client) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + self.assertEqual(metric.path, '/%s' % (FULL,)) + + def test_ctor_explicit(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + client = _Client(self.PROJECT) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, + client=client, description=self.DESCRIPTION) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertIs(metric.client, client) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + self.assertEqual(metric.path, '/%s' % (FULL,)) + + def test_from_api_repr_minimal(self): + client = _Client(project=self.PROJECT) + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + } + klass = self._getTargetClass() + metric = klass.from_api_repr(RESOURCE, client=client) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, '') + self.assertIs(metric._client, client) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + + def test_from_api_repr_w_description(self): + client = _Client(project=self.PROJECT) + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + DESCRIPTION = 'DESCRIPTION' + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': DESCRIPTION, + } + klass = self._getTargetClass() + metric = klass.from_api_repr(RESOURCE, client=client) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, DESCRIPTION) + self.assertIs(metric._client, client) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + + def test_create_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + + metric.create() + + self.assertEqual( + api._metric_create_called_with, + (self.PROJECT, self.METRIC_NAME, self.FILTER, '')) + + def test_create_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1, + description=self.DESCRIPTION) + + metric.create(client=client2) + + self.assertEqual( + api._metric_create_called_with, + (self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION)) + + def test_exists_miss_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + + self.assertFalse(metric.exists()) + + self.assertEqual(api._metric_get_called_with, + (self.PROJECT, self.METRIC_NAME)) + + def test_exists_hit_w_alternate_client(self): + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.metrics_api = _DummyMetricsAPI() + api._metric_get_response = RESOURCE + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1) + + self.assertTrue(metric.exists(client=client2)) + + self.assertEqual(api._metric_get_called_with, + (self.PROJECT, self.METRIC_NAME)) + + def test_reload_w_bound_client(self): + NEW_FILTER = 'logName:syslog AND severity>=INFO' + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': NEW_FILTER, + } + client = _Client(project=self.PROJECT) + api = client.metrics_api = _DummyMetricsAPI() + api._metric_get_response = RESOURCE + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client, + description=self.DESCRIPTION) + + metric.reload() + + self.assertEqual(metric.filter_, NEW_FILTER) + self.assertEqual(metric.description, '') + self.assertEqual(api._metric_get_called_with, + (self.PROJECT, self.METRIC_NAME)) + + def test_reload_w_alternate_client(self): + NEW_FILTER = 'logName:syslog AND severity>=INFO' + RESOURCE = { + 'name': self.METRIC_NAME, + 'description': self.DESCRIPTION, + 'filter': NEW_FILTER, + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.metrics_api = _DummyMetricsAPI() + api._metric_get_response = RESOURCE + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1) + + metric.reload(client=client2) + + self.assertEqual(metric.filter_, NEW_FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertEqual(api._metric_get_called_with, + (self.PROJECT, self.METRIC_NAME)) + + def test_update_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + + metric.update() + + self.assertEqual( + api._metric_update_called_with, + (self.PROJECT, self.METRIC_NAME, self.FILTER, '')) + + def test_update_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1, + description=self.DESCRIPTION) + + metric.update(client=client2) + + self.assertEqual( + api._metric_update_called_with, + (self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION)) + + def test_delete_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + + metric.delete() + + self.assertEqual(api._metric_delete_called_with, + (self.PROJECT, self.METRIC_NAME)) + + def test_delete_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1) + + metric.delete(client=client2) + + self.assertEqual(api._metric_delete_called_with, + (self.PROJECT, self.METRIC_NAME)) + + +class _Client(object): + + def __init__(self, project): + self.project = project + + +class _DummyMetricsAPI(object): + + def metric_create(self, project, metric_name, filter_, description): + self._metric_create_called_with = ( + project, metric_name, filter_, description) + + def metric_get(self, project, metric_name): + from google.cloud.exceptions import NotFound + self._metric_get_called_with = (project, metric_name) + try: + return self._metric_get_response + except AttributeError: + raise NotFound('miss') + + def metric_update(self, project, metric_name, filter_, description): + self._metric_update_called_with = ( + project, metric_name, filter_, description) + + def metric_delete(self, project, metric_name): + self._metric_delete_called_with = (project, metric_name) diff --git a/packages/google-cloud-logging/unit_tests/test_sink.py b/packages/google-cloud-logging/unit_tests/test_sink.py new file mode 100644 index 000000000000..64245e66db72 --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/test_sink.py @@ -0,0 +1,262 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestSink(unittest.TestCase): + + PROJECT = 'test-project' + SINK_NAME = 'sink-name' + FILTER = 'logName:syslog AND severity>=INFO' + DESTINATION_URI = 'faux.googleapis.com/destination' + + def _getTargetClass(self): + from google.cloud.logging.sink import Sink + return Sink + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + client = _Client(self.PROJECT) + sink = self._makeOne(self.SINK_NAME, client=client) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertIsNone(sink.filter_) + self.assertIsNone(sink.destination) + self.assertIs(sink.client, client) + self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.full_name, FULL) + self.assertEqual(sink.path, '/%s' % (FULL,)) + + def test_ctor_explicit(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + client = _Client(self.PROJECT) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertIs(sink.client, client) + self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.full_name, FULL) + self.assertEqual(sink.path, '/%s' % (FULL,)) + + def test_from_api_repr_minimal(self): + client = _Client(project=self.PROJECT) + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + klass = self._getTargetClass() + sink = klass.from_api_repr(RESOURCE, client=client) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertIs(sink._client, client) + self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.full_name, FULL) + + def test_from_api_repr_w_description(self): + client = _Client(project=self.PROJECT) + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + klass = self._getTargetClass() + sink = klass.from_api_repr(RESOURCE, client=client) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertIs(sink._client, client) + self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.full_name, FULL) + + def test_create_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.sinks_api = _DummySinksAPI() + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + + sink.create() + + self.assertEqual( + api._sink_create_called_with, + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) + + def test_create_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client1) + api = client2.sinks_api = _DummySinksAPI() + + sink.create(client=client2) + + self.assertEqual( + api._sink_create_called_with, + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) + + def test_exists_miss_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.sinks_api = _DummySinksAPI() + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + + self.assertFalse(sink.exists()) + + self.assertEqual(api._sink_get_called_with, + (self.PROJECT, self.SINK_NAME)) + + def test_exists_hit_w_alternate_client(self): + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.sinks_api = _DummySinksAPI() + api._sink_get_response = RESOURCE + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client1) + + self.assertTrue(sink.exists(client=client2)) + + self.assertEqual(api._sink_get_called_with, + (self.PROJECT, self.SINK_NAME)) + + def test_reload_w_bound_client(self): + NEW_FILTER = 'logName:syslog AND severity>=INFO' + NEW_DESTINATION_URI = 'faux.googleapis.com/other' + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': NEW_FILTER, + 'destination': NEW_DESTINATION_URI, + } + client = _Client(project=self.PROJECT) + api = client.sinks_api = _DummySinksAPI() + api._sink_get_response = RESOURCE + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + + sink.reload() + + self.assertEqual(sink.filter_, NEW_FILTER) + self.assertEqual(sink.destination, NEW_DESTINATION_URI) + self.assertEqual(api._sink_get_called_with, + (self.PROJECT, self.SINK_NAME)) + + def test_reload_w_alternate_client(self): + NEW_FILTER = 'logName:syslog AND severity>=INFO' + NEW_DESTINATION_URI = 'faux.googleapis.com/other' + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': NEW_FILTER, + 'destination': NEW_DESTINATION_URI, + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.sinks_api = _DummySinksAPI() + api._sink_get_response = RESOURCE + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client1) + + sink.reload(client=client2) + + self.assertEqual(sink.filter_, NEW_FILTER) + self.assertEqual(sink.destination, NEW_DESTINATION_URI) + self.assertEqual(api._sink_get_called_with, + (self.PROJECT, self.SINK_NAME)) + + def test_update_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.sinks_api = _DummySinksAPI() + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + + sink.update() + + self.assertEqual( + api._sink_update_called_with, + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) + + def test_update_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.sinks_api = _DummySinksAPI() + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client1) + + sink.update(client=client2) + + self.assertEqual( + api._sink_update_called_with, + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) + + def test_delete_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.sinks_api = _DummySinksAPI() + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + + sink.delete() + + self.assertEqual(api._sink_delete_called_with, + (self.PROJECT, self.SINK_NAME)) + + def test_delete_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.sinks_api = _DummySinksAPI() + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client1) + + sink.delete(client=client2) + + self.assertEqual(api._sink_delete_called_with, + (self.PROJECT, self.SINK_NAME)) + + +class _Client(object): + + def __init__(self, project): + self.project = project + + +class _DummySinksAPI(object): + + def sink_create(self, project, sink_name, filter_, destination): + self._sink_create_called_with = ( + project, sink_name, filter_, destination) + + def sink_get(self, project, sink_name): + from google.cloud.exceptions import NotFound + self._sink_get_called_with = (project, sink_name) + try: + return self._sink_get_response + except AttributeError: + raise NotFound('miss') + + def sink_update(self, project, sink_name, filter_, destination): + self._sink_update_called_with = ( + project, sink_name, filter_, destination) + + def sink_delete(self, project, sink_name): + self._sink_delete_called_with = (project, sink_name) From e65b299ff709cb2dae6fa47964762cc114103bbf Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 26 Sep 2016 12:48:40 -0700 Subject: [PATCH 002/855] Making logging subpackage into a proper package. - Adding README, setup.py, MANIFEST.in, .coveragerc and tox.ini - Adding google-cloud-logging as a dependency to the umbrella package - Adding the logging subdirectory into the list of packages for verifying the docs - Incorporating the logging subdirectory into the umbrella coverage report - Adding the logging only tox tests to the Travis config - Adding {toxinidir}/../core as a dependency for the logging tox config --- packages/google-cloud-logging/.coveragerc | 11 ++++ packages/google-cloud-logging/MANIFEST.in | 4 ++ packages/google-cloud-logging/README.rst | 59 +++++++++++++++++++ packages/google-cloud-logging/setup.py | 72 +++++++++++++++++++++++ packages/google-cloud-logging/tox.ini | 30 ++++++++++ 5 files changed, 176 insertions(+) create mode 100644 packages/google-cloud-logging/.coveragerc create mode 100644 packages/google-cloud-logging/MANIFEST.in create mode 100644 packages/google-cloud-logging/README.rst create mode 100644 packages/google-cloud-logging/setup.py create mode 100644 packages/google-cloud-logging/tox.ini diff --git a/packages/google-cloud-logging/.coveragerc b/packages/google-cloud-logging/.coveragerc new file mode 100644 index 000000000000..a54b99aa14b7 --- /dev/null +++ b/packages/google-cloud-logging/.coveragerc @@ -0,0 +1,11 @@ +[run] +branch = True + +[report] +fail_under = 100 +show_missing = True +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-logging/MANIFEST.in b/packages/google-cloud-logging/MANIFEST.in new file mode 100644 index 000000000000..cb3a2b9ef4fa --- /dev/null +++ b/packages/google-cloud-logging/MANIFEST.in @@ -0,0 +1,4 @@ +include README.rst +graft google +graft unit_tests +global-exclude *.pyc diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst new file mode 100644 index 000000000000..a7e213131054 --- /dev/null +++ b/packages/google-cloud-logging/README.rst @@ -0,0 +1,59 @@ +Python Client for Stackdriver Logging +===================================== + + Python idiomatic client for `Stackdriver Logging`_ + +.. _Stackdriver Logging: https://cloud.google.com/logging/ + +- `Homepage`_ +- `API Documentation`_ + +.. _Homepage: https://googlecloudplatform.github.io/google-cloud-python/ +.. _API Documentation: http://googlecloudplatform.github.io/google-cloud-python/ + +Quick Start +----------- + +:: + + $ pip install --upgrade google-cloud-logging + +Authentication +-------------- + +With ``google-cloud-python`` we try to make authentication as painless as +possible. Check out the `Authentication section`_ in our documentation to +learn more. You may also find the `authentication document`_ shared by all +the ``google-cloud-*`` libraries to be helpful. + +.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication + +Using the API +------------- + +`Stackdriver Logging`_ API (`Logging API docs`_) allows you to store, search, +analyze, monitor, and alert on log data and events from Google Cloud Platform. + +.. _Stackdriver Logging: https://cloud.google.com/logging/ +.. _Logging API docs: https://cloud.google.com/logging/docs/ + +.. code:: python + + from google.cloud import logging + client = logging.Client() + logger = client.logger('log_name') + logger.log_text('A simple entry') # API call + +Example of fetching entries: + +.. code:: python + + entries, token = logger.list_entries() + for entry in entries: + print(entry.payload) + +See the ``google-cloud-python`` API `logging documentation`_ to learn how to +connect to Stackdriver Logging using this Client Library. + +.. _logging documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging-usage.html diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py new file mode 100644 index 000000000000..f5424672e917 --- /dev/null +++ b/packages/google-cloud-logging/setup.py @@ -0,0 +1,72 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from setuptools import find_packages +from setuptools import setup + + +PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) + +with open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: + README = file_obj.read() + +# NOTE: This is duplicated throughout and we should try to +# consolidate. +SETUP_BASE = { + 'author': 'Google Cloud Platform', + 'author_email': 'jjg+google-cloud-python@google.com', + 'scripts': [], + 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', + 'license': 'Apache 2.0', + 'platforms': 'Posix; MacOS X; Windows', + 'include_package_data': True, + 'zip_safe': False, + 'classifiers': [ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Topic :: Internet', + ], +} + + +REQUIREMENTS = [ + 'google-cloud-core', + 'grpcio >= 1.0.0', + 'google-gax >= 0.14.1, < 0.15dev', + 'gapic-google-logging-v2 >= 0.9.0, < 0.10dev', + 'grpc-google-logging-v2 >= 0.9.0, < 0.10dev', +] + +setup( + name='google-cloud-logging', + version='0.20.0dev', + description='Python Client for Stackdriver Logging', + long_description=README, + namespace_packages=[ + 'google', + 'google.cloud', + ], + packages=find_packages(), + install_requires=REQUIREMENTS, + **SETUP_BASE +) diff --git a/packages/google-cloud-logging/tox.ini b/packages/google-cloud-logging/tox.ini new file mode 100644 index 000000000000..abfe42a6d8e2 --- /dev/null +++ b/packages/google-cloud-logging/tox.ini @@ -0,0 +1,30 @@ +[tox] +envlist = + py27,py34,py35,cover + +[testing] +deps = + {toxinidir}/../core + pytest +covercmd = + py.test --quiet \ + --cov=google.cloud.logging \ + --cov=unit_tests \ + --cov-config {toxinidir}/.coveragerc \ + unit_tests + +[testenv] +commands = + py.test --quiet {posargs} unit_tests +deps = + {[testing]deps} + +[testenv:cover] +basepython = + python2.7 +commands = + {[testing]covercmd} +deps = + {[testenv]deps} + coverage + pytest-cov From a639cbbedcb571c4dfdd391c0d0335db0cb2851e Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 27 Sep 2016 08:53:24 -0700 Subject: [PATCH 003/855] Preparing for a release of all packages. Towards #2441. - Updating umbrella README to point at all packages - Putting upper bounds on grpcio in dependencies - Putting lower bounds on all google-cloud-* packages listed as dependencies - Adding `setup.cfg` for universal wheels --- packages/google-cloud-logging/setup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index f5424672e917..8fbd52fafa0f 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -50,8 +50,8 @@ REQUIREMENTS = [ - 'google-cloud-core', - 'grpcio >= 1.0.0', + 'google-cloud-core >= 0.20.0', + 'grpcio >= 1.0.0, < 2.0dev', 'google-gax >= 0.14.1, < 0.15dev', 'gapic-google-logging-v2 >= 0.9.0, < 0.10dev', 'grpc-google-logging-v2 >= 0.9.0, < 0.10dev', @@ -59,7 +59,7 @@ setup( name='google-cloud-logging', - version='0.20.0dev', + version='0.20.0', description='Python Client for Stackdriver Logging', long_description=README, namespace_packages=[ From c8ad7e90826e0c3d010de23a18128cc60e0b3c03 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 27 Sep 2016 08:57:43 -0700 Subject: [PATCH 004/855] Adding setup.cfg to all packages. --- packages/google-cloud-logging/setup.cfg | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 packages/google-cloud-logging/setup.cfg diff --git a/packages/google-cloud-logging/setup.cfg b/packages/google-cloud-logging/setup.cfg new file mode 100644 index 000000000000..2a9acf13daa9 --- /dev/null +++ b/packages/google-cloud-logging/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal = 1 From 602fbc96c2287fab2ec247cd6041c3a5b492f244 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 27 Sep 2016 14:11:08 -0700 Subject: [PATCH 005/855] Upgrading versions of GAPIC and gRPC generated libraries. This resolves the google-gax 0.14.* conflict. --- packages/google-cloud-logging/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 8fbd52fafa0f..0a0255553e24 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -53,8 +53,8 @@ 'google-cloud-core >= 0.20.0', 'grpcio >= 1.0.0, < 2.0dev', 'google-gax >= 0.14.1, < 0.15dev', - 'gapic-google-logging-v2 >= 0.9.0, < 0.10dev', - 'grpc-google-logging-v2 >= 0.9.0, < 0.10dev', + 'gapic-google-logging-v2 >= 0.10.1, < 0.11dev', + 'grpc-google-logging-v2 >= 0.10.1, < 0.11dev', ] setup( From 6005ff18bfd6bd4251b581f78dbb1045969d0019 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 27 Sep 2016 15:27:14 -0700 Subject: [PATCH 006/855] Making sure to use kwargs when calling GAPIC surfaces. --- .../google/cloud/logging/_gax.py | 32 +++++++++++-------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index fab3077941c2..56a3e7876008 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -79,7 +79,8 @@ def list_entries(self, projects, filter_='', order_by='', page_token = INITIAL_PAGE options = CallOptions(page_token=page_token) page_iter = self._gax_api.list_log_entries( - projects, filter_, order_by, page_size, options) + projects, filter_=filter_, order_by=order_by, + page_size=page_size, options=options) entries = [_log_entry_pb_to_mapping(entry_pb) for entry_pb in page_iter.next()] token = page_iter.page_token or None @@ -107,8 +108,9 @@ def write_entries(self, entries, logger_name=None, resource=None, options = None partial_success = False entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries] - self._gax_api.write_log_entries(entry_pbs, logger_name, resource, - labels, partial_success, options) + self._gax_api.write_log_entries( + entry_pbs, log_name=logger_name, resource=resource, labels=labels, + partial_success=partial_success, options=options) def logger_delete(self, project, logger_name): """API call: delete all entries in a logger via a DELETE request @@ -122,7 +124,7 @@ def logger_delete(self, project, logger_name): options = None path = 'projects/%s/logs/%s' % (project, logger_name) try: - self._gax_api.delete_log(path, options) + self._gax_api.delete_log(path, options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) @@ -163,7 +165,8 @@ def list_sinks(self, project, page_size=0, page_token=None): page_token = INITIAL_PAGE options = CallOptions(page_token=page_token) path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_sinks(path, page_size, options) + page_iter = self._gax_api.list_sinks(path, page_size=page_size, + options=options) sinks = [_log_sink_pb_to_mapping(log_sink_pb) for log_sink_pb in page_iter.next()] token = page_iter.page_token or None @@ -194,7 +197,7 @@ def sink_create(self, project, sink_name, filter_, destination): sink_pb = LogSink(name=sink_name, filter=filter_, destination=destination) try: - self._gax_api.create_sink(parent, sink_pb, options) + self._gax_api.create_sink(parent, sink_pb, options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: path = 'projects/%s/sinks/%s' % (project, sink_name) @@ -217,7 +220,7 @@ def sink_get(self, project, sink_name): options = None path = 'projects/%s/sinks/%s' % (project, sink_name) try: - sink_pb = self._gax_api.get_sink(path, options) + sink_pb = self._gax_api.get_sink(path, options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) @@ -249,7 +252,7 @@ def sink_update(self, project, sink_name, filter_, destination): path = 'projects/%s/sinks/%s' % (project, sink_name) sink_pb = LogSink(name=path, filter=filter_, destination=destination) try: - self._gax_api.update_sink(path, sink_pb, options) + self._gax_api.update_sink(path, sink_pb, options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) @@ -268,7 +271,7 @@ def sink_delete(self, project, sink_name): options = None path = 'projects/%s/sinks/%s' % (project, sink_name) try: - self._gax_api.delete_sink(path, options) + self._gax_api.delete_sink(path, options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) @@ -309,7 +312,8 @@ def list_metrics(self, project, page_size=0, page_token=None): page_token = INITIAL_PAGE options = CallOptions(page_token=page_token) path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_log_metrics(path, page_size, options) + page_iter = self._gax_api.list_log_metrics( + path, page_size=page_size, options=options) metrics = [_log_metric_pb_to_mapping(log_metric_pb) for log_metric_pb in page_iter.next()] token = page_iter.page_token or None @@ -339,7 +343,7 @@ def metric_create(self, project, metric_name, filter_, description): metric_pb = LogMetric(name=metric_name, filter=filter_, description=description) try: - self._gax_api.create_log_metric(parent, metric_pb, options) + self._gax_api.create_log_metric(parent, metric_pb, options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: path = 'projects/%s/metrics/%s' % (project, metric_name) @@ -362,7 +366,7 @@ def metric_get(self, project, metric_name): options = None path = 'projects/%s/metrics/%s' % (project, metric_name) try: - metric_pb = self._gax_api.get_log_metric(path, options) + metric_pb = self._gax_api.get_log_metric(path, options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) @@ -394,7 +398,7 @@ def metric_update(self, project, metric_name, filter_, description): metric_pb = LogMetric(name=path, filter=filter_, description=description) try: - self._gax_api.update_log_metric(path, metric_pb, options) + self._gax_api.update_log_metric(path, metric_pb, options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) @@ -413,7 +417,7 @@ def metric_delete(self, project, metric_name): options = None path = 'projects/%s/metrics/%s' % (project, metric_name) try: - self._gax_api.delete_log_metric(path, options) + self._gax_api.delete_log_metric(path, options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) From 4a046788dbca751becb4e02db8982afcacbd5714 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 26 Sep 2016 14:13:40 -0400 Subject: [PATCH 007/855] Disable pylint's 'ungrouped-imports' error. We share the 'google' namespace with third-party pacakages. PEP 8 wants 'local' imports to be separated fro 'third-party' imports, which is more important than pylint's attempt to group them by name alone. --- packages/google-cloud-logging/google/cloud/logging/_gax.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 56a3e7876008..fafcafdabf85 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -27,12 +27,10 @@ from google.protobuf.json_format import Parse from grpc import StatusCode -# pylint: disable=ungrouped-imports from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import _pb_timestamp_to_rfc3339 from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound -# pylint: enable=ungrouped-imports class _LoggingAPI(object): From 5eb46739bb7a75fff65dc329f7cfe0cf4d30f9e9 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 29 Sep 2016 16:17:02 -0700 Subject: [PATCH 008/855] General clean-up after rename. - Removing "graft google" from MANIFEST for umbrella package. It isn't needed since the umbrella package has no source - Updating license year on copy-pasted namespace package __init__.py files. Done via: https://gist.github.com/dhermes/a0e88f891ffffc3ecea5c9bb2f13e4f5 - Removing unused HTML context from docs/conf.py - Setting GH_OWNER AND GH_PROJECT_NAME (which together make the REPO_SLUG) manually in the docs update scripts. This way the env. variables don't need to be set in the Travis UI / CLI. Also updating tox.ini to stop passing those variables through - Removing the root package from `verify_included_modules.py` since it no longer has any source - Updated a docstring reference to a moved class in the Bigtable system test - Removing redundant `GOOGLE_CLOUD_*` in `tox` system test `passenv` (already covered by `GOOGLE_*`) --- packages/google-cloud-logging/google/cloud/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/google/cloud/__init__.py b/packages/google-cloud-logging/google/cloud/__init__.py index 8ac7b74af136..b2b833373882 100644 --- a/packages/google-cloud-logging/google/cloud/__init__.py +++ b/packages/google-cloud-logging/google/cloud/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From ba69e71f1b74b33e5e283fb7ed26030a2eec2b16 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 3 Oct 2016 21:32:48 -0700 Subject: [PATCH 009/855] Updating package README's with more useful doc links. Also removing duplicate "Homepage" links (duplicate of "API Documentation" links). --- packages/google-cloud-logging/README.rst | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index a7e213131054..750cd1aba8db 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -5,11 +5,9 @@ Python Client for Stackdriver Logging .. _Stackdriver Logging: https://cloud.google.com/logging/ -- `Homepage`_ -- `API Documentation`_ +- `Documentation`_ -.. _Homepage: https://googlecloudplatform.github.io/google-cloud-python/ -.. _API Documentation: http://googlecloudplatform.github.io/google-cloud-python/ +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging-usage.html Quick Start ----------- From 77bf18035b7652af04bce9afdbc6c21aa745b728 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 20 Oct 2016 15:50:55 -0700 Subject: [PATCH 010/855] Replace types string with str. Uses command: ag -l 'type ([^:]+): string' | \ xargs gsed -r -i.bak -e 's/type ([^:]+): string/type \1: str/g' Note: [-r for gsed (GNU sed) is needed for group matching](http://superuser.com/a/336819/125262). --- .../google/cloud/logging/_gax.py | 58 +++++++++--------- .../google/cloud/logging/connection.py | 60 +++++++++---------- .../google/cloud/logging/entries.py | 2 +- .../google/cloud/logging/logger.py | 40 ++++++------- .../google/cloud/logging/metric.py | 6 +- .../google/cloud/logging/sink.py | 6 +- 6 files changed, 86 insertions(+), 86 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index fafcafdabf85..f399dbb5a8c4 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -91,7 +91,7 @@ def write_entries(self, entries, logger_name=None, resource=None, :type entries: sequence of mapping :param entries: the log entry resources to log. - :type logger_name: string + :type logger_name: str :param logger_name: name of default logger to which to log the entries; individual entries may override. @@ -113,10 +113,10 @@ def write_entries(self, entries, logger_name=None, resource=None, def logger_delete(self, project, logger_name): """API call: delete all entries in a logger via a DELETE request - :type project: string + :type project: str :param project: ID of project containing the log entries to delete - :type logger_name: string + :type logger_name: str :param logger_name: name of logger containing the log entries to delete """ options = None @@ -142,7 +142,7 @@ def __init__(self, gax_api): def list_sinks(self, project, page_size=0, page_token=None): """List sinks for the project associated with this client. - :type project: string + :type project: str :param project: ID of the project whose sinks are to be listed. :type page_size: int @@ -176,17 +176,17 @@ def sink_create(self, project, sink_name, filter_, destination): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create - :type project: string + :type project: str :param project: ID of the project in which to create the sink. - :type sink_name: string + :type sink_name: str :param sink_name: the name of the sink - :type filter_: string + :type filter_: str :param filter_: the advanced logs filter expression defining the entries exported by the sink. - :type destination: string + :type destination: str :param destination: destination URI for the entries exported by the sink. """ @@ -205,10 +205,10 @@ def sink_create(self, project, sink_name, filter_, destination): def sink_get(self, project, sink_name): """API call: retrieve a sink resource. - :type project: string + :type project: str :param project: ID of the project containing the sink. - :type sink_name: string + :type sink_name: str :param sink_name: the name of the sink :rtype: dict @@ -228,17 +228,17 @@ def sink_get(self, project, sink_name): def sink_update(self, project, sink_name, filter_, destination): """API call: update a sink resource. - :type project: string + :type project: str :param project: ID of the project containing the sink. - :type sink_name: string + :type sink_name: str :param sink_name: the name of the sink - :type filter_: string + :type filter_: str :param filter_: the advanced logs filter expression defining the entries exported by the sink. - :type destination: string + :type destination: str :param destination: destination URI for the entries exported by the sink. @@ -260,10 +260,10 @@ def sink_update(self, project, sink_name, filter_, destination): def sink_delete(self, project, sink_name): """API call: delete a sink resource. - :type project: string + :type project: str :param project: ID of the project containing the sink. - :type sink_name: string + :type sink_name: str :param sink_name: the name of the sink """ options = None @@ -289,7 +289,7 @@ def __init__(self, gax_api): def list_metrics(self, project, page_size=0, page_token=None): """List metrics for the project associated with this client. - :type project: string + :type project: str :param project: ID of the project whose metrics are to be listed. :type page_size: int @@ -323,17 +323,17 @@ def metric_create(self, project, metric_name, filter_, description): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create - :type project: string + :type project: str :param project: ID of the project in which to create the metric. - :type metric_name: string + :type metric_name: str :param metric_name: the name of the metric - :type filter_: string + :type filter_: str :param filter_: the advanced logs filter expression defining the entries exported by the metric. - :type description: string + :type description: str :param description: description of the metric. """ options = None @@ -351,10 +351,10 @@ def metric_create(self, project, metric_name, filter_, description): def metric_get(self, project, metric_name): """API call: retrieve a metric resource. - :type project: string + :type project: str :param project: ID of the project containing the metric. - :type metric_name: string + :type metric_name: str :param metric_name: the name of the metric :rtype: dict @@ -374,17 +374,17 @@ def metric_get(self, project, metric_name): def metric_update(self, project, metric_name, filter_, description): """API call: update a metric resource. - :type project: string + :type project: str :param project: ID of the project containing the metric. - :type metric_name: string + :type metric_name: str :param metric_name: the name of the metric - :type filter_: string + :type filter_: str :param filter_: the advanced logs filter expression defining the entries exported by the metric. - :type description: string + :type description: str :param description: description of the metric. :rtype: dict @@ -406,10 +406,10 @@ def metric_update(self, project, metric_name, filter_, description): def metric_delete(self, project, metric_name): """API call: delete a metric resource. - :type project: string + :type project: str :param project: ID of the project containing the metric. - :type metric_name: string + :type metric_name: str :param metric_name: the name of the metric """ options = None diff --git a/packages/google-cloud-logging/google/cloud/logging/connection.py b/packages/google-cloud-logging/google/cloud/logging/connection.py index 60b893751fc6..08a540671f5f 100644 --- a/packages/google-cloud-logging/google/cloud/logging/connection.py +++ b/packages/google-cloud-logging/google/cloud/logging/connection.py @@ -27,7 +27,7 @@ class Connection(base_connection.JSONConnection): :type http: :class:`httplib2.Http` or class that defines ``request()``. :param http: (Optional) HTTP object to make requests. - :type api_base_url: string + :type api_base_url: str :param api_base_url: The base of the API call URL. Defaults to the value :attr:`Connection.API_BASE_URL`. """ @@ -123,7 +123,7 @@ def write_entries(self, entries, logger_name=None, resource=None, :type entries: sequence of mapping :param entries: the log entry resources to log. - :type logger_name: string + :type logger_name: str :param logger_name: name of default logger to which to log the entries; individual entries may override. @@ -155,10 +155,10 @@ def logger_delete(self, project, logger_name): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs/delete - :type project: string + :type project: str :param project: ID of project containing the log entries to delete - :type logger_name: string + :type logger_name: str :param logger_name: name of logger containing the log entries to delete """ path = '/projects/%s/logs/%s' % (project, logger_name) @@ -183,7 +183,7 @@ def list_sinks(self, project, page_size=None, page_token=None): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/list - :type project: string + :type project: str :param project: ID of the project whose sinks are to be listed. :type page_size: int @@ -220,17 +220,17 @@ def sink_create(self, project, sink_name, filter_, destination): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create - :type project: string + :type project: str :param project: ID of the project in which to create the sink. - :type sink_name: string + :type sink_name: str :param sink_name: the name of the sink - :type filter_: string + :type filter_: str :param filter_: the advanced logs filter expression defining the entries exported by the sink. - :type destination: string + :type destination: str :param destination: destination URI for the entries exported by the sink. """ @@ -248,10 +248,10 @@ def sink_get(self, project, sink_name): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get - :type project: string + :type project: str :param project: ID of the project containing the sink. - :type sink_name: string + :type sink_name: str :param sink_name: the name of the sink :rtype: dict @@ -266,17 +266,17 @@ def sink_update(self, project, sink_name, filter_, destination): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update - :type project: string + :type project: str :param project: ID of the project containing the sink. - :type sink_name: string + :type sink_name: str :param sink_name: the name of the sink - :type filter_: string + :type filter_: str :param filter_: the advanced logs filter expression defining the entries exported by the sink. - :type destination: string + :type destination: str :param destination: destination URI for the entries exported by the sink. """ @@ -294,10 +294,10 @@ def sink_delete(self, project, sink_name): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete - :type project: string + :type project: str :param project: ID of the project containing the sink. - :type sink_name: string + :type sink_name: str :param sink_name: the name of the sink """ target = '/projects/%s/sinks/%s' % (project, sink_name) @@ -322,7 +322,7 @@ def list_metrics(self, project, page_size=None, page_token=None): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/list - :type project: string + :type project: str :param project: ID of the project whose metrics are to be listed. :type page_size: int @@ -359,17 +359,17 @@ def metric_create(self, project, metric_name, filter_, description=None): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create - :type project: string + :type project: str :param project: ID of the project in which to create the metric. - :type metric_name: string + :type metric_name: str :param metric_name: the name of the metric - :type filter_: string + :type filter_: str :param filter_: the advanced logs filter expression defining the entries exported by the metric. - :type description: string + :type description: str :param description: description of the metric. """ target = '/projects/%s/metrics' % (project,) @@ -386,10 +386,10 @@ def metric_get(self, project, metric_name): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get - :type project: string + :type project: str :param project: ID of the project containing the metric. - :type metric_name: string + :type metric_name: str :param metric_name: the name of the metric :rtype: dict @@ -404,17 +404,17 @@ def metric_update(self, project, metric_name, filter_, description): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/update - :type project: string + :type project: str :param project: ID of the project containing the metric. - :type metric_name: string + :type metric_name: str :param metric_name: the name of the metric - :type filter_: string + :type filter_: str :param filter_: the advanced logs filter expression defining the entries exported by the metric. - :type description: string + :type description: str :param description: description of the metric. """ target = '/projects/%s/metrics/%s' % (project, metric_name) @@ -431,10 +431,10 @@ def metric_delete(self, project, metric_name): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/delete - :type project: string + :type project: str :param project: ID of the project containing the metric. - :type metric_name: string + :type metric_name: str :param metric_name: the name of the metric. """ target = '/projects/%s/metrics/%s' % (project, metric_name) diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py index cad23ee9c48a..bbedfa113006 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -65,7 +65,7 @@ class _BaseEntry(object): :type labels: dict or :class:`NoneType` :param labels: (optional) mapping of labels for the entry - :type severity: string or :class:`NoneType` + :type severity: str or :class:`NoneType` :param severity: (optional) severity of event being logged. :type http_request: dict or :class:`NoneType` diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 833971ee547f..38cc5110e271 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -25,7 +25,7 @@ class Logger(object): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs - :type name: string + :type name: str :param name: the name of the logger :type client: :class:`google.cloud.logging.client.Client` @@ -99,7 +99,7 @@ def _make_entry_resource(self, text=None, info=None, message=None, Only one of ``text``, ``info``, or ``message`` should be passed. - :type text: string or :class:`NoneType` + :type text: str or :class:`NoneType` :param text: text payload :type info: dict or :class:`NoneType` @@ -111,10 +111,10 @@ def _make_entry_resource(self, text=None, info=None, message=None, :type labels: dict or :class:`NoneType` :param labels: labels passed in to calling method. - :type insert_id: string or :class:`NoneType` + :type insert_id: str or :class:`NoneType` :param insert_id: (optional) unique ID for log entry. - :type severity: string or :class:`NoneType` + :type severity: str or :class:`NoneType` :param severity: (optional) severity of event being logged. :type http_request: dict or :class:`NoneType` @@ -175,10 +175,10 @@ def log_text(self, text, client=None, labels=None, insert_id=None, :type labels: dict or :class:`NoneType` :param labels: (optional) mapping of labels for the entry. - :type insert_id: string or :class:`NoneType` + :type insert_id: str or :class:`NoneType` :param insert_id: (optional) unique ID for log entry. - :type severity: string or :class:`NoneType` + :type severity: str or :class:`NoneType` :param severity: (optional) severity of event being logged. :type http_request: dict or :class:`NoneType` @@ -209,10 +209,10 @@ def log_struct(self, info, client=None, labels=None, insert_id=None, :type labels: dict or :class:`NoneType` :param labels: (optional) mapping of labels for the entry. - :type insert_id: string or :class:`NoneType` + :type insert_id: str or :class:`NoneType` :param insert_id: (optional) unique ID for log entry. - :type severity: string or :class:`NoneType` + :type severity: str or :class:`NoneType` :param severity: (optional) severity of event being logged. :type http_request: dict or :class:`NoneType` @@ -243,10 +243,10 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, :type labels: dict or :class:`NoneType` :param labels: (optional) mapping of labels for the entry. - :type insert_id: string or :class:`NoneType` + :type insert_id: str or :class:`NoneType` :param insert_id: (optional) unique ID for log entry. - :type severity: string or :class:`NoneType` + :type severity: str or :class:`NoneType` :param severity: (optional) severity of event being logged. :type http_request: dict or :class:`NoneType` @@ -284,11 +284,11 @@ def list_entries(self, projects=None, filter_=None, order_by=None, :param projects: project IDs to include. If not passed, defaults to the project bound to the client. - :type filter_: string + :type filter_: str :param filter_: a filter expression. See: https://cloud.google.com/logging/docs/view/advanced_filters - :type order_by: string + :type order_by: str :param order_by: One of :data:`~google.cloud.logging.ASCENDING` or :data:`~google.cloud.logging.DESCENDING`. @@ -296,7 +296,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None, :param page_size: maximum number of entries to return, If not passed, defaults to a value set by the API. - :type page_token: string + :type page_token: str :param page_token: opaque marker for the next "page" of entries. If not passed, the API will return the first page of entries. @@ -344,16 +344,16 @@ def log_text(self, text, labels=None, insert_id=None, severity=None, http_request=None): """Add a text entry to be logged during :meth:`commit`. - :type text: string + :type text: str :param text: the text entry :type labels: dict or :class:`NoneType` :param labels: (optional) mapping of labels for the entry. - :type insert_id: string or :class:`NoneType` + :type insert_id: str or :class:`NoneType` :param insert_id: (optional) unique ID for log entry. - :type severity: string or :class:`NoneType` + :type severity: str or :class:`NoneType` :param severity: (optional) severity of event being logged. :type http_request: dict or :class:`NoneType` @@ -373,10 +373,10 @@ def log_struct(self, info, labels=None, insert_id=None, severity=None, :type labels: dict or :class:`NoneType` :param labels: (optional) mapping of labels for the entry. - :type insert_id: string or :class:`NoneType` + :type insert_id: str or :class:`NoneType` :param insert_id: (optional) unique ID for log entry. - :type severity: string or :class:`NoneType` + :type severity: str or :class:`NoneType` :param severity: (optional) severity of event being logged. :type http_request: dict or :class:`NoneType` @@ -396,10 +396,10 @@ def log_proto(self, message, labels=None, insert_id=None, severity=None, :type labels: dict or :class:`NoneType` :param labels: (optional) mapping of labels for the entry. - :type insert_id: string or :class:`NoneType` + :type insert_id: str or :class:`NoneType` :param insert_id: (optional) unique ID for log entry. - :type severity: string or :class:`NoneType` + :type severity: str or :class:`NoneType` :param severity: (optional) severity of event being logged. :type http_request: dict or :class:`NoneType` diff --git a/packages/google-cloud-logging/google/cloud/logging/metric.py b/packages/google-cloud-logging/google/cloud/logging/metric.py index 05e18dba852a..162eed548720 100644 --- a/packages/google-cloud-logging/google/cloud/logging/metric.py +++ b/packages/google-cloud-logging/google/cloud/logging/metric.py @@ -23,10 +23,10 @@ class Metric(object): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics - :type name: string + :type name: str :param name: the name of the metric - :type filter_: string + :type filter_: str :param filter_: the advanced logs filter expression defining the entries tracked by the metric. If not passed, the instance should already exist, to be refreshed via :meth:`reload`. @@ -35,7 +35,7 @@ class Metric(object): :param client: A client which holds credentials and project configuration for the metric (which requires a project). - :type description: string + :type description: str :param description: an optional description of the metric. """ def __init__(self, name, filter_=None, client=None, description=''): diff --git a/packages/google-cloud-logging/google/cloud/logging/sink.py b/packages/google-cloud-logging/google/cloud/logging/sink.py index 356ade5407be..52c9efaece61 100644 --- a/packages/google-cloud-logging/google/cloud/logging/sink.py +++ b/packages/google-cloud-logging/google/cloud/logging/sink.py @@ -23,15 +23,15 @@ class Sink(object): See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks - :type name: string + :type name: str :param name: the name of the sink - :type filter_: string + :type filter_: str :param filter_: the advanced logs filter expression defining the entries exported by the sink. If not passed, the instance should already exist, to be refreshed via :meth:`reload`. - :type destination: string + :type destination: str :param destination: destination URI for the entries exported by the sink. If not passed, the instance should already exist, to be refreshed via :meth:`reload`. From 9ee33d1b63594fd89a1cb100345234ba610a54d0 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Oct 2016 17:04:00 -0700 Subject: [PATCH 011/855] Replace :: with `.. code-block:: console`. Towards #2404. --- packages/google-cloud-logging/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 750cd1aba8db..a325db025b07 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -12,7 +12,7 @@ Python Client for Stackdriver Logging Quick Start ----------- -:: +.. code-block:: console $ pip install --upgrade google-cloud-logging From deeca4cf49d87ffcc4411f6746c8f300d0f6dbe5 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 21 Oct 2016 09:51:22 -0700 Subject: [PATCH 012/855] Remove None from param types and add (Optional). This runs a script to remove None from the types for parameters, and added (Optional) to the description. Does not pass lint due to some too-long lines. I will clean those up manually. See: https://github.com/GoogleCloudPlatform/google-cloud-python/pull/2580#pullrequestreview-5178193 --- .../google/cloud/logging/client.py | 4 +- .../google/cloud/logging/entries.py | 14 ++-- .../google/cloud/logging/logger.py | 68 +++++++++---------- 3 files changed, 43 insertions(+), 43 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index ab02c90f464e..482cf2576eae 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -139,8 +139,8 @@ def _entry_from_resource(self, resource, loggers): :type resource: dict :param resource: one entry resource from API response - :type loggers: dict or None - :param loggers: A mapping of logger fullnames -> loggers. If not + :type loggers: dict + :param loggers: (Optional) A mapping of logger fullnames -> loggers. If not passed, the entry will have a newly-created logger. :rtype: One of: diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py index bbedfa113006..b4cd6e714001 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -56,19 +56,19 @@ class _BaseEntry(object): :type logger: :class:`google.cloud.logging.logger.Logger` :param logger: the logger used to write the entry. - :type insert_id: text, or :class:`NoneType` + :type insert_id: text :param insert_id: (optional) the ID used to identify an entry uniquely. - :type timestamp: :class:`datetime.datetime`, or :class:`NoneType` + :type timestamp: :class:`datetime.datetime` :param timestamp: (optional) timestamp for the entry - :type labels: dict or :class:`NoneType` + :type labels: dict :param labels: (optional) mapping of labels for the entry - :type severity: str or :class:`NoneType` + :type severity: str :param severity: (optional) severity of event being logged. - :type http_request: dict or :class:`NoneType` + :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry """ @@ -94,8 +94,8 @@ def from_api_repr(cls, resource, client, loggers=None): :param client: Client which holds credentials and project configuration. - :type loggers: dict or None - :param loggers: A mapping of logger fullnames -> loggers. If not + :type loggers: dict + :param loggers: (Optional) A mapping of logger fullnames -> loggers. If not passed, the entry will have a newly-created logger. :rtype: :class:`google.cloud.logging.entries.TextEntry` diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 38cc5110e271..d078dda29726 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -32,7 +32,7 @@ class Logger(object): :param client: A client which holds credentials and project configuration for the logger (which requires a project). - :type labels: dict or :class:`NoneType` + :type labels: dict :param labels: (optional) mapping of default labels for entries written via this logger. """ @@ -99,25 +99,25 @@ def _make_entry_resource(self, text=None, info=None, message=None, Only one of ``text``, ``info``, or ``message`` should be passed. - :type text: str or :class:`NoneType` - :param text: text payload + :type text: str + :param text: (Optional) text payload - :type info: dict or :class:`NoneType` - :param info: struct payload + :type info: dict + :param info: (Optional) struct payload :type message: Protobuf message or :class:`NoneType` :param message: protobuf payload - :type labels: dict or :class:`NoneType` - :param labels: labels passed in to calling method. + :type labels: dict + :param labels: (Optional) labels passed in to calling method. - :type insert_id: str or :class:`NoneType` + :type insert_id: str :param insert_id: (optional) unique ID for log entry. - :type severity: str or :class:`NoneType` + :type severity: str :param severity: (optional) severity of event being logged. - :type http_request: dict or :class:`NoneType` + :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry @@ -172,16 +172,16 @@ def log_text(self, text, client=None, labels=None, insert_id=None, :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. - :type labels: dict or :class:`NoneType` + :type labels: dict :param labels: (optional) mapping of labels for the entry. - :type insert_id: str or :class:`NoneType` + :type insert_id: str :param insert_id: (optional) unique ID for log entry. - :type severity: str or :class:`NoneType` + :type severity: str :param severity: (optional) severity of event being logged. - :type http_request: dict or :class:`NoneType` + :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry """ @@ -206,16 +206,16 @@ def log_struct(self, info, client=None, labels=None, insert_id=None, :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. - :type labels: dict or :class:`NoneType` + :type labels: dict :param labels: (optional) mapping of labels for the entry. - :type insert_id: str or :class:`NoneType` + :type insert_id: str :param insert_id: (optional) unique ID for log entry. - :type severity: str or :class:`NoneType` + :type severity: str :param severity: (optional) severity of event being logged. - :type http_request: dict or :class:`NoneType` + :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry. """ @@ -240,16 +240,16 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. - :type labels: dict or :class:`NoneType` + :type labels: dict :param labels: (optional) mapping of labels for the entry. - :type insert_id: str or :class:`NoneType` + :type insert_id: str :param insert_id: (optional) unique ID for log entry. - :type severity: str or :class:`NoneType` + :type severity: str :param severity: (optional) severity of event being logged. - :type http_request: dict or :class:`NoneType` + :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry. """ @@ -347,16 +347,16 @@ def log_text(self, text, labels=None, insert_id=None, severity=None, :type text: str :param text: the text entry - :type labels: dict or :class:`NoneType` + :type labels: dict :param labels: (optional) mapping of labels for the entry. - :type insert_id: str or :class:`NoneType` + :type insert_id: str :param insert_id: (optional) unique ID for log entry. - :type severity: str or :class:`NoneType` + :type severity: str :param severity: (optional) severity of event being logged. - :type http_request: dict or :class:`NoneType` + :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry. """ @@ -370,16 +370,16 @@ def log_struct(self, info, labels=None, insert_id=None, severity=None, :type info: dict :param info: the struct entry - :type labels: dict or :class:`NoneType` + :type labels: dict :param labels: (optional) mapping of labels for the entry. - :type insert_id: str or :class:`NoneType` + :type insert_id: str :param insert_id: (optional) unique ID for log entry. - :type severity: str or :class:`NoneType` + :type severity: str :param severity: (optional) severity of event being logged. - :type http_request: dict or :class:`NoneType` + :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry. """ @@ -393,16 +393,16 @@ def log_proto(self, message, labels=None, insert_id=None, severity=None, :type message: protobuf message :param message: the protobuf entry - :type labels: dict or :class:`NoneType` + :type labels: dict :param labels: (optional) mapping of labels for the entry. - :type insert_id: str or :class:`NoneType` + :type insert_id: str :param insert_id: (optional) unique ID for log entry. - :type severity: str or :class:`NoneType` + :type severity: str :param severity: (optional) severity of event being logged. - :type http_request: dict or :class:`NoneType` + :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry. """ From 05a76db638e96cbaeea3ad1dced06e220939bf3a Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 21 Oct 2016 11:21:42 -0700 Subject: [PATCH 013/855] Fix lint errors caused by addition of (Optional). Mostly, lines that were too long. --- packages/google-cloud-logging/google/cloud/logging/client.py | 5 +++-- .../google-cloud-logging/google/cloud/logging/entries.py | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 482cf2576eae..9b093dc9e131 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -140,8 +140,9 @@ def _entry_from_resource(self, resource, loggers): :param resource: one entry resource from API response :type loggers: dict - :param loggers: (Optional) A mapping of logger fullnames -> loggers. If not - passed, the entry will have a newly-created logger. + :param loggers: + (Optional) A mapping of logger fullnames -> loggers. If not + passed, the entry will have a newly-created logger. :rtype: One of: :class:`google.cloud.logging.entries.TextEntry`, diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py index b4cd6e714001..0de34e68ed25 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -95,8 +95,9 @@ def from_api_repr(cls, resource, client, loggers=None): configuration. :type loggers: dict - :param loggers: (Optional) A mapping of logger fullnames -> loggers. If not - passed, the entry will have a newly-created logger. + :param loggers: + (Optional) A mapping of logger fullnames -> loggers. If not + passed, the entry will have a newly-created logger. :rtype: :class:`google.cloud.logging.entries.TextEntry` :returns: Text entry parsed from ``resource``. From 7ee974b514e29c96916f427d31f6cab07c1834c2 Mon Sep 17 00:00:00 2001 From: Bill Prin Date: Fri, 21 Oct 2016 14:42:06 -0700 Subject: [PATCH 014/855] Merge pull request #2553 from waprin/logging_gax_optional Allows Explicitly Enabling/Disabling GAX for Logging/Pubsub --- .../google/cloud/logging/client.py | 16 ++++++++++++- .../unit_tests/test_client.py | 23 +++++++++++++++---- 2 files changed, 33 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 9b093dc9e131..3a44fd4658cf 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -71,11 +71,25 @@ class Client(JSONClient): :param http: An optional HTTP object to make requests. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. + + :type use_gax: bool + :param use_gax: (Optional) Explicitly specifies whether + to use the gRPC transport (via GAX) or HTTP. If unset, + falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` environment + variable """ _connection_class = Connection _logging_api = _sinks_api = _metrics_api = None + def __init__(self, project=None, credentials=None, + http=None, use_gax=None): + super(Client, self).__init__(project, credentials, http) + if use_gax is None: + self._use_gax = _USE_GAX + else: + self._use_gax = use_gax + @property def logging_api(self): """Helper for logging-related API calls. @@ -85,7 +99,7 @@ def logging_api(self): https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs """ if self._logging_api is None: - if _USE_GAX: + if self._use_gax: generated = GeneratedLoggingAPI() self._logging_api = GAXLoggingAPI(generated) else: diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 114f5f635c45..5becbd68b920 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -39,14 +39,14 @@ def test_ctor(self): self.assertEqual(client.project, self.PROJECT) def test_logging_api_wo_gax(self): - from google.cloud.logging.connection import _LoggingAPI - from google.cloud.logging import client as MUT from google.cloud._testing import _Monkey - client = self._makeOne(self.PROJECT, credentials=_Credentials()) - conn = client.connection = object() + from google.cloud.logging import client as MUT + from google.cloud.logging.connection import _LoggingAPI with _Monkey(MUT, _USE_GAX=False): - api = client.logging_api + client = self._makeOne(self.PROJECT, credentials=_Credentials()) + conn = client.connection = object() + api = client.logging_api self.assertIsInstance(api, _LoggingAPI) self.assertIs(api._connection, conn) @@ -85,6 +85,19 @@ def __init__(self, _wrapped): again = client.logging_api self.assertIs(again, api) + def test_no_gax_ctor(self): + from google.cloud._testing import _Monkey + from google.cloud.logging import client as MUT + from google.cloud.logging.connection import _LoggingAPI + + creds = _Credentials() + with _Monkey(MUT, _USE_GAX=True): + client = self._makeOne(project=self.PROJECT, credentials=creds, + use_gax=False) + + api = client.logging_api + self.assertIsInstance(api, _LoggingAPI) + def test_sinks_api_wo_gax(self): from google.cloud.logging.connection import _SinksAPI from google.cloud.logging import client as MUT From 311837bfa31195886a594535de605df27f2d5335 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 21 Oct 2016 17:59:48 -0700 Subject: [PATCH 015/855] Allowing for arbitrarily nested dictionaries in _log_entry_mapping_to_pb. Fixes #2552. --- .../google/cloud/logging/_gax.py | 7 +- .../unit_tests/test__gax.py | 102 ++++++++++++++---- 2 files changed, 86 insertions(+), 23 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index f399dbb5a8c4..bd55820d056e 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -603,8 +603,11 @@ def _log_entry_mapping_to_pb(mapping): entry_pb.labels[key] = value if 'jsonPayload' in mapping: - for key, value in mapping['jsonPayload'].items(): - entry_pb.json_payload[key] = value + # NOTE: ``json.dumps`` is wasteful here because internally, + # ``Parse`` will just call ``json.loads``. However, + # there is no equivalent public function to parse on raw + # dictionaries, so we waste cycles on parse/unparse. + Parse(json.dumps(mapping['jsonPayload']), entry_pb.json_payload) if 'protoPayload' in mapping: Parse(json.dumps(mapping['protoPayload']), entry_pb.proto_payload) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 6b0396c5d421..a89cffe09172 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -80,16 +80,12 @@ def test_list_entries_no_paging(self): self.assertEqual(page_size, 0) self.assertIs(options.page_token, INITIAL_PAGE) - def test_list_entries_with_paging(self): - from google.protobuf.struct_pb2 import Value + def _list_entries_with_paging_helper(self, payload, struct_pb): from google.cloud._testing import _GAXPageIterator + SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' - PAYLOAD = {'message': 'MESSAGE', 'weather': 'sunny'} - struct_pb = _StructPB({ - key: Value(string_value=value) for key, value in PAYLOAD.items() - }) response = _GAXPageIterator( [_LogEntryPB(self.LOG_NAME, json_payload=struct_pb)], NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) @@ -103,7 +99,7 @@ def test_list_entries_with_paging(self): self.assertIsInstance(entry, dict) self.assertEqual(entry['logName'], self.LOG_NAME) self.assertEqual(entry['resource'], {'type': 'global'}) - self.assertEqual(entry['jsonPayload'], PAYLOAD) + self.assertEqual(entry['jsonPayload'], payload) self.assertEqual(next_token, NEW_TOKEN) projects, filter_, order_by, page_size, options = ( @@ -114,6 +110,43 @@ def test_list_entries_with_paging(self): self.assertEqual(page_size, SIZE) self.assertEqual(options.page_token, TOKEN) + def test_list_entries_with_paging(self): + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + payload = {'message': 'MESSAGE', 'weather': 'sunny'} + struct_pb = Struct(fields={ + key: Value(string_value=value) for key, value in payload.items() + }) + self._list_entries_with_paging_helper(payload, struct_pb) + + def test_list_entries_with_paging_nested_payload(self): + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + payload = {} + struct_fields = {} + # Add a simple key. + key = 'message' + payload[key] = 'MESSAGE' + struct_fields[key] = Value(string_value=payload[key]) + # Add a nested key. + key = 'weather' + sub_value = {} + sub_fields = {} + sub_key = 'temperature' + sub_value[sub_key] = 75 + sub_fields[sub_key] = Value(number_value=sub_value[sub_key]) + sub_key = 'precipitation' + sub_value[sub_key] = False + sub_fields[sub_key] = Value(bool_value=sub_value[sub_key]) + # Update the parent payload. + payload[key] = sub_value + struct_fields[key] = Value(struct_value=Struct(fields=sub_fields)) + # Make the struct_pb for our dict. + struct_pb = Struct(fields=struct_fields) + self._list_entries_with_paging_helper(payload, struct_pb) + def test_list_entries_with_extra_properties(self): from datetime import datetime from google.logging.type.log_severity_pb2 import WARNING @@ -317,18 +350,16 @@ def test_write_entries_w_extra_properties(self): self.assertIsNone(options) # pylint: enable=too-many-statements - def test_write_entries_multiple(self): + def _write_entries_multiple_helper(self, json_payload, json_struct_pb): # pylint: disable=too-many-statements import datetime from google.logging.type.log_severity_pb2 import WARNING from google.logging.v2.log_entry_pb2 import LogEntry from google.protobuf.any_pb2 import Any - from google.protobuf.struct_pb2 import Struct from google.cloud._helpers import _datetime_to_rfc3339, UTC TEXT = 'TEXT' NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) TIMESTAMP_TYPE_URL = 'type.googleapis.com/google.protobuf.Timestamp' - JSON = {'payload': 'PAYLOAD', 'type': 'json'} PROTO = { '@type': TIMESTAMP_TYPE_URL, 'value': _datetime_to_rfc3339(NOW), @@ -339,7 +370,7 @@ def test_write_entries_multiple(self): ENTRIES = [ {'textPayload': TEXT, 'severity': WARNING}, - {'jsonPayload': JSON, + {'jsonPayload': json_payload, 'operation': {'producer': PRODUCER, 'id': OPID}}, {'protoPayload': PROTO, 'httpRequest': {'requestUrl': URL}}, @@ -373,10 +404,7 @@ def test_write_entries_multiple(self): self.assertEqual(entry.log_name, '') self.assertEqual(entry.resource.type, '') self.assertEqual(entry.labels, {}) - json_struct = entry.json_payload - self.assertIsInstance(json_struct, Struct) - self.assertEqual(json_struct.fields['payload'].string_value, - JSON['payload']) + self.assertEqual(entry.json_payload, json_struct_pb) operation = entry.operation self.assertEqual(operation.producer, PRODUCER) self.assertEqual(operation.id, OPID) @@ -399,6 +427,44 @@ def test_write_entries_multiple(self): self.assertIsNone(options) # pylint: enable=too-many-statements + def test_write_entries_multiple(self): + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + json_payload = {'payload': 'PAYLOAD', 'type': 'json'} + json_struct_pb = Struct(fields={ + key: Value(string_value=value) + for key, value in json_payload.items() + }) + self._write_entries_multiple_helper(json_payload, json_struct_pb) + + def test_write_entries_multiple_nested_payload(self): + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + json_payload = {} + struct_fields = {} + # Add a simple key. + key = 'hello' + json_payload[key] = 'me you looking for' + struct_fields[key] = Value(string_value=json_payload[key]) + # Add a nested key. + key = 'everything' + sub_value = {} + sub_fields = {} + sub_key = 'answer' + sub_value[sub_key] = 42 + sub_fields[sub_key] = Value(number_value=sub_value[sub_key]) + sub_key = 'really?' + sub_value[sub_key] = False + sub_fields[sub_key] = Value(bool_value=sub_value[sub_key]) + # Update the parent payload. + json_payload[key] = sub_value + struct_fields[key] = Value(struct_value=Struct(fields=sub_fields)) + # Make the struct_pb for our dict. + json_struct_pb = Struct(fields=struct_fields) + self._write_entries_multiple_helper(json_payload, json_struct_pb) + def test_logger_delete(self): LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) gax_api = _GAXLoggingAPI() @@ -1057,12 +1123,6 @@ def __init__(self, type_='global', **labels): self.labels = labels -class _StructPB(object): - - def __init__(self, fields): - self.fields = fields - - class _LogEntryPB(object): severity = 0 From dbb5c2c964a8bb1576cb6b996602ef34ffbe7b3c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 24 Oct 2016 10:51:23 -0700 Subject: [PATCH 016/855] Using nested JSON payload in logging sys. tests. Also using ParseDict() instead of Parse() and a wasted json.dumps(). --- .../google/cloud/logging/_gax.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index bd55820d056e..61d372c4b35f 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -14,8 +14,6 @@ """GAX wrapper for Logging API requests.""" -import json - from google.gax import CallOptions from google.gax import INITIAL_PAGE from google.gax.errors import GaxError @@ -24,7 +22,7 @@ from google.logging.v2.logging_config_pb2 import LogSink from google.logging.v2.logging_metrics_pb2 import LogMetric from google.logging.v2.log_entry_pb2 import LogEntry -from google.protobuf.json_format import Parse +from google.protobuf.json_format import ParseDict from grpc import StatusCode from google.cloud._helpers import _datetime_to_pb_timestamp @@ -603,14 +601,10 @@ def _log_entry_mapping_to_pb(mapping): entry_pb.labels[key] = value if 'jsonPayload' in mapping: - # NOTE: ``json.dumps`` is wasteful here because internally, - # ``Parse`` will just call ``json.loads``. However, - # there is no equivalent public function to parse on raw - # dictionaries, so we waste cycles on parse/unparse. - Parse(json.dumps(mapping['jsonPayload']), entry_pb.json_payload) + ParseDict(mapping['jsonPayload'], entry_pb.json_payload) if 'protoPayload' in mapping: - Parse(json.dumps(mapping['protoPayload']), entry_pb.proto_payload) + ParseDict(mapping['protoPayload'], entry_pb.proto_payload) if 'httpRequest' in mapping: _http_request_mapping_to_pb( From 89e791d51ff2150582867a7ff3c88a831a789d6b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 24 Oct 2016 20:28:20 -0700 Subject: [PATCH 017/855] Using MessageToDict/ParseDict in logging._gax. This replaces lots of ad-hoc conversion code. --- .../google/cloud/logging/_gax.py | 164 +------------ .../unit_tests/test__gax.py | 218 ++++++++++-------- 2 files changed, 136 insertions(+), 246 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 61d372c4b35f..96180cb4986e 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -18,15 +18,14 @@ from google.gax import INITIAL_PAGE from google.gax.errors import GaxError from google.gax.grpc import exc_to_code -from google.logging.type.log_severity_pb2 import LogSeverity from google.logging.v2.logging_config_pb2 import LogSink from google.logging.v2.logging_metrics_pb2 import LogMetric from google.logging.v2.log_entry_pb2 import LogEntry +from google.protobuf.json_format import MessageToDict from google.protobuf.json_format import ParseDict from grpc import StatusCode -from google.cloud._helpers import _datetime_to_pb_timestamp -from google.cloud._helpers import _pb_timestamp_to_rfc3339 +from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound @@ -420,20 +419,6 @@ def metric_delete(self, project, metric_name): raise -def _mon_resource_pb_to_mapping(resource_pb): - """Helper for :func:_log_entry_pb_to_mapping`. - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - mapping = { - 'type': resource_pb.type, - } - if resource_pb.labels: - mapping['labels'] = resource_pb.labels - return mapping - - def _value_pb_to_value(value_pb): """Helper for :func:`_log_entry_pb_to_mapping`. @@ -483,152 +468,29 @@ def _log_entry_pb_to_mapping(entry_pb): Performs "impedance matching" between the protobuf attrs and the keys expected in the JSON API. """ - mapping = { - 'logName': entry_pb.log_name, - 'resource': _mon_resource_pb_to_mapping(entry_pb.resource), - 'severity': LogSeverity.Name(entry_pb.severity), - 'insertId': entry_pb.insert_id, - 'timestamp': _pb_timestamp_to_rfc3339(entry_pb.timestamp), - 'labels': entry_pb.labels, - } - if entry_pb.HasField('text_payload'): - mapping['textPayload'] = entry_pb.text_payload - - if entry_pb.HasField('json_payload'): - mapping['jsonPayload'] = _struct_pb_to_mapping(entry_pb.json_payload) - - if entry_pb.HasField('proto_payload'): - mapping['protoPayload'] = entry_pb.proto_payload - - if entry_pb.http_request: - request = entry_pb.http_request - mapping['httpRequest'] = { - 'requestMethod': request.request_method, - 'requestUrl': request.request_url, - 'status': request.status, - 'referer': request.referer, - 'userAgent': request.user_agent, - 'cacheHit': request.cache_hit, - 'requestSize': request.request_size, - 'responseSize': request.response_size, - 'remoteIp': request.remote_ip, - } - - if entry_pb.operation: - operation = entry_pb.operation - mapping['operation'] = { - 'producer': operation.producer, - 'id': operation.id, - 'first': operation.first, - 'last': operation.last, - } - - return mapping - - -def _http_request_mapping_to_pb(info, request): - """Helper for _log_entry_mapping_to_pb - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - optional_request_keys = { - 'requestMethod': 'request_method', - 'requestUrl': 'request_url', - 'status': 'status', - 'referer': 'referer', - 'userAgent': 'user_agent', - 'cacheHit': 'cache_hit', - 'requestSize': 'request_size', - 'responseSize': 'response_size', - 'remoteIp': 'remote_ip', - } - for key, pb_name in optional_request_keys.items(): - if key in info: - setattr(request, pb_name, info[key]) - - -def _log_operation_mapping_to_pb(info, operation): - """Helper for _log_entry_mapping_to_pb - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - operation.producer = info['producer'] - operation.id = info['id'] - - if 'first' in info: - operation.first = info['first'] - - if 'last' in info: - operation.last = info['last'] + return MessageToDict(entry_pb) def _log_entry_mapping_to_pb(mapping): """Helper for :meth:`write_entries`, et aliae - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. + Performs "impedance matching" between the protobuf attrs and + the keys expected in the JSON API. """ - # pylint: disable=too-many-branches entry_pb = LogEntry() - - optional_scalar_keys = { - 'logName': 'log_name', - 'insertId': 'insert_id', - 'textPayload': 'text_payload', - } - - for key, pb_name in optional_scalar_keys.items(): - if key in mapping: - setattr(entry_pb, pb_name, mapping[key]) - - if 'resource' in mapping: - entry_pb.resource.type = mapping['resource']['type'] - - if 'severity' in mapping: - severity = mapping['severity'] - if isinstance(severity, str): - severity = LogSeverity.Value(severity) - entry_pb.severity = severity - if 'timestamp' in mapping: - timestamp = _datetime_to_pb_timestamp(mapping['timestamp']) - entry_pb.timestamp.CopyFrom(timestamp) - - if 'labels' in mapping: - for key, value in mapping['labels'].items(): - entry_pb.labels[key] = value - - if 'jsonPayload' in mapping: - ParseDict(mapping['jsonPayload'], entry_pb.json_payload) - - if 'protoPayload' in mapping: - ParseDict(mapping['protoPayload'], entry_pb.proto_payload) - - if 'httpRequest' in mapping: - _http_request_mapping_to_pb( - mapping['httpRequest'], entry_pb.http_request) - - if 'operation' in mapping: - _log_operation_mapping_to_pb( - mapping['operation'], entry_pb.operation) - + mapping['timestamp'] = _datetime_to_rfc3339(mapping['timestamp']) + ParseDict(mapping, entry_pb) return entry_pb - # pylint: enable=too-many-branches def _log_sink_pb_to_mapping(sink_pb): """Helper for :meth:`list_sinks`, et aliae - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. + Performs "impedance matching" between the protobuf attrs and + the keys expected in the JSON API. """ - return { - 'name': sink_pb.name, - 'destination': sink_pb.destination, - 'filter': sink_pb.filter, - } + return MessageToDict(sink_pb) def _log_metric_pb_to_mapping(metric_pb): @@ -637,8 +499,4 @@ def _log_metric_pb_to_mapping(metric_pb): Performs "impedance matching" between the protobuf attrs and the keys expected in the JSON API. """ - return { - 'name': metric_pb.name, - 'description': metric_pb.description, - 'filter': metric_pb.filter, - } + return MessageToDict(metric_pb) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index a89cffe09172..a3376780ee11 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -50,14 +50,26 @@ def test_ctor(self): self.assertIs(api._gax_api, gax_api) def test_list_entries_no_paging(self): + import datetime + + from google.api.monitored_resource_pb2 import MonitoredResource from google.gax import INITIAL_PAGE - from google.cloud.logging import DESCENDING + from google.logging.v2.log_entry_pb2 import LogEntry + + from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._testing import _GAXPageIterator + from google.cloud.logging import DESCENDING TOKEN = 'TOKEN' TEXT = 'TEXT' - response = _GAXPageIterator( - [_LogEntryPB(self.LOG_NAME, text_payload=TEXT)], TOKEN) + resource_pb = MonitoredResource(type='global') + timestamp_pb = _datetime_to_pb_timestamp( + datetime.datetime.utcnow()) + entry_pb = LogEntry(log_name=self.LOG_NAME, + resource=resource_pb, + timestamp=timestamp_pb, + text_payload=TEXT) + response = _GAXPageIterator([entry_pb], TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) api = self._makeOne(gax_api) @@ -81,13 +93,24 @@ def test_list_entries_no_paging(self): self.assertIs(options.page_token, INITIAL_PAGE) def _list_entries_with_paging_helper(self, payload, struct_pb): + import datetime + + from google.api.monitored_resource_pb2 import MonitoredResource + from google.logging.v2.log_entry_pb2 import LogEntry from google.cloud._testing import _GAXPageIterator + from google.cloud._helpers import _datetime_to_pb_timestamp SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' - response = _GAXPageIterator( - [_LogEntryPB(self.LOG_NAME, json_payload=struct_pb)], NEW_TOKEN) + resource_pb = MonitoredResource(type='global') + timestamp_pb = _datetime_to_pb_timestamp( + datetime.datetime.utcnow()) + entry_pb = LogEntry(log_name=self.LOG_NAME, + resource=resource_pb, + timestamp=timestamp_pb, + json_payload=struct_pb) + response = _GAXPageIterator([entry_pb], NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) api = self._makeOne(gax_api) @@ -147,36 +170,74 @@ def test_list_entries_with_paging_nested_payload(self): struct_pb = Struct(fields=struct_fields) self._list_entries_with_paging_helper(payload, struct_pb) + def _make_log_entry_with_extras(self, labels, iid, type_url, now): + from google.api.monitored_resource_pb2 import MonitoredResource + from google.logging.v2.log_entry_pb2 import LogEntry + from google.logging.v2.log_entry_pb2 import LogEntryOperation + from google.logging.type.http_request_pb2 import HttpRequest + from google.logging.type.log_severity_pb2 import WARNING + from google.protobuf.any_pb2 import Any + + from google.cloud._helpers import _datetime_to_pb_timestamp + + resource_pb = MonitoredResource( + type='global', labels=labels) + proto_payload = Any(type_url=type_url) + timestamp_pb = _datetime_to_pb_timestamp(now) + request_pb = HttpRequest( + request_url='http://example.com/requested', + request_method='GET', + status=200, + referer='http://example.com/referer', + user_agent='AGENT', + cache_hit=True, + request_size=256, + response_size=1024, + remote_ip='1.2.3.4', + ) + operation_pb = LogEntryOperation( + producer='PRODUCER', + first=True, + last=True, + id='OPID', + ) + entry_pb = LogEntry(log_name=self.LOG_NAME, + resource=resource_pb, + proto_payload=proto_payload, + timestamp=timestamp_pb, + severity=WARNING, + insert_id=iid, + http_request=request_pb, + labels=labels, + operation=operation_pb) + return entry_pb + def test_list_entries_with_extra_properties(self): from datetime import datetime - from google.logging.type.log_severity_pb2 import WARNING - from google.cloud._testing import _GAXPageIterator + + # Import the wrappers to register the type URL for BoolValue + # pylint: disable=unused-variable + from google.protobuf import wrappers_pb2 + # pylint: enable=unused-variable + from google.cloud._helpers import UTC from google.cloud._helpers import _datetime_to_rfc3339 - from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud._testing import _GAXPageIterator + NOW = datetime.utcnow().replace(tzinfo=UTC) SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' - PAYLOAD = {'message': 'MESSAGE', 'weather': 'sunny'} SEVERITY = 'WARNING' LABELS = { 'foo': 'bar', } IID = 'IID' - request = _HTTPRequestPB() - operation = _LogEntryOperationPB() - EXTRAS = { - 'severity': WARNING, - 'labels': LABELS, - 'insert_id': IID, - 'http_request': request, - 'operation': operation, - } - ENTRY = _LogEntryPB(self.LOG_NAME, proto_payload=PAYLOAD, **EXTRAS) - ENTRY.resource.labels['foo'] = 'bar' - ENTRY.timestamp = _datetime_to_pb_timestamp(NOW) - response = _GAXPageIterator([ENTRY], NEW_TOKEN) + bool_type_url = 'type.googleapis.com/google.protobuf.BoolValue' + entry_pb = self._make_log_entry_with_extras( + LABELS, IID, bool_type_url, NOW) + + response = _GAXPageIterator([entry_pb], NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) api = self._makeOne(gax_api) @@ -189,23 +250,28 @@ def test_list_entries_with_extra_properties(self): self.assertEqual(entry['logName'], self.LOG_NAME) self.assertEqual(entry['resource'], {'type': 'global', 'labels': {'foo': 'bar'}}) - self.assertEqual(entry['protoPayload'], PAYLOAD) + self.assertEqual(entry['protoPayload'], { + '@type': bool_type_url, + 'value': False, + }) self.assertEqual(entry['severity'], SEVERITY) self.assertEqual(entry['labels'], LABELS) self.assertEqual(entry['insertId'], IID) self.assertEqual(entry['timestamp'], _datetime_to_rfc3339(NOW)) + request = entry_pb.http_request EXPECTED_REQUEST = { 'requestMethod': request.request_method, 'requestUrl': request.request_url, 'status': request.status, - 'requestSize': request.request_size, - 'responseSize': request.response_size, + 'requestSize': str(request.request_size), + 'responseSize': str(request.response_size), 'referer': request.referer, 'userAgent': request.user_agent, 'remoteIp': request.remote_ip, 'cacheHit': request.cache_hit, } self.assertEqual(entry['httpRequest'], EXPECTED_REQUEST) + operation = entry_pb.operation EXPECTED_OPERATION = { 'producer': operation.producer, 'id': operation.id, @@ -521,15 +587,18 @@ def test_ctor(self): def test_list_sinks_no_paging(self): from google.gax import INITIAL_PAGE from google.cloud._testing import _GAXPageIterator + from google.logging.v2.logging_config_pb2 import LogSink + TOKEN = 'TOKEN' SINKS = [{ 'name': self.SINK_PATH, 'filter': self.FILTER, 'destination': self.DESTINATION_URI, }] - response = _GAXPageIterator( - [_LogSinkPB(self.SINK_PATH, self.DESTINATION_URI, self.FILTER)], - TOKEN) + sink_pb = LogSink(name=self.SINK_PATH, + destination=self.DESTINATION_URI, + filter=self.FILTER) + response = _GAXPageIterator([sink_pb], TOKEN) gax_api = _GAXSinksAPI(_list_sinks_response=response) api = self._makeOne(gax_api) @@ -545,6 +614,8 @@ def test_list_sinks_no_paging(self): def test_list_sinks_w_paging(self): from google.cloud._testing import _GAXPageIterator + from google.logging.v2.logging_config_pb2 import LogSink + TOKEN = 'TOKEN' PAGE_SIZE = 42 SINKS = [{ @@ -552,9 +623,10 @@ def test_list_sinks_w_paging(self): 'filter': self.FILTER, 'destination': self.DESTINATION_URI, }] - response = _GAXPageIterator( - [_LogSinkPB(self.SINK_PATH, self.DESTINATION_URI, self.FILTER)], - None) + sink_pb = LogSink(name=self.SINK_PATH, + destination=self.DESTINATION_URI, + filter=self.FILTER) + response = _GAXPageIterator([sink_pb], None) gax_api = _GAXSinksAPI(_list_sinks_response=response) api = self._makeOne(gax_api) @@ -623,13 +695,16 @@ def test_sink_get_miss(self): api.sink_get(self.PROJECT, self.SINK_NAME) def test_sink_get_hit(self): + from google.logging.v2.logging_config_pb2 import LogSink + RESPONSE = { 'name': self.SINK_PATH, 'filter': self.FILTER, 'destination': self.DESTINATION_URI, } - sink_pb = _LogSinkPB( - self.SINK_PATH, self.DESTINATION_URI, self.FILTER) + sink_pb = LogSink(name=self.SINK_PATH, + destination=self.DESTINATION_URI, + filter=self.FILTER) gax_api = _GAXSinksAPI(_get_sink_response=sink_pb) api = self._makeOne(gax_api) @@ -725,15 +800,18 @@ def test_ctor(self): def test_list_metrics_no_paging(self): from google.gax import INITIAL_PAGE from google.cloud._testing import _GAXPageIterator + from google.logging.v2.logging_metrics_pb2 import LogMetric + TOKEN = 'TOKEN' METRICS = [{ 'name': self.METRIC_PATH, 'filter': self.FILTER, 'description': self.DESCRIPTION, }] - response = _GAXPageIterator( - [_LogMetricPB(self.METRIC_PATH, self.DESCRIPTION, self.FILTER)], - TOKEN) + metric_pb = LogMetric(name=self.METRIC_PATH, + description=self.DESCRIPTION, + filter=self.FILTER) + response = _GAXPageIterator([metric_pb], TOKEN) gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) api = self._makeOne(gax_api) @@ -749,6 +827,8 @@ def test_list_metrics_no_paging(self): def test_list_metrics_w_paging(self): from google.cloud._testing import _GAXPageIterator + from google.logging.v2.logging_metrics_pb2 import LogMetric + TOKEN = 'TOKEN' PAGE_SIZE = 42 METRICS = [{ @@ -756,9 +836,10 @@ def test_list_metrics_w_paging(self): 'filter': self.FILTER, 'description': self.DESCRIPTION, }] - response = _GAXPageIterator( - [_LogMetricPB(self.METRIC_PATH, self.DESCRIPTION, self.FILTER)], - None) + metric_pb = LogMetric(name=self.METRIC_PATH, + description=self.DESCRIPTION, + filter=self.FILTER) + response = _GAXPageIterator([metric_pb], None) gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) api = self._makeOne(gax_api) @@ -827,13 +908,16 @@ def test_metric_get_miss(self): api.metric_get(self.PROJECT, self.METRIC_NAME) def test_metric_get_hit(self): + from google.logging.v2.logging_metrics_pb2 import LogMetric + RESPONSE = { 'name': self.METRIC_PATH, 'filter': self.FILTER, 'description': self.DESCRIPTION, } - metric_pb = _LogMetricPB( - self.METRIC_PATH, self.DESCRIPTION, self.FILTER) + metric_pb = LogMetric(name=self.METRIC_PATH, + description=self.DESCRIPTION, + filter=self.FILTER) gax_api = _GAXMetricsAPI(_get_log_metric_response=metric_pb) api = self._makeOne(gax_api) @@ -1096,58 +1180,6 @@ def delete_log_metric(self, metric_name, options=None): raise GaxError('notfound', self._make_grpc_not_found()) -class _HTTPRequestPB(object): - - request_url = 'http://example.com/requested' - request_method = 'GET' - status = 200 - referer = 'http://example.com/referer' - user_agent = 'AGENT' - cache_hit = False - request_size = 256 - response_size = 1024 - remote_ip = '1.2.3.4' - - -class _LogEntryOperationPB(object): - - producer = 'PRODUCER' - first = last = False - id = 'OPID' - - -class _ResourcePB(object): - - def __init__(self, type_='global', **labels): - self.type = type_ - self.labels = labels - - -class _LogEntryPB(object): - - severity = 0 - http_request = operation = insert_id = None - text_payload = json_payload = proto_payload = None - - def __init__(self, log_name, **kw): - self.log_name = log_name - self.resource = _ResourcePB() - self.timestamp = self._make_timestamp() - self.labels = kw.pop('labels', {}) - self.__dict__.update(kw) - - def HasField(self, field_name): - return getattr(self, field_name, None) is not None - - @staticmethod - def _make_timestamp(): - from datetime import datetime - from google.cloud._helpers import UTC - from google.cloud._helpers import _datetime_to_pb_timestamp - NOW = datetime.utcnow().replace(tzinfo=UTC) - return _datetime_to_pb_timestamp(NOW) - - class _LogSinkPB(object): def __init__(self, name, destination, filter_): From 43fe084d8080fb552e9b6ddc13a0a001f5fd6820 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 24 Oct 2016 20:32:09 -0700 Subject: [PATCH 018/855] Removing all thin wrappers around MessageToDict in logging. Also replacing _struct_pb_to_mapping with MessageToDict and removing the unused _value_pb_to_value (wasn't showing up in uncovered code / non-linted code since it was fully unit tested). --- .../google/cloud/logging/_gax.py | 84 ++----------------- .../unit_tests/test__gax.py | 69 --------------- 2 files changed, 7 insertions(+), 146 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 96180cb4986e..cd5ad58cd1a4 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -76,7 +76,7 @@ def list_entries(self, projects, filter_='', order_by='', page_iter = self._gax_api.list_log_entries( projects, filter_=filter_, order_by=order_by, page_size=page_size, options=options) - entries = [_log_entry_pb_to_mapping(entry_pb) + entries = [MessageToDict(entry_pb) for entry_pb in page_iter.next()] token = page_iter.page_token or None return entries, token @@ -162,7 +162,7 @@ def list_sinks(self, project, page_size=0, page_token=None): path = 'projects/%s' % (project,) page_iter = self._gax_api.list_sinks(path, page_size=page_size, options=options) - sinks = [_log_sink_pb_to_mapping(log_sink_pb) + sinks = [MessageToDict(log_sink_pb) for log_sink_pb in page_iter.next()] token = page_iter.page_token or None return sinks, token @@ -220,7 +220,7 @@ def sink_get(self, project, sink_name): if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) raise - return _log_sink_pb_to_mapping(sink_pb) + return MessageToDict(sink_pb) def sink_update(self, project, sink_name, filter_, destination): """API call: update a sink resource. @@ -252,7 +252,7 @@ def sink_update(self, project, sink_name, filter_, destination): if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) raise - return _log_sink_pb_to_mapping(sink_pb) + return MessageToDict(sink_pb) def sink_delete(self, project, sink_name): """API call: delete a sink resource. @@ -309,7 +309,7 @@ def list_metrics(self, project, page_size=0, page_token=None): path = 'projects/%s' % (project,) page_iter = self._gax_api.list_log_metrics( path, page_size=page_size, options=options) - metrics = [_log_metric_pb_to_mapping(log_metric_pb) + metrics = [MessageToDict(log_metric_pb) for log_metric_pb in page_iter.next()] token = page_iter.page_token or None return metrics, token @@ -366,7 +366,7 @@ def metric_get(self, project, metric_name): if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) raise - return _log_metric_pb_to_mapping(metric_pb) + return MessageToDict(metric_pb) def metric_update(self, project, metric_name, filter_, description): """API call: update a metric resource. @@ -398,7 +398,7 @@ def metric_update(self, project, metric_name, filter_, description): if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) raise - return _log_metric_pb_to_mapping(metric_pb) + return MessageToDict(metric_pb) def metric_delete(self, project, metric_name): """API call: delete a metric resource. @@ -419,58 +419,6 @@ def metric_delete(self, project, metric_name): raise -def _value_pb_to_value(value_pb): - """Helper for :func:`_log_entry_pb_to_mapping`. - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - kind = value_pb.WhichOneof('kind') - - if kind is None: - result = None - - elif kind == 'string_value': - result = value_pb.string_value - - elif kind == 'bool_value': - result = value_pb.bool_value - - elif kind == 'number_value': - result = value_pb.number_value - - elif kind == 'list_value': - result = [_value_pb_to_value(element) - for element in value_pb.list_value.values] - - elif kind == 'struct_value': - result = _struct_pb_to_mapping(value_pb.struct_value) - - else: - raise ValueError('Value protobuf had unknown kind: %s' % (kind,)) - - return result - - -def _struct_pb_to_mapping(struct_pb): - """Helper for :func:`_log_entry_pb_to_mapping`. - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - return {key: _value_pb_to_value(struct_pb.fields[key]) - for key in struct_pb.fields} - - -def _log_entry_pb_to_mapping(entry_pb): - """Helper for :meth:`list_entries`, et aliae - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - return MessageToDict(entry_pb) - - def _log_entry_mapping_to_pb(mapping): """Helper for :meth:`write_entries`, et aliae @@ -482,21 +430,3 @@ def _log_entry_mapping_to_pb(mapping): mapping['timestamp'] = _datetime_to_rfc3339(mapping['timestamp']) ParseDict(mapping, entry_pb) return entry_pb - - -def _log_sink_pb_to_mapping(sink_pb): - """Helper for :meth:`list_sinks`, et aliae - - Performs "impedance matching" between the protobuf attrs and - the keys expected in the JSON API. - """ - return MessageToDict(sink_pb) - - -def _log_metric_pb_to_mapping(metric_pb): - """Helper for :meth:`list_metrics`, et aliae - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - return MessageToDict(metric_pb) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index a3376780ee11..f6216ef4538c 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -995,75 +995,6 @@ def test_metric_delete_hit(self): self.assertIsNone(options) -@unittest.skipUnless(_HAVE_GAX, 'No gax-python') -class Test_value_pb_to_value(_Base, unittest.TestCase): - - def _callFUT(self, value_pb): - from google.cloud.logging._gax import _value_pb_to_value - return _value_pb_to_value(value_pb) - - def test_w_null_values(self): - from google.protobuf.struct_pb2 import Value - value_pb = Value() - self.assertIsNone(self._callFUT(value_pb)) - value_pb = Value(null_value=None) - self.assertIsNone(self._callFUT(value_pb)) - - def test_w_string_value(self): - from google.protobuf.struct_pb2 import Value - STRING = 'STRING' - value_pb = Value(string_value=STRING) - self.assertEqual(self._callFUT(value_pb), STRING) - - def test_w_bool_values(self): - from google.protobuf.struct_pb2 import Value - true_value_pb = Value(bool_value=True) - self.assertIs(self._callFUT(true_value_pb), True) - false_value_pb = Value(bool_value=False) - self.assertIs(self._callFUT(false_value_pb), False) - - def test_w_number_values(self): - from google.protobuf.struct_pb2 import Value - ANSWER = 42 - PI = 3.1415926 - int_value_pb = Value(number_value=ANSWER) - self.assertEqual(self._callFUT(int_value_pb), ANSWER) - float_value_pb = Value(number_value=PI) - self.assertEqual(self._callFUT(float_value_pb), PI) - - def test_w_list_value(self): - from google.protobuf.struct_pb2 import Value - STRING = 'STRING' - PI = 3.1415926 - value_pb = Value() - value_pb.list_value.values.add(string_value=STRING) - value_pb.list_value.values.add(bool_value=True) - value_pb.list_value.values.add(number_value=PI) - self.assertEqual(self._callFUT(value_pb), [STRING, True, PI]) - - def test_w_struct_value(self): - from google.protobuf.struct_pb2 import Value - STRING = 'STRING' - PI = 3.1415926 - value_pb = Value() - value_pb.struct_value.fields['string'].string_value = STRING - value_pb.struct_value.fields['bool'].bool_value = True - value_pb.struct_value.fields['number'].number_value = PI - self.assertEqual(self._callFUT(value_pb), - {'string': STRING, 'bool': True, 'number': PI}) - - def test_w_unknown_kind(self): - - class _Value(object): - - def WhichOneof(self, name): - assert name == 'kind' - return 'UNKNOWN' - - with self.assertRaises(ValueError): - self._callFUT(_Value()) - - class _GAXLoggingAPI(_GAXBaseAPI): _delete_not_found = False From db03a90a74d4fcc70f1e18b5c74620659fbcaf7c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 24 Oct 2016 20:44:17 -0700 Subject: [PATCH 019/855] Removing un-needed PB mocks in GAX logging unit tests. --- .../unit_tests/test__gax.py | 28 ++++++------------- 1 file changed, 8 insertions(+), 20 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index f6216ef4538c..f0c3ec6595d3 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -738,8 +738,10 @@ def test_sink_update_miss(self): def test_sink_update_hit(self): from google.logging.v2.logging_config_pb2 import LogSink - response = _LogSinkPB( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + + response = LogSink(name=self.SINK_NAME, + destination=self.DESTINATION_URI, + filter=self.FILTER) gax_api = _GAXSinksAPI(_update_sink_response=response) api = self._makeOne(gax_api) @@ -951,8 +953,10 @@ def test_metric_update_miss(self): def test_metric_update_hit(self): from google.logging.v2.logging_metrics_pb2 import LogMetric - response = _LogMetricPB( - self.METRIC_NAME, self.FILTER, self.DESCRIPTION) + + response = LogMetric(name=self.METRIC_NAME, + description=self.DESCRIPTION, + filter=self.FILTER) gax_api = _GAXMetricsAPI(_update_log_metric_response=response) api = self._makeOne(gax_api) @@ -1109,19 +1113,3 @@ def delete_log_metric(self, metric_name, options=None): raise GaxError('error') if self._log_metric_not_found: raise GaxError('notfound', self._make_grpc_not_found()) - - -class _LogSinkPB(object): - - def __init__(self, name, destination, filter_): - self.name = name - self.destination = destination - self.filter = filter_ - - -class _LogMetricPB(object): - - def __init__(self, name, description, filter_): - self.name = name - self.description = description - self.filter = filter_ From 1f72d76c9d35e54d08098ca2e5e01c23782a9aa8 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 16:43:27 -0700 Subject: [PATCH 020/855] Adding GAXIterator._wrap_gax for wrapping the GAX iterator. Also updating the _GAXPageIterator mock to allow multiple pages. --- .../google-cloud-logging/unit_tests/test__gax.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index f0c3ec6595d3..1fd5d2fc9a22 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -69,7 +69,7 @@ def test_list_entries_no_paging(self): resource=resource_pb, timestamp=timestamp_pb, text_payload=TEXT) - response = _GAXPageIterator([entry_pb], TOKEN) + response = _GAXPageIterator([entry_pb], page_token=TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) api = self._makeOne(gax_api) @@ -110,7 +110,7 @@ def _list_entries_with_paging_helper(self, payload, struct_pb): resource=resource_pb, timestamp=timestamp_pb, json_payload=struct_pb) - response = _GAXPageIterator([entry_pb], NEW_TOKEN) + response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) api = self._makeOne(gax_api) @@ -237,7 +237,7 @@ def test_list_entries_with_extra_properties(self): entry_pb = self._make_log_entry_with_extras( LABELS, IID, bool_type_url, NOW) - response = _GAXPageIterator([entry_pb], NEW_TOKEN) + response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) api = self._makeOne(gax_api) @@ -598,7 +598,7 @@ def test_list_sinks_no_paging(self): sink_pb = LogSink(name=self.SINK_PATH, destination=self.DESTINATION_URI, filter=self.FILTER) - response = _GAXPageIterator([sink_pb], TOKEN) + response = _GAXPageIterator([sink_pb], page_token=TOKEN) gax_api = _GAXSinksAPI(_list_sinks_response=response) api = self._makeOne(gax_api) @@ -626,7 +626,7 @@ def test_list_sinks_w_paging(self): sink_pb = LogSink(name=self.SINK_PATH, destination=self.DESTINATION_URI, filter=self.FILTER) - response = _GAXPageIterator([sink_pb], None) + response = _GAXPageIterator([sink_pb]) gax_api = _GAXSinksAPI(_list_sinks_response=response) api = self._makeOne(gax_api) @@ -813,7 +813,7 @@ def test_list_metrics_no_paging(self): metric_pb = LogMetric(name=self.METRIC_PATH, description=self.DESCRIPTION, filter=self.FILTER) - response = _GAXPageIterator([metric_pb], TOKEN) + response = _GAXPageIterator([metric_pb], page_token=TOKEN) gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) api = self._makeOne(gax_api) @@ -841,7 +841,7 @@ def test_list_metrics_w_paging(self): metric_pb = LogMetric(name=self.METRIC_PATH, description=self.DESCRIPTION, filter=self.FILTER) - response = _GAXPageIterator([metric_pb], None) + response = _GAXPageIterator([metric_pb]) gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) api = self._makeOne(gax_api) From 32492349be1e88c894139665a32b8cb339f70d8f Mon Sep 17 00:00:00 2001 From: omaray Date: Wed, 26 Oct 2016 11:28:02 -0700 Subject: [PATCH 021/855] Change the version of the Logging API used from v2beta1 to v2 --- .../google-cloud-logging/google/cloud/logging/connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/connection.py b/packages/google-cloud-logging/google/cloud/logging/connection.py index 08a540671f5f..69e8cd7aee2d 100644 --- a/packages/google-cloud-logging/google/cloud/logging/connection.py +++ b/packages/google-cloud-logging/google/cloud/logging/connection.py @@ -35,7 +35,7 @@ class Connection(base_connection.JSONConnection): API_BASE_URL = 'https://logging.googleapis.com' """The base of the API call URL.""" - API_VERSION = 'v2beta1' + API_VERSION = 'v2' """The version of the API, used in building the API call's URL.""" API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' From 21ef9057a69f3add7c01ca3df8a8debb80bd9d7f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 27 Oct 2016 11:28:21 -0400 Subject: [PATCH 022/855] Fix up logging docs URLs after 'v2beta1' -> 'v2' transition. Follow-on to #2625. --- .../google/cloud/logging/_gax.py | 4 +-- .../google/cloud/logging/client.py | 14 ++++---- .../google/cloud/logging/connection.py | 34 +++++++++---------- .../google/cloud/logging/entries.py | 6 ++-- .../google/cloud/logging/logger.py | 12 +++---- .../google/cloud/logging/metric.py | 12 +++---- .../google/cloud/logging/sink.py | 12 +++---- 7 files changed, 47 insertions(+), 47 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index cd5ad58cd1a4..57cd4981cc61 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -171,7 +171,7 @@ def sink_create(self, project, sink_name, filter_, destination): """API call: create a sink resource. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/create :type project: str :param project: ID of the project in which to create the sink. @@ -318,7 +318,7 @@ def metric_create(self, project, metric_name, filter_, description): """API call: create a metric resource. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/create :type project: str :param project: ID of the project in which to create the metric. diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 3a44fd4658cf..998e02d4e0c7 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -95,8 +95,8 @@ def logging_api(self): """Helper for logging-related API calls. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs + https://cloud.google.com/logging/docs/api/reference/rest/v2/entries + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.logs """ if self._logging_api is None: if self._use_gax: @@ -111,7 +111,7 @@ def sinks_api(self): """Helper for log sink-related API calls. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks """ if self._sinks_api is None: if _USE_GAX: @@ -126,7 +126,7 @@ def metrics_api(self): """Helper for log metric-related API calls. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics """ if self._metrics_api is None: if _USE_GAX: @@ -177,7 +177,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None, """Return a page of log entries. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list + https://cloud.google.com/logging/docs/api/reference/rest/v2/entries/list :type projects: list of strings :param projects: project IDs to include. If not passed, @@ -244,7 +244,7 @@ def list_sinks(self, page_size=None, page_token=None): """List sinks for the project associated with this client. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/list + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/list :type page_size: int :param page_size: maximum number of sinks to return, If not passed, @@ -293,7 +293,7 @@ def list_metrics(self, page_size=None, page_token=None): """List metrics for the project associated with this client. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/list + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/list :type page_size: int :param page_size: maximum number of metrics to return, If not passed, diff --git a/packages/google-cloud-logging/google/cloud/logging/connection.py b/packages/google-cloud-logging/google/cloud/logging/connection.py index 69e8cd7aee2d..9d442b29267b 100644 --- a/packages/google-cloud-logging/google/cloud/logging/connection.py +++ b/packages/google-cloud-logging/google/cloud/logging/connection.py @@ -52,8 +52,8 @@ class _LoggingAPI(object): """Helper mapping logging-related APIs. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs + https://cloud.google.com/logging/docs/api/reference/rest/v2/entries + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.logs :type connection: :class:`google.cloud.logging.connection.Connection` :param connection: the connection used to make API requests. @@ -66,7 +66,7 @@ def list_entries(self, projects, filter_=None, order_by=None, """Return a page of log entry resources. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list + https://cloud.google.com/logging/docs/api/reference/rest/v2/entries/list :type projects: list of strings :param projects: project IDs to include. If not passed, @@ -118,7 +118,7 @@ def write_entries(self, entries, logger_name=None, resource=None, """API call: log an entry resource via a POST request See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + https://cloud.google.com/logging/docs/api/reference/rest/v2/entries/write :type entries: sequence of mapping :param entries: the log entry resources to log. @@ -153,7 +153,7 @@ def logger_delete(self, project, logger_name): """API call: delete all entries in a logger via a DELETE request See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs/delete + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.logs/delete :type project: str :param project: ID of project containing the log entries to delete @@ -169,7 +169,7 @@ class _SinksAPI(object): """Helper mapping sink-related APIs. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks :type connection: :class:`google.cloud.logging.connection.Connection` :param connection: the connection used to make API requests. @@ -181,7 +181,7 @@ def list_sinks(self, project, page_size=None, page_token=None): """List sinks for the project associated with this client. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/list + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/list :type project: str :param project: ID of the project whose sinks are to be listed. @@ -218,7 +218,7 @@ def sink_create(self, project, sink_name, filter_, destination): """API call: create a sink resource. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/create :type project: str :param project: ID of the project in which to create the sink. @@ -246,7 +246,7 @@ def sink_get(self, project, sink_name): """API call: retrieve a sink resource. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/get :type project: str :param project: ID of the project containing the sink. @@ -264,7 +264,7 @@ def sink_update(self, project, sink_name, filter_, destination): """API call: update a sink resource. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/update :type project: str :param project: ID of the project containing the sink. @@ -292,7 +292,7 @@ def sink_delete(self, project, sink_name): """API call: delete a sink resource. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/delete :type project: str :param project: ID of the project containing the sink. @@ -308,7 +308,7 @@ class _MetricsAPI(object): """Helper mapping sink-related APIs. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics :type connection: :class:`google.cloud.logging.connection.Connection` :param connection: the connection used to make API requests. @@ -320,7 +320,7 @@ def list_metrics(self, project, page_size=None, page_token=None): """List metrics for the project associated with this client. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/list + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/list :type project: str :param project: ID of the project whose metrics are to be listed. @@ -357,7 +357,7 @@ def metric_create(self, project, metric_name, filter_, description=None): """API call: create a metric resource. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/create :type project: str :param project: ID of the project in which to create the metric. @@ -384,7 +384,7 @@ def metric_get(self, project, metric_name): """API call: retrieve a metric resource. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/get :type project: str :param project: ID of the project containing the metric. @@ -402,7 +402,7 @@ def metric_update(self, project, metric_name, filter_, description): """API call: update a metric resource. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/update + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/update :type project: str :param project: ID of the project containing the metric. @@ -429,7 +429,7 @@ def metric_delete(self, project, metric_name): """API call: delete a metric resource. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/delete + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/delete :type project: str :param project: ID of the project containing the metric. diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py index 0de34e68ed25..417d42cefdca 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -125,7 +125,7 @@ class TextEntry(_BaseEntry): """Entry created with ``textPayload``. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry + https://cloud.google.com/logging/docs/api/reference/rest/Shared.Types/LogEntry """ _PAYLOAD_KEY = 'textPayload' @@ -134,7 +134,7 @@ class StructEntry(_BaseEntry): """Entry created with ``jsonPayload``. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry + https://cloud.google.com/logging/docs/api/reference/rest/Shared.Types/LogEntry """ _PAYLOAD_KEY = 'jsonPayload' @@ -143,7 +143,7 @@ class ProtobufEntry(_BaseEntry): """Entry created with ``protoPayload``. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry + https://cloud.google.com/logging/docs/api/reference/rest/Shared.Types/LogEntry """ _PAYLOAD_KEY = 'protoPayload' diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index d078dda29726..9ef9a2d3e887 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -23,7 +23,7 @@ class Logger(object): """Loggers represent named targets for log entries. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.logs :type name: str :param name: the name of the logger @@ -162,7 +162,7 @@ def log_text(self, text, client=None, labels=None, insert_id=None, """API call: log a text message via a POST request See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + https://cloud.google.com/logging/docs/api/reference/rest/v2/entries/write :type text: text :param text: the log message. @@ -196,7 +196,7 @@ def log_struct(self, info, client=None, labels=None, insert_id=None, """API call: log a structured message via a POST request See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + https://cloud.google.com/logging/docs/api/reference/rest/v2/entries/write :type info: dict :param info: the log entry information @@ -230,7 +230,7 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, """API call: log a protobuf message via a POST request See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + https://cloud.google.com/logging/docs/api/reference/rest/v2/entries/write :type message: Protobuf message :param message: the message to be logged @@ -263,7 +263,7 @@ def delete(self, client=None): """API call: delete all entries in a logger via a DELETE request See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs/delete + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.logs/delete :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -278,7 +278,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None, """Return a page of log entries. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list + https://cloud.google.com/logging/docs/api/reference/rest/v2/entries/list :type projects: list of strings :param projects: project IDs to include. If not passed, diff --git a/packages/google-cloud-logging/google/cloud/logging/metric.py b/packages/google-cloud-logging/google/cloud/logging/metric.py index 162eed548720..e0912583c8a9 100644 --- a/packages/google-cloud-logging/google/cloud/logging/metric.py +++ b/packages/google-cloud-logging/google/cloud/logging/metric.py @@ -21,7 +21,7 @@ class Metric(object): """Metrics represent named filters for log entries. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics :type name: str :param name: the name of the metric @@ -103,7 +103,7 @@ def create(self, client=None): """API call: create the metric via a PUT request See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/create :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -118,7 +118,7 @@ def exists(self, client=None): """API call: test for the existence of the metric via a GET request See - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/get :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -141,7 +141,7 @@ def reload(self, client=None): """API call: sync local metric configuration via a GET request See - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/get :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -157,7 +157,7 @@ def update(self, client=None): """API call: update metric configuration via a PUT request See - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/update + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/update :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -172,7 +172,7 @@ def delete(self, client=None): """API call: delete a metric via a DELETE request See - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/delete + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/delete :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` diff --git a/packages/google-cloud-logging/google/cloud/logging/sink.py b/packages/google-cloud-logging/google/cloud/logging/sink.py index 52c9efaece61..aeadde05ed9a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/sink.py +++ b/packages/google-cloud-logging/google/cloud/logging/sink.py @@ -21,7 +21,7 @@ class Sink(object): """Sinks represent filtered exports for log entries. See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks :type name: str :param name: the name of the sink @@ -107,7 +107,7 @@ def create(self, client=None): """API call: create the sink via a PUT request See: - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/create :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -122,7 +122,7 @@ def exists(self, client=None): """API call: test for the existence of the sink via a GET request See - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/get :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -145,7 +145,7 @@ def reload(self, client=None): """API call: sync local sink configuration via a GET request See - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/get :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -161,7 +161,7 @@ def update(self, client=None): """API call: update sink configuration via a PUT request See - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/update :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -176,7 +176,7 @@ def delete(self, client=None): """API call: delete a sink via a DELETE request See - https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete + https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/delete :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` From e4eb65a701816a861dd27312088c5d49175de60d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 27 Oct 2016 21:53:21 -0700 Subject: [PATCH 023/855] Converting Logging client->list_entries to iterator. --- packages/google-cloud-logging/README.rst | 3 +- .../google/cloud/logging/_gax.py | 65 +++++++++++--- .../google/cloud/logging/_helpers.py | 48 ++++++++++ .../google/cloud/logging/client.py | 51 ++--------- .../google/cloud/logging/connection.py | 87 ++++++++++++++----- .../google/cloud/logging/logger.py | 13 ++- .../unit_tests/test__helpers.py | 62 +++++++++++++ .../unit_tests/test_client.py | 8 -- 8 files changed, 245 insertions(+), 92 deletions(-) create mode 100644 packages/google-cloud-logging/google/cloud/logging/_helpers.py create mode 100644 packages/google-cloud-logging/unit_tests/test__helpers.py diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index a325db025b07..78e301366e99 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -47,8 +47,7 @@ Example of fetching entries: .. code:: python - entries, token = logger.list_entries() - for entry in entries: + for entry in logger.list_entries(): print(entry.payload) See the ``google-cloud-python`` API `logging documentation`_ to learn how to diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 57cd4981cc61..365b85530432 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -14,6 +14,8 @@ """GAX wrapper for Logging API requests.""" +import functools + from google.gax import CallOptions from google.gax import INITIAL_PAGE from google.gax.errors import GaxError @@ -28,6 +30,8 @@ from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound +from google.cloud.iterator import GAXIterator +from google.cloud.logging._helpers import entry_from_resource class _LoggingAPI(object): @@ -36,9 +40,13 @@ class _LoggingAPI(object): :type gax_api: :class:`google.logging.v2.logging_service_v2_api.LoggingServiceV2Api` :param gax_api: API object used to make GAX requests. + + :type client: :class:`~google.cloud.logging.client.Client` + :param client: The client that owns this API object. """ - def __init__(self, gax_api): + def __init__(self, gax_api, client): self._gax_api = gax_api + self._client = client def list_entries(self, projects, filter_='', order_by='', page_size=0, page_token=None): @@ -49,8 +57,9 @@ def list_entries(self, projects, filter_='', order_by='', defaults to the project bound to the API's client. :type filter_: str - :param filter_: a filter expression. See: - https://cloud.google.com/logging/docs/view/advanced_filters + :param filter_: + a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters :type order_by: str :param order_by: One of :data:`~google.cloud.logging.ASCENDING` @@ -65,10 +74,9 @@ def list_entries(self, projects, filter_='', order_by='', passed, the API will return the first page of entries. - :rtype: tuple, (list, str) - :returns: list of mappings, plus a "next page token" string: - if not None, indicates that more entries can be retrieved - with another call (pass that value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` + accessible to the current API. """ if page_token is None: page_token = INITIAL_PAGE @@ -76,10 +84,14 @@ def list_entries(self, projects, filter_='', order_by='', page_iter = self._gax_api.list_log_entries( projects, filter_=filter_, order_by=order_by, page_size=page_size, options=options) - entries = [MessageToDict(entry_pb) - for entry_pb in page_iter.next()] - token = page_iter.page_token or None - return entries, token + + # We attach a mutable loggers dictionary so that as Logger + # objects are created by entry_from_resource, they can be + # re-used by other log entries from the same logger. + loggers = {} + item_to_value = functools.partial( + _item_to_entry, loggers=loggers) + return GAXIterator(self._client, page_iter, item_to_value) def write_entries(self, entries, logger_name=None, resource=None, labels=None): @@ -430,3 +442,34 @@ def _log_entry_mapping_to_pb(mapping): mapping['timestamp'] = _datetime_to_rfc3339(mapping['timestamp']) ParseDict(mapping, entry_pb) return entry_pb + + +def _item_to_entry(iterator, entry_pb, loggers): + """Convert a log entry protobuf to the native object. + + .. note:: + + This method does not have the correct signature to be used as + the ``item_to_value`` argument to + :class:`~google.cloud.iterator.Iterator`. It is intended to be + patched with a mutable ``loggers`` argument that can be updated + on subsequent calls. For an example, see how the method is + used above in :meth:`_LoggingAPI.list_entries`. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type entry_pb: :class:`~google.logging.v2.log_entry_pb2.LogEntry` + :param entry_pb: Log entry protobuf returned from the API. + + :type loggers: dict + :param loggers: + A mapping of logger fullnames -> loggers. If the logger + that owns the entry is not in ``loggers``, the entry + will have a newly-created logger. + + :rtype: :class:`~google.cloud.logging.entries._BaseEntry` + :returns: The next log entry in the page. + """ + resource = MessageToDict(entry_pb) + return entry_from_resource(resource, iterator.client, loggers) diff --git a/packages/google-cloud-logging/google/cloud/logging/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/_helpers.py new file mode 100644 index 000000000000..0e801cb66a0a --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/_helpers.py @@ -0,0 +1,48 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Common logging helpers.""" + + +from google.cloud.logging.entries import ProtobufEntry +from google.cloud.logging.entries import StructEntry +from google.cloud.logging.entries import TextEntry + + +def entry_from_resource(resource, client, loggers): + """Detect correct entry type from resource and instantiate. + + :type resource: dict + :param resource: one entry resource from API response + + :type client: :class:`~google.cloud.logging.client.Client` + :param client: Client that owns the log entry. + + :type loggers: dict + :param loggers: + A mapping of logger fullnames -> loggers. If the logger + that owns the entry is not in ``loggers``, the entry + will have a newly-created logger. + + :rtype: :class:`~google.cloud.logging.entries._BaseEntry` + :returns: The entry instance, constructed via the resource + """ + if 'textPayload' in resource: + return TextEntry.from_api_repr(resource, client, loggers) + elif 'jsonPayload' in resource: + return StructEntry.from_api_repr(resource, client, loggers) + elif 'protoPayload' in resource: + return ProtobufEntry.from_api_repr(resource, client, loggers) + + raise ValueError('Cannot parse log entry resource') diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 998e02d4e0c7..ad20ae25711c 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -40,9 +40,6 @@ from google.cloud.logging.connection import _LoggingAPI as JSONLoggingAPI from google.cloud.logging.connection import _MetricsAPI as JSONMetricsAPI from google.cloud.logging.connection import _SinksAPI as JSONSinksAPI -from google.cloud.logging.entries import ProtobufEntry -from google.cloud.logging.entries import StructEntry -from google.cloud.logging.entries import TextEntry from google.cloud.logging.logger import Logger from google.cloud.logging.metric import Metric from google.cloud.logging.sink import Sink @@ -101,9 +98,9 @@ def logging_api(self): if self._logging_api is None: if self._use_gax: generated = GeneratedLoggingAPI() - self._logging_api = GAXLoggingAPI(generated) + self._logging_api = GAXLoggingAPI(generated, self) else: - self._logging_api = JSONLoggingAPI(self.connection) + self._logging_api = JSONLoggingAPI(self) return self._logging_api @property @@ -147,31 +144,6 @@ def logger(self, name): """ return Logger(name, client=self) - def _entry_from_resource(self, resource, loggers): - """Detect correct entry type from resource and instantiate. - - :type resource: dict - :param resource: one entry resource from API response - - :type loggers: dict - :param loggers: - (Optional) A mapping of logger fullnames -> loggers. If not - passed, the entry will have a newly-created logger. - - :rtype: One of: - :class:`google.cloud.logging.entries.TextEntry`, - :class:`google.cloud.logging.entries.StructEntry`, - :class:`google.cloud.logging.entries.ProtobufEntry` - :returns: the entry instance, constructed via the resource - """ - if 'textPayload' in resource: - return TextEntry.from_api_repr(resource, self, loggers) - elif 'jsonPayload' in resource: - return StructEntry.from_api_repr(resource, self, loggers) - elif 'protoPayload' in resource: - return ProtobufEntry.from_api_repr(resource, self, loggers) - raise ValueError('Cannot parse log entry resource') - def list_entries(self, projects=None, filter_=None, order_by=None, page_size=None, page_token=None): """Return a page of log entries. @@ -184,8 +156,9 @@ def list_entries(self, projects=None, filter_=None, order_by=None, defaults to the project bound to the client. :type filter_: str - :param filter_: a filter expression. See: - https://cloud.google.com/logging/docs/view/advanced_filters + :param filter_: + a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters :type order_by: str :param order_by: One of :data:`~google.cloud.logging.ASCENDING` @@ -200,22 +173,16 @@ def list_entries(self, projects=None, filter_=None, order_by=None, passed, the API will return the first page of entries. - :rtype: tuple, (list, str) - :returns: list of :class:`google.cloud.logging.entry.TextEntry`, plus a - "next page token" string: if not None, indicates that - more entries can be retrieved with another call (pass that - value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` + accessible to the current client. """ if projects is None: projects = [self.project] - resources, token = self.logging_api.list_entries( + return self.logging_api.list_entries( projects=projects, filter_=filter_, order_by=order_by, page_size=page_size, page_token=page_token) - loggers = {} - entries = [self._entry_from_resource(resource, loggers) - for resource in resources] - return entries, token def sink(self, name, filter_=None, destination=None): """Creates a sink bound to the current client. diff --git a/packages/google-cloud-logging/google/cloud/logging/connection.py b/packages/google-cloud-logging/google/cloud/logging/connection.py index 9d442b29267b..72ebc98a4f9b 100644 --- a/packages/google-cloud-logging/google/cloud/logging/connection.py +++ b/packages/google-cloud-logging/google/cloud/logging/connection.py @@ -14,7 +14,11 @@ """Create / interact with Stackdriver Logging connections.""" +import functools + from google.cloud import connection as base_connection +from google.cloud.iterator import HTTPIterator +from google.cloud.logging._helpers import entry_from_resource class Connection(base_connection.JSONConnection): @@ -55,11 +59,13 @@ class _LoggingAPI(object): https://cloud.google.com/logging/docs/api/reference/rest/v2/entries https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.logs - :type connection: :class:`google.cloud.logging.connection.Connection` - :param connection: the connection used to make API requests. + :type client: :class:`~google.cloud.logging.client.Client` + :param client: The client used to make API requests. """ - def __init__(self, connection): - self._connection = connection + + def __init__(self, client): + self._client = client + self._connection = client.connection def list_entries(self, projects, filter_=None, order_by=None, page_size=None, page_token=None): @@ -73,8 +79,9 @@ def list_entries(self, projects, filter_=None, order_by=None, defaults to the project bound to the client. :type filter_: str - :param filter_: a filter expression. See: - https://cloud.google.com/logging/docs/view/advanced_filters + :param filter_: + a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters :type order_by: str :param order_by: One of :data:`~google.cloud.logging.ASCENDING` @@ -89,29 +96,35 @@ def list_entries(self, projects, filter_=None, order_by=None, passed, the API will return the first page of entries. - :rtype: tuple, (list, str) - :returns: list of mappings, plus a "next page token" string: - if not None, indicates that more entries can be retrieved - with another call (pass that value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` + accessible to the current API. """ - params = {'projectIds': projects} + extra_params = {'projectIds': projects} if filter_ is not None: - params['filter'] = filter_ + extra_params['filter'] = filter_ if order_by is not None: - params['orderBy'] = order_by + extra_params['orderBy'] = order_by if page_size is not None: - params['pageSize'] = page_size - - if page_token is not None: - params['pageToken'] = page_token - - resp = self._connection.api_request( - method='POST', path='/entries:list', data=params) - - return resp.get('entries', ()), resp.get('nextPageToken') + extra_params['pageSize'] = page_size + + path = '/entries:list' + # We attach a mutable loggers dictionary so that as Logger + # objects are created by entry_from_resource, they can be + # re-used by other log entries from the same logger. + loggers = {} + item_to_value = functools.partial( + _item_to_entry, loggers=loggers) + iterator = HTTPIterator( + client=self._client, path=path, + item_to_value=item_to_value, items_key='entries', + page_token=page_token, extra_params=extra_params) + # This method uses POST to make a read-only request. + iterator._HTTP_METHOD = 'POST' + return iterator def write_entries(self, entries, logger_name=None, resource=None, labels=None): @@ -439,3 +452,33 @@ def metric_delete(self, project, metric_name): """ target = '/projects/%s/metrics/%s' % (project, metric_name) self._connection.api_request(method='DELETE', path=target) + + +def _item_to_entry(iterator, resource, loggers): + """Convert a log entry resource to the native object. + + .. note:: + + This method does not have the correct signature to be used as + the ``item_to_value`` argument to + :class:`~google.cloud.iterator.Iterator`. It is intended to be + patched with a mutable ``loggers`` argument that can be updated + on subsequent calls. For an example, see how the method is + used above in :meth:`_LoggingAPI.list_entries`. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type resource: dict + :param resource: Log entry JSON resource returned from the API. + + :type loggers: dict + :param loggers: + A mapping of logger fullnames -> loggers. If the logger + that owns the entry is not in ``loggers``, the entry + will have a newly-created logger. + + :rtype: :class:`~google.cloud.logging.entries._BaseEntry` + :returns: The next log entry in the page. + """ + return entry_from_resource(resource, iterator.client, loggers) diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 9ef9a2d3e887..842481af42da 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -285,8 +285,9 @@ def list_entries(self, projects=None, filter_=None, order_by=None, defaults to the project bound to the client. :type filter_: str - :param filter_: a filter expression. See: - https://cloud.google.com/logging/docs/view/advanced_filters + :param filter_: + a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters :type order_by: str :param order_by: One of :data:`~google.cloud.logging.ASCENDING` @@ -301,11 +302,9 @@ def list_entries(self, projects=None, filter_=None, order_by=None, passed, the API will return the first page of entries. - :rtype: tuple, (list, str) - :returns: list of :class:`google.cloud.logging.entry.TextEntry`, plus a - "next page token" string: if not None, indicates that - more entries can be retrieved with another call (pass that - value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` + accessible to the current logger. """ log_filter = 'logName=%s' % (self.full_name,) if filter_ is not None: diff --git a/packages/google-cloud-logging/unit_tests/test__helpers.py b/packages/google-cloud-logging/unit_tests/test__helpers.py new file mode 100644 index 000000000000..85d2dc3b846a --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/test__helpers.py @@ -0,0 +1,62 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import unittest + + +class Test_entry_from_resource(unittest.TestCase): + + @staticmethod + def _call_fut(resource, client, loggers): + from google.cloud.logging._helpers import entry_from_resource + return entry_from_resource(resource, client, loggers) + + def test_unknown_type(self): + with self.assertRaises(ValueError): + self._call_fut({}, None, {}) + + def _payload_helper(self, key, class_name): + from google.cloud._testing import _Monkey + import google.cloud.logging._helpers as MUT + + resource = {key: 'yup'} + client = object() + loggers = {} + mock_class = EntryMock() + with _Monkey(MUT, **{class_name: mock_class}): + result = self._call_fut(resource, client, loggers) + + self.assertIs(result, mock_class.sentinel) + self.assertEqual(mock_class.called, (resource, client, loggers)) + + def test_text_payload(self): + self._payload_helper('textPayload', 'TextEntry') + + def test_json_payload(self): + self._payload_helper('jsonPayload', 'StructEntry') + + def test_proto_payload(self): + self._payload_helper('protoPayload', 'ProtobufEntry') + + +class EntryMock(object): + + def __init__(self): + self.sentinel = object() + self.called = None + + def from_api_repr(self, resource, client, loggers): + self.called = (resource, client, loggers) + return self.sentinel diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 5becbd68b920..703ab56bc6bf 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -202,14 +202,6 @@ def test_logger(self): self.assertIs(logger.client, client) self.assertEqual(logger.project, self.PROJECT) - def test__entry_from_resource_unknown_type(self): - PROJECT = 'PROJECT' - creds = _Credentials() - client = self._makeOne(PROJECT, creds) - loggers = {} - with self.assertRaises(ValueError): - client._entry_from_resource({'unknownPayload': {}}, loggers) - def test_list_entries_defaults(self): from google.cloud.logging.entries import TextEntry IID = 'IID' From e0546519cd17e37dc43e75fdf94a4d87de236af7 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 28 Oct 2016 11:31:07 -0700 Subject: [PATCH 024/855] Adding unit tests for Logging list_entries() change. --- .../unit_tests/test__gax.py | 185 ++++++++++-------- .../unit_tests/test_client.py | 85 +++++--- .../unit_tests/test_connection.py | 124 +++++++++--- .../unit_tests/test_logger.py | 94 ++++++--- 4 files changed, 325 insertions(+), 163 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 1fd5d2fc9a22..78a51f4acdd3 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -39,6 +39,7 @@ def _makeOne(self, *args, **kw): @unittest.skipUnless(_HAVE_GAX, 'No gax-python') class Test_LoggingAPI(_Base, unittest.TestCase): LOG_NAME = 'log_name' + LOG_PATH = 'projects/%s/logs/%s' % (_Base.PROJECT, LOG_NAME) def _getTargetClass(self): from google.cloud.logging._gax import _LoggingAPI @@ -46,8 +47,10 @@ def _getTargetClass(self): def test_ctor(self): gax_api = _GAXLoggingAPI() - api = self._makeOne(gax_api) + client = object() + api = self._makeOne(gax_api, client) self.assertIs(api._gax_api, gax_api) + self.assertIs(api._client, client) def test_list_entries_no_paging(self): import datetime @@ -57,32 +60,47 @@ def test_list_entries_no_paging(self): from google.logging.v2.log_entry_pb2 import LogEntry from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud._helpers import UTC from google.cloud._testing import _GAXPageIterator from google.cloud.logging import DESCENDING + from google.cloud.logging.client import Client + from google.cloud.logging.entries import TextEntry + from google.cloud.logging.logger import Logger TOKEN = 'TOKEN' TEXT = 'TEXT' resource_pb = MonitoredResource(type='global') - timestamp_pb = _datetime_to_pb_timestamp( - datetime.datetime.utcnow()) - entry_pb = LogEntry(log_name=self.LOG_NAME, + timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC) + timestamp_pb = _datetime_to_pb_timestamp(timestamp) + entry_pb = LogEntry(log_name=self.LOG_PATH, resource=resource_pb, timestamp=timestamp_pb, text_payload=TEXT) response = _GAXPageIterator([entry_pb], page_token=TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) - api = self._makeOne(gax_api) + client = Client(project=self.PROJECT, credentials=object(), + use_gax=True) + api = self._makeOne(gax_api, client) - entries, next_token = api.list_entries( + iterator = api.list_entries( [self.PROJECT], self.FILTER, DESCENDING) + entries = list(iterator) + next_token = iterator.next_page_token + # First check the token. + self.assertEqual(next_token, TOKEN) + # Then check the entries returned. self.assertEqual(len(entries), 1) entry = entries[0] - self.assertIsInstance(entry, dict) - self.assertEqual(entry['logName'], self.LOG_NAME) - self.assertEqual(entry['resource'], {'type': 'global'}) - self.assertEqual(entry['textPayload'], TEXT) - self.assertEqual(next_token, TOKEN) + self.assertIsInstance(entry, TextEntry) + self.assertEqual(entry.payload, TEXT) + self.assertIsInstance(entry.logger, Logger) + self.assertEqual(entry.logger.name, self.LOG_NAME) + self.assertIsNone(entry.insert_id) + self.assertEqual(entry.timestamp, timestamp) + self.assertIsNone(entry.labels) + self.assertIsNone(entry.severity) + self.assertIsNone(entry.http_request) projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) @@ -97,33 +115,47 @@ def _list_entries_with_paging_helper(self, payload, struct_pb): from google.api.monitored_resource_pb2 import MonitoredResource from google.logging.v2.log_entry_pb2 import LogEntry - from google.cloud._testing import _GAXPageIterator from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud._helpers import UTC + from google.cloud._testing import _GAXPageIterator + from google.cloud.logging.client import Client + from google.cloud.logging.entries import StructEntry + from google.cloud.logging.logger import Logger SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' resource_pb = MonitoredResource(type='global') - timestamp_pb = _datetime_to_pb_timestamp( - datetime.datetime.utcnow()) - entry_pb = LogEntry(log_name=self.LOG_NAME, + timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC) + timestamp_pb = _datetime_to_pb_timestamp(timestamp) + entry_pb = LogEntry(log_name=self.LOG_PATH, resource=resource_pb, timestamp=timestamp_pb, json_payload=struct_pb) response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) - api = self._makeOne(gax_api) + client = Client(project=self.PROJECT, credentials=object(), + use_gax=True) + api = self._makeOne(gax_api, client) - entries, next_token = api.list_entries( + iterator = api.list_entries( [self.PROJECT], page_size=SIZE, page_token=TOKEN) + entries = list(iterator) + next_token = iterator.next_page_token + # First check the token. + self.assertEqual(next_token, NEW_TOKEN) self.assertEqual(len(entries), 1) entry = entries[0] - self.assertIsInstance(entry, dict) - self.assertEqual(entry['logName'], self.LOG_NAME) - self.assertEqual(entry['resource'], {'type': 'global'}) - self.assertEqual(entry['jsonPayload'], payload) - self.assertEqual(next_token, NEW_TOKEN) + self.assertIsInstance(entry, StructEntry) + self.assertEqual(entry.payload, payload) + self.assertIsInstance(entry.logger, Logger) + self.assertEqual(entry.logger.name, self.LOG_NAME) + self.assertIsNone(entry.insert_id) + self.assertEqual(entry.timestamp, timestamp) + self.assertIsNone(entry.labels) + self.assertIsNone(entry.severity) + self.assertIsNone(entry.http_request) projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) @@ -201,7 +233,7 @@ def _make_log_entry_with_extras(self, labels, iid, type_url, now): last=True, id='OPID', ) - entry_pb = LogEntry(log_name=self.LOG_NAME, + entry_pb = LogEntry(log_name=self.LOG_PATH, resource=resource_pb, proto_payload=proto_payload, timestamp=timestamp_pb, @@ -213,7 +245,7 @@ def _make_log_entry_with_extras(self, labels, iid, type_url, now): return entry_pb def test_list_entries_with_extra_properties(self): - from datetime import datetime + import datetime # Import the wrappers to register the type URL for BoolValue # pylint: disable=unused-variable @@ -221,10 +253,12 @@ def test_list_entries_with_extra_properties(self): # pylint: enable=unused-variable from google.cloud._helpers import UTC - from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._testing import _GAXPageIterator + from google.cloud.logging.client import Client + from google.cloud.logging.entries import ProtobufEntry + from google.cloud.logging.logger import Logger - NOW = datetime.utcnow().replace(tzinfo=UTC) + NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' @@ -239,47 +273,42 @@ def test_list_entries_with_extra_properties(self): response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) - api = self._makeOne(gax_api) + client = Client(project=self.PROJECT, credentials=object(), + use_gax=True) + api = self._makeOne(gax_api, client) - entries, next_token = api.list_entries( + iterator = api.list_entries( [self.PROJECT], page_size=SIZE, page_token=TOKEN) + entries = list(iterator) + next_token = iterator.next_page_token + # First check the token. + self.assertEqual(next_token, NEW_TOKEN) + # Then check the entries returned. self.assertEqual(len(entries), 1) entry = entries[0] - self.assertIsInstance(entry, dict) - self.assertEqual(entry['logName'], self.LOG_NAME) - self.assertEqual(entry['resource'], - {'type': 'global', 'labels': {'foo': 'bar'}}) - self.assertEqual(entry['protoPayload'], { + self.assertIsInstance(entry, ProtobufEntry) + self.assertEqual(entry.payload, { '@type': bool_type_url, 'value': False, }) - self.assertEqual(entry['severity'], SEVERITY) - self.assertEqual(entry['labels'], LABELS) - self.assertEqual(entry['insertId'], IID) - self.assertEqual(entry['timestamp'], _datetime_to_rfc3339(NOW)) - request = entry_pb.http_request - EXPECTED_REQUEST = { - 'requestMethod': request.request_method, - 'requestUrl': request.request_url, - 'status': request.status, - 'requestSize': str(request.request_size), - 'responseSize': str(request.response_size), - 'referer': request.referer, - 'userAgent': request.user_agent, - 'remoteIp': request.remote_ip, - 'cacheHit': request.cache_hit, - } - self.assertEqual(entry['httpRequest'], EXPECTED_REQUEST) - operation = entry_pb.operation - EXPECTED_OPERATION = { - 'producer': operation.producer, - 'id': operation.id, - 'first': operation.first, - 'last': operation.last, - } - self.assertEqual(entry['operation'], EXPECTED_OPERATION) - self.assertEqual(next_token, NEW_TOKEN) + self.assertIsInstance(entry.logger, Logger) + self.assertEqual(entry.logger.name, self.LOG_NAME) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + self.assertEqual(entry.labels, {'foo': 'bar'}) + self.assertEqual(entry.severity, SEVERITY) + self.assertEqual(entry.http_request, { + 'requestMethod': entry_pb.http_request.request_method, + 'requestUrl': entry_pb.http_request.request_url, + 'status': entry_pb.http_request.status, + 'requestSize': str(entry_pb.http_request.request_size), + 'responseSize': str(entry_pb.http_request.response_size), + 'referer': entry_pb.http_request.referer, + 'userAgent': entry_pb.http_request.user_agent, + 'remoteIp': entry_pb.http_request.remote_ip, + 'cacheHit': entry_pb.http_request.cache_hit, + }) projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) @@ -292,14 +321,13 @@ def test_list_entries_with_extra_properties(self): def test_write_entries_single(self): from google.logging.v2.log_entry_pb2 import LogEntry TEXT = 'TEXT' - LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) ENTRY = { - 'logName': LOG_PATH, + 'logName': self.LOG_PATH, 'resource': {'type': 'global'}, 'textPayload': TEXT, } gax_api = _GAXLoggingAPI() - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) api.write_entries([ENTRY]) @@ -309,7 +337,7 @@ def test_write_entries_single(self): entry = entries[0] self.assertIsInstance(entry, LogEntry) - self.assertEqual(entry.log_name, LOG_PATH) + self.assertEqual(entry.log_name, self.LOG_PATH) self.assertEqual(entry.resource.type, 'global') self.assertEqual(entry.labels, {}) self.assertEqual(entry.text_payload, TEXT) @@ -328,7 +356,6 @@ def test_write_entries_w_extra_properties(self): from google.cloud._helpers import UTC, _pb_timestamp_to_datetime NOW = datetime.utcnow().replace(tzinfo=UTC) TEXT = 'TEXT' - LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) SEVERITY = 'WARNING' LABELS = { 'foo': 'bar', @@ -362,7 +389,7 @@ def test_write_entries_w_extra_properties(self): 'last': True, } ENTRY = { - 'logName': LOG_PATH, + 'logName': self.LOG_PATH, 'resource': {'type': 'global'}, 'textPayload': TEXT, 'severity': SEVERITY, @@ -373,7 +400,7 @@ def test_write_entries_w_extra_properties(self): 'operation': OPERATION, } gax_api = _GAXLoggingAPI() - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) api.write_entries([ENTRY]) @@ -383,7 +410,7 @@ def test_write_entries_w_extra_properties(self): entry = entries[0] self.assertIsInstance(entry, LogEntry) - self.assertEqual(entry.log_name, LOG_PATH) + self.assertEqual(entry.log_name, self.LOG_PATH) self.assertEqual(entry.resource.type, 'global') self.assertEqual(entry.text_payload, TEXT) self.assertEqual(entry.severity, WARNING) @@ -441,7 +468,6 @@ def _write_entries_multiple_helper(self, json_payload, json_struct_pb): {'protoPayload': PROTO, 'httpRequest': {'requestUrl': URL}}, ] - LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) RESOURCE = { 'type': 'global', } @@ -449,9 +475,9 @@ def _write_entries_multiple_helper(self, json_payload, json_struct_pb): 'foo': 'bar', } gax_api = _GAXLoggingAPI() - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) - api.write_entries(ENTRIES, LOG_PATH, RESOURCE, LABELS) + api.write_entries(ENTRIES, self.LOG_PATH, RESOURCE, LABELS) entries, log_name, resource, labels, partial_success, options = ( gax_api._write_log_entries_called_with) @@ -486,7 +512,7 @@ def _write_entries_multiple_helper(self, json_payload, json_struct_pb): request = entry.http_request self.assertEqual(request.request_url, URL) - self.assertEqual(log_name, LOG_PATH) + self.assertEqual(log_name, self.LOG_PATH) self.assertEqual(resource, RESOURCE) self.assertEqual(labels, LABELS) self.assertEqual(partial_success, False) @@ -532,40 +558,39 @@ def test_write_entries_multiple_nested_payload(self): self._write_entries_multiple_helper(json_payload, json_struct_pb) def test_logger_delete(self): - LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) gax_api = _GAXLoggingAPI() - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) api.logger_delete(self.PROJECT, self.LOG_NAME) log_name, options = gax_api._delete_log_called_with - self.assertEqual(log_name, LOG_PATH) + self.assertEqual(log_name, self.LOG_PATH) self.assertIsNone(options) def test_logger_delete_not_found(self): from google.cloud.exceptions import NotFound - LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) + gax_api = _GAXLoggingAPI(_delete_not_found=True) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(NotFound): api.logger_delete(self.PROJECT, self.LOG_NAME) log_name, options = gax_api._delete_log_called_with - self.assertEqual(log_name, LOG_PATH) + self.assertEqual(log_name, self.LOG_PATH) self.assertIsNone(options) def test_logger_delete_error(self): from google.gax.errors import GaxError - LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) + gax_api = _GAXLoggingAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(GaxError): api.logger_delete(self.PROJECT, self.LOG_NAME) log_name, options = gax_api._delete_log_called_with - self.assertEqual(log_name, LOG_PATH) + self.assertEqual(log_name, self.LOG_PATH) self.assertIsNone(options) diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 703ab56bc6bf..5ab843f8521a 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -67,8 +67,9 @@ def _generated_api(*args, **kw): class _GaxLoggingAPI(object): - def __init__(self, _wrapped): + def __init__(self, _wrapped, client): self._wrapped = _wrapped + self.client = client creds = _Credentials() client = self._makeOne(project=self.PROJECT, credentials=creds) @@ -81,6 +82,7 @@ def __init__(self, _wrapped): self.assertIsInstance(api, _GaxLoggingAPI) self.assertIs(api._wrapped, wrapped) + self.assertIs(api.client, client) # API instance is cached again = client.logging_api self.assertIs(again, api) @@ -203,7 +205,9 @@ def test_logger(self): self.assertEqual(logger.project, self.PROJECT) def test_list_entries_defaults(self): + import six from google.cloud.logging.entries import TextEntry + IID = 'IID' TEXT = 'TEXT' TOKEN = 'TOKEN' @@ -217,11 +221,18 @@ def test_list_entries_defaults(self): self.PROJECT, self.LOGGER_NAME), }] creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) - api = client._logging_api = _DummyLoggingAPI() - api._list_entries_response = ENTRIES, TOKEN - - entries, token = client.list_entries() + client = self._makeOne(project=self.PROJECT, credentials=creds, + use_gax=False) + returned = { + 'entries': ENTRIES, + 'nextPageToken': TOKEN, + } + client.connection = _Connection(returned) + + iterator = client.list_entries() + page = six.next(iterator.pages) + entries = list(page) + token = iterator.next_page_token self.assertEqual(len(entries), 1) entry = entries[0] @@ -234,9 +245,12 @@ def test_list_entries_defaults(self): self.assertEqual(logger.project, self.PROJECT) self.assertEqual(token, TOKEN) - self.assertEqual( - api._list_entries_called_with, - ([self.PROJECT], None, None, None, None)) + called_with = client.connection._called_with + self.assertEqual(called_with, { + 'path': '/entries:list', + 'method': 'POST', + 'data': {'projectIds': [self.PROJECT]}, + }) def test_list_entries_explicit(self): from google.cloud.logging import DESCENDING @@ -271,15 +285,21 @@ def test_list_entries_explicit(self): 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), }] - client = self._makeOne(self.PROJECT, credentials=_Credentials()) - api = client._logging_api = _DummyLoggingAPI() - api._list_entries_response = ENTRIES, None + client = self._makeOne(self.PROJECT, credentials=_Credentials(), + use_gax=False) + returned = {'entries': ENTRIES} + client.connection = _Connection(returned) - entries, token = client.list_entries( + iterator = client.list_entries( projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN) - self.assertEqual(len(entries), 2) + entries = list(iterator) + token = iterator.next_page_token + # First, check the token. + self.assertIsNone(token) + # Then check the entries. + self.assertEqual(len(entries), 2) entry = entries[0] self.assertIsInstance(entry, StructEntry) self.assertEqual(entry.insert_id, IID1) @@ -301,10 +321,18 @@ def test_list_entries_explicit(self): self.assertIs(entries[0].logger, entries[1].logger) - self.assertIsNone(token) - self.assertEqual( - api._list_entries_called_with, - ([PROJECT1, PROJECT2], FILTER, DESCENDING, PAGE_SIZE, TOKEN)) + called_with = client.connection._called_with + self.assertEqual(called_with, { + 'path': '/entries:list', + 'method': 'POST', + 'data': { + 'filter': FILTER, + 'orderBy': DESCENDING, + 'pageSize': PAGE_SIZE, + 'pageToken': TOKEN, + 'projectIds': [PROJECT1, PROJECT2], + }, + }) def test_sink_defaults(self): from google.cloud.logging.sink import Sink @@ -479,14 +507,6 @@ def create_scoped(self, scope): return self -class _DummyLoggingAPI(object): - - def list_entries(self, projects, filter_, order_by, page_size, page_token): - self._list_entries_called_with = ( - projects, filter_, order_by, page_size, page_token) - return self._list_entries_response - - class _DummySinksAPI(object): def list_sinks(self, project, page_size, page_token): @@ -499,3 +519,16 @@ class _DummyMetricsAPI(object): def list_metrics(self, project, page_size, page_token): self._list_metrics_called_with = (project, page_size, page_token) return self._list_metrics_response + + +class _Connection(object): + + _called_with = None + + def __init__(self, *responses): + self._responses = responses + + def api_request(self, **kw): + self._called_with = kw + response, self._responses = self._responses[0], self._responses[1:] + return response diff --git a/packages/google-cloud-logging/unit_tests/test_connection.py b/packages/google-cloud-logging/unit_tests/test_connection.py index 07d09ceb9a02..d994f6f66289 100644 --- a/packages/google-cloud-logging/unit_tests/test_connection.py +++ b/packages/google-cloud-logging/unit_tests/test_connection.py @@ -51,19 +51,26 @@ def _makeOne(self, *args, **kw): def test_ctor(self): connection = object() - api = self._makeOne(connection) + client = _Client(connection) + api = self._makeOne(client) self.assertIs(api._connection, connection) + self.assertIs(api._client, client) @staticmethod def _make_timestamp(): - from datetime import datetime + import datetime from google.cloud._helpers import UTC - NOW = datetime.utcnow().replace(tzinfo=UTC) - return _datetime_to_rfc3339_w_nanos(NOW) + NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) + return NOW, _datetime_to_rfc3339_w_nanos(NOW) def test_list_entries_no_paging(self): - TIMESTAMP = self._make_timestamp() + import six + from google.cloud.logging.client import Client + from google.cloud.logging.entries import TextEntry + from google.cloud.logging.logger import Logger + + NOW, TIMESTAMP = self._make_timestamp() IID = 'IID' TEXT = 'TEXT' SENT = { @@ -83,25 +90,49 @@ def test_list_entries_no_paging(self): }], 'nextPageToken': TOKEN, } - conn = _Connection(RETURNED) - api = self._makeOne(conn) + client = Client(project=self.PROJECT, credentials=object(), + use_gax=False) + client.connection = _Connection(RETURNED) + api = self._makeOne(client) - entries, token = api.list_entries([self.PROJECT]) + iterator = api.list_entries([self.PROJECT]) + page = six.next(iterator.pages) + entries = list(page) + token = iterator.next_page_token - self.assertEqual(entries, RETURNED['entries']) + # First check the token. self.assertEqual(token, TOKEN) - - self.assertEqual(conn._called_with['method'], 'POST') - path = '/%s' % self.LIST_ENTRIES_PATH - self.assertEqual(conn._called_with['path'], path) - self.assertEqual(conn._called_with['data'], SENT) + # Then check the entries returned. + self.assertEqual(len(entries), 1) + entry = entries[0] + self.assertIsInstance(entry, TextEntry) + self.assertEqual(entry.payload, TEXT) + self.assertIsInstance(entry.logger, Logger) + self.assertEqual(entry.logger.name, self.LOGGER_NAME) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + self.assertIsNone(entry.labels) + self.assertIsNone(entry.severity) + self.assertIsNone(entry.http_request) + + called_with = client.connection._called_with + expected_path = '/%s' % (self.LIST_ENTRIES_PATH,) + self.assertEqual(called_with, { + 'method': 'POST', + 'path': expected_path, + 'data': SENT, + }) def test_list_entries_w_paging(self): from google.cloud.logging import DESCENDING + from google.cloud.logging.client import Client + from google.cloud.logging.logger import Logger + from google.cloud.logging.entries import ProtobufEntry + from google.cloud.logging.entries import StructEntry PROJECT1 = 'PROJECT1' PROJECT2 = 'PROJECT2' - TIMESTAMP = self._make_timestamp() + NOW, TIMESTAMP = self._make_timestamp() IID1 = 'IID1' IID2 = 'IID2' PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} @@ -137,20 +168,50 @@ def test_list_entries_w_paging(self): self.PROJECT, self.LOGGER_NAME), }], } - conn = _Connection(RETURNED) - api = self._makeOne(conn) + client = Client(project=self.PROJECT, credentials=object(), + use_gax=False) + client.connection = _Connection(RETURNED) + api = self._makeOne(client) - entries, token = api.list_entries( + iterator = api.list_entries( projects=[PROJECT1, PROJECT2], filter_=self.FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN) + entries = list(iterator) + token = iterator.next_page_token - self.assertEqual(entries, RETURNED['entries']) + # First check the token. self.assertIsNone(token) - - self.assertEqual(conn._called_with['method'], 'POST') - path = '/%s' % self.LIST_ENTRIES_PATH - self.assertEqual(conn._called_with['path'], path) - self.assertEqual(conn._called_with['data'], SENT) + # Then check the entries returned. + self.assertEqual(len(entries), 2) + entry1 = entries[0] + self.assertIsInstance(entry1, StructEntry) + self.assertEqual(entry1.payload, PAYLOAD) + self.assertIsInstance(entry1.logger, Logger) + self.assertEqual(entry1.logger.name, self.LOGGER_NAME) + self.assertEqual(entry1.insert_id, IID1) + self.assertEqual(entry1.timestamp, NOW) + self.assertIsNone(entry1.labels) + self.assertIsNone(entry1.severity) + self.assertIsNone(entry1.http_request) + + entry2 = entries[1] + self.assertIsInstance(entry2, ProtobufEntry) + self.assertEqual(entry2.payload, PROTO_PAYLOAD) + self.assertIsInstance(entry2.logger, Logger) + self.assertEqual(entry2.logger.name, self.LOGGER_NAME) + self.assertEqual(entry2.insert_id, IID2) + self.assertEqual(entry2.timestamp, NOW) + self.assertIsNone(entry2.labels) + self.assertIsNone(entry2.severity) + self.assertIsNone(entry2.http_request) + + called_with = client.connection._called_with + expected_path = '/%s' % (self.LIST_ENTRIES_PATH,) + self.assertEqual(called_with, { + 'method': 'POST', + 'path': expected_path, + 'data': SENT, + }) def test_write_entries_single(self): TEXT = 'TEXT' @@ -166,7 +227,8 @@ def test_write_entries_single(self): 'entries': [ENTRY], } conn = _Connection({}) - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) api.write_entries([ENTRY]) @@ -198,7 +260,8 @@ def test_write_entries_multiple(self): 'entries': [ENTRY1, ENTRY2], } conn = _Connection({}) - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) api.write_entries([ENTRY1, ENTRY2], LOG_NAME, RESOURCE, LABELS) @@ -210,7 +273,8 @@ def test_write_entries_multiple(self): def test_logger_delete(self): path = '/projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) conn = _Connection({}) - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) api.logger_delete(self.PROJECT, self.LOGGER_NAME) @@ -638,3 +702,9 @@ def _datetime_to_rfc3339_w_nanos(value): from google.cloud._helpers import _RFC3339_NO_FRACTION no_fraction = value.strftime(_RFC3339_NO_FRACTION) return '%s.%09dZ' % (no_fraction, value.microsecond * 1000) + + +class _Client(object): + + def __init__(self, connection): + self.connection = connection diff --git a/packages/google-cloud-logging/unit_tests/test_logger.py b/packages/google-cloud-logging/unit_tests/test_logger.py index 575b92a3af4d..0524dfddbc9d 100644 --- a/packages/google-cloud-logging/unit_tests/test_logger.py +++ b/packages/google-cloud-logging/unit_tests/test_logger.py @@ -346,47 +346,75 @@ def test_delete_w_alternate_client(self): (self.PROJECT, self.LOGGER_NAME)) def test_list_entries_defaults(self): - LISTED = { - 'projects': None, - 'filter_': 'logName=projects/%s/logs/%s' % - (self.PROJECT, self.LOGGER_NAME), - 'order_by': None, - 'page_size': None, - 'page_token': None, - } + import six + from google.cloud.logging.client import Client + TOKEN = 'TOKEN' - client = _Client(self.PROJECT) - client._token = TOKEN + + client = Client(project=self.PROJECT, credentials=object(), + use_gax=False) + returned = { + 'nextPageToken': TOKEN, + } + client.connection = _Connection(returned) + logger = self._makeOne(self.LOGGER_NAME, client=client) - entries, token = logger.list_entries() + + iterator = logger.list_entries() + page = six.next(iterator.pages) + entries = list(page) + token = iterator.next_page_token + self.assertEqual(len(entries), 0) self.assertEqual(token, TOKEN) - self.assertEqual(client._listed, LISTED) + called_with = client.connection._called_with + FILTER = 'logName=projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME) + self.assertEqual(called_with, { + 'method': 'POST', + 'path': '/entries:list', + 'data': { + 'filter': FILTER, + 'projectIds': [self.PROJECT], + }, + }) def test_list_entries_explicit(self): from google.cloud.logging import DESCENDING + from google.cloud.logging.client import Client PROJECT1 = 'PROJECT1' PROJECT2 = 'PROJECT2' FILTER = 'resource.type:global' TOKEN = 'TOKEN' PAGE_SIZE = 42 - LISTED = { - 'projects': ['PROJECT1', 'PROJECT2'], - 'filter_': '%s AND logName=projects/%s/logs/%s' % - (FILTER, self.PROJECT, self.LOGGER_NAME), - 'order_by': DESCENDING, - 'page_size': PAGE_SIZE, - 'page_token': TOKEN, - } - client = _Client(self.PROJECT) + client = Client(project=self.PROJECT, credentials=object(), + use_gax=False) + client.connection = _Connection({}) logger = self._makeOne(self.LOGGER_NAME, client=client) - entries, token = logger.list_entries( + iterator = logger.list_entries( projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN) + entries = list(iterator) + token = iterator.next_page_token + self.assertEqual(len(entries), 0) self.assertIsNone(token) - self.assertEqual(client._listed, LISTED) + # self.assertEqual(client._listed, LISTED) + called_with = client.connection._called_with + combined_filter = '%s AND logName=projects/%s/logs/%s' % ( + FILTER, self.PROJECT, self.LOGGER_NAME) + self.assertEqual(called_with, { + 'method': 'POST', + 'path': '/entries:list', + 'data': { + 'filter': combined_filter, + 'orderBy': DESCENDING, + 'pageSize': PAGE_SIZE, + 'pageToken': TOKEN, + 'projectIds': [PROJECT1, PROJECT2], + }, + }) class TestBatch(unittest.TestCase): @@ -689,17 +717,23 @@ def logger_delete(self, project, logger_name): class _Client(object): - _listed = _token = None - _entries = () - def __init__(self, project, connection=None): self.project = project self.connection = connection - def list_entries(self, **kw): - self._listed = kw - return self._entries, self._token - class _Bugout(Exception): pass + + +class _Connection(object): + + _called_with = None + + def __init__(self, *responses): + self._responses = responses + + def api_request(self, **kw): + self._called_with = kw + response, self._responses = self._responses[0], self._responses[1:] + return response From 732e304ab84072b89e19e41d00fddd95a406bb8d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 31 Oct 2016 13:54:43 -0700 Subject: [PATCH 025/855] Passing Client through to low-level logging API objects. --- .../google/cloud/logging/_gax.py | 12 ++++++++++-- .../google/cloud/logging/client.py | 8 ++++---- .../google/cloud/logging/connection.py | 18 ++++++++++-------- 3 files changed, 24 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 365b85530432..ee5da3a5e2a1 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -144,9 +144,13 @@ class _SinksAPI(object): :type gax_api: :class:`google.logging.v2.config_service_v2_api.ConfigServiceV2Api` :param gax_api: API object used to make GAX requests. + + :type client: :class:`~google.cloud.logging.client.Client` + :param client: The client that owns this API object. """ - def __init__(self, gax_api): + def __init__(self, gax_api, client): self._gax_api = gax_api + self._client = client def list_sinks(self, project, page_size=0, page_token=None): """List sinks for the project associated with this client. @@ -291,9 +295,13 @@ class _MetricsAPI(object): :type gax_api: :class:`google.logging.v2.metrics_service_v2_api.MetricsServiceV2Api` :param gax_api: API object used to make GAX requests. + + :type client: :class:`~google.cloud.logging.client.Client` + :param client: The client that owns this API object. """ - def __init__(self, gax_api): + def __init__(self, gax_api, client): self._gax_api = gax_api + self._client = client def list_metrics(self, project, page_size=0, page_token=None): """List metrics for the project associated with this client. diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index ad20ae25711c..f968668e57f7 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -113,9 +113,9 @@ def sinks_api(self): if self._sinks_api is None: if _USE_GAX: generated = GeneratedSinksAPI() - self._sinks_api = GAXSinksAPI(generated) + self._sinks_api = GAXSinksAPI(generated, self) else: - self._sinks_api = JSONSinksAPI(self.connection) + self._sinks_api = JSONSinksAPI(self) return self._sinks_api @property @@ -128,9 +128,9 @@ def metrics_api(self): if self._metrics_api is None: if _USE_GAX: generated = GeneratedMetricsAPI() - self._metrics_api = GAXMetricsAPI(generated) + self._metrics_api = GAXMetricsAPI(generated, self) else: - self._metrics_api = JSONMetricsAPI(self.connection) + self._metrics_api = JSONMetricsAPI(self) return self._metrics_api def logger(self, name): diff --git a/packages/google-cloud-logging/google/cloud/logging/connection.py b/packages/google-cloud-logging/google/cloud/logging/connection.py index 72ebc98a4f9b..2d9cac58075c 100644 --- a/packages/google-cloud-logging/google/cloud/logging/connection.py +++ b/packages/google-cloud-logging/google/cloud/logging/connection.py @@ -184,11 +184,12 @@ class _SinksAPI(object): See: https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks - :type connection: :class:`google.cloud.logging.connection.Connection` - :param connection: the connection used to make API requests. + :type client: :class:`~google.cloud.logging.client.Client` + :param client: The client used to make API requests. """ - def __init__(self, connection): - self._connection = connection + def __init__(self, client): + self._client = client + self._connection = client.connection def list_sinks(self, project, page_size=None, page_token=None): """List sinks for the project associated with this client. @@ -323,11 +324,12 @@ class _MetricsAPI(object): See: https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics - :type connection: :class:`google.cloud.logging.connection.Connection` - :param connection: the connection used to make API requests. + :type client: :class:`~google.cloud.logging.client.Client` + :param client: The client used to make API requests. """ - def __init__(self, connection): - self._connection = connection + def __init__(self, client): + self._client = client + self._connection = client.connection def list_metrics(self, project, page_size=None, page_token=None): """List metrics for the project associated with this client. From 53fda3da7658a25fd9591d0a1e3ca605efbebde1 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 31 Oct 2016 14:15:04 -0700 Subject: [PATCH 026/855] Updating unit tests for logging API objects. Change needed to accommodate the changes to the constructor. --- .../unit_tests/test__gax.py | 62 +++++++++--------- .../unit_tests/test_client.py | 8 ++- .../unit_tests/test_connection.py | 64 +++++++++++++------ 3 files changed, 81 insertions(+), 53 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 78a51f4acdd3..e9656cdd024c 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -606,8 +606,10 @@ def _getTargetClass(self): def test_ctor(self): gax_api = _GAXSinksAPI() - api = self._makeOne(gax_api) + client = object() + api = self._makeOne(gax_api, client) self.assertIs(api._gax_api, gax_api) + self.assertIs(api._client, client) def test_list_sinks_no_paging(self): from google.gax import INITIAL_PAGE @@ -625,7 +627,7 @@ def test_list_sinks_no_paging(self): filter=self.FILTER) response = _GAXPageIterator([sink_pb], page_token=TOKEN) gax_api = _GAXSinksAPI(_list_sinks_response=response) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) sinks, token = api.list_sinks(self.PROJECT) @@ -653,7 +655,7 @@ def test_list_sinks_w_paging(self): filter=self.FILTER) response = _GAXPageIterator([sink_pb]) gax_api = _GAXSinksAPI(_list_sinks_response=response) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) sinks, token = api.list_sinks( self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) @@ -669,7 +671,7 @@ def test_list_sinks_w_paging(self): def test_sink_create_error(self): from google.gax.errors import GaxError gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(GaxError): api.sink_create( @@ -679,7 +681,7 @@ def test_sink_create_error(self): def test_sink_create_conflict(self): from google.cloud.exceptions import Conflict gax_api = _GAXSinksAPI(_create_sink_conflict=True) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(Conflict): api.sink_create( @@ -689,7 +691,7 @@ def test_sink_create_conflict(self): def test_sink_create_ok(self): from google.logging.v2.logging_config_pb2 import LogSink gax_api = _GAXSinksAPI() - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) api.sink_create( self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) @@ -706,7 +708,7 @@ def test_sink_create_ok(self): def test_sink_get_error(self): from google.cloud.exceptions import NotFound gax_api = _GAXSinksAPI() - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(NotFound): api.sink_get(self.PROJECT, self.SINK_NAME) @@ -714,7 +716,7 @@ def test_sink_get_error(self): def test_sink_get_miss(self): from google.gax.errors import GaxError gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(GaxError): api.sink_get(self.PROJECT, self.SINK_NAME) @@ -731,7 +733,7 @@ def test_sink_get_hit(self): destination=self.DESTINATION_URI, filter=self.FILTER) gax_api = _GAXSinksAPI(_get_sink_response=sink_pb) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) response = api.sink_get(self.PROJECT, self.SINK_NAME) @@ -744,7 +746,7 @@ def test_sink_get_hit(self): def test_sink_update_error(self): from google.gax.errors import GaxError gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(GaxError): api.sink_update( @@ -754,7 +756,7 @@ def test_sink_update_error(self): def test_sink_update_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXSinksAPI() - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(NotFound): api.sink_update( @@ -768,7 +770,7 @@ def test_sink_update_hit(self): destination=self.DESTINATION_URI, filter=self.FILTER) gax_api = _GAXSinksAPI(_update_sink_response=response) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) api.sink_update( self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) @@ -785,7 +787,7 @@ def test_sink_update_hit(self): def test_sink_delete_error(self): from google.gax.errors import GaxError gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(GaxError): api.sink_delete(self.PROJECT, self.SINK_NAME) @@ -793,14 +795,14 @@ def test_sink_delete_error(self): def test_sink_delete_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXSinksAPI(_sink_not_found=True) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(NotFound): api.sink_delete(self.PROJECT, self.SINK_NAME) def test_sink_delete_hit(self): gax_api = _GAXSinksAPI() - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) api.sink_delete(self.PROJECT, self.SINK_NAME) @@ -821,7 +823,7 @@ def _getTargetClass(self): def test_ctor(self): gax_api = _GAXMetricsAPI() - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) self.assertIs(api._gax_api, gax_api) def test_list_metrics_no_paging(self): @@ -840,7 +842,7 @@ def test_list_metrics_no_paging(self): filter=self.FILTER) response = _GAXPageIterator([metric_pb], page_token=TOKEN) gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) metrics, token = api.list_metrics(self.PROJECT) @@ -868,7 +870,7 @@ def test_list_metrics_w_paging(self): filter=self.FILTER) response = _GAXPageIterator([metric_pb]) gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) metrics, token = api.list_metrics( self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) @@ -884,7 +886,7 @@ def test_list_metrics_w_paging(self): def test_metric_create_error(self): from google.gax.errors import GaxError gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(GaxError): api.metric_create( @@ -894,7 +896,7 @@ def test_metric_create_error(self): def test_metric_create_conflict(self): from google.cloud.exceptions import Conflict gax_api = _GAXMetricsAPI(_create_log_metric_conflict=True) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(Conflict): api.metric_create( @@ -904,7 +906,7 @@ def test_metric_create_conflict(self): def test_metric_create_ok(self): from google.logging.v2.logging_metrics_pb2 import LogMetric gax_api = _GAXMetricsAPI() - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) api.metric_create( self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) @@ -921,7 +923,7 @@ def test_metric_create_ok(self): def test_metric_get_error(self): from google.cloud.exceptions import NotFound gax_api = _GAXMetricsAPI() - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(NotFound): api.metric_get(self.PROJECT, self.METRIC_NAME) @@ -929,7 +931,7 @@ def test_metric_get_error(self): def test_metric_get_miss(self): from google.gax.errors import GaxError gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(GaxError): api.metric_get(self.PROJECT, self.METRIC_NAME) @@ -946,7 +948,7 @@ def test_metric_get_hit(self): description=self.DESCRIPTION, filter=self.FILTER) gax_api = _GAXMetricsAPI(_get_log_metric_response=metric_pb) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) response = api.metric_get(self.PROJECT, self.METRIC_NAME) @@ -959,7 +961,7 @@ def test_metric_get_hit(self): def test_metric_update_error(self): from google.gax.errors import GaxError gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(GaxError): api.metric_update( @@ -969,7 +971,7 @@ def test_metric_update_error(self): def test_metric_update_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXMetricsAPI() - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(NotFound): api.metric_update( @@ -983,7 +985,7 @@ def test_metric_update_hit(self): description=self.DESCRIPTION, filter=self.FILTER) gax_api = _GAXMetricsAPI(_update_log_metric_response=response) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) api.metric_update( self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) @@ -1000,7 +1002,7 @@ def test_metric_update_hit(self): def test_metric_delete_error(self): from google.gax.errors import GaxError gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(GaxError): api.metric_delete(self.PROJECT, self.METRIC_NAME) @@ -1008,14 +1010,14 @@ def test_metric_delete_error(self): def test_metric_delete_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXMetricsAPI(_log_metric_not_found=True) - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) with self.assertRaises(NotFound): api.metric_delete(self.PROJECT, self.METRIC_NAME) def test_metric_delete_hit(self): gax_api = _GAXMetricsAPI() - api = self._makeOne(gax_api) + api = self._makeOne(gax_api, None) api.metric_delete(self.PROJECT, self.METRIC_NAME) diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 5ab843f8521a..c93be6ea8f24 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -129,8 +129,9 @@ def _generated_api(*args, **kw): class _GaxSinksAPI(object): - def __init__(self, _wrapped): + def __init__(self, _wrapped, client): self._wrapped = _wrapped + self.client = client creds = _Credentials() client = self._makeOne(project=self.PROJECT, credentials=creds) @@ -143,6 +144,7 @@ def __init__(self, _wrapped): self.assertIsInstance(api, _GaxSinksAPI) self.assertIs(api._wrapped, wrapped) + self.assertIs(api.client, client) # API instance is cached again = client.sinks_api self.assertIs(again, api) @@ -176,8 +178,9 @@ def _generated_api(*args, **kw): class _GaxMetricsAPI(object): - def __init__(self, _wrapped): + def __init__(self, _wrapped, client): self._wrapped = _wrapped + self.client = client creds = _Credentials() client = self._makeOne(project=self.PROJECT, credentials=creds) @@ -190,6 +193,7 @@ def __init__(self, _wrapped): self.assertIsInstance(api, _GaxMetricsAPI) self.assertIs(api._wrapped, wrapped) + self.assertIs(api.client, client) # API instance is cached again = client.metrics_api self.assertIs(again, api) diff --git a/packages/google-cloud-logging/unit_tests/test_connection.py b/packages/google-cloud-logging/unit_tests/test_connection.py index d994f6f66289..ccf5fb75653f 100644 --- a/packages/google-cloud-logging/unit_tests/test_connection.py +++ b/packages/google-cloud-logging/unit_tests/test_connection.py @@ -300,8 +300,10 @@ def _makeOne(self, *args, **kw): def test_ctor(self): connection = object() - api = self._makeOne(connection) + client = _Client(connection) + api = self._makeOne(client) self.assertIs(api._connection, connection) + self.assertIs(api._client, client) def test_list_sinks_no_paging(self): TOKEN = 'TOKEN' @@ -314,7 +316,8 @@ def test_list_sinks_no_paging(self): 'nextPageToken': TOKEN, } conn = _Connection(RETURNED) - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) sinks, token = api.list_sinks(self.PROJECT) @@ -337,7 +340,8 @@ def test_list_sinks_w_paging(self): }], } conn = _Connection(RETURNED) - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) sinks, token = api.list_sinks( self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) @@ -360,7 +364,8 @@ def test_sink_create_conflict(self): } conn = _Connection() conn._raise_conflict = True - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) with self.assertRaises(Conflict): api.sink_create( @@ -379,7 +384,8 @@ def test_sink_create_ok(self): 'destination': self.DESTINATION_URI, } conn = _Connection({}) - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) api.sink_create( self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) @@ -392,7 +398,8 @@ def test_sink_create_ok(self): def test_sink_get_miss(self): from google.cloud.exceptions import NotFound conn = _Connection() - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) with self.assertRaises(NotFound): api.sink_get(self.PROJECT, self.SINK_NAME) @@ -408,7 +415,8 @@ def test_sink_get_hit(self): 'destination': self.DESTINATION_URI, } conn = _Connection(RESPONSE) - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) response = api.sink_get(self.PROJECT, self.SINK_NAME) @@ -425,7 +433,8 @@ def test_sink_update_miss(self): 'destination': self.DESTINATION_URI, } conn = _Connection() - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) with self.assertRaises(NotFound): api.sink_update( @@ -444,7 +453,8 @@ def test_sink_update_hit(self): 'destination': self.DESTINATION_URI, } conn = _Connection({}) - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) api.sink_update( self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) @@ -457,7 +467,8 @@ def test_sink_update_hit(self): def test_sink_delete_miss(self): from google.cloud.exceptions import NotFound conn = _Connection() - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) with self.assertRaises(NotFound): api.sink_delete(self.PROJECT, self.SINK_NAME) @@ -468,7 +479,8 @@ def test_sink_delete_miss(self): def test_sink_delete_hit(self): conn = _Connection({}) - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) api.sink_delete(self.PROJECT, self.SINK_NAME) @@ -503,7 +515,8 @@ def test_list_metrics_no_paging(self): 'nextPageToken': TOKEN, } conn = _Connection(RETURNED) - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) metrics, token = api.list_metrics(self.PROJECT) @@ -524,7 +537,8 @@ def test_list_metrics_w_paging(self): }], } conn = _Connection(RETURNED) - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) metrics, token = api.list_metrics( self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) @@ -547,7 +561,8 @@ def test_metric_create_conflict(self): } conn = _Connection() conn._raise_conflict = True - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) with self.assertRaises(Conflict): api.metric_create( @@ -566,7 +581,8 @@ def test_metric_create_ok(self): 'description': self.DESCRIPTION, } conn = _Connection({}) - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) api.metric_create( self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) @@ -579,7 +595,8 @@ def test_metric_create_ok(self): def test_metric_get_miss(self): from google.cloud.exceptions import NotFound conn = _Connection() - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) with self.assertRaises(NotFound): api.metric_get(self.PROJECT, self.METRIC_NAME) @@ -595,7 +612,8 @@ def test_metric_get_hit(self): 'description': self.DESCRIPTION, } conn = _Connection(RESPONSE) - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) response = api.metric_get(self.PROJECT, self.METRIC_NAME) @@ -612,7 +630,8 @@ def test_metric_update_miss(self): 'description': self.DESCRIPTION, } conn = _Connection() - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) with self.assertRaises(NotFound): api.metric_update( @@ -631,7 +650,8 @@ def test_metric_update_hit(self): 'description': self.DESCRIPTION, } conn = _Connection({}) - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) api.metric_update( self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) @@ -644,7 +664,8 @@ def test_metric_update_hit(self): def test_metric_delete_miss(self): from google.cloud.exceptions import NotFound conn = _Connection() - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) with self.assertRaises(NotFound): api.metric_delete(self.PROJECT, self.METRIC_NAME) @@ -655,7 +676,8 @@ def test_metric_delete_miss(self): def test_metric_delete_hit(self): conn = _Connection({}) - api = self._makeOne(conn) + client = _Client(conn) + api = self._makeOne(client) api.metric_delete(self.PROJECT, self.METRIC_NAME) From b6fcd6bc9c4cbaf1faa8474bb00c187ac372de88 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 31 Oct 2016 15:41:52 -0700 Subject: [PATCH 027/855] Using Logging client's use_gax attr. in API properties. --- .../google/cloud/logging/client.py | 4 ++-- .../google-cloud-logging/unit_tests/test_client.py | 14 ++++++++------ 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index f968668e57f7..4d05972b67d3 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -111,7 +111,7 @@ def sinks_api(self): https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks """ if self._sinks_api is None: - if _USE_GAX: + if self._use_gax: generated = GeneratedSinksAPI() self._sinks_api = GAXSinksAPI(generated, self) else: @@ -126,7 +126,7 @@ def metrics_api(self): https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics """ if self._metrics_api is None: - if _USE_GAX: + if self._use_gax: generated = GeneratedMetricsAPI() self._metrics_api = GAXMetricsAPI(generated, self) else: diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index c93be6ea8f24..7b13164376c3 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -104,11 +104,12 @@ def test_sinks_api_wo_gax(self): from google.cloud.logging.connection import _SinksAPI from google.cloud.logging import client as MUT from google.cloud._testing import _Monkey - client = self._makeOne(self.PROJECT, credentials=_Credentials()) - conn = client.connection = object() with _Monkey(MUT, _USE_GAX=False): - api = client.sinks_api + client = self._makeOne(self.PROJECT, credentials=_Credentials()) + + conn = client.connection = object() + api = client.sinks_api self.assertIsInstance(api, _SinksAPI) self.assertIs(api._connection, conn) @@ -153,11 +154,12 @@ def test_metrics_api_wo_gax(self): from google.cloud.logging.connection import _MetricsAPI from google.cloud.logging import client as MUT from google.cloud._testing import _Monkey - client = self._makeOne(self.PROJECT, credentials=_Credentials()) - conn = client.connection = object() with _Monkey(MUT, _USE_GAX=False): - api = client.metrics_api + client = self._makeOne(self.PROJECT, credentials=_Credentials()) + + conn = client.connection = object() + api = client.metrics_api self.assertIsInstance(api, _MetricsAPI) self.assertIs(api._connection, conn) From 90e19b7ac1ca6e4453401d1af4cb4000e96fa1a1 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 31 Oct 2016 14:34:47 -0700 Subject: [PATCH 028/855] Updating list_sinks() to Iterator pattern. --- .../google/cloud/logging/_gax.py | 23 +++++++++-- .../google/cloud/logging/client.py | 14 +++---- .../google/cloud/logging/connection.py | 39 ++++++++++++------- 3 files changed, 50 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index ee5da3a5e2a1..1825e48cb94e 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -32,6 +32,7 @@ from google.cloud.exceptions import NotFound from google.cloud.iterator import GAXIterator from google.cloud.logging._helpers import entry_from_resource +from google.cloud.logging.sink import Sink class _LoggingAPI(object): @@ -178,10 +179,7 @@ def list_sinks(self, project, page_size=0, page_token=None): path = 'projects/%s' % (project,) page_iter = self._gax_api.list_sinks(path, page_size=page_size, options=options) - sinks = [MessageToDict(log_sink_pb) - for log_sink_pb in page_iter.next()] - token = page_iter.page_token or None - return sinks, token + return GAXIterator(self._client, page_iter, _item_to_sink) def sink_create(self, project, sink_name, filter_, destination): """API call: create a sink resource. @@ -481,3 +479,20 @@ def _item_to_entry(iterator, entry_pb, loggers): """ resource = MessageToDict(entry_pb) return entry_from_resource(resource, iterator.client, loggers) + + +def _item_to_sink(iterator, log_sink_pb): + """Convert a sink protobuf to the native object. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type log_sink_pb: + :class:`~google.logging.v2.logging_config_pb2.LogSink` + :param log_sink_pb: Sink protobuf returned from the API. + + :rtype: :class:`~google.cloud.logging.sink.Sink` + :returns: The next sink in the page. + """ + resource = MessageToDict(log_sink_pb) + return Sink.from_api_repr(resource, iterator.client) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 4d05972b67d3..692b3c6a0c12 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -222,17 +222,13 @@ def list_sinks(self, page_size=None, page_token=None): passed, the API will return the first page of sinks. - :rtype: tuple, (list, str) - :returns: list of :class:`google.cloud.logging.sink.Sink`, plus a - "next page token" string: if not None, indicates that - more sinks can be retrieved with another call (pass that - value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of + :class:`~google.cloud.logging.sink.Sink` + accessible to the current client. """ - resources, token = self.sinks_api.list_sinks( + return self.sinks_api.list_sinks( self.project, page_size, page_token) - sinks = [Sink.from_api_repr(resource, self) - for resource in resources] - return sinks, token def metric(self, name, filter_=None, description=''): """Creates a metric bound to the current client. diff --git a/packages/google-cloud-logging/google/cloud/logging/connection.py b/packages/google-cloud-logging/google/cloud/logging/connection.py index 2d9cac58075c..2f50eb988cbc 100644 --- a/packages/google-cloud-logging/google/cloud/logging/connection.py +++ b/packages/google-cloud-logging/google/cloud/logging/connection.py @@ -19,6 +19,7 @@ from google.cloud import connection as base_connection from google.cloud.iterator import HTTPIterator from google.cloud.logging._helpers import entry_from_resource +from google.cloud.logging.sink import Sink class Connection(base_connection.JSONConnection): @@ -209,24 +210,21 @@ def list_sinks(self, project, page_size=None, page_token=None): passed, the API will return the first page of sinks. - :rtype: tuple, (list, str) - :returns: list of mappings, plus a "next page token" string: - if not None, indicates that more sinks can be retrieved - with another call (pass that value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of + :class:`~google.cloud.logging.sink.Sink` + accessible to the current API. """ - params = {} + extra_params = {} if page_size is not None: - params['pageSize'] = page_size - - if page_token is not None: - params['pageToken'] = page_token + extra_params['pageSize'] = page_size path = '/projects/%s/sinks' % (project,) - resp = self._connection.api_request( - method='GET', path=path, query_params=params) - sinks = resp.get('sinks', ()) - return sinks, resp.get('nextPageToken') + return HTTPIterator( + client=self._client, path=path, + item_to_value=_item_to_sink, items_key='sinks', + page_token=page_token, extra_params=extra_params) def sink_create(self, project, sink_name, filter_, destination): """API call: create a sink resource. @@ -484,3 +482,18 @@ def _item_to_entry(iterator, resource, loggers): :returns: The next log entry in the page. """ return entry_from_resource(resource, iterator.client, loggers) + + +def _item_to_sink(iterator, resource): + """Convert a sink resource to the native object. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type resource: dict + :param resource: Sink JSON resource returned from the API. + + :rtype: :class:`~google.cloud.logging.sink.Sink` + :returns: The next sink in the page. + """ + return Sink.from_api_repr(resource, iterator.client) From be692482e1d710b4264924b53e70109c10afa2e8 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 31 Oct 2016 14:56:43 -0700 Subject: [PATCH 029/855] Docstring update follow-up from PR #2636. --- .../google-cloud-logging/google/cloud/logging/_helpers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/_helpers.py index 0e801cb66a0a..8e17a9538e76 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging/_helpers.py @@ -24,7 +24,7 @@ def entry_from_resource(resource, client, loggers): """Detect correct entry type from resource and instantiate. :type resource: dict - :param resource: one entry resource from API response + :param resource: One entry resource from API response. :type client: :class:`~google.cloud.logging.client.Client` :param client: Client that owns the log entry. @@ -45,4 +45,4 @@ def entry_from_resource(resource, client, loggers): elif 'protoPayload' in resource: return ProtobufEntry.from_api_repr(resource, client, loggers) - raise ValueError('Cannot parse log entry resource') + raise ValueError('Cannot parse log entry resource.') From 8b41e9692da5518513c7237d68a9c5c83861d205 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 31 Oct 2016 15:37:49 -0700 Subject: [PATCH 030/855] Updating unit tests are list_sinks() iterator update. --- .../unit_tests/test__gax.py | 52 +++++++++----- .../unit_tests/test_client.py | 72 +++++++++++++------ .../unit_tests/test_connection.py | 60 ++++++++++++---- 3 files changed, 132 insertions(+), 52 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index e9656cdd024c..ed5f1f12c9f0 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -612,27 +612,36 @@ def test_ctor(self): self.assertIs(api._client, client) def test_list_sinks_no_paging(self): + import six from google.gax import INITIAL_PAGE - from google.cloud._testing import _GAXPageIterator from google.logging.v2.logging_config_pb2 import LogSink + from google.cloud._testing import _GAXPageIterator + from google.cloud.logging.sink import Sink TOKEN = 'TOKEN' - SINKS = [{ - 'name': self.SINK_PATH, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - }] sink_pb = LogSink(name=self.SINK_PATH, destination=self.DESTINATION_URI, filter=self.FILTER) response = _GAXPageIterator([sink_pb], page_token=TOKEN) gax_api = _GAXSinksAPI(_list_sinks_response=response) - api = self._makeOne(gax_api, None) + client = object() + api = self._makeOne(gax_api, client) - sinks, token = api.list_sinks(self.PROJECT) + iterator = api.list_sinks(self.PROJECT) + page = six.next(iterator.pages) + sinks = list(page) + token = iterator.next_page_token - self.assertEqual(sinks, SINKS) + # First check the token. self.assertEqual(token, TOKEN) + # Then check the sinks returned. + self.assertEqual(len(sinks), 1) + sink = sinks[0] + self.assertIsInstance(sink, Sink) + self.assertEqual(sink.name, self.SINK_PATH) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertIs(sink.client, client) project, page_size, options = gax_api._list_sinks_called_with self.assertEqual(project, self.PROJECT_PATH) @@ -640,28 +649,35 @@ def test_list_sinks_no_paging(self): self.assertEqual(options.page_token, INITIAL_PAGE) def test_list_sinks_w_paging(self): - from google.cloud._testing import _GAXPageIterator from google.logging.v2.logging_config_pb2 import LogSink + from google.cloud._testing import _GAXPageIterator + from google.cloud.logging.sink import Sink TOKEN = 'TOKEN' PAGE_SIZE = 42 - SINKS = [{ - 'name': self.SINK_PATH, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - }] sink_pb = LogSink(name=self.SINK_PATH, destination=self.DESTINATION_URI, filter=self.FILTER) response = _GAXPageIterator([sink_pb]) gax_api = _GAXSinksAPI(_list_sinks_response=response) - api = self._makeOne(gax_api, None) + client = object() + api = self._makeOne(gax_api, client) - sinks, token = api.list_sinks( + iterator = api.list_sinks( self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + sinks = list(iterator) + token = iterator.next_page_token - self.assertEqual(sinks, SINKS) + # First check the token. self.assertIsNone(token) + # Then check the sinks returned. + self.assertEqual(len(sinks), 1) + sink = sinks[0] + self.assertIsInstance(sink, Sink) + self.assertEqual(sink.name, self.SINK_PATH) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertIs(sink.client, client) project, page_size, options = gax_api._list_sinks_called_with self.assertEqual(project, self.PROJECT_PATH) diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 7b13164376c3..8c94a088bbcc 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -365,7 +365,9 @@ def test_sink_explicit(self): self.assertEqual(sink.project, self.PROJECT) def test_list_sinks_no_paging(self): + import six from google.cloud.logging.sink import Sink + PROJECT = 'PROJECT' TOKEN = 'TOKEN' SINK_NAME = 'sink_name' @@ -375,25 +377,42 @@ def test_list_sinks_no_paging(self): 'filter': FILTER, 'destination': self.DESTINATION_URI, }] - client = self._makeOne(project=PROJECT, credentials=_Credentials()) - api = client._sinks_api = _DummySinksAPI() - api._list_sinks_response = SINKS, TOKEN + client = self._makeOne(project=PROJECT, credentials=_Credentials(), + use_gax=False) + returned = { + 'sinks': SINKS, + 'nextPageToken': TOKEN, + } + client.connection = _Connection(returned) - sinks, token = client.list_sinks() + iterator = client.list_sinks() + page = six.next(iterator.pages) + sinks = list(page) + token = iterator.next_page_token + # First check the token. + self.assertEqual(token, TOKEN) + # Then check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) self.assertEqual(sink.name, SINK_NAME) self.assertEqual(sink.filter_, FILTER) self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertIs(sink.client, client) - self.assertEqual(token, TOKEN) - self.assertEqual(api._list_sinks_called_with, - (PROJECT, None, None)) + # Verify the mocked transport. + called_with = client.connection._called_with + path = '/projects/%s/sinks' % (self.PROJECT,) + self.assertEqual(called_with, { + 'method': 'GET', + 'path': path, + 'query_params': {}, + }) def test_list_sinks_with_paging(self): from google.cloud.logging.sink import Sink + PROJECT = 'PROJECT' SINK_NAME = 'sink_name' FILTER = 'logName:syslog AND severity>=ERROR' @@ -404,21 +423,39 @@ def test_list_sinks_with_paging(self): 'filter': FILTER, 'destination': self.DESTINATION_URI, }] - client = self._makeOne(project=PROJECT, credentials=_Credentials()) - api = client._sinks_api = _DummySinksAPI() - api._list_sinks_response = SINKS, None + client = self._makeOne(project=PROJECT, credentials=_Credentials(), + use_gax=False) + returned = { + 'sinks': SINKS, + } + client.connection = _Connection(returned) - sinks, token = client.list_sinks(PAGE_SIZE, TOKEN) + iterator = client.list_sinks(PAGE_SIZE, TOKEN) + sinks = list(iterator) + token = iterator.next_page_token + # First check the token. + self.assertIsNone(token) + # Then check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) self.assertEqual(sink.name, SINK_NAME) self.assertEqual(sink.filter_, FILTER) self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertIsNone(token) - self.assertEqual(api._list_sinks_called_with, - (PROJECT, PAGE_SIZE, TOKEN)) + self.assertIs(sink.client, client) + + # Verify the mocked transport. + called_with = client.connection._called_with + path = '/projects/%s/sinks' % (self.PROJECT,) + self.assertEqual(called_with, { + 'method': 'GET', + 'path': path, + 'query_params': { + 'pageSize': PAGE_SIZE, + 'pageToken': TOKEN, + }, + }) def test_metric_defaults(self): from google.cloud.logging.metric import Metric @@ -513,13 +550,6 @@ def create_scoped(self, scope): return self -class _DummySinksAPI(object): - - def list_sinks(self, project, page_size, page_token): - self._list_sinks_called_with = (project, page_size, page_token) - return self._list_sinks_response - - class _DummyMetricsAPI(object): def list_metrics(self, project, page_size, page_token): diff --git a/packages/google-cloud-logging/unit_tests/test_connection.py b/packages/google-cloud-logging/unit_tests/test_connection.py index ccf5fb75653f..ec25c185e759 100644 --- a/packages/google-cloud-logging/unit_tests/test_connection.py +++ b/packages/google-cloud-logging/unit_tests/test_connection.py @@ -306,6 +306,9 @@ def test_ctor(self): self.assertIs(api._client, client) def test_list_sinks_no_paging(self): + import six + from google.cloud.logging.sink import Sink + TOKEN = 'TOKEN' RETURNED = { 'sinks': [{ @@ -319,17 +322,33 @@ def test_list_sinks_no_paging(self): client = _Client(conn) api = self._makeOne(client) - sinks, token = api.list_sinks(self.PROJECT) + iterator = api.list_sinks(self.PROJECT) + page = six.next(iterator.pages) + sinks = list(page) + token = iterator.next_page_token - self.assertEqual(sinks, RETURNED['sinks']) + # First check the token. self.assertEqual(token, TOKEN) - - self.assertEqual(conn._called_with['method'], 'GET') + # Then check the sinks returned. + self.assertEqual(len(sinks), 1) + sink = sinks[0] + self.assertIsInstance(sink, Sink) + self.assertEqual(sink.name, self.SINK_PATH) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertIs(sink.client, client) + + called_with = conn._called_with path = '/%s' % (self.LIST_SINKS_PATH,) - self.assertEqual(conn._called_with['path'], path) - self.assertEqual(conn._called_with['query_params'], {}) + self.assertEqual(called_with, { + 'method': 'GET', + 'path': path, + 'query_params': {}, + }) def test_list_sinks_w_paging(self): + from google.cloud.logging.sink import Sink + TOKEN = 'TOKEN' PAGE_SIZE = 42 RETURNED = { @@ -343,17 +362,32 @@ def test_list_sinks_w_paging(self): client = _Client(conn) api = self._makeOne(client) - sinks, token = api.list_sinks( + iterator = api.list_sinks( self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + sinks = list(iterator) + token = iterator.next_page_token - self.assertEqual(sinks, RETURNED['sinks']) + # First check the token. self.assertIsNone(token) - - self.assertEqual(conn._called_with['method'], 'GET') + # Then check the sinks returned. + self.assertEqual(len(sinks), 1) + sink = sinks[0] + self.assertIsInstance(sink, Sink) + self.assertEqual(sink.name, self.SINK_PATH) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertIs(sink.client, client) + + called_with = conn._called_with path = '/%s' % (self.LIST_SINKS_PATH,) - self.assertEqual(conn._called_with['path'], path) - self.assertEqual(conn._called_with['query_params'], - {'pageSize': PAGE_SIZE, 'pageToken': TOKEN}) + self.assertEqual(called_with, { + 'method': 'GET', + 'path': path, + 'query_params': { + 'pageSize': PAGE_SIZE, + 'pageToken': TOKEN, + }, + }) def test_sink_create_conflict(self): from google.cloud.exceptions import Conflict From 497af5856efc3c77d09518262b59e81ec881d8e1 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 31 Oct 2016 17:01:13 -0700 Subject: [PATCH 031/855] Removing explicit doctest blocks from Sphinx docs. This way we can gradually turn them on with **doctest** and make sure they work piece by piece. Also converted some implicit code blocks (`::`) and some implicit doctest blocks (`:` followed by `>>>`) into explicit code blocks. --- .../google/cloud/logging/handlers/handlers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py index a6bd083c9944..e3b6d5b30da4 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py @@ -55,7 +55,7 @@ class CloudLoggingHandler(logging.StreamHandler): Example: - .. doctest:: + .. code-block:: python import google.cloud.logging from google.cloud.logging.handlers import CloudLoggingHandler @@ -109,7 +109,7 @@ def setup_logging(handler, excluded_loggers=EXCLUDE_LOGGER_DEFAULTS): Example: - .. doctest:: + .. code-block:: python import logging import google.cloud.logging From 9cb27006039108417fc5e2ebe32b0ecc15101470 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 31 Oct 2016 14:46:43 -0700 Subject: [PATCH 032/855] Updating list_metrics() to Iterator pattern. --- .../google/cloud/logging/_gax.py | 32 +++++++++++---- .../google/cloud/logging/client.py | 13 ++----- .../google/cloud/logging/connection.py | 39 ++++++++++++------- 3 files changed, 54 insertions(+), 30 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 1825e48cb94e..72685c3fca94 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -33,6 +33,7 @@ from google.cloud.iterator import GAXIterator from google.cloud.logging._helpers import entry_from_resource from google.cloud.logging.sink import Sink +from google.cloud.logging.metric import Metric class _LoggingAPI(object): @@ -316,10 +317,10 @@ def list_metrics(self, project, page_size=0, page_token=None): passed, the API will return the first page of metrics. - :rtype: tuple, (list, str) - :returns: list of mappings, plus a "next page token" string: - if not None, indicates that more metrics can be retrieved - with another call (pass that value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of + :class:`~google.cloud.logging.metric.Metric` + accessible to the current API. """ if page_token is None: page_token = INITIAL_PAGE @@ -327,10 +328,7 @@ def list_metrics(self, project, page_size=0, page_token=None): path = 'projects/%s' % (project,) page_iter = self._gax_api.list_log_metrics( path, page_size=page_size, options=options) - metrics = [MessageToDict(log_metric_pb) - for log_metric_pb in page_iter.next()] - token = page_iter.page_token or None - return metrics, token + return GAXIterator(self._client, page_iter, _item_to_metric) def metric_create(self, project, metric_name, filter_, description): """API call: create a metric resource. @@ -496,3 +494,21 @@ def _item_to_sink(iterator, log_sink_pb): """ resource = MessageToDict(log_sink_pb) return Sink.from_api_repr(resource, iterator.client) + + + +def _item_to_metric(iterator, log_metric_pb): + """Convert a metric protobuf to the native object. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type log_metric_pb: + :class:`~google.logging.v2.logging_metrics_pb2.LogMetric` + :param log_metric_pb: Metric protobuf returned from the API. + + :rtype: :class:`~google.cloud.logging.metric.Metric` + :returns: The next metric in the page. + """ + resource = MessageToDict(log_metric_pb) + return Metric.from_api_repr(resource, iterator.client) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 692b3c6a0c12..ec3cf8174719 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -267,14 +267,9 @@ def list_metrics(self, page_size=None, page_token=None): passed, the API will return the first page of metrics. - :rtype: tuple, (list, str) - :returns: list of :class:`google.cloud.logging.metric.Metric`, plus a - "next page token" string: if not None, indicates that - more metrics can be retrieved with another call (pass that - value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of :class:`~google.cloud.logging.metric.Metric` + accessible to the current client. """ - resources, token = self.metrics_api.list_metrics( + return self.metrics_api.list_metrics( self.project, page_size, page_token) - metrics = [Metric.from_api_repr(resource, self) - for resource in resources] - return metrics, token diff --git a/packages/google-cloud-logging/google/cloud/logging/connection.py b/packages/google-cloud-logging/google/cloud/logging/connection.py index 2f50eb988cbc..a1fa388b0f09 100644 --- a/packages/google-cloud-logging/google/cloud/logging/connection.py +++ b/packages/google-cloud-logging/google/cloud/logging/connection.py @@ -20,6 +20,7 @@ from google.cloud.iterator import HTTPIterator from google.cloud.logging._helpers import entry_from_resource from google.cloud.logging.sink import Sink +from google.cloud.logging.metric import Metric class Connection(base_connection.JSONConnection): @@ -347,24 +348,21 @@ def list_metrics(self, project, page_size=None, page_token=None): passed, the API will return the first page of metrics. - :rtype: tuple, (list, str) - :returns: list of mappings, plus a "next page token" string: - if not None, indicates that more metrics can be retrieved - with another call (pass that value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of + :class:`~google.cloud.logging.metric.Metric` + accessible to the current API. """ - params = {} + extra_params = {} if page_size is not None: - params['pageSize'] = page_size - - if page_token is not None: - params['pageToken'] = page_token + extra_params['pageSize'] = page_size path = '/projects/%s/metrics' % (project,) - resp = self._connection.api_request( - method='GET', path=path, query_params=params) - metrics = resp.get('metrics', ()) - return metrics, resp.get('nextPageToken') + return HTTPIterator( + client=self._client, path=path, + item_to_value=_item_to_metric, items_key='metrics', + page_token=page_token, extra_params=extra_params) def metric_create(self, project, metric_name, filter_, description=None): """API call: create a metric resource. @@ -497,3 +495,18 @@ def _item_to_sink(iterator, resource): :returns: The next sink in the page. """ return Sink.from_api_repr(resource, iterator.client) + + +def _item_to_metric(iterator, resource): + """Convert a metric resource to the native object. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type resource: dict + :param resource: Metric JSON resource returned from the API. + + :rtype: :class:`~google.cloud.logging.metric.Metric` + :returns: The next metric in the page. + """ + return Metric.from_api_repr(resource, iterator.client) From da734faac6efe5cbb22d06179cbcdc2ad8101f1c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 31 Oct 2016 16:15:13 -0700 Subject: [PATCH 033/855] Updating unit tests are list_metrics() iterator update. --- .../google/cloud/logging/_gax.py | 1 - .../unit_tests/test__gax.py | 52 ++++++++----- .../unit_tests/test_client.py | 75 +++++++++++++------ .../unit_tests/test_connection.py | 55 +++++++++++--- 4 files changed, 132 insertions(+), 51 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 72685c3fca94..5e096841324c 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -496,7 +496,6 @@ def _item_to_sink(iterator, log_sink_pb): return Sink.from_api_repr(resource, iterator.client) - def _item_to_metric(iterator, log_metric_pb): """Convert a metric protobuf to the native object. diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index ed5f1f12c9f0..90206e26a388 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -843,27 +843,36 @@ def test_ctor(self): self.assertIs(api._gax_api, gax_api) def test_list_metrics_no_paging(self): + import six from google.gax import INITIAL_PAGE - from google.cloud._testing import _GAXPageIterator from google.logging.v2.logging_metrics_pb2 import LogMetric + from google.cloud._testing import _GAXPageIterator + from google.cloud.logging.metric import Metric TOKEN = 'TOKEN' - METRICS = [{ - 'name': self.METRIC_PATH, - 'filter': self.FILTER, - 'description': self.DESCRIPTION, - }] metric_pb = LogMetric(name=self.METRIC_PATH, description=self.DESCRIPTION, filter=self.FILTER) response = _GAXPageIterator([metric_pb], page_token=TOKEN) gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) - api = self._makeOne(gax_api, None) + client = object() + api = self._makeOne(gax_api, client) - metrics, token = api.list_metrics(self.PROJECT) + iterator = api.list_metrics(self.PROJECT) + page = six.next(iterator.pages) + metrics = list(page) + token = iterator.next_page_token - self.assertEqual(metrics, METRICS) + # First check the token. self.assertEqual(token, TOKEN) + # Then check the metrics returned. + self.assertEqual(len(metrics), 1) + metric = metrics[0] + self.assertIsInstance(metric, Metric) + self.assertEqual(metric.name, self.METRIC_PATH) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertIs(metric.client, client) project, page_size, options = gax_api._list_log_metrics_called_with self.assertEqual(project, self.PROJECT_PATH) @@ -871,28 +880,35 @@ def test_list_metrics_no_paging(self): self.assertEqual(options.page_token, INITIAL_PAGE) def test_list_metrics_w_paging(self): - from google.cloud._testing import _GAXPageIterator from google.logging.v2.logging_metrics_pb2 import LogMetric + from google.cloud._testing import _GAXPageIterator + from google.cloud.logging.metric import Metric TOKEN = 'TOKEN' PAGE_SIZE = 42 - METRICS = [{ - 'name': self.METRIC_PATH, - 'filter': self.FILTER, - 'description': self.DESCRIPTION, - }] metric_pb = LogMetric(name=self.METRIC_PATH, description=self.DESCRIPTION, filter=self.FILTER) response = _GAXPageIterator([metric_pb]) gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) - api = self._makeOne(gax_api, None) + client = object() + api = self._makeOne(gax_api, client) - metrics, token = api.list_metrics( + iterator = api.list_metrics( self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + metrics = list(iterator) + token = iterator.next_page_token - self.assertEqual(metrics, METRICS) + # First check the token. self.assertIsNone(token) + # Then check the metrics returned. + self.assertEqual(len(metrics), 1) + metric = metrics[0] + self.assertIsInstance(metric, Metric) + self.assertEqual(metric.name, self.METRIC_PATH) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertIs(metric.client, client) project, page_size, options = gax_api._list_log_metrics_called_with self.assertEqual(project, self.PROJECT_PATH) diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 8c94a088bbcc..490372f03a96 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -485,7 +485,9 @@ def test_metric_explicit(self): self.assertEqual(metric.project, self.PROJECT) def test_list_metrics_no_paging(self): + import six from google.cloud.logging.metric import Metric + PROJECT = 'PROJECT' TOKEN = 'TOKEN' METRICS = [{ @@ -493,21 +495,39 @@ def test_list_metrics_no_paging(self): 'filter': self.FILTER, 'description': self.DESCRIPTION, }] - client = self._makeOne(project=PROJECT, credentials=_Credentials()) - api = client._metrics_api = _DummyMetricsAPI() - api._list_metrics_response = METRICS, TOKEN + client = self._makeOne(project=PROJECT, credentials=_Credentials(), + use_gax=False) + returned = { + 'metrics': METRICS, + 'nextPageToken': TOKEN, + } + client.connection = _Connection(returned) - metrics, token = client.list_metrics() + # Execute request. + iterator = client.list_metrics() + page = six.next(iterator.pages) + metrics = list(page) + token = iterator.next_page_token + # First check the token. + self.assertEqual(token, TOKEN) + # Then check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertIsInstance(metric, Metric) self.assertEqual(metric.name, self.METRIC_NAME) self.assertEqual(metric.filter_, self.FILTER) self.assertEqual(metric.description, self.DESCRIPTION) - self.assertEqual(token, TOKEN) - self.assertEqual(api._list_metrics_called_with, - (PROJECT, None, None)) + self.assertIs(metric.client, client) + + # Verify mocked transport. + called_with = client.connection._called_with + path = '/projects/%s/metrics' % (self.PROJECT,) + self.assertEqual(called_with, { + 'method': 'GET', + 'path': path, + 'query_params': {}, + }) def test_list_metrics_with_paging(self): from google.cloud.logging.metric import Metric @@ -519,22 +539,40 @@ def test_list_metrics_with_paging(self): 'filter': self.FILTER, 'description': self.DESCRIPTION, }] - client = self._makeOne(project=PROJECT, credentials=_Credentials()) - api = client._metrics_api = _DummyMetricsAPI() - api._list_metrics_response = METRICS, None + client = self._makeOne(project=PROJECT, credentials=_Credentials(), + use_gax=False) + returned = { + 'metrics': METRICS, + } + client.connection = _Connection(returned) # Execute request. - metrics, token = client.list_metrics(PAGE_SIZE, TOKEN) - # Test values are correct. + iterator = client.list_metrics(PAGE_SIZE, TOKEN) + metrics = list(iterator) + token = iterator.next_page_token + + # First check the token. + self.assertIsNone(token) + # Then check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertIsInstance(metric, Metric) self.assertEqual(metric.name, self.METRIC_NAME) self.assertEqual(metric.filter_, self.FILTER) self.assertEqual(metric.description, self.DESCRIPTION) - self.assertIsNone(token) - self.assertEqual(api._list_metrics_called_with, - (PROJECT, PAGE_SIZE, TOKEN)) + self.assertIs(metric.client, client) + + # Verify mocked transport. + called_with = client.connection._called_with + path = '/projects/%s/metrics' % (self.PROJECT,) + self.assertEqual(called_with, { + 'method': 'GET', + 'path': path, + 'query_params': { + 'pageSize': PAGE_SIZE, + 'pageToken': TOKEN, + }, + }) class _Credentials(object): @@ -550,13 +588,6 @@ def create_scoped(self, scope): return self -class _DummyMetricsAPI(object): - - def list_metrics(self, project, page_size, page_token): - self._list_metrics_called_with = (project, page_size, page_token) - return self._list_metrics_response - - class _Connection(object): _called_with = None diff --git a/packages/google-cloud-logging/unit_tests/test_connection.py b/packages/google-cloud-logging/unit_tests/test_connection.py index ec25c185e759..5d41877476d8 100644 --- a/packages/google-cloud-logging/unit_tests/test_connection.py +++ b/packages/google-cloud-logging/unit_tests/test_connection.py @@ -540,6 +540,9 @@ def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_list_metrics_no_paging(self): + import six + from google.cloud.logging.metric import Metric + TOKEN = 'TOKEN' RETURNED = { 'metrics': [{ @@ -552,16 +555,33 @@ def test_list_metrics_no_paging(self): client = _Client(conn) api = self._makeOne(client) - metrics, token = api.list_metrics(self.PROJECT) + iterator = api.list_metrics(self.PROJECT) + page = six.next(iterator.pages) + metrics = list(page) + token = iterator.next_page_token - self.assertEqual(metrics, RETURNED['metrics']) + # First check the token. self.assertEqual(token, TOKEN) + # Then check the metrics returned. + self.assertEqual(len(metrics), 1) + metric = metrics[0] + self.assertIsInstance(metric, Metric) + self.assertEqual(metric.name, self.METRIC_PATH) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, '') + self.assertIs(metric.client, client) - self.assertEqual(conn._called_with['method'], 'GET') + called_with = conn._called_with path = '/%s' % (self.LIST_METRICS_PATH,) - self.assertEqual(conn._called_with['path'], path) + self.assertEqual(called_with, { + 'method': 'GET', + 'path': path, + 'query_params': {}, + }) def test_list_metrics_w_paging(self): + from google.cloud.logging.metric import Metric + TOKEN = 'TOKEN' PAGE_SIZE = 42 RETURNED = { @@ -574,17 +594,32 @@ def test_list_metrics_w_paging(self): client = _Client(conn) api = self._makeOne(client) - metrics, token = api.list_metrics( + iterator = api.list_metrics( self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + metrics = list(iterator) + token = iterator.next_page_token - self.assertEqual(metrics, RETURNED['metrics']) + # First check the token. self.assertIsNone(token) + # Then check the metrics returned. + self.assertEqual(len(metrics), 1) + metric = metrics[0] + self.assertIsInstance(metric, Metric) + self.assertEqual(metric.name, self.METRIC_PATH) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, '') + self.assertIs(metric.client, client) - self.assertEqual(conn._called_with['method'], 'GET') + called_with = conn._called_with path = '/%s' % (self.LIST_METRICS_PATH,) - self.assertEqual(conn._called_with['path'], path) - self.assertEqual(conn._called_with['query_params'], - {'pageSize': PAGE_SIZE, 'pageToken': TOKEN}) + self.assertEqual(called_with, { + 'method': 'GET', + 'path': path, + 'query_params': { + 'pageSize': PAGE_SIZE, + 'pageToken': TOKEN, + }, + }) def test_metric_create_conflict(self): from google.cloud.exceptions import Conflict From d53e488c329f7e7e4d35d2a9c29e12886f0ce2a0 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 1 Nov 2016 09:23:49 -0700 Subject: [PATCH 034/855] Updating list_metrics() unit tests to reflect paging / non-paging. --- .../unit_tests/test_client.py | 51 +++++++++---------- 1 file changed, 24 insertions(+), 27 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 490372f03a96..609d8b40728e 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -485,33 +485,26 @@ def test_metric_explicit(self): self.assertEqual(metric.project, self.PROJECT) def test_list_metrics_no_paging(self): - import six from google.cloud.logging.metric import Metric - PROJECT = 'PROJECT' - TOKEN = 'TOKEN' - METRICS = [{ + metrics = [{ 'name': self.METRIC_NAME, 'filter': self.FILTER, 'description': self.DESCRIPTION, }] - client = self._makeOne(project=PROJECT, credentials=_Credentials(), - use_gax=False) + client = self._makeOne( + project=self.PROJECT, credentials=_Credentials(), + use_gax=False) returned = { - 'metrics': METRICS, - 'nextPageToken': TOKEN, + 'metrics': metrics, } client.connection = _Connection(returned) # Execute request. iterator = client.list_metrics() - page = six.next(iterator.pages) - metrics = list(page) - token = iterator.next_page_token + metrics = list(iterator) - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the metrics returned. + # Check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertIsInstance(metric, Metric) @@ -530,29 +523,33 @@ def test_list_metrics_no_paging(self): }) def test_list_metrics_with_paging(self): + import six from google.cloud.logging.metric import Metric - PROJECT = 'PROJECT' - TOKEN = 'TOKEN' - PAGE_SIZE = 42 - METRICS = [{ + + token = 'TOKEN' + next_token = 'T00KEN' + page_size = 42 + metrics = [{ 'name': self.METRIC_NAME, 'filter': self.FILTER, 'description': self.DESCRIPTION, }] - client = self._makeOne(project=PROJECT, credentials=_Credentials(), - use_gax=False) + client = self._makeOne( + project=self.PROJECT, credentials=_Credentials(), + use_gax=False) returned = { - 'metrics': METRICS, + 'metrics': metrics, + 'nextPageToken': next_token, } client.connection = _Connection(returned) # Execute request. - iterator = client.list_metrics(PAGE_SIZE, TOKEN) - metrics = list(iterator) - token = iterator.next_page_token + iterator = client.list_metrics(page_size, token) + page = six.next(iterator.pages) + metrics = list(page) # First check the token. - self.assertIsNone(token) + self.assertEqual(iterator.next_page_token, next_token) # Then check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] @@ -569,8 +566,8 @@ def test_list_metrics_with_paging(self): 'method': 'GET', 'path': path, 'query_params': { - 'pageSize': PAGE_SIZE, - 'pageToken': TOKEN, + 'pageSize': page_size, + 'pageToken': token, }, }) From e1cf447f130cbdb8d67633ffb18b989d42dbd3b5 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Nov 2016 10:12:12 -0700 Subject: [PATCH 035/855] Adding PyPI badges to package READMEs. --- packages/google-cloud-logging/README.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 78e301366e99..6dd9f74e62a3 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -5,6 +5,8 @@ Python Client for Stackdriver Logging .. _Stackdriver Logging: https://cloud.google.com/logging/ +|pypi| |versions| + - `Documentation`_ .. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging-usage.html @@ -54,3 +56,8 @@ See the ``google-cloud-python`` API `logging documentation`_ to learn how to connect to Stackdriver Logging using this Client Library. .. _logging documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging-usage.html + +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-logging.svg + :target: https://pypi.python.org/pypi/google-cloud-logging +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-logging.svg + :target: https://pypi.python.org/pypi/google-cloud-logging From 90c2320b7f159828a8522df77ec26bb396f0c97b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Nov 2016 01:18:11 -0700 Subject: [PATCH 036/855] Making a gRPC channel with credentials from Client's. This over-rides the default behavior within GAX, which uses application default credentials. --- .../google/cloud/logging/_gax.py | 56 +++++++ .../google/cloud/logging/client.py | 27 ++-- .../unit_tests/test__gax.py | 138 ++++++++++++++++++ .../unit_tests/test_client.py | 93 ++++-------- 4 files changed, 235 insertions(+), 79 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 5e096841324c..1df2be5daf70 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -16,6 +16,12 @@ import functools +from google.cloud.gapic.logging.v2.config_service_v2_api import ( + ConfigServiceV2Api) +from google.cloud.gapic.logging.v2.logging_service_v2_api import ( + LoggingServiceV2Api) +from google.cloud.gapic.logging.v2.metrics_service_v2_api import ( + MetricsServiceV2Api) from google.gax import CallOptions from google.gax import INITIAL_PAGE from google.gax.errors import GaxError @@ -28,6 +34,8 @@ from grpc import StatusCode from google.cloud._helpers import _datetime_to_rfc3339 +from google.cloud._helpers import make_secure_channel +from google.cloud.connection import DEFAULT_USER_AGENT from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound from google.cloud.iterator import GAXIterator @@ -511,3 +519,51 @@ def _item_to_metric(iterator, log_metric_pb): """ resource = MessageToDict(log_metric_pb) return Metric.from_api_repr(resource, iterator.client) + + +def make_gax_logging_api(client): + """Create an instance of the GAX Logging API. + + :type client: :class:`~google.cloud.logging.client.Client` + :param client: The client that holds configuration details. + + :rtype: :class:`_LoggingAPI` + :returns: A metrics API instance with the proper credentials. + """ + channel = make_secure_channel( + client.connection.credentials, DEFAULT_USER_AGENT, + LoggingServiceV2Api.SERVICE_ADDRESS) + generated = LoggingServiceV2Api(channel=channel) + return _LoggingAPI(generated, client) + + +def make_gax_metrics_api(client): + """Create an instance of the GAX Metrics API. + + :type client: :class:`~google.cloud.logging.client.Client` + :param client: The client that holds configuration details. + + :rtype: :class:`_MetricsAPI` + :returns: A metrics API instance with the proper credentials. + """ + channel = make_secure_channel( + client.connection.credentials, DEFAULT_USER_AGENT, + MetricsServiceV2Api.SERVICE_ADDRESS) + generated = MetricsServiceV2Api(channel=channel) + return _MetricsAPI(generated, client) + + +def make_gax_sinks_api(client): + """Create an instance of the GAX Sinks API. + + :type client: :class:`~google.cloud.logging.client.Client` + :param client: The client that holds configuration details. + + :rtype: :class:`_SinksAPI` + :returns: A metrics API instance with the proper credentials. + """ + channel = make_secure_channel( + client.connection.credentials, DEFAULT_USER_AGENT, + ConfigServiceV2Api.SERVICE_ADDRESS) + generated = ConfigServiceV2Api(channel=channel) + return _SinksAPI(generated, client) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index ec3cf8174719..801d4ec63dc8 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -17,20 +17,14 @@ import os try: - from google.cloud.gapic.logging.v2.config_service_v2_api import ( - ConfigServiceV2Api as GeneratedSinksAPI) - from google.cloud.gapic.logging.v2.logging_service_v2_api import ( - LoggingServiceV2Api as GeneratedLoggingAPI) - from google.cloud.gapic.logging.v2.metrics_service_v2_api import ( - MetricsServiceV2Api as GeneratedMetricsAPI) - from google.cloud.logging._gax import _LoggingAPI as GAXLoggingAPI - from google.cloud.logging._gax import _MetricsAPI as GAXMetricsAPI - from google.cloud.logging._gax import _SinksAPI as GAXSinksAPI + from google.cloud.logging._gax import make_gax_logging_api + from google.cloud.logging._gax import make_gax_metrics_api + from google.cloud.logging._gax import make_gax_sinks_api except ImportError: # pragma: NO COVER _HAVE_GAX = False - GeneratedLoggingAPI = GAXLoggingAPI = None - GeneratedMetricsAPI = GAXMetricsAPI = None - GeneratedSinksAPI = GAXSinksAPI = None + make_gax_logging_api = None + make_gax_metrics_api = None + make_gax_sinks_api = None else: _HAVE_GAX = True @@ -97,8 +91,7 @@ def logging_api(self): """ if self._logging_api is None: if self._use_gax: - generated = GeneratedLoggingAPI() - self._logging_api = GAXLoggingAPI(generated, self) + self._logging_api = make_gax_logging_api(self) else: self._logging_api = JSONLoggingAPI(self) return self._logging_api @@ -112,8 +105,7 @@ def sinks_api(self): """ if self._sinks_api is None: if self._use_gax: - generated = GeneratedSinksAPI() - self._sinks_api = GAXSinksAPI(generated, self) + self._sinks_api = make_gax_sinks_api(self) else: self._sinks_api = JSONSinksAPI(self) return self._sinks_api @@ -127,8 +119,7 @@ def metrics_api(self): """ if self._metrics_api is None: if self._use_gax: - generated = GeneratedMetricsAPI() - self._metrics_api = GAXMetricsAPI(generated, self) + self._metrics_api = make_gax_metrics_api(self) else: self._metrics_api = JSONMetricsAPI(self) return self._metrics_api diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 90206e26a388..7c79b2c1c10b 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -1058,6 +1058,132 @@ def test_metric_delete_hit(self): self.assertIsNone(options) +@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +class Test_make_gax_logging_api(unittest.TestCase): + + def _call_fut(self, client): + from google.cloud.logging._gax import make_gax_logging_api + return make_gax_logging_api(client) + + def test_it(self): + from google.cloud._testing import _Monkey + from google.cloud.logging import _gax as MUT + + creds = object() + client = _Client(creds) + channels = [] + channel_args = [] + channel_obj = object() + generated = object() + + def make_channel(*args): + channel_args.append(args) + return channel_obj + + def generated_api(channel=None): + channels.append(channel) + return generated + + host = 'foo.apis.invalid' + generated_api.SERVICE_ADDRESS = host + + with _Monkey(MUT, LoggingServiceV2Api=generated_api, + make_secure_channel=make_channel): + logging_api = self._call_fut(client) + + self.assertEqual(channels, [channel_obj]) + self.assertEqual(channel_args, + [(creds, MUT.DEFAULT_USER_AGENT, host)]) + + self.assertIsInstance(logging_api, MUT._LoggingAPI) + self.assertIs(logging_api._gax_api, generated) + self.assertIs(logging_api._client, client) + + +@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +class Test_make_gax_metrics_api(unittest.TestCase): + + def _call_fut(self, client): + from google.cloud.logging._gax import make_gax_metrics_api + return make_gax_metrics_api(client) + + def test_it(self): + from google.cloud._testing import _Monkey + from google.cloud.logging import _gax as MUT + + creds = object() + client = _Client(creds) + channels = [] + channel_args = [] + channel_obj = object() + generated = object() + + def make_channel(*args): + channel_args.append(args) + return channel_obj + + def generated_api(channel=None): + channels.append(channel) + return generated + + host = 'foo.apis.invalid' + generated_api.SERVICE_ADDRESS = host + + with _Monkey(MUT, MetricsServiceV2Api=generated_api, + make_secure_channel=make_channel): + metrics_api = self._call_fut(client) + + self.assertEqual(channels, [channel_obj]) + self.assertEqual(channel_args, + [(creds, MUT.DEFAULT_USER_AGENT, host)]) + + self.assertIsInstance(metrics_api, MUT._MetricsAPI) + self.assertIs(metrics_api._gax_api, generated) + self.assertIs(metrics_api._client, client) + + +@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +class Test_make_gax_sinks_api(unittest.TestCase): + + def _call_fut(self, client): + from google.cloud.logging._gax import make_gax_sinks_api + return make_gax_sinks_api(client) + + def test_it(self): + from google.cloud._testing import _Monkey + from google.cloud.logging import _gax as MUT + + creds = object() + client = _Client(creds) + channels = [] + channel_args = [] + channel_obj = object() + generated = object() + + def make_channel(*args): + channel_args.append(args) + return channel_obj + + def generated_api(channel=None): + channels.append(channel) + return generated + + host = 'foo.apis.invalid' + generated_api.SERVICE_ADDRESS = host + + with _Monkey(MUT, ConfigServiceV2Api=generated_api, + make_secure_channel=make_channel): + sinks_api = self._call_fut(client) + + self.assertEqual(channels, [channel_obj]) + self.assertEqual(channel_args, + [(creds, MUT.DEFAULT_USER_AGENT, host)]) + + self.assertIsInstance(sinks_api, MUT._SinksAPI) + self.assertIs(sinks_api._gax_api, generated) + self.assertIs(sinks_api._client, client) + + class _GAXLoggingAPI(_GAXBaseAPI): _delete_not_found = False @@ -1172,3 +1298,15 @@ def delete_log_metric(self, metric_name, options=None): raise GaxError('error') if self._log_metric_not_found: raise GaxError('notfound', self._make_grpc_not_found()) + + +class _Connection(object): + + def __init__(self, credentials): + self.credentials = credentials + + +class _Client(object): + + def __init__(self, credentials): + self.connection = _Connection(credentials) diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 609d8b40728e..1b8daa10f9fb 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -39,12 +39,10 @@ def test_ctor(self): self.assertEqual(client.project, self.PROJECT) def test_logging_api_wo_gax(self): - from google.cloud._testing import _Monkey - from google.cloud.logging import client as MUT from google.cloud.logging.connection import _LoggingAPI - with _Monkey(MUT, _USE_GAX=False): - client = self._makeOne(self.PROJECT, credentials=_Credentials()) + client = self._makeOne(self.PROJECT, credentials=_Credentials(), + use_gax=False) conn = client.connection = object() api = client.logging_api @@ -58,31 +56,22 @@ def test_logging_api_w_gax(self): from google.cloud.logging import client as MUT from google.cloud._testing import _Monkey - wrapped = object() - _called_with = [] - - def _generated_api(*args, **kw): - _called_with.append((args, kw)) - return wrapped + clients = [] + api_obj = object() - class _GaxLoggingAPI(object): - - def __init__(self, _wrapped, client): - self._wrapped = _wrapped - self.client = client + def make_api(client_obj): + clients.append(client_obj) + return api_obj creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._makeOne(project=self.PROJECT, credentials=creds, + use_gax=True) - with _Monkey(MUT, - _USE_GAX=True, - GeneratedLoggingAPI=_generated_api, - GAXLoggingAPI=_GaxLoggingAPI): + with _Monkey(MUT, make_gax_logging_api=make_api): api = client.logging_api - self.assertIsInstance(api, _GaxLoggingAPI) - self.assertIs(api._wrapped, wrapped) - self.assertIs(api.client, client) + self.assertIs(api, api_obj) + self.assertEqual(clients, [client]) # API instance is cached again = client.logging_api self.assertIs(again, api) @@ -121,31 +110,22 @@ def test_sinks_api_w_gax(self): from google.cloud.logging import client as MUT from google.cloud._testing import _Monkey - wrapped = object() - _called_with = [] - - def _generated_api(*args, **kw): - _called_with.append((args, kw)) - return wrapped - - class _GaxSinksAPI(object): + clients = [] + api_obj = object() - def __init__(self, _wrapped, client): - self._wrapped = _wrapped - self.client = client + def make_api(client_obj): + clients.append(client_obj) + return api_obj creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._makeOne(project=self.PROJECT, credentials=creds, + use_gax=True) - with _Monkey(MUT, - _USE_GAX=True, - GeneratedSinksAPI=_generated_api, - GAXSinksAPI=_GaxSinksAPI): + with _Monkey(MUT, make_gax_sinks_api=make_api): api = client.sinks_api - self.assertIsInstance(api, _GaxSinksAPI) - self.assertIs(api._wrapped, wrapped) - self.assertIs(api.client, client) + self.assertIs(api, api_obj) + self.assertEqual(clients, [client]) # API instance is cached again = client.sinks_api self.assertIs(again, api) @@ -171,31 +151,22 @@ def test_metrics_api_w_gax(self): from google.cloud.logging import client as MUT from google.cloud._testing import _Monkey - wrapped = object() - _called_with = [] + clients = [] + api_obj = object() - def _generated_api(*args, **kw): - _called_with.append((args, kw)) - return wrapped - - class _GaxMetricsAPI(object): - - def __init__(self, _wrapped, client): - self._wrapped = _wrapped - self.client = client + def make_api(client_obj): + clients.append(client_obj) + return api_obj creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._makeOne(project=self.PROJECT, credentials=creds, + use_gax=True) - with _Monkey(MUT, - _USE_GAX=True, - GeneratedMetricsAPI=_generated_api, - GAXMetricsAPI=_GaxMetricsAPI): + with _Monkey(MUT, make_gax_metrics_api=make_api): api = client.metrics_api - self.assertIsInstance(api, _GaxMetricsAPI) - self.assertIs(api._wrapped, wrapped) - self.assertIs(api.client, client) + self.assertIs(api, api_obj) + self.assertEqual(clients, [client]) # API instance is cached again = client.metrics_api self.assertIs(again, api) From 51b5dcb769ac6b22ae37e20e2cb591afee1449e6 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Nov 2016 12:49:02 -0700 Subject: [PATCH 037/855] Renaming connection module as _http in 5 packages. The packages are BigQuery, Datastore, Logging, Pub/Sub and Storage. The rename is in advance of a larger re-factor. But so long as the connections are not public, the re-factor can happen without user-facing implications. --- .../google/cloud/logging/{connection.py => _http.py} | 0 .../unit_tests/{test_connection.py => test__http.py} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename packages/google-cloud-logging/google/cloud/logging/{connection.py => _http.py} (100%) rename packages/google-cloud-logging/unit_tests/{test_connection.py => test__http.py} (100%) diff --git a/packages/google-cloud-logging/google/cloud/logging/connection.py b/packages/google-cloud-logging/google/cloud/logging/_http.py similarity index 100% rename from packages/google-cloud-logging/google/cloud/logging/connection.py rename to packages/google-cloud-logging/google/cloud/logging/_http.py diff --git a/packages/google-cloud-logging/unit_tests/test_connection.py b/packages/google-cloud-logging/unit_tests/test__http.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/test_connection.py rename to packages/google-cloud-logging/unit_tests/test__http.py From 0cc14a63ab7f48683231019a6098e1cff8543441 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Nov 2016 13:22:29 -0700 Subject: [PATCH 038/855] Updating imports to reflect connection->_http module rename. --- .../google-cloud-logging/google/cloud/logging/__init__.py | 2 -- .../google-cloud-logging/google/cloud/logging/client.py | 8 ++++---- packages/google-cloud-logging/unit_tests/test__http.py | 8 ++++---- packages/google-cloud-logging/unit_tests/test_client.py | 8 ++++---- 4 files changed, 12 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/__init__.py b/packages/google-cloud-logging/google/cloud/logging/__init__.py index ea29393f6292..d83ea4798949 100644 --- a/packages/google-cloud-logging/google/cloud/logging/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/__init__.py @@ -16,10 +16,8 @@ from google.cloud.logging.client import Client -from google.cloud.logging.connection import Connection -SCOPE = Connection.SCOPE ASCENDING = 'timestamp asc' """Query string to order by ascending timestamps.""" DESCENDING = 'timestamp desc' diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 801d4ec63dc8..b84fc9c6a736 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -30,10 +30,10 @@ from google.cloud.client import JSONClient from google.cloud.environment_vars import DISABLE_GRPC -from google.cloud.logging.connection import Connection -from google.cloud.logging.connection import _LoggingAPI as JSONLoggingAPI -from google.cloud.logging.connection import _MetricsAPI as JSONMetricsAPI -from google.cloud.logging.connection import _SinksAPI as JSONSinksAPI +from google.cloud.logging._http import Connection +from google.cloud.logging._http import _LoggingAPI as JSONLoggingAPI +from google.cloud.logging._http import _MetricsAPI as JSONMetricsAPI +from google.cloud.logging._http import _SinksAPI as JSONSinksAPI from google.cloud.logging.logger import Logger from google.cloud.logging.metric import Metric from google.cloud.logging.sink import Sink diff --git a/packages/google-cloud-logging/unit_tests/test__http.py b/packages/google-cloud-logging/unit_tests/test__http.py index 5d41877476d8..942450063487 100644 --- a/packages/google-cloud-logging/unit_tests/test__http.py +++ b/packages/google-cloud-logging/unit_tests/test__http.py @@ -21,7 +21,7 @@ class TestConnection(unittest.TestCase): FILTER = 'logName:syslog AND severity>=ERROR' def _getTargetClass(self): - from google.cloud.logging.connection import Connection + from google.cloud.logging._http import Connection return Connection def _makeOne(self, *args, **kw): @@ -43,7 +43,7 @@ class Test_LoggingAPI(unittest.TestCase): FILTER = 'logName:syslog AND severity>=ERROR' def _getTargetClass(self): - from google.cloud.logging.connection import _LoggingAPI + from google.cloud.logging._http import _LoggingAPI return _LoggingAPI def _makeOne(self, *args, **kw): @@ -292,7 +292,7 @@ class Test_SinksAPI(unittest.TestCase): DESTINATION_URI = 'faux.googleapis.com/destination' def _getTargetClass(self): - from google.cloud.logging.connection import _SinksAPI + from google.cloud.logging._http import _SinksAPI return _SinksAPI def _makeOne(self, *args, **kw): @@ -533,7 +533,7 @@ class Test_MetricsAPI(unittest.TestCase): DESCRIPTION = 'DESCRIPTION' def _getTargetClass(self): - from google.cloud.logging.connection import _MetricsAPI + from google.cloud.logging._http import _MetricsAPI return _MetricsAPI def _makeOne(self, *args, **kw): diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 1b8daa10f9fb..b498c13df656 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -39,7 +39,7 @@ def test_ctor(self): self.assertEqual(client.project, self.PROJECT) def test_logging_api_wo_gax(self): - from google.cloud.logging.connection import _LoggingAPI + from google.cloud.logging._http import _LoggingAPI client = self._makeOne(self.PROJECT, credentials=_Credentials(), use_gax=False) @@ -79,7 +79,7 @@ def make_api(client_obj): def test_no_gax_ctor(self): from google.cloud._testing import _Monkey from google.cloud.logging import client as MUT - from google.cloud.logging.connection import _LoggingAPI + from google.cloud.logging._http import _LoggingAPI creds = _Credentials() with _Monkey(MUT, _USE_GAX=True): @@ -90,7 +90,7 @@ def test_no_gax_ctor(self): self.assertIsInstance(api, _LoggingAPI) def test_sinks_api_wo_gax(self): - from google.cloud.logging.connection import _SinksAPI + from google.cloud.logging._http import _SinksAPI from google.cloud.logging import client as MUT from google.cloud._testing import _Monkey @@ -131,7 +131,7 @@ def make_api(client_obj): self.assertIs(again, api) def test_metrics_api_wo_gax(self): - from google.cloud.logging.connection import _MetricsAPI + from google.cloud.logging._http import _MetricsAPI from google.cloud.logging import client as MUT from google.cloud._testing import _Monkey From 238cfb8b0876c082d89d7d9b120d9c8c5a8ae199 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 21:26:07 -0800 Subject: [PATCH 039/855] Avoiding using filesystem deps in package tox.ini configs. --- packages/google-cloud-logging/tox.ini | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/tox.ini b/packages/google-cloud-logging/tox.ini index abfe42a6d8e2..5dd0b038e806 100644 --- a/packages/google-cloud-logging/tox.ini +++ b/packages/google-cloud-logging/tox.ini @@ -3,8 +3,9 @@ envlist = py27,py34,py35,cover [testing] +localdeps = + pip install --upgrade {toxinidir}/../core deps = - {toxinidir}/../core pytest covercmd = py.test --quiet \ @@ -15,6 +16,7 @@ covercmd = [testenv] commands = + {[testing]localdeps} py.test --quiet {posargs} unit_tests deps = {[testing]deps} @@ -23,6 +25,7 @@ deps = basepython = python2.7 commands = + {[testing]localdeps} {[testing]covercmd} deps = {[testenv]deps} From c5369e0acf1f1bc2837de5c9801948750792a1d0 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Nov 2016 20:20:59 -0800 Subject: [PATCH 040/855] Renaming _getTargetClass to _get_target_class. Done via: $ git grep -l 'def _getTargetClass(self)' | \ > xargs sed -i s/'def _getTargetClass(self)'/'@staticmethod\n def _get_target_class()'/g --- .../unit_tests/handlers/test_handlers.py | 3 ++- .../handlers/transports/test_background_thread.py | 6 ++++-- .../unit_tests/handlers/transports/test_base.py | 3 ++- .../unit_tests/handlers/transports/test_sync.py | 3 ++- .../google-cloud-logging/unit_tests/test__gax.py | 9 ++++++--- .../google-cloud-logging/unit_tests/test__http.py | 12 ++++++++---- .../google-cloud-logging/unit_tests/test_client.py | 3 ++- .../google-cloud-logging/unit_tests/test_entries.py | 6 ++++-- .../google-cloud-logging/unit_tests/test_logger.py | 6 ++++-- .../google-cloud-logging/unit_tests/test_metric.py | 3 ++- .../google-cloud-logging/unit_tests/test_sink.py | 3 ++- 11 files changed, 38 insertions(+), 19 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py b/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py index f836e5a335d5..ca6a43fb821d 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py +++ b/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py @@ -20,7 +20,8 @@ class TestCloudLoggingHandler(unittest.TestCase): PROJECT = 'PROJECT' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging.handlers.handlers import CloudLoggingHandler return CloudLoggingHandler diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py index d9ae8297ec22..480c20610b3a 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py @@ -21,7 +21,8 @@ class TestBackgroundThreadHandler(unittest.TestCase): PROJECT = 'PROJECT' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging.handlers.transports import ( BackgroundThreadTransport) return BackgroundThreadTransport @@ -57,7 +58,8 @@ def test_send(self): class TestWorker(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging.handlers.transports import background_thread return background_thread._Worker diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py index 9e3324e3ba0c..bba952004517 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py @@ -19,7 +19,8 @@ class TestBaseHandler(unittest.TestCase): PROJECT = 'PROJECT' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging.handlers.transports import Transport return Transport diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py index 7639a8f77787..f659f5da7e34 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py @@ -20,7 +20,8 @@ class TestSyncHandler(unittest.TestCase): PROJECT = 'PROJECT' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging.handlers.transports import SyncTransport return SyncTransport diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 7c79b2c1c10b..154b0b50880d 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -41,7 +41,8 @@ class Test_LoggingAPI(_Base, unittest.TestCase): LOG_NAME = 'log_name' LOG_PATH = 'projects/%s/logs/%s' % (_Base.PROJECT, LOG_NAME) - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging._gax import _LoggingAPI return _LoggingAPI @@ -600,7 +601,8 @@ class Test_SinksAPI(_Base, unittest.TestCase): SINK_PATH = 'projects/%s/sinks/%s' % (_Base.PROJECT, SINK_NAME) DESTINATION_URI = 'faux.googleapis.com/destination' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging._gax import _SinksAPI return _SinksAPI @@ -833,7 +835,8 @@ class Test_MetricsAPI(_Base, unittest.TestCase): METRIC_PATH = 'projects/%s/metrics/%s' % (_Base.PROJECT, METRIC_NAME) DESCRIPTION = 'Description' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging._gax import _MetricsAPI return _MetricsAPI diff --git a/packages/google-cloud-logging/unit_tests/test__http.py b/packages/google-cloud-logging/unit_tests/test__http.py index 942450063487..0d73e53148bf 100644 --- a/packages/google-cloud-logging/unit_tests/test__http.py +++ b/packages/google-cloud-logging/unit_tests/test__http.py @@ -20,7 +20,8 @@ class TestConnection(unittest.TestCase): PROJECT = 'project' FILTER = 'logName:syslog AND severity>=ERROR' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging._http import Connection return Connection @@ -42,7 +43,8 @@ class Test_LoggingAPI(unittest.TestCase): LOGGER_NAME = 'LOGGER_NAME' FILTER = 'logName:syslog AND severity>=ERROR' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging._http import _LoggingAPI return _LoggingAPI @@ -291,7 +293,8 @@ class Test_SinksAPI(unittest.TestCase): SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) DESTINATION_URI = 'faux.googleapis.com/destination' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging._http import _SinksAPI return _SinksAPI @@ -532,7 +535,8 @@ class Test_MetricsAPI(unittest.TestCase): METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, METRIC_NAME) DESCRIPTION = 'DESCRIPTION' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging._http import _MetricsAPI return _MetricsAPI diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index b498c13df656..164102564668 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -26,7 +26,8 @@ class TestClient(unittest.TestCase): FILTER = 'logName:syslog AND severity>=ERROR' DESCRIPTION = 'DESCRIPTION' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging.client import Client return Client diff --git a/packages/google-cloud-logging/unit_tests/test_entries.py b/packages/google-cloud-logging/unit_tests/test_entries.py index 5a78243b1336..b207cdbb8008 100644 --- a/packages/google-cloud-logging/unit_tests/test_entries.py +++ b/packages/google-cloud-logging/unit_tests/test_entries.py @@ -41,7 +41,8 @@ class Test_BaseEntry(unittest.TestCase): PROJECT = 'PROJECT' LOGGER_NAME = 'LOGGER_NAME' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging.entries import _BaseEntry class _Dummy(_BaseEntry): @@ -193,7 +194,8 @@ class TestProtobufEntry(unittest.TestCase): PROJECT = 'PROJECT' LOGGER_NAME = 'LOGGER_NAME' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging.entries import ProtobufEntry return ProtobufEntry diff --git a/packages/google-cloud-logging/unit_tests/test_logger.py b/packages/google-cloud-logging/unit_tests/test_logger.py index 0524dfddbc9d..0a11cdedabc3 100644 --- a/packages/google-cloud-logging/unit_tests/test_logger.py +++ b/packages/google-cloud-logging/unit_tests/test_logger.py @@ -20,7 +20,8 @@ class TestLogger(unittest.TestCase): PROJECT = 'test-project' LOGGER_NAME = 'logger-name' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging.logger import Logger return Logger @@ -421,7 +422,8 @@ class TestBatch(unittest.TestCase): PROJECT = 'test-project' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging.logger import Batch return Batch diff --git a/packages/google-cloud-logging/unit_tests/test_metric.py b/packages/google-cloud-logging/unit_tests/test_metric.py index 8f777ec0e33e..45c54768c401 100644 --- a/packages/google-cloud-logging/unit_tests/test_metric.py +++ b/packages/google-cloud-logging/unit_tests/test_metric.py @@ -22,7 +22,8 @@ class TestMetric(unittest.TestCase): FILTER = 'logName:syslog AND severity>=ERROR' DESCRIPTION = 'DESCRIPTION' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging.metric import Metric return Metric diff --git a/packages/google-cloud-logging/unit_tests/test_sink.py b/packages/google-cloud-logging/unit_tests/test_sink.py index 64245e66db72..8bb6be7a4589 100644 --- a/packages/google-cloud-logging/unit_tests/test_sink.py +++ b/packages/google-cloud-logging/unit_tests/test_sink.py @@ -22,7 +22,8 @@ class TestSink(unittest.TestCase): FILTER = 'logName:syslog AND severity>=INFO' DESTINATION_URI = 'faux.googleapis.com/destination' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.logging.sink import Sink return Sink From f73fd83ebcbdfdeeb7ab601d40635bc143bcf97a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Nov 2016 20:22:12 -0800 Subject: [PATCH 041/855] Changing uses of _getTargetClass to _get_target_class. Done via: $ git grep -l _getTargetClass | \ > xargs sed -i s/_getTargetClass/_get_target_class/g --- .../unit_tests/handlers/test_handlers.py | 2 +- .../handlers/transports/test_background_thread.py | 4 ++-- .../unit_tests/handlers/transports/test_base.py | 2 +- .../unit_tests/handlers/transports/test_sync.py | 2 +- packages/google-cloud-logging/unit_tests/test__gax.py | 2 +- packages/google-cloud-logging/unit_tests/test__http.py | 10 +++++----- .../google-cloud-logging/unit_tests/test_client.py | 2 +- .../google-cloud-logging/unit_tests/test_entries.py | 10 +++++----- .../google-cloud-logging/unit_tests/test_logger.py | 4 ++-- .../google-cloud-logging/unit_tests/test_metric.py | 6 +++--- packages/google-cloud-logging/unit_tests/test_sink.py | 6 +++--- 11 files changed, 25 insertions(+), 25 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py b/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py index ca6a43fb821d..0deb5647592c 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py +++ b/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py @@ -26,7 +26,7 @@ def _get_target_class(): return CloudLoggingHandler def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor(self): client = _Client(self.PROJECT) diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py index 480c20610b3a..c5b9fe37b0a7 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py @@ -28,7 +28,7 @@ def _get_target_class(): return BackgroundThreadTransport def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor(self): client = _Client(self.PROJECT) @@ -64,7 +64,7 @@ def _get_target_class(): return background_thread._Worker def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor(self): NAME = 'python_logger' diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py index bba952004517..aec7160943d8 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py @@ -25,7 +25,7 @@ def _get_target_class(): return Transport def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_send_is_abstract(self): target = self._makeOne() diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py index f659f5da7e34..c415c2b47782 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py @@ -26,7 +26,7 @@ def _get_target_class(): return SyncTransport def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor(self): client = _Client(self.PROJECT) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 154b0b50880d..ee3b5cccef08 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -33,7 +33,7 @@ class _Base(object): FILTER = 'logName:syslog AND severity>=ERROR' def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) @unittest.skipUnless(_HAVE_GAX, 'No gax-python') diff --git a/packages/google-cloud-logging/unit_tests/test__http.py b/packages/google-cloud-logging/unit_tests/test__http.py index 0d73e53148bf..a087de503599 100644 --- a/packages/google-cloud-logging/unit_tests/test__http.py +++ b/packages/google-cloud-logging/unit_tests/test__http.py @@ -26,12 +26,12 @@ def _get_target_class(): return Connection def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_default_url(self): creds = _Credentials() conn = self._makeOne(creds) - klass = self._getTargetClass() + klass = self._get_target_class() self.assertEqual(conn.credentials._scopes, klass.SCOPE) @@ -49,7 +49,7 @@ def _get_target_class(): return _LoggingAPI def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor(self): connection = object() @@ -299,7 +299,7 @@ def _get_target_class(): return _SinksAPI def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor(self): connection = object() @@ -541,7 +541,7 @@ def _get_target_class(): return _MetricsAPI def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_list_metrics_no_paging(self): import six diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 164102564668..f46194f04af4 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -32,7 +32,7 @@ def _get_target_class(): return Client def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor(self): creds = _Credentials() diff --git a/packages/google-cloud-logging/unit_tests/test_entries.py b/packages/google-cloud-logging/unit_tests/test_entries.py index b207cdbb8008..cf365ad1d419 100644 --- a/packages/google-cloud-logging/unit_tests/test_entries.py +++ b/packages/google-cloud-logging/unit_tests/test_entries.py @@ -51,7 +51,7 @@ class _Dummy(_BaseEntry): return _Dummy def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): PAYLOAD = 'PAYLOAD' @@ -105,7 +105,7 @@ def test_from_api_repr_missing_data_no_loggers(self): 'dummyPayload': PAYLOAD, 'logName': LOG_NAME, } - klass = self._getTargetClass() + klass = self._get_target_class() entry = klass.from_api_repr(API_REPR, client) self.assertEqual(entry.payload, PAYLOAD) self.assertIsNone(entry.insert_id) @@ -120,7 +120,7 @@ def test_from_api_repr_missing_data_no_loggers(self): def test_from_api_repr_w_loggers_no_logger_match(self): from datetime import datetime from google.cloud._helpers import UTC - klass = self._getTargetClass() + klass = self._get_target_class() client = _Client(self.PROJECT) PAYLOAD = 'PAYLOAD' SEVERITY = 'CRITICAL' @@ -180,7 +180,7 @@ def test_from_api_repr_w_loggers_w_logger_match(self): } LOGGER = object() loggers = {LOG_NAME: LOGGER} - klass = self._getTargetClass() + klass = self._get_target_class() entry = klass.from_api_repr(API_REPR, client, loggers=loggers) self.assertEqual(entry.payload, PAYLOAD) self.assertEqual(entry.insert_id, IID) @@ -200,7 +200,7 @@ def _get_target_class(): return ProtobufEntry def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_parse_message(self): import json diff --git a/packages/google-cloud-logging/unit_tests/test_logger.py b/packages/google-cloud-logging/unit_tests/test_logger.py index 0a11cdedabc3..f4bf83a10375 100644 --- a/packages/google-cloud-logging/unit_tests/test_logger.py +++ b/packages/google-cloud-logging/unit_tests/test_logger.py @@ -26,7 +26,7 @@ def _get_target_class(): return Logger def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): conn = object() @@ -428,7 +428,7 @@ def _get_target_class(): return Batch def _makeOne(self, *args, **kwargs): - return self._getTargetClass()(*args, **kwargs) + return self._get_target_class()(*args, **kwargs) def test_ctor_defaults(self): logger = _Logger() diff --git a/packages/google-cloud-logging/unit_tests/test_metric.py b/packages/google-cloud-logging/unit_tests/test_metric.py index 45c54768c401..60c251398620 100644 --- a/packages/google-cloud-logging/unit_tests/test_metric.py +++ b/packages/google-cloud-logging/unit_tests/test_metric.py @@ -28,7 +28,7 @@ def _get_target_class(): return Metric def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) @@ -62,7 +62,7 @@ def test_from_api_repr_minimal(self): 'name': self.METRIC_NAME, 'filter': self.FILTER, } - klass = self._getTargetClass() + klass = self._get_target_class() metric = klass.from_api_repr(RESOURCE, client=client) self.assertEqual(metric.name, self.METRIC_NAME) self.assertEqual(metric.filter_, self.FILTER) @@ -80,7 +80,7 @@ def test_from_api_repr_w_description(self): 'filter': self.FILTER, 'description': DESCRIPTION, } - klass = self._getTargetClass() + klass = self._get_target_class() metric = klass.from_api_repr(RESOURCE, client=client) self.assertEqual(metric.name, self.METRIC_NAME) self.assertEqual(metric.filter_, self.FILTER) diff --git a/packages/google-cloud-logging/unit_tests/test_sink.py b/packages/google-cloud-logging/unit_tests/test_sink.py index 8bb6be7a4589..90d60faecf9d 100644 --- a/packages/google-cloud-logging/unit_tests/test_sink.py +++ b/packages/google-cloud-logging/unit_tests/test_sink.py @@ -28,7 +28,7 @@ def _get_target_class(): return Sink def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) @@ -63,7 +63,7 @@ def test_from_api_repr_minimal(self): 'filter': self.FILTER, 'destination': self.DESTINATION_URI, } - klass = self._getTargetClass() + klass = self._get_target_class() sink = klass.from_api_repr(RESOURCE, client=client) self.assertEqual(sink.name, self.SINK_NAME) self.assertEqual(sink.filter_, self.FILTER) @@ -80,7 +80,7 @@ def test_from_api_repr_w_description(self): 'filter': self.FILTER, 'destination': self.DESTINATION_URI, } - klass = self._getTargetClass() + klass = self._get_target_class() sink = klass.from_api_repr(RESOURCE, client=client) self.assertEqual(sink.name, self.SINK_NAME) self.assertEqual(sink.filter_, self.FILTER) From 90b98ac0ad95b11ddfeea6494554b70332a5920d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 10 Nov 2016 11:05:35 -0800 Subject: [PATCH 042/855] Changing all instances of _makeOne to _make_one. Done via: $ git grep -l _makeOne | \ > xargs sed -i s/_makeOne/_make_one/g --- .../unit_tests/handlers/test_handlers.py | 6 +- .../transports/test_background_thread.py | 16 ++-- .../handlers/transports/test_base.py | 4 +- .../handlers/transports/test_sync.py | 6 +- .../unit_tests/test__gax.py | 82 +++++++++---------- .../unit_tests/test__http.py | 64 +++++++-------- .../unit_tests/test_client.py | 40 ++++----- .../unit_tests/test_entries.py | 10 +-- .../unit_tests/test_logger.py | 62 +++++++------- .../unit_tests/test_metric.py | 26 +++--- .../unit_tests/test_sink.py | 26 +++--- 11 files changed, 171 insertions(+), 171 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py b/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py index 0deb5647592c..a144efe66f3a 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py +++ b/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py @@ -25,17 +25,17 @@ def _get_target_class(): from google.cloud.logging.handlers.handlers import CloudLoggingHandler return CloudLoggingHandler - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): client = _Client(self.PROJECT) - handler = self._makeOne(client, transport=_Transport) + handler = self._make_one(client, transport=_Transport) self.assertEqual(handler.client, client) def test_emit(self): client = _Client(self.PROJECT) - handler = self._makeOne(client, transport=_Transport) + handler = self._make_one(client, transport=_Transport) LOGNAME = 'loggername' MESSAGE = 'hello world' record = _Record(LOGNAME, logging.INFO, MESSAGE) diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py index c5b9fe37b0a7..a7ef4fc43190 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py @@ -27,19 +27,19 @@ def _get_target_class(): BackgroundThreadTransport) return BackgroundThreadTransport - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): client = _Client(self.PROJECT) NAME = 'python_logger' - transport = self._makeOne(client, NAME) + transport = self._make_one(client, NAME) self.assertEquals(transport.worker.logger.name, NAME) def test_send(self): client = _Client(self.PROJECT) NAME = 'python_logger' - transport = self._makeOne(client, NAME) + transport = self._make_one(client, NAME) transport.worker.batch = client.logger(NAME).batch() PYTHON_LOGGER_NAME = 'mylogger' @@ -63,19 +63,19 @@ def _get_target_class(): from google.cloud.logging.handlers.transports import background_thread return background_thread._Worker - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): NAME = 'python_logger' logger = _Logger(NAME) - worker = self._makeOne(logger) + worker = self._make_one(logger) self.assertEquals(worker.batch, logger._batch) def test_run(self): NAME = 'python_logger' logger = _Logger(NAME) - worker = self._makeOne(logger) + worker = self._make_one(logger) PYTHON_LOGGER_NAME = 'mylogger' MESSAGE = 'hello world' @@ -101,7 +101,7 @@ def test_run_after_stopped(self): # No-op NAME = 'python_logger' logger = _Logger(NAME) - worker = self._makeOne(logger) + worker = self._make_one(logger) PYTHON_LOGGER_NAME = 'mylogger' MESSAGE = 'hello world' @@ -120,7 +120,7 @@ def test_run_enqueue_early(self): # No-op NAME = 'python_logger' logger = _Logger(NAME) - worker = self._makeOne(logger) + worker = self._make_one(logger) PYTHON_LOGGER_NAME = 'mylogger' MESSAGE = 'hello world' diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py index aec7160943d8..0fd673fc2a1b 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py @@ -24,10 +24,10 @@ def _get_target_class(): from google.cloud.logging.handlers.transports import Transport return Transport - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_send_is_abstract(self): - target = self._makeOne() + target = self._make_one() with self.assertRaises(NotImplementedError): target.send(None, None) diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py index c415c2b47782..54e14dcbdfff 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py @@ -25,20 +25,20 @@ def _get_target_class(): from google.cloud.logging.handlers.transports import SyncTransport return SyncTransport - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): client = _Client(self.PROJECT) NAME = 'python_logger' - transport = self._makeOne(client, NAME) + transport = self._make_one(client, NAME) self.assertEqual(transport.logger.name, 'python_logger') def test_send(self): client = _Client(self.PROJECT) STACKDRIVER_LOGGER_NAME = 'python' PYTHON_LOGGER_NAME = 'mylogger' - transport = self._makeOne(client, STACKDRIVER_LOGGER_NAME) + transport = self._make_one(client, STACKDRIVER_LOGGER_NAME) MESSAGE = 'hello world' record = _Record(PYTHON_LOGGER_NAME, logging.INFO, MESSAGE) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index ee3b5cccef08..7497a07efc7d 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -32,7 +32,7 @@ class _Base(object): PROJECT_PATH = 'projects/%s' % (PROJECT,) FILTER = 'logName:syslog AND severity>=ERROR' - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) @@ -49,7 +49,7 @@ def _get_target_class(): def test_ctor(self): gax_api = _GAXLoggingAPI() client = object() - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) self.assertIs(api._gax_api, gax_api) self.assertIs(api._client, client) @@ -81,7 +81,7 @@ def test_list_entries_no_paging(self): gax_api = _GAXLoggingAPI(_list_log_entries_response=response) client = Client(project=self.PROJECT, credentials=object(), use_gax=True) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) iterator = api.list_entries( [self.PROJECT], self.FILTER, DESCENDING) @@ -137,7 +137,7 @@ def _list_entries_with_paging_helper(self, payload, struct_pb): gax_api = _GAXLoggingAPI(_list_log_entries_response=response) client = Client(project=self.PROJECT, credentials=object(), use_gax=True) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) iterator = api.list_entries( [self.PROJECT], page_size=SIZE, page_token=TOKEN) @@ -276,7 +276,7 @@ def test_list_entries_with_extra_properties(self): gax_api = _GAXLoggingAPI(_list_log_entries_response=response) client = Client(project=self.PROJECT, credentials=object(), use_gax=True) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) iterator = api.list_entries( [self.PROJECT], page_size=SIZE, page_token=TOKEN) @@ -328,7 +328,7 @@ def test_write_entries_single(self): 'textPayload': TEXT, } gax_api = _GAXLoggingAPI() - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) api.write_entries([ENTRY]) @@ -401,7 +401,7 @@ def test_write_entries_w_extra_properties(self): 'operation': OPERATION, } gax_api = _GAXLoggingAPI() - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) api.write_entries([ENTRY]) @@ -476,7 +476,7 @@ def _write_entries_multiple_helper(self, json_payload, json_struct_pb): 'foo': 'bar', } gax_api = _GAXLoggingAPI() - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) api.write_entries(ENTRIES, self.LOG_PATH, RESOURCE, LABELS) @@ -560,7 +560,7 @@ def test_write_entries_multiple_nested_payload(self): def test_logger_delete(self): gax_api = _GAXLoggingAPI() - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) api.logger_delete(self.PROJECT, self.LOG_NAME) @@ -572,7 +572,7 @@ def test_logger_delete_not_found(self): from google.cloud.exceptions import NotFound gax_api = _GAXLoggingAPI(_delete_not_found=True) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(NotFound): api.logger_delete(self.PROJECT, self.LOG_NAME) @@ -585,7 +585,7 @@ def test_logger_delete_error(self): from google.gax.errors import GaxError gax_api = _GAXLoggingAPI(_random_gax_error=True) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(GaxError): api.logger_delete(self.PROJECT, self.LOG_NAME) @@ -609,7 +609,7 @@ def _get_target_class(): def test_ctor(self): gax_api = _GAXSinksAPI() client = object() - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) self.assertIs(api._gax_api, gax_api) self.assertIs(api._client, client) @@ -627,7 +627,7 @@ def test_list_sinks_no_paging(self): response = _GAXPageIterator([sink_pb], page_token=TOKEN) gax_api = _GAXSinksAPI(_list_sinks_response=response) client = object() - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) iterator = api.list_sinks(self.PROJECT) page = six.next(iterator.pages) @@ -663,7 +663,7 @@ def test_list_sinks_w_paging(self): response = _GAXPageIterator([sink_pb]) gax_api = _GAXSinksAPI(_list_sinks_response=response) client = object() - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) iterator = api.list_sinks( self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) @@ -689,7 +689,7 @@ def test_list_sinks_w_paging(self): def test_sink_create_error(self): from google.gax.errors import GaxError gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(GaxError): api.sink_create( @@ -699,7 +699,7 @@ def test_sink_create_error(self): def test_sink_create_conflict(self): from google.cloud.exceptions import Conflict gax_api = _GAXSinksAPI(_create_sink_conflict=True) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(Conflict): api.sink_create( @@ -709,7 +709,7 @@ def test_sink_create_conflict(self): def test_sink_create_ok(self): from google.logging.v2.logging_config_pb2 import LogSink gax_api = _GAXSinksAPI() - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) api.sink_create( self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) @@ -726,7 +726,7 @@ def test_sink_create_ok(self): def test_sink_get_error(self): from google.cloud.exceptions import NotFound gax_api = _GAXSinksAPI() - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(NotFound): api.sink_get(self.PROJECT, self.SINK_NAME) @@ -734,7 +734,7 @@ def test_sink_get_error(self): def test_sink_get_miss(self): from google.gax.errors import GaxError gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(GaxError): api.sink_get(self.PROJECT, self.SINK_NAME) @@ -751,7 +751,7 @@ def test_sink_get_hit(self): destination=self.DESTINATION_URI, filter=self.FILTER) gax_api = _GAXSinksAPI(_get_sink_response=sink_pb) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) response = api.sink_get(self.PROJECT, self.SINK_NAME) @@ -764,7 +764,7 @@ def test_sink_get_hit(self): def test_sink_update_error(self): from google.gax.errors import GaxError gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(GaxError): api.sink_update( @@ -774,7 +774,7 @@ def test_sink_update_error(self): def test_sink_update_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXSinksAPI() - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(NotFound): api.sink_update( @@ -788,7 +788,7 @@ def test_sink_update_hit(self): destination=self.DESTINATION_URI, filter=self.FILTER) gax_api = _GAXSinksAPI(_update_sink_response=response) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) api.sink_update( self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) @@ -805,7 +805,7 @@ def test_sink_update_hit(self): def test_sink_delete_error(self): from google.gax.errors import GaxError gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(GaxError): api.sink_delete(self.PROJECT, self.SINK_NAME) @@ -813,14 +813,14 @@ def test_sink_delete_error(self): def test_sink_delete_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXSinksAPI(_sink_not_found=True) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(NotFound): api.sink_delete(self.PROJECT, self.SINK_NAME) def test_sink_delete_hit(self): gax_api = _GAXSinksAPI() - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) api.sink_delete(self.PROJECT, self.SINK_NAME) @@ -842,7 +842,7 @@ def _get_target_class(): def test_ctor(self): gax_api = _GAXMetricsAPI() - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) self.assertIs(api._gax_api, gax_api) def test_list_metrics_no_paging(self): @@ -859,7 +859,7 @@ def test_list_metrics_no_paging(self): response = _GAXPageIterator([metric_pb], page_token=TOKEN) gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) client = object() - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) iterator = api.list_metrics(self.PROJECT) page = six.next(iterator.pages) @@ -895,7 +895,7 @@ def test_list_metrics_w_paging(self): response = _GAXPageIterator([metric_pb]) gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) client = object() - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) iterator = api.list_metrics( self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) @@ -921,7 +921,7 @@ def test_list_metrics_w_paging(self): def test_metric_create_error(self): from google.gax.errors import GaxError gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(GaxError): api.metric_create( @@ -931,7 +931,7 @@ def test_metric_create_error(self): def test_metric_create_conflict(self): from google.cloud.exceptions import Conflict gax_api = _GAXMetricsAPI(_create_log_metric_conflict=True) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(Conflict): api.metric_create( @@ -941,7 +941,7 @@ def test_metric_create_conflict(self): def test_metric_create_ok(self): from google.logging.v2.logging_metrics_pb2 import LogMetric gax_api = _GAXMetricsAPI() - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) api.metric_create( self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) @@ -958,7 +958,7 @@ def test_metric_create_ok(self): def test_metric_get_error(self): from google.cloud.exceptions import NotFound gax_api = _GAXMetricsAPI() - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(NotFound): api.metric_get(self.PROJECT, self.METRIC_NAME) @@ -966,7 +966,7 @@ def test_metric_get_error(self): def test_metric_get_miss(self): from google.gax.errors import GaxError gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(GaxError): api.metric_get(self.PROJECT, self.METRIC_NAME) @@ -983,7 +983,7 @@ def test_metric_get_hit(self): description=self.DESCRIPTION, filter=self.FILTER) gax_api = _GAXMetricsAPI(_get_log_metric_response=metric_pb) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) response = api.metric_get(self.PROJECT, self.METRIC_NAME) @@ -996,7 +996,7 @@ def test_metric_get_hit(self): def test_metric_update_error(self): from google.gax.errors import GaxError gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(GaxError): api.metric_update( @@ -1006,7 +1006,7 @@ def test_metric_update_error(self): def test_metric_update_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXMetricsAPI() - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(NotFound): api.metric_update( @@ -1020,7 +1020,7 @@ def test_metric_update_hit(self): description=self.DESCRIPTION, filter=self.FILTER) gax_api = _GAXMetricsAPI(_update_log_metric_response=response) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) api.metric_update( self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) @@ -1037,7 +1037,7 @@ def test_metric_update_hit(self): def test_metric_delete_error(self): from google.gax.errors import GaxError gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(GaxError): api.metric_delete(self.PROJECT, self.METRIC_NAME) @@ -1045,14 +1045,14 @@ def test_metric_delete_error(self): def test_metric_delete_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXMetricsAPI(_log_metric_not_found=True) - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) with self.assertRaises(NotFound): api.metric_delete(self.PROJECT, self.METRIC_NAME) def test_metric_delete_hit(self): gax_api = _GAXMetricsAPI() - api = self._makeOne(gax_api, None) + api = self._make_one(gax_api, None) api.metric_delete(self.PROJECT, self.METRIC_NAME) diff --git a/packages/google-cloud-logging/unit_tests/test__http.py b/packages/google-cloud-logging/unit_tests/test__http.py index a087de503599..1a6d5cd5d9f3 100644 --- a/packages/google-cloud-logging/unit_tests/test__http.py +++ b/packages/google-cloud-logging/unit_tests/test__http.py @@ -25,12 +25,12 @@ def _get_target_class(): from google.cloud.logging._http import Connection return Connection - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_default_url(self): creds = _Credentials() - conn = self._makeOne(creds) + conn = self._make_one(creds) klass = self._get_target_class() self.assertEqual(conn.credentials._scopes, klass.SCOPE) @@ -48,13 +48,13 @@ def _get_target_class(): from google.cloud.logging._http import _LoggingAPI return _LoggingAPI - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): connection = object() client = _Client(connection) - api = self._makeOne(client) + api = self._make_one(client) self.assertIs(api._connection, connection) self.assertIs(api._client, client) @@ -95,7 +95,7 @@ def test_list_entries_no_paging(self): client = Client(project=self.PROJECT, credentials=object(), use_gax=False) client.connection = _Connection(RETURNED) - api = self._makeOne(client) + api = self._make_one(client) iterator = api.list_entries([self.PROJECT]) page = six.next(iterator.pages) @@ -173,7 +173,7 @@ def test_list_entries_w_paging(self): client = Client(project=self.PROJECT, credentials=object(), use_gax=False) client.connection = _Connection(RETURNED) - api = self._makeOne(client) + api = self._make_one(client) iterator = api.list_entries( projects=[PROJECT1, PROJECT2], filter_=self.FILTER, @@ -230,7 +230,7 @@ def test_write_entries_single(self): } conn = _Connection({}) client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) api.write_entries([ENTRY]) @@ -263,7 +263,7 @@ def test_write_entries_multiple(self): } conn = _Connection({}) client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) api.write_entries([ENTRY1, ENTRY2], LOG_NAME, RESOURCE, LABELS) @@ -276,7 +276,7 @@ def test_logger_delete(self): path = '/projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) conn = _Connection({}) client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) api.logger_delete(self.PROJECT, self.LOGGER_NAME) @@ -298,13 +298,13 @@ def _get_target_class(): from google.cloud.logging._http import _SinksAPI return _SinksAPI - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): connection = object() client = _Client(connection) - api = self._makeOne(client) + api = self._make_one(client) self.assertIs(api._connection, connection) self.assertIs(api._client, client) @@ -323,7 +323,7 @@ def test_list_sinks_no_paging(self): } conn = _Connection(RETURNED) client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) iterator = api.list_sinks(self.PROJECT) page = six.next(iterator.pages) @@ -363,7 +363,7 @@ def test_list_sinks_w_paging(self): } conn = _Connection(RETURNED) client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) iterator = api.list_sinks( self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) @@ -402,7 +402,7 @@ def test_sink_create_conflict(self): conn = _Connection() conn._raise_conflict = True client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) with self.assertRaises(Conflict): api.sink_create( @@ -422,7 +422,7 @@ def test_sink_create_ok(self): } conn = _Connection({}) client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) api.sink_create( self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) @@ -436,7 +436,7 @@ def test_sink_get_miss(self): from google.cloud.exceptions import NotFound conn = _Connection() client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) with self.assertRaises(NotFound): api.sink_get(self.PROJECT, self.SINK_NAME) @@ -453,7 +453,7 @@ def test_sink_get_hit(self): } conn = _Connection(RESPONSE) client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) response = api.sink_get(self.PROJECT, self.SINK_NAME) @@ -471,7 +471,7 @@ def test_sink_update_miss(self): } conn = _Connection() client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) with self.assertRaises(NotFound): api.sink_update( @@ -491,7 +491,7 @@ def test_sink_update_hit(self): } conn = _Connection({}) client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) api.sink_update( self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) @@ -505,7 +505,7 @@ def test_sink_delete_miss(self): from google.cloud.exceptions import NotFound conn = _Connection() client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) with self.assertRaises(NotFound): api.sink_delete(self.PROJECT, self.SINK_NAME) @@ -517,7 +517,7 @@ def test_sink_delete_miss(self): def test_sink_delete_hit(self): conn = _Connection({}) client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) api.sink_delete(self.PROJECT, self.SINK_NAME) @@ -540,7 +540,7 @@ def _get_target_class(): from google.cloud.logging._http import _MetricsAPI return _MetricsAPI - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_list_metrics_no_paging(self): @@ -557,7 +557,7 @@ def test_list_metrics_no_paging(self): } conn = _Connection(RETURNED) client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) iterator = api.list_metrics(self.PROJECT) page = six.next(iterator.pages) @@ -596,7 +596,7 @@ def test_list_metrics_w_paging(self): } conn = _Connection(RETURNED) client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) iterator = api.list_metrics( self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) @@ -635,7 +635,7 @@ def test_metric_create_conflict(self): conn = _Connection() conn._raise_conflict = True client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) with self.assertRaises(Conflict): api.metric_create( @@ -655,7 +655,7 @@ def test_metric_create_ok(self): } conn = _Connection({}) client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) api.metric_create( self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) @@ -669,7 +669,7 @@ def test_metric_get_miss(self): from google.cloud.exceptions import NotFound conn = _Connection() client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) with self.assertRaises(NotFound): api.metric_get(self.PROJECT, self.METRIC_NAME) @@ -686,7 +686,7 @@ def test_metric_get_hit(self): } conn = _Connection(RESPONSE) client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) response = api.metric_get(self.PROJECT, self.METRIC_NAME) @@ -704,7 +704,7 @@ def test_metric_update_miss(self): } conn = _Connection() client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) with self.assertRaises(NotFound): api.metric_update( @@ -724,7 +724,7 @@ def test_metric_update_hit(self): } conn = _Connection({}) client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) api.metric_update( self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) @@ -738,7 +738,7 @@ def test_metric_delete_miss(self): from google.cloud.exceptions import NotFound conn = _Connection() client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) with self.assertRaises(NotFound): api.metric_delete(self.PROJECT, self.METRIC_NAME) @@ -750,7 +750,7 @@ def test_metric_delete_miss(self): def test_metric_delete_hit(self): conn = _Connection({}) client = _Client(conn) - api = self._makeOne(client) + api = self._make_one(client) api.metric_delete(self.PROJECT, self.METRIC_NAME) diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index f46194f04af4..711ce9a8a23a 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -31,18 +31,18 @@ def _get_target_class(): from google.cloud.logging.client import Client return Client - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._make_one(project=self.PROJECT, credentials=creds) self.assertEqual(client.project, self.PROJECT) def test_logging_api_wo_gax(self): from google.cloud.logging._http import _LoggingAPI - client = self._makeOne(self.PROJECT, credentials=_Credentials(), + client = self._make_one(self.PROJECT, credentials=_Credentials(), use_gax=False) conn = client.connection = object() api = client.logging_api @@ -65,7 +65,7 @@ def make_api(client_obj): return api_obj creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds, + client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=True) with _Monkey(MUT, make_gax_logging_api=make_api): @@ -84,7 +84,7 @@ def test_no_gax_ctor(self): creds = _Credentials() with _Monkey(MUT, _USE_GAX=True): - client = self._makeOne(project=self.PROJECT, credentials=creds, + client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=False) api = client.logging_api @@ -96,7 +96,7 @@ def test_sinks_api_wo_gax(self): from google.cloud._testing import _Monkey with _Monkey(MUT, _USE_GAX=False): - client = self._makeOne(self.PROJECT, credentials=_Credentials()) + client = self._make_one(self.PROJECT, credentials=_Credentials()) conn = client.connection = object() api = client.sinks_api @@ -119,7 +119,7 @@ def make_api(client_obj): return api_obj creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds, + client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=True) with _Monkey(MUT, make_gax_sinks_api=make_api): @@ -137,7 +137,7 @@ def test_metrics_api_wo_gax(self): from google.cloud._testing import _Monkey with _Monkey(MUT, _USE_GAX=False): - client = self._makeOne(self.PROJECT, credentials=_Credentials()) + client = self._make_one(self.PROJECT, credentials=_Credentials()) conn = client.connection = object() api = client.metrics_api @@ -160,7 +160,7 @@ def make_api(client_obj): return api_obj creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds, + client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=True) with _Monkey(MUT, make_gax_metrics_api=make_api): @@ -175,7 +175,7 @@ def make_api(client_obj): def test_logger(self): from google.cloud.logging.logger import Logger creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._make_one(project=self.PROJECT, credentials=creds) logger = client.logger(self.LOGGER_NAME) self.assertIsInstance(logger, Logger) self.assertEqual(logger.name, self.LOGGER_NAME) @@ -199,7 +199,7 @@ def test_list_entries_defaults(self): self.PROJECT, self.LOGGER_NAME), }] creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds, + client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=False) returned = { 'entries': ENTRIES, @@ -263,7 +263,7 @@ def test_list_entries_explicit(self): 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), }] - client = self._makeOne(self.PROJECT, credentials=_Credentials(), + client = self._make_one(self.PROJECT, credentials=_Credentials(), use_gax=False) returned = {'entries': ENTRIES} client.connection = _Connection(returned) @@ -315,7 +315,7 @@ def test_list_entries_explicit(self): def test_sink_defaults(self): from google.cloud.logging.sink import Sink creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._make_one(project=self.PROJECT, credentials=creds) sink = client.sink(self.SINK_NAME) self.assertIsInstance(sink, Sink) self.assertEqual(sink.name, self.SINK_NAME) @@ -327,7 +327,7 @@ def test_sink_defaults(self): def test_sink_explicit(self): from google.cloud.logging.sink import Sink creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._make_one(project=self.PROJECT, credentials=creds) sink = client.sink(self.SINK_NAME, self.FILTER, self.DESTINATION_URI) self.assertIsInstance(sink, Sink) self.assertEqual(sink.name, self.SINK_NAME) @@ -349,7 +349,7 @@ def test_list_sinks_no_paging(self): 'filter': FILTER, 'destination': self.DESTINATION_URI, }] - client = self._makeOne(project=PROJECT, credentials=_Credentials(), + client = self._make_one(project=PROJECT, credentials=_Credentials(), use_gax=False) returned = { 'sinks': SINKS, @@ -395,7 +395,7 @@ def test_list_sinks_with_paging(self): 'filter': FILTER, 'destination': self.DESTINATION_URI, }] - client = self._makeOne(project=PROJECT, credentials=_Credentials(), + client = self._make_one(project=PROJECT, credentials=_Credentials(), use_gax=False) returned = { 'sinks': SINKS, @@ -433,7 +433,7 @@ def test_metric_defaults(self): from google.cloud.logging.metric import Metric creds = _Credentials() - client_obj = self._makeOne(project=self.PROJECT, credentials=creds) + client_obj = self._make_one(project=self.PROJECT, credentials=creds) metric = client_obj.metric(self.METRIC_NAME) self.assertIsInstance(metric, Metric) self.assertEqual(metric.name, self.METRIC_NAME) @@ -446,7 +446,7 @@ def test_metric_explicit(self): from google.cloud.logging.metric import Metric creds = _Credentials() - client_obj = self._makeOne(project=self.PROJECT, credentials=creds) + client_obj = self._make_one(project=self.PROJECT, credentials=creds) metric = client_obj.metric(self.METRIC_NAME, self.FILTER, description=self.DESCRIPTION) self.assertIsInstance(metric, Metric) @@ -464,7 +464,7 @@ def test_list_metrics_no_paging(self): 'filter': self.FILTER, 'description': self.DESCRIPTION, }] - client = self._makeOne( + client = self._make_one( project=self.PROJECT, credentials=_Credentials(), use_gax=False) returned = { @@ -506,7 +506,7 @@ def test_list_metrics_with_paging(self): 'filter': self.FILTER, 'description': self.DESCRIPTION, }] - client = self._makeOne( + client = self._make_one( project=self.PROJECT, credentials=_Credentials(), use_gax=False) returned = { diff --git a/packages/google-cloud-logging/unit_tests/test_entries.py b/packages/google-cloud-logging/unit_tests/test_entries.py index cf365ad1d419..8628e363b910 100644 --- a/packages/google-cloud-logging/unit_tests/test_entries.py +++ b/packages/google-cloud-logging/unit_tests/test_entries.py @@ -50,13 +50,13 @@ class _Dummy(_BaseEntry): return _Dummy - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): PAYLOAD = 'PAYLOAD' logger = _Logger(self.LOGGER_NAME, self.PROJECT) - entry = self._makeOne(PAYLOAD, logger) + entry = self._make_one(PAYLOAD, logger) self.assertEqual(entry.payload, PAYLOAD) self.assertIs(entry.logger, logger) self.assertIsNone(entry.insert_id) @@ -81,7 +81,7 @@ def test_ctor_explicit(self): 'status': STATUS, } logger = _Logger(self.LOGGER_NAME, self.PROJECT) - entry = self._makeOne(PAYLOAD, logger, + entry = self._make_one(PAYLOAD, logger, insert_id=IID, timestamp=TIMESTAMP, labels=LABELS, @@ -199,7 +199,7 @@ def _get_target_class(): from google.cloud.logging.entries import ProtobufEntry return ProtobufEntry - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_parse_message(self): @@ -210,7 +210,7 @@ def test_parse_message(self): message = Struct(fields={'foo': Value(bool_value=False)}) with_true = Struct(fields={'foo': Value(bool_value=True)}) PAYLOAD = json.loads(MessageToJson(with_true)) - entry = self._makeOne(PAYLOAD, LOGGER) + entry = self._make_one(PAYLOAD, LOGGER) entry.parse_message(message) self.assertTrue(message.fields['foo']) diff --git a/packages/google-cloud-logging/unit_tests/test_logger.py b/packages/google-cloud-logging/unit_tests/test_logger.py index f4bf83a10375..35a155655189 100644 --- a/packages/google-cloud-logging/unit_tests/test_logger.py +++ b/packages/google-cloud-logging/unit_tests/test_logger.py @@ -25,13 +25,13 @@ def _get_target_class(): from google.cloud.logging.logger import Logger return Logger - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): conn = object() client = _Client(self.PROJECT, conn) - logger = self._makeOne(self.LOGGER_NAME, client=client) + logger = self._make_one(self.LOGGER_NAME, client=client) self.assertEqual(logger.name, self.LOGGER_NAME) self.assertIs(logger.client, client) self.assertEqual(logger.project, self.PROJECT) @@ -45,7 +45,7 @@ def test_ctor_explicit(self): LABELS = {'foo': 'bar', 'baz': 'qux'} conn = object() client = _Client(self.PROJECT, conn) - logger = self._makeOne(self.LOGGER_NAME, client=client, labels=LABELS) + logger = self._make_one(self.LOGGER_NAME, client=client, labels=LABELS) self.assertEqual(logger.name, self.LOGGER_NAME) self.assertIs(logger.client, client) self.assertEqual(logger.project, self.PROJECT) @@ -59,7 +59,7 @@ def test_batch_w_bound_client(self): from google.cloud.logging.logger import Batch conn = object() client = _Client(self.PROJECT, conn) - logger = self._makeOne(self.LOGGER_NAME, client=client) + logger = self._make_one(self.LOGGER_NAME, client=client) batch = logger.batch() self.assertIsInstance(batch, Batch) self.assertIs(batch.logger, logger) @@ -71,7 +71,7 @@ def test_batch_w_alternate_client(self): conn2 = object() client1 = _Client(self.PROJECT, conn1) client2 = _Client(self.PROJECT, conn2) - logger = self._makeOne(self.LOGGER_NAME, client=client1) + logger = self._make_one(self.LOGGER_NAME, client=client1) batch = logger.batch(client2) self.assertIsInstance(batch, Batch) self.assertIs(batch.logger, logger) @@ -89,7 +89,7 @@ def test_log_text_w_str_implicit_client(self): }] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() - logger = self._makeOne(self.LOGGER_NAME, client=client) + logger = self._make_one(self.LOGGER_NAME, client=client) logger.log_text(TEXT) @@ -110,7 +110,7 @@ def test_log_text_w_default_labels(self): }] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() - logger = self._makeOne(self.LOGGER_NAME, client=client, + logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) logger.log_text(TEXT) @@ -147,7 +147,7 @@ def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() - logger = self._makeOne(self.LOGGER_NAME, client=client1, + logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) logger.log_text(TEXT, client=client2, labels=LABELS, @@ -168,7 +168,7 @@ def test_log_struct_w_implicit_client(self): }] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() - logger = self._makeOne(self.LOGGER_NAME, client=client) + logger = self._make_one(self.LOGGER_NAME, client=client) logger.log_struct(STRUCT) @@ -189,7 +189,7 @@ def test_log_struct_w_default_labels(self): }] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() - logger = self._makeOne(self.LOGGER_NAME, client=client, + logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) logger.log_struct(STRUCT) @@ -226,7 +226,7 @@ def test_log_struct_w_explicit_client_labels_severity_httpreq(self): client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() - logger = self._makeOne(self.LOGGER_NAME, client=client1, + logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) logger.log_struct(STRUCT, client=client2, labels=LABELS, @@ -251,7 +251,7 @@ def test_log_proto_w_implicit_client(self): }] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() - logger = self._makeOne(self.LOGGER_NAME, client=client) + logger = self._make_one(self.LOGGER_NAME, client=client) logger.log_proto(message) @@ -275,7 +275,7 @@ def test_log_proto_w_default_labels(self): }] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() - logger = self._makeOne(self.LOGGER_NAME, client=client, + logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) logger.log_proto(message) @@ -315,7 +315,7 @@ def test_log_proto_w_explicit_client_labels_severity_httpreq(self): client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() - logger = self._makeOne(self.LOGGER_NAME, client=client1, + logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) logger.log_proto(message, client=client2, labels=LABELS, @@ -328,7 +328,7 @@ def test_log_proto_w_explicit_client_labels_severity_httpreq(self): def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() - logger = self._makeOne(self.LOGGER_NAME, client=client) + logger = self._make_one(self.LOGGER_NAME, client=client) logger.delete() @@ -339,7 +339,7 @@ def test_delete_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() - logger = self._makeOne(self.LOGGER_NAME, client=client1) + logger = self._make_one(self.LOGGER_NAME, client=client1) logger.delete(client=client2) @@ -359,7 +359,7 @@ def test_list_entries_defaults(self): } client.connection = _Connection(returned) - logger = self._makeOne(self.LOGGER_NAME, client=client) + logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries() page = six.next(iterator.pages) @@ -392,7 +392,7 @@ def test_list_entries_explicit(self): client = Client(project=self.PROJECT, credentials=object(), use_gax=False) client.connection = _Connection({}) - logger = self._makeOne(self.LOGGER_NAME, client=client) + logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries( projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN) @@ -427,13 +427,13 @@ def _get_target_class(): from google.cloud.logging.logger import Batch return Batch - def _makeOne(self, *args, **kwargs): + def _make_one(self, *args, **kwargs): return self._get_target_class()(*args, **kwargs) def test_ctor_defaults(self): logger = _Logger() client = _Client(project=self.PROJECT) - batch = self._makeOne(logger, client) + batch = self._make_one(logger, client) self.assertIs(batch.logger, logger) self.assertIs(batch.client, client) self.assertEqual(len(batch.entries), 0) @@ -442,7 +442,7 @@ def test_log_text_defaults(self): TEXT = 'This is the entry text' client = _Client(project=self.PROJECT, connection=object()) logger = _Logger() - batch = self._makeOne(logger, client=client) + batch = self._make_one(logger, client=client) batch.log_text(TEXT) self.assertEqual(batch.entries, [('text', TEXT, None, None, None, None)]) @@ -462,7 +462,7 @@ def test_log_text_explicit(self): } client = _Client(project=self.PROJECT, connection=object()) logger = _Logger() - batch = self._makeOne(logger, client=client) + batch = self._make_one(logger, client=client) batch.log_text(TEXT, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST) self.assertEqual(batch.entries, @@ -472,7 +472,7 @@ def test_log_struct_defaults(self): STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} client = _Client(project=self.PROJECT, connection=object()) logger = _Logger() - batch = self._makeOne(logger, client=client) + batch = self._make_one(logger, client=client) batch.log_struct(STRUCT) self.assertEqual(batch.entries, [('struct', STRUCT, None, None, None, None)]) @@ -492,7 +492,7 @@ def test_log_struct_explicit(self): } client = _Client(project=self.PROJECT, connection=object()) logger = _Logger() - batch = self._makeOne(logger, client=client) + batch = self._make_one(logger, client=client) batch.log_struct(STRUCT, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST) self.assertEqual(batch.entries, @@ -503,7 +503,7 @@ def test_log_proto_defaults(self): message = Struct(fields={'foo': Value(bool_value=True)}) client = _Client(project=self.PROJECT, connection=object()) logger = _Logger() - batch = self._makeOne(logger, client=client) + batch = self._make_one(logger, client=client) batch.log_proto(message) self.assertEqual(batch.entries, [('proto', message, None, None, None, None)]) @@ -524,7 +524,7 @@ def test_log_proto_explicit(self): } client = _Client(project=self.PROJECT, connection=object()) logger = _Logger() - batch = self._makeOne(logger, client=client) + batch = self._make_one(logger, client=client) batch.log_proto(message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST) self.assertEqual(batch.entries, @@ -533,7 +533,7 @@ def test_log_proto_explicit(self): def test_commit_w_invalid_entry_type(self): logger = _Logger() client = _Client(project=self.PROJECT, connection=object()) - batch = self._makeOne(logger, client) + batch = self._make_one(logger, client) batch.entries.append(('bogus', 'BOGUS', None, None, None, None)) with self.assertRaises(ValueError): batch.commit() @@ -560,7 +560,7 @@ def test_commit_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = _Logger() - batch = self._makeOne(logger, client=client) + batch = self._make_one(logger, client=client) batch.log_text(TEXT, insert_id=IID1) batch.log_struct(STRUCT, insert_id=IID2) @@ -604,7 +604,7 @@ def test_commit_w_alternate_client(self): {'protoPayload': json.loads(MessageToJson(message)), 'httpRequest': REQUEST}, ] - batch = self._makeOne(logger, client=client1) + batch = self._make_one(logger, client=client1) batch.log_text(TEXT, labels=LABELS) batch.log_struct(STRUCT, severity=SEVERITY) @@ -646,7 +646,7 @@ def test_context_mgr_success(self): {'protoPayload': json.loads(MessageToJson(message)), 'severity': SEVERITY}, ] - batch = self._makeOne(logger, client=client) + batch = self._make_one(logger, client=client) with batch as other: other.log_text(TEXT, http_request=REQUEST) @@ -681,7 +681,7 @@ def test_context_mgr_failure(self): ('struct', STRUCT, None, None, SEVERITY, None), ('proto', message, LABELS, None, None, REQUEST), ] - batch = self._makeOne(logger, client=client) + batch = self._make_one(logger, client=client) try: with batch as other: diff --git a/packages/google-cloud-logging/unit_tests/test_metric.py b/packages/google-cloud-logging/unit_tests/test_metric.py index 60c251398620..4c88fc83bc26 100644 --- a/packages/google-cloud-logging/unit_tests/test_metric.py +++ b/packages/google-cloud-logging/unit_tests/test_metric.py @@ -27,13 +27,13 @@ def _get_target_class(): from google.cloud.logging.metric import Metric return Metric - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) client = _Client(self.PROJECT) - metric = self._makeOne(self.METRIC_NAME, client=client) + metric = self._make_one(self.METRIC_NAME, client=client) self.assertEqual(metric.name, self.METRIC_NAME) self.assertIsNone(metric.filter_) self.assertEqual(metric.description, '') @@ -45,7 +45,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) client = _Client(self.PROJECT) - metric = self._makeOne(self.METRIC_NAME, self.FILTER, + metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client, description=self.DESCRIPTION) self.assertEqual(metric.name, self.METRIC_NAME) self.assertEqual(metric.filter_, self.FILTER) @@ -92,7 +92,7 @@ def test_from_api_repr_w_description(self): def test_create_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.metrics_api = _DummyMetricsAPI() - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client) metric.create() @@ -104,7 +104,7 @@ def test_create_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.metrics_api = _DummyMetricsAPI() - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1, + metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client1, description=self.DESCRIPTION) metric.create(client=client2) @@ -116,7 +116,7 @@ def test_create_w_alternate_client(self): def test_exists_miss_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.metrics_api = _DummyMetricsAPI() - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client) self.assertFalse(metric.exists()) @@ -132,7 +132,7 @@ def test_exists_hit_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.metrics_api = _DummyMetricsAPI() api._metric_get_response = RESOURCE - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1) + metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client1) self.assertTrue(metric.exists(client=client2)) @@ -148,7 +148,7 @@ def test_reload_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.metrics_api = _DummyMetricsAPI() api._metric_get_response = RESOURCE - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client, + metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client, description=self.DESCRIPTION) metric.reload() @@ -169,7 +169,7 @@ def test_reload_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.metrics_api = _DummyMetricsAPI() api._metric_get_response = RESOURCE - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1) + metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client1) metric.reload(client=client2) @@ -181,7 +181,7 @@ def test_reload_w_alternate_client(self): def test_update_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.metrics_api = _DummyMetricsAPI() - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client) metric.update() @@ -193,7 +193,7 @@ def test_update_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.metrics_api = _DummyMetricsAPI() - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1, + metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client1, description=self.DESCRIPTION) metric.update(client=client2) @@ -205,7 +205,7 @@ def test_update_w_alternate_client(self): def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.metrics_api = _DummyMetricsAPI() - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client) metric.delete() @@ -216,7 +216,7 @@ def test_delete_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.metrics_api = _DummyMetricsAPI() - metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1) + metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client1) metric.delete(client=client2) diff --git a/packages/google-cloud-logging/unit_tests/test_sink.py b/packages/google-cloud-logging/unit_tests/test_sink.py index 90d60faecf9d..20b6fcded9f5 100644 --- a/packages/google-cloud-logging/unit_tests/test_sink.py +++ b/packages/google-cloud-logging/unit_tests/test_sink.py @@ -27,13 +27,13 @@ def _get_target_class(): from google.cloud.logging.sink import Sink return Sink - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) client = _Client(self.PROJECT) - sink = self._makeOne(self.SINK_NAME, client=client) + sink = self._make_one(self.SINK_NAME, client=client) self.assertEqual(sink.name, self.SINK_NAME) self.assertIsNone(sink.filter_) self.assertIsNone(sink.destination) @@ -45,7 +45,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) client = _Client(self.PROJECT) - sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client) self.assertEqual(sink.name, self.SINK_NAME) self.assertEqual(sink.filter_, self.FILTER) @@ -92,7 +92,7 @@ def test_from_api_repr_w_description(self): def test_create_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() - sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client) sink.create() @@ -104,7 +104,7 @@ def test_create_w_bound_client(self): def test_create_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) - sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1) api = client2.sinks_api = _DummySinksAPI() @@ -117,7 +117,7 @@ def test_create_w_alternate_client(self): def test_exists_miss_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() - sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client) self.assertFalse(sink.exists()) @@ -135,7 +135,7 @@ def test_exists_hit_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.sinks_api = _DummySinksAPI() api._sink_get_response = RESOURCE - sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1) self.assertTrue(sink.exists(client=client2)) @@ -154,7 +154,7 @@ def test_reload_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() api._sink_get_response = RESOURCE - sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client) sink.reload() @@ -176,7 +176,7 @@ def test_reload_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.sinks_api = _DummySinksAPI() api._sink_get_response = RESOURCE - sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1) sink.reload(client=client2) @@ -189,7 +189,7 @@ def test_reload_w_alternate_client(self): def test_update_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() - sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client) sink.update() @@ -202,7 +202,7 @@ def test_update_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.sinks_api = _DummySinksAPI() - sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1) sink.update(client=client2) @@ -214,7 +214,7 @@ def test_update_w_alternate_client(self): def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() - sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client) sink.delete() @@ -226,7 +226,7 @@ def test_delete_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.sinks_api = _DummySinksAPI() - sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1) sink.delete(client=client2) From 358f442b67e697ab8c573d18dbe62739f0c1a964 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 10 Nov 2016 11:06:21 -0800 Subject: [PATCH 043/855] Changing all instances of _callFUT to _call_fut. Done via: $ git grep -l _callFUT | \ > xargs sed -i s/_callFUT/_call_fut/g --- .../unit_tests/handlers/test_handlers.py | 6 +++--- packages/google-cloud-logging/unit_tests/test_entries.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py b/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py index a144efe66f3a..54c38f9b82cb 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py +++ b/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py @@ -46,7 +46,7 @@ def test_emit(self): class TestSetupLogging(unittest.TestCase): - def _callFUT(self, handler, excludes=None): + def _call_fut(self, handler, excludes=None): from google.cloud.logging.handlers.handlers import setup_logging if excludes: return setup_logging(handler, excluded_loggers=excludes) @@ -55,7 +55,7 @@ def _callFUT(self, handler, excludes=None): def test_setup_logging(self): handler = _Handler(logging.INFO) - self._callFUT(handler) + self._call_fut(handler) root_handlers = logging.getLogger().handlers self.assertIn(handler, root_handlers) @@ -65,7 +65,7 @@ def test_setup_logging_excludes(self): EXCLUDED_LOGGER_NAME = 'excludeme' handler = _Handler(logging.INFO) - self._callFUT(handler, (EXCLUDED_LOGGER_NAME,)) + self._call_fut(handler, (EXCLUDED_LOGGER_NAME,)) included_logger = logging.getLogger(INCLUDED_LOGGER_NAME) self.assertTrue(included_logger.propagate) diff --git a/packages/google-cloud-logging/unit_tests/test_entries.py b/packages/google-cloud-logging/unit_tests/test_entries.py index 8628e363b910..12c3063a8b8c 100644 --- a/packages/google-cloud-logging/unit_tests/test_entries.py +++ b/packages/google-cloud-logging/unit_tests/test_entries.py @@ -17,7 +17,7 @@ class Test_logger_name_from_path(unittest.TestCase): - def _callFUT(self, path): + def _call_fut(self, path): from google.cloud.logging.entries import logger_name_from_path return logger_name_from_path(path) @@ -25,14 +25,14 @@ def test_w_simple_name(self): LOGGER_NAME = 'LOGGER_NAME' PROJECT = 'my-project-1234' PATH = 'projects/%s/logs/%s' % (PROJECT, LOGGER_NAME) - logger_name = self._callFUT(PATH) + logger_name = self._call_fut(PATH) self.assertEqual(logger_name, LOGGER_NAME) def test_w_name_w_all_extras(self): LOGGER_NAME = 'LOGGER_NAME-part.one~part.two%part-three' PROJECT = 'my-project-1234' PATH = 'projects/%s/logs/%s' % (PROJECT, LOGGER_NAME) - logger_name = self._callFUT(PATH) + logger_name = self._call_fut(PATH) self.assertEqual(logger_name, LOGGER_NAME) From c8540ca776a08f18db06ad4cb1c517b250409373 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Nov 2016 21:02:17 -0800 Subject: [PATCH 044/855] Manually fixing up bad indents / long lines after renames. --- .../unit_tests/test_client.py | 18 +++--- .../unit_tests/test_entries.py | 10 ++-- .../unit_tests/test_logger.py | 12 ++-- .../unit_tests/test_metric.py | 8 +-- .../unit_tests/test_sink.py | 55 +++++++++++-------- 5 files changed, 57 insertions(+), 46 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 711ce9a8a23a..ea6bd89fb961 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -43,7 +43,7 @@ def test_logging_api_wo_gax(self): from google.cloud.logging._http import _LoggingAPI client = self._make_one(self.PROJECT, credentials=_Credentials(), - use_gax=False) + use_gax=False) conn = client.connection = object() api = client.logging_api @@ -66,7 +66,7 @@ def make_api(client_obj): creds = _Credentials() client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=True) + use_gax=True) with _Monkey(MUT, make_gax_logging_api=make_api): api = client.logging_api @@ -85,7 +85,7 @@ def test_no_gax_ctor(self): creds = _Credentials() with _Monkey(MUT, _USE_GAX=True): client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=False) + use_gax=False) api = client.logging_api self.assertIsInstance(api, _LoggingAPI) @@ -120,7 +120,7 @@ def make_api(client_obj): creds = _Credentials() client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=True) + use_gax=True) with _Monkey(MUT, make_gax_sinks_api=make_api): api = client.sinks_api @@ -161,7 +161,7 @@ def make_api(client_obj): creds = _Credentials() client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=True) + use_gax=True) with _Monkey(MUT, make_gax_metrics_api=make_api): api = client.metrics_api @@ -200,7 +200,7 @@ def test_list_entries_defaults(self): }] creds = _Credentials() client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=False) + use_gax=False) returned = { 'entries': ENTRIES, 'nextPageToken': TOKEN, @@ -264,7 +264,7 @@ def test_list_entries_explicit(self): self.PROJECT, self.LOGGER_NAME), }] client = self._make_one(self.PROJECT, credentials=_Credentials(), - use_gax=False) + use_gax=False) returned = {'entries': ENTRIES} client.connection = _Connection(returned) @@ -350,7 +350,7 @@ def test_list_sinks_no_paging(self): 'destination': self.DESTINATION_URI, }] client = self._make_one(project=PROJECT, credentials=_Credentials(), - use_gax=False) + use_gax=False) returned = { 'sinks': SINKS, 'nextPageToken': TOKEN, @@ -396,7 +396,7 @@ def test_list_sinks_with_paging(self): 'destination': self.DESTINATION_URI, }] client = self._make_one(project=PROJECT, credentials=_Credentials(), - use_gax=False) + use_gax=False) returned = { 'sinks': SINKS, } diff --git a/packages/google-cloud-logging/unit_tests/test_entries.py b/packages/google-cloud-logging/unit_tests/test_entries.py index 12c3063a8b8c..0ae4a5e112f9 100644 --- a/packages/google-cloud-logging/unit_tests/test_entries.py +++ b/packages/google-cloud-logging/unit_tests/test_entries.py @@ -82,11 +82,11 @@ def test_ctor_explicit(self): } logger = _Logger(self.LOGGER_NAME, self.PROJECT) entry = self._make_one(PAYLOAD, logger, - insert_id=IID, - timestamp=TIMESTAMP, - labels=LABELS, - severity=SEVERITY, - http_request=REQUEST) + insert_id=IID, + timestamp=TIMESTAMP, + labels=LABELS, + severity=SEVERITY, + http_request=REQUEST) self.assertEqual(entry.payload, PAYLOAD) self.assertIs(entry.logger, logger) self.assertEqual(entry.insert_id, IID) diff --git a/packages/google-cloud-logging/unit_tests/test_logger.py b/packages/google-cloud-logging/unit_tests/test_logger.py index 35a155655189..c2770f83d9f3 100644 --- a/packages/google-cloud-logging/unit_tests/test_logger.py +++ b/packages/google-cloud-logging/unit_tests/test_logger.py @@ -111,7 +111,7 @@ def test_log_text_w_default_labels(self): client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client, - labels=DEFAULT_LABELS) + labels=DEFAULT_LABELS) logger.log_text(TEXT) @@ -148,7 +148,7 @@ def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): client2 = _Client(self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client1, - labels=DEFAULT_LABELS) + labels=DEFAULT_LABELS) logger.log_text(TEXT, client=client2, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST) @@ -190,7 +190,7 @@ def test_log_struct_w_default_labels(self): client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client, - labels=DEFAULT_LABELS) + labels=DEFAULT_LABELS) logger.log_struct(STRUCT) @@ -227,7 +227,7 @@ def test_log_struct_w_explicit_client_labels_severity_httpreq(self): client2 = _Client(self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client1, - labels=DEFAULT_LABELS) + labels=DEFAULT_LABELS) logger.log_struct(STRUCT, client=client2, labels=LABELS, insert_id=IID, severity=SEVERITY, @@ -276,7 +276,7 @@ def test_log_proto_w_default_labels(self): client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client, - labels=DEFAULT_LABELS) + labels=DEFAULT_LABELS) logger.log_proto(message) @@ -316,7 +316,7 @@ def test_log_proto_w_explicit_client_labels_severity_httpreq(self): client2 = _Client(self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client1, - labels=DEFAULT_LABELS) + labels=DEFAULT_LABELS) logger.log_proto(message, client=client2, labels=LABELS, insert_id=IID, severity=SEVERITY, diff --git a/packages/google-cloud-logging/unit_tests/test_metric.py b/packages/google-cloud-logging/unit_tests/test_metric.py index 4c88fc83bc26..5f4b05c054d2 100644 --- a/packages/google-cloud-logging/unit_tests/test_metric.py +++ b/packages/google-cloud-logging/unit_tests/test_metric.py @@ -46,7 +46,7 @@ def test_ctor_explicit(self): FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) client = _Client(self.PROJECT) metric = self._make_one(self.METRIC_NAME, self.FILTER, - client=client, description=self.DESCRIPTION) + client=client, description=self.DESCRIPTION) self.assertEqual(metric.name, self.METRIC_NAME) self.assertEqual(metric.filter_, self.FILTER) self.assertEqual(metric.description, self.DESCRIPTION) @@ -105,7 +105,7 @@ def test_create_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.metrics_api = _DummyMetricsAPI() metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client1, - description=self.DESCRIPTION) + description=self.DESCRIPTION) metric.create(client=client2) @@ -149,7 +149,7 @@ def test_reload_w_bound_client(self): api = client.metrics_api = _DummyMetricsAPI() api._metric_get_response = RESOURCE metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client, - description=self.DESCRIPTION) + description=self.DESCRIPTION) metric.reload() @@ -194,7 +194,7 @@ def test_update_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.metrics_api = _DummyMetricsAPI() metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client1, - description=self.DESCRIPTION) + description=self.DESCRIPTION) metric.update(client=client2) diff --git a/packages/google-cloud-logging/unit_tests/test_sink.py b/packages/google-cloud-logging/unit_tests/test_sink.py index 20b6fcded9f5..ca2a7a6df9c4 100644 --- a/packages/google-cloud-logging/unit_tests/test_sink.py +++ b/packages/google-cloud-logging/unit_tests/test_sink.py @@ -45,8 +45,9 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) client = _Client(self.PROJECT) - sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=client) + sink = self._make_one(self.SINK_NAME, self.FILTER, + self.DESTINATION_URI, + client=client) self.assertEqual(sink.name, self.SINK_NAME) self.assertEqual(sink.filter_, self.FILTER) self.assertEqual(sink.destination, self.DESTINATION_URI) @@ -92,8 +93,9 @@ def test_from_api_repr_w_description(self): def test_create_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() - sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=client) + sink = self._make_one(self.SINK_NAME, self.FILTER, + self.DESTINATION_URI, + client=client) sink.create() @@ -104,8 +106,9 @@ def test_create_w_bound_client(self): def test_create_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) - sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=client1) + sink = self._make_one(self.SINK_NAME, self.FILTER, + self.DESTINATION_URI, + client=client1) api = client2.sinks_api = _DummySinksAPI() sink.create(client=client2) @@ -117,8 +120,9 @@ def test_create_w_alternate_client(self): def test_exists_miss_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() - sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=client) + sink = self._make_one(self.SINK_NAME, self.FILTER, + self.DESTINATION_URI, + client=client) self.assertFalse(sink.exists()) @@ -135,8 +139,9 @@ def test_exists_hit_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.sinks_api = _DummySinksAPI() api._sink_get_response = RESOURCE - sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=client1) + sink = self._make_one(self.SINK_NAME, self.FILTER, + self.DESTINATION_URI, + client=client1) self.assertTrue(sink.exists(client=client2)) @@ -154,8 +159,9 @@ def test_reload_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() api._sink_get_response = RESOURCE - sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=client) + sink = self._make_one(self.SINK_NAME, self.FILTER, + self.DESTINATION_URI, + client=client) sink.reload() @@ -176,8 +182,9 @@ def test_reload_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.sinks_api = _DummySinksAPI() api._sink_get_response = RESOURCE - sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=client1) + sink = self._make_one(self.SINK_NAME, self.FILTER, + self.DESTINATION_URI, + client=client1) sink.reload(client=client2) @@ -189,8 +196,9 @@ def test_reload_w_alternate_client(self): def test_update_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() - sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=client) + sink = self._make_one(self.SINK_NAME, self.FILTER, + self.DESTINATION_URI, + client=client) sink.update() @@ -202,8 +210,9 @@ def test_update_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.sinks_api = _DummySinksAPI() - sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=client1) + sink = self._make_one(self.SINK_NAME, self.FILTER, + self.DESTINATION_URI, + client=client1) sink.update(client=client2) @@ -214,8 +223,9 @@ def test_update_w_alternate_client(self): def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() - sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=client) + sink = self._make_one(self.SINK_NAME, self.FILTER, + self.DESTINATION_URI, + client=client) sink.delete() @@ -226,8 +236,9 @@ def test_delete_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.sinks_api = _DummySinksAPI() - sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - client=client1) + sink = self._make_one(self.SINK_NAME, self.FILTER, + self.DESTINATION_URI, + client=client1) sink.delete(client=client2) From f5f797ad0a6c79d026f16bb5360e816a769b185d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 10 Nov 2016 21:17:51 -0800 Subject: [PATCH 045/855] Adding quiet flag to pip command for local deps. --- packages/google-cloud-logging/tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/tox.ini b/packages/google-cloud-logging/tox.ini index 5dd0b038e806..3019c5083252 100644 --- a/packages/google-cloud-logging/tox.ini +++ b/packages/google-cloud-logging/tox.ini @@ -4,7 +4,7 @@ envlist = [testing] localdeps = - pip install --upgrade {toxinidir}/../core + pip install --quiet --upgrade {toxinidir}/../core deps = pytest covercmd = From ac4d87adc09a26890cd41b67ab27ec759f51485d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 19:22:59 -0800 Subject: [PATCH 046/855] Updating connection -> _connection attribute in some packages. In particular: dns, language, logging, monitoring. Transitively, error reporting has also been covered (by logging). --- .../google/cloud/logging/_gax.py | 8 ++--- .../google/cloud/logging/_http.py | 10 +++---- .../handlers/transports/background_thread.py | 6 ++-- .../transports/test_background_thread.py | 2 +- .../unit_tests/test__gax.py | 2 +- .../unit_tests/test__http.py | 10 +++---- .../unit_tests/test_client.py | 30 +++++++++---------- .../unit_tests/test_logger.py | 10 +++---- 8 files changed, 39 insertions(+), 39 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 1df2be5daf70..259b8e20e55a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -35,7 +35,7 @@ from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._helpers import make_secure_channel -from google.cloud.connection import DEFAULT_USER_AGENT +from google.cloud._http import DEFAULT_USER_AGENT from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound from google.cloud.iterator import GAXIterator @@ -531,7 +531,7 @@ def make_gax_logging_api(client): :returns: A metrics API instance with the proper credentials. """ channel = make_secure_channel( - client.connection.credentials, DEFAULT_USER_AGENT, + client._connection.credentials, DEFAULT_USER_AGENT, LoggingServiceV2Api.SERVICE_ADDRESS) generated = LoggingServiceV2Api(channel=channel) return _LoggingAPI(generated, client) @@ -547,7 +547,7 @@ def make_gax_metrics_api(client): :returns: A metrics API instance with the proper credentials. """ channel = make_secure_channel( - client.connection.credentials, DEFAULT_USER_AGENT, + client._connection.credentials, DEFAULT_USER_AGENT, MetricsServiceV2Api.SERVICE_ADDRESS) generated = MetricsServiceV2Api(channel=channel) return _MetricsAPI(generated, client) @@ -563,7 +563,7 @@ def make_gax_sinks_api(client): :returns: A metrics API instance with the proper credentials. """ channel = make_secure_channel( - client.connection.credentials, DEFAULT_USER_AGENT, + client._connection.credentials, DEFAULT_USER_AGENT, ConfigServiceV2Api.SERVICE_ADDRESS) generated = ConfigServiceV2Api(channel=channel) return _SinksAPI(generated, client) diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index a1fa388b0f09..8d9eccc819d5 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -16,14 +16,14 @@ import functools -from google.cloud import connection as base_connection +from google.cloud import _http from google.cloud.iterator import HTTPIterator from google.cloud.logging._helpers import entry_from_resource from google.cloud.logging.sink import Sink from google.cloud.logging.metric import Metric -class Connection(base_connection.JSONConnection): +class Connection(_http.JSONConnection): """A connection to Google Stackdriver Logging via the JSON REST API. :type credentials: :class:`oauth2client.client.OAuth2Credentials` @@ -67,7 +67,7 @@ class _LoggingAPI(object): def __init__(self, client): self._client = client - self._connection = client.connection + self._connection = client._connection def list_entries(self, projects, filter_=None, order_by=None, page_size=None, page_token=None): @@ -191,7 +191,7 @@ class _SinksAPI(object): """ def __init__(self, client): self._client = client - self._connection = client.connection + self._connection = client._connection def list_sinks(self, project, page_size=None, page_token=None): """List sinks for the project associated with this client. @@ -328,7 +328,7 @@ class _MetricsAPI(object): """ def __init__(self, client): self._client = client - self._connection = client.connection + self._connection = client._connection def list_metrics(self, project, page_size=None, page_token=None): """List metrics for the project associated with this client. diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 3c1e76872985..144bccafc838 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -150,10 +150,10 @@ class BackgroundThreadTransport(Transport): """ def __init__(self, client, name): - http = copy.deepcopy(client.connection.http) - http = client.connection.credentials.authorize(http) + http = copy.deepcopy(client._connection.http) + http = client._connection.credentials.authorize(http) self.client = Client(client.project, - client.connection.credentials, + client._connection.credentials, http) logger = self.client.logger(name) self.worker = _Worker(logger) diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py index a7ef4fc43190..3695c591288c 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py @@ -188,7 +188,7 @@ class _Client(object): def __init__(self, project): self.project = project - self.connection = _Connection() + self._connection = _Connection() def logger(self, name): # pylint: disable=unused-argument self._logger = _Logger(name) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 7497a07efc7d..79d0568f899f 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -1312,4 +1312,4 @@ def __init__(self, credentials): class _Client(object): def __init__(self, credentials): - self.connection = _Connection(credentials) + self._connection = _Connection(credentials) diff --git a/packages/google-cloud-logging/unit_tests/test__http.py b/packages/google-cloud-logging/unit_tests/test__http.py index 1a6d5cd5d9f3..1a6e2d548a06 100644 --- a/packages/google-cloud-logging/unit_tests/test__http.py +++ b/packages/google-cloud-logging/unit_tests/test__http.py @@ -94,7 +94,7 @@ def test_list_entries_no_paging(self): } client = Client(project=self.PROJECT, credentials=object(), use_gax=False) - client.connection = _Connection(RETURNED) + client._connection = _Connection(RETURNED) api = self._make_one(client) iterator = api.list_entries([self.PROJECT]) @@ -117,7 +117,7 @@ def test_list_entries_no_paging(self): self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) - called_with = client.connection._called_with + called_with = client._connection._called_with expected_path = '/%s' % (self.LIST_ENTRIES_PATH,) self.assertEqual(called_with, { 'method': 'POST', @@ -172,7 +172,7 @@ def test_list_entries_w_paging(self): } client = Client(project=self.PROJECT, credentials=object(), use_gax=False) - client.connection = _Connection(RETURNED) + client._connection = _Connection(RETURNED) api = self._make_one(client) iterator = api.list_entries( @@ -207,7 +207,7 @@ def test_list_entries_w_paging(self): self.assertIsNone(entry2.severity) self.assertIsNone(entry2.http_request) - called_with = client.connection._called_with + called_with = client._connection._called_with expected_path = '/%s' % (self.LIST_ENTRIES_PATH,) self.assertEqual(called_with, { 'method': 'POST', @@ -802,4 +802,4 @@ def _datetime_to_rfc3339_w_nanos(value): class _Client(object): def __init__(self, connection): - self.connection = connection + self._connection = connection diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index ea6bd89fb961..2cc1cf4ff328 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -44,7 +44,7 @@ def test_logging_api_wo_gax(self): client = self._make_one(self.PROJECT, credentials=_Credentials(), use_gax=False) - conn = client.connection = object() + conn = client._connection = object() api = client.logging_api self.assertIsInstance(api, _LoggingAPI) @@ -98,7 +98,7 @@ def test_sinks_api_wo_gax(self): with _Monkey(MUT, _USE_GAX=False): client = self._make_one(self.PROJECT, credentials=_Credentials()) - conn = client.connection = object() + conn = client._connection = object() api = client.sinks_api self.assertIsInstance(api, _SinksAPI) @@ -139,7 +139,7 @@ def test_metrics_api_wo_gax(self): with _Monkey(MUT, _USE_GAX=False): client = self._make_one(self.PROJECT, credentials=_Credentials()) - conn = client.connection = object() + conn = client._connection = object() api = client.metrics_api self.assertIsInstance(api, _MetricsAPI) @@ -205,7 +205,7 @@ def test_list_entries_defaults(self): 'entries': ENTRIES, 'nextPageToken': TOKEN, } - client.connection = _Connection(returned) + client._connection = _Connection(returned) iterator = client.list_entries() page = six.next(iterator.pages) @@ -223,7 +223,7 @@ def test_list_entries_defaults(self): self.assertEqual(logger.project, self.PROJECT) self.assertEqual(token, TOKEN) - called_with = client.connection._called_with + called_with = client._connection._called_with self.assertEqual(called_with, { 'path': '/entries:list', 'method': 'POST', @@ -266,7 +266,7 @@ def test_list_entries_explicit(self): client = self._make_one(self.PROJECT, credentials=_Credentials(), use_gax=False) returned = {'entries': ENTRIES} - client.connection = _Connection(returned) + client._connection = _Connection(returned) iterator = client.list_entries( projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, @@ -299,7 +299,7 @@ def test_list_entries_explicit(self): self.assertIs(entries[0].logger, entries[1].logger) - called_with = client.connection._called_with + called_with = client._connection._called_with self.assertEqual(called_with, { 'path': '/entries:list', 'method': 'POST', @@ -355,7 +355,7 @@ def test_list_sinks_no_paging(self): 'sinks': SINKS, 'nextPageToken': TOKEN, } - client.connection = _Connection(returned) + client._connection = _Connection(returned) iterator = client.list_sinks() page = six.next(iterator.pages) @@ -374,7 +374,7 @@ def test_list_sinks_no_paging(self): self.assertIs(sink.client, client) # Verify the mocked transport. - called_with = client.connection._called_with + called_with = client._connection._called_with path = '/projects/%s/sinks' % (self.PROJECT,) self.assertEqual(called_with, { 'method': 'GET', @@ -400,7 +400,7 @@ def test_list_sinks_with_paging(self): returned = { 'sinks': SINKS, } - client.connection = _Connection(returned) + client._connection = _Connection(returned) iterator = client.list_sinks(PAGE_SIZE, TOKEN) sinks = list(iterator) @@ -418,7 +418,7 @@ def test_list_sinks_with_paging(self): self.assertIs(sink.client, client) # Verify the mocked transport. - called_with = client.connection._called_with + called_with = client._connection._called_with path = '/projects/%s/sinks' % (self.PROJECT,) self.assertEqual(called_with, { 'method': 'GET', @@ -470,7 +470,7 @@ def test_list_metrics_no_paging(self): returned = { 'metrics': metrics, } - client.connection = _Connection(returned) + client._connection = _Connection(returned) # Execute request. iterator = client.list_metrics() @@ -486,7 +486,7 @@ def test_list_metrics_no_paging(self): self.assertIs(metric.client, client) # Verify mocked transport. - called_with = client.connection._called_with + called_with = client._connection._called_with path = '/projects/%s/metrics' % (self.PROJECT,) self.assertEqual(called_with, { 'method': 'GET', @@ -513,7 +513,7 @@ def test_list_metrics_with_paging(self): 'metrics': metrics, 'nextPageToken': next_token, } - client.connection = _Connection(returned) + client._connection = _Connection(returned) # Execute request. iterator = client.list_metrics(page_size, token) @@ -532,7 +532,7 @@ def test_list_metrics_with_paging(self): self.assertIs(metric.client, client) # Verify mocked transport. - called_with = client.connection._called_with + called_with = client._connection._called_with path = '/projects/%s/metrics' % (self.PROJECT,) self.assertEqual(called_with, { 'method': 'GET', diff --git a/packages/google-cloud-logging/unit_tests/test_logger.py b/packages/google-cloud-logging/unit_tests/test_logger.py index c2770f83d9f3..cbe149102445 100644 --- a/packages/google-cloud-logging/unit_tests/test_logger.py +++ b/packages/google-cloud-logging/unit_tests/test_logger.py @@ -357,7 +357,7 @@ def test_list_entries_defaults(self): returned = { 'nextPageToken': TOKEN, } - client.connection = _Connection(returned) + client._connection = _Connection(returned) logger = self._make_one(self.LOGGER_NAME, client=client) @@ -368,7 +368,7 @@ def test_list_entries_defaults(self): self.assertEqual(len(entries), 0) self.assertEqual(token, TOKEN) - called_with = client.connection._called_with + called_with = client._connection._called_with FILTER = 'logName=projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME) self.assertEqual(called_with, { @@ -391,7 +391,7 @@ def test_list_entries_explicit(self): PAGE_SIZE = 42 client = Client(project=self.PROJECT, credentials=object(), use_gax=False) - client.connection = _Connection({}) + client._connection = _Connection({}) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries( projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, @@ -402,7 +402,7 @@ def test_list_entries_explicit(self): self.assertEqual(len(entries), 0) self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) - called_with = client.connection._called_with + called_with = client._connection._called_with combined_filter = '%s AND logName=projects/%s/logs/%s' % ( FILTER, self.PROJECT, self.LOGGER_NAME) self.assertEqual(called_with, { @@ -721,7 +721,7 @@ class _Client(object): def __init__(self, project, connection=None): self.project = project - self.connection = connection + self._connection = connection class _Bugout(Exception): From f17a01656646aa53aa7c01d61c52ab66da44efe8 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 11 Nov 2016 14:16:26 -0800 Subject: [PATCH 047/855] Removing use of _Monkey in pubsub and logging. --- packages/google-cloud-logging/tox.ini | 1 + .../unit_tests/test__gax.py | 48 ++++++++++++------- .../unit_tests/test__helpers.py | 7 +-- .../unit_tests/test_client.py | 46 ++++++++++-------- 4 files changed, 61 insertions(+), 41 deletions(-) diff --git a/packages/google-cloud-logging/tox.ini b/packages/google-cloud-logging/tox.ini index 3019c5083252..dd8c14186e5b 100644 --- a/packages/google-cloud-logging/tox.ini +++ b/packages/google-cloud-logging/tox.ini @@ -7,6 +7,7 @@ localdeps = pip install --quiet --upgrade {toxinidir}/../core deps = pytest + mock covercmd = py.test --quiet \ --cov=google.cloud.logging \ diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 7497a07efc7d..54cac6712daf 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -1069,8 +1069,9 @@ def _call_fut(self, client): return make_gax_logging_api(client) def test_it(self): - from google.cloud._testing import _Monkey - from google.cloud.logging import _gax as MUT + import mock + from google.cloud.logging._gax import _LoggingAPI + from google.cloud.logging._gax import DEFAULT_USER_AGENT creds = object() client = _Client(creds) @@ -1090,15 +1091,18 @@ def generated_api(channel=None): host = 'foo.apis.invalid' generated_api.SERVICE_ADDRESS = host - with _Monkey(MUT, LoggingServiceV2Api=generated_api, - make_secure_channel=make_channel): + patch = mock.patch.multiple( + 'google.cloud.logging._gax', + LoggingServiceV2Api=generated_api, + make_secure_channel=make_channel) + with patch: logging_api = self._call_fut(client) self.assertEqual(channels, [channel_obj]) self.assertEqual(channel_args, - [(creds, MUT.DEFAULT_USER_AGENT, host)]) + [(creds, DEFAULT_USER_AGENT, host)]) - self.assertIsInstance(logging_api, MUT._LoggingAPI) + self.assertIsInstance(logging_api, _LoggingAPI) self.assertIs(logging_api._gax_api, generated) self.assertIs(logging_api._client, client) @@ -1111,8 +1115,9 @@ def _call_fut(self, client): return make_gax_metrics_api(client) def test_it(self): - from google.cloud._testing import _Monkey - from google.cloud.logging import _gax as MUT + import mock + from google.cloud.logging._gax import _MetricsAPI + from google.cloud.logging._gax import DEFAULT_USER_AGENT creds = object() client = _Client(creds) @@ -1132,15 +1137,18 @@ def generated_api(channel=None): host = 'foo.apis.invalid' generated_api.SERVICE_ADDRESS = host - with _Monkey(MUT, MetricsServiceV2Api=generated_api, - make_secure_channel=make_channel): + patch = mock.patch.multiple( + 'google.cloud.logging._gax', + MetricsServiceV2Api=generated_api, + make_secure_channel=make_channel) + with patch: metrics_api = self._call_fut(client) self.assertEqual(channels, [channel_obj]) self.assertEqual(channel_args, - [(creds, MUT.DEFAULT_USER_AGENT, host)]) + [(creds, DEFAULT_USER_AGENT, host)]) - self.assertIsInstance(metrics_api, MUT._MetricsAPI) + self.assertIsInstance(metrics_api, _MetricsAPI) self.assertIs(metrics_api._gax_api, generated) self.assertIs(metrics_api._client, client) @@ -1153,8 +1161,9 @@ def _call_fut(self, client): return make_gax_sinks_api(client) def test_it(self): - from google.cloud._testing import _Monkey - from google.cloud.logging import _gax as MUT + import mock + from google.cloud.logging._gax import _SinksAPI + from google.cloud.logging._gax import DEFAULT_USER_AGENT creds = object() client = _Client(creds) @@ -1174,15 +1183,18 @@ def generated_api(channel=None): host = 'foo.apis.invalid' generated_api.SERVICE_ADDRESS = host - with _Monkey(MUT, ConfigServiceV2Api=generated_api, - make_secure_channel=make_channel): + patch = mock.patch.multiple( + 'google.cloud.logging._gax', + ConfigServiceV2Api=generated_api, + make_secure_channel=make_channel) + with patch: sinks_api = self._call_fut(client) self.assertEqual(channels, [channel_obj]) self.assertEqual(channel_args, - [(creds, MUT.DEFAULT_USER_AGENT, host)]) + [(creds, DEFAULT_USER_AGENT, host)]) - self.assertIsInstance(sinks_api, MUT._SinksAPI) + self.assertIsInstance(sinks_api, _SinksAPI) self.assertIs(sinks_api._gax_api, generated) self.assertIs(sinks_api._client, client) diff --git a/packages/google-cloud-logging/unit_tests/test__helpers.py b/packages/google-cloud-logging/unit_tests/test__helpers.py index 85d2dc3b846a..8f8e43f36734 100644 --- a/packages/google-cloud-logging/unit_tests/test__helpers.py +++ b/packages/google-cloud-logging/unit_tests/test__helpers.py @@ -28,14 +28,15 @@ def test_unknown_type(self): self._call_fut({}, None, {}) def _payload_helper(self, key, class_name): - from google.cloud._testing import _Monkey - import google.cloud.logging._helpers as MUT + import mock resource = {key: 'yup'} client = object() loggers = {} mock_class = EntryMock() - with _Monkey(MUT, **{class_name: mock_class}): + + name = 'google.cloud.logging._helpers.' + class_name + with mock.patch(name, new=mock_class): result = self._call_fut(resource, client, loggers) self.assertIs(result, mock_class.sentinel) diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index ea6bd89fb961..a65e621db2bd 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -54,8 +54,7 @@ def test_logging_api_wo_gax(self): self.assertIs(again, api) def test_logging_api_w_gax(self): - from google.cloud.logging import client as MUT - from google.cloud._testing import _Monkey + import mock clients = [] api_obj = object() @@ -68,7 +67,10 @@ def make_api(client_obj): client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=True) - with _Monkey(MUT, make_gax_logging_api=make_api): + patch = mock.patch( + 'google.cloud.logging.client.make_gax_logging_api', + new=make_api) + with patch: api = client.logging_api self.assertIs(api, api_obj) @@ -78,12 +80,14 @@ def make_api(client_obj): self.assertIs(again, api) def test_no_gax_ctor(self): - from google.cloud._testing import _Monkey - from google.cloud.logging import client as MUT + import mock from google.cloud.logging._http import _LoggingAPI creds = _Credentials() - with _Monkey(MUT, _USE_GAX=True): + patch = mock.patch( + 'google.cloud.logging.client._USE_GAX', + new=True) + with patch: client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=False) @@ -92,11 +96,10 @@ def test_no_gax_ctor(self): def test_sinks_api_wo_gax(self): from google.cloud.logging._http import _SinksAPI - from google.cloud.logging import client as MUT - from google.cloud._testing import _Monkey - with _Monkey(MUT, _USE_GAX=False): - client = self._make_one(self.PROJECT, credentials=_Credentials()) + client = self._make_one( + self.PROJECT, credentials=_Credentials(), + use_gax=False) conn = client.connection = object() api = client.sinks_api @@ -108,8 +111,7 @@ def test_sinks_api_wo_gax(self): self.assertIs(again, api) def test_sinks_api_w_gax(self): - from google.cloud.logging import client as MUT - from google.cloud._testing import _Monkey + import mock clients = [] api_obj = object() @@ -122,7 +124,10 @@ def make_api(client_obj): client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=True) - with _Monkey(MUT, make_gax_sinks_api=make_api): + patch = mock.patch( + 'google.cloud.logging.client.make_gax_sinks_api', + new=make_api) + with patch: api = client.sinks_api self.assertIs(api, api_obj) @@ -133,11 +138,10 @@ def make_api(client_obj): def test_metrics_api_wo_gax(self): from google.cloud.logging._http import _MetricsAPI - from google.cloud.logging import client as MUT - from google.cloud._testing import _Monkey - with _Monkey(MUT, _USE_GAX=False): - client = self._make_one(self.PROJECT, credentials=_Credentials()) + client = self._make_one( + self.PROJECT, credentials=_Credentials(), + use_gax=False) conn = client.connection = object() api = client.metrics_api @@ -149,8 +153,7 @@ def test_metrics_api_wo_gax(self): self.assertIs(again, api) def test_metrics_api_w_gax(self): - from google.cloud.logging import client as MUT - from google.cloud._testing import _Monkey + import mock clients = [] api_obj = object() @@ -163,7 +166,10 @@ def make_api(client_obj): client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=True) - with _Monkey(MUT, make_gax_metrics_api=make_api): + patch = mock.patch( + 'google.cloud.logging.client.make_gax_metrics_api', + new=make_api) + with patch: api = client.metrics_api self.assertIs(api, api_obj) From 4256e4cecec5805e84986fd0466f3510f4bf1663 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 14 Nov 2016 12:44:19 -0800 Subject: [PATCH 048/855] Upgrading core to version to 0.21.0. As a result, also upgrading the umbrella package and all packages to 0.21.0 (since they all depend on core). --- packages/google-cloud-logging/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 0a0255553e24..6ab0c59b6ca0 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -50,7 +50,7 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.20.0', + 'google-cloud-core >= 0.21.0', 'grpcio >= 1.0.0, < 2.0dev', 'google-gax >= 0.14.1, < 0.15dev', 'gapic-google-logging-v2 >= 0.10.1, < 0.11dev', @@ -59,7 +59,7 @@ setup( name='google-cloud-logging', - version='0.20.0', + version='0.21.0', description='Python Client for Stackdriver Logging', long_description=README, namespace_packages=[ From 1a16e6c782b31b746594d72b627b210c976c3c66 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 14 Nov 2016 14:11:34 -0800 Subject: [PATCH 049/855] Need to install from local deps first. The `pip install --upgrade` still is needed to ensure freshness but by removing the filesystem paths from deps we made the initial install grab from PyPI (by mistake). This way, all local package deps are grabbed from the local filesystem. --- packages/google-cloud-logging/tox.ini | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/tox.ini b/packages/google-cloud-logging/tox.ini index dd8c14186e5b..ea41e5413c43 100644 --- a/packages/google-cloud-logging/tox.ini +++ b/packages/google-cloud-logging/tox.ini @@ -6,8 +6,9 @@ envlist = localdeps = pip install --quiet --upgrade {toxinidir}/../core deps = - pytest + {toxinidir}/../core mock + pytest covercmd = py.test --quiet \ --cov=google.cloud.logging \ @@ -17,7 +18,6 @@ covercmd = [testenv] commands = - {[testing]localdeps} py.test --quiet {posargs} unit_tests deps = {[testing]deps} @@ -26,7 +26,6 @@ deps = basepython = python2.7 commands = - {[testing]localdeps} {[testing]covercmd} deps = {[testenv]deps} From 6175f6dd1960d17a5d97ddb6901113fd5cfa4e91 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 14 Nov 2016 14:58:42 -0800 Subject: [PATCH 050/855] Fixing accidental removal of {localdeps} Also - adding RTD dependency for runtimeconfig. - adding local paths to umbrella tox config "deps" as was done in #2733. --- packages/google-cloud-logging/tox.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/google-cloud-logging/tox.ini b/packages/google-cloud-logging/tox.ini index ea41e5413c43..db3e8a654954 100644 --- a/packages/google-cloud-logging/tox.ini +++ b/packages/google-cloud-logging/tox.ini @@ -18,6 +18,7 @@ covercmd = [testenv] commands = + {[testing]localdeps} py.test --quiet {posargs} unit_tests deps = {[testing]deps} @@ -26,6 +27,7 @@ deps = basepython = python2.7 commands = + {[testing]localdeps} {[testing]covercmd} deps = {[testenv]deps} From 9bf8557c4387cb24b7b0f3c8c6dd20db4ff9b850 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Wed, 16 Nov 2016 11:09:27 -0500 Subject: [PATCH 051/855] Set core version compatible specifier to packages. --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 6ab0c59b6ca0..1a335fcf6760 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -50,7 +50,7 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.21.0', + 'google-cloud-core >= 0.21.0, < 0.22dev', 'grpcio >= 1.0.0, < 2.0dev', 'google-gax >= 0.14.1, < 0.15dev', 'gapic-google-logging-v2 >= 0.10.1, < 0.11dev', From 558c3881391e7b633ce561e2f9b02a7eff31a3cd Mon Sep 17 00:00:00 2001 From: Bill Prin Date: Mon, 31 Oct 2016 10:56:23 -0700 Subject: [PATCH 052/855] Add GAE and GKE fluentd Handlers MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit On GAE and GKE with the plugin installed, there is a fluentd plugin that collects logs from files. However without the right formatting, metadata like log_level is lost. Furthermore, the fluentd agents are configured to set the correct resources types, which could be done in the main handler as well, but it’s easier to rely on the fluentd configurations. This adds two new handlers and some helper functions to detect when they should be used. --- .../google/cloud/logging/client.py | 53 +++++++++++++ .../google/cloud/logging/handlers/__init__.py | 3 + .../google/cloud/logging/handlers/_helpers.py | 39 ++++++++++ .../cloud/logging/handlers/app_engine.py | 73 ++++++++++++++++++ .../logging/handlers/container_engine.py | 44 +++++++++++ .../google/cloud/logging/handlers/handlers.py | 39 +++++----- .../handlers/transports/background_thread.py | 11 ++- .../unit_tests/handlers/test_app_engine.py | 57 ++++++++++++++ .../handlers/test_container_engine.py | 51 +++++++++++++ .../unit_tests/handlers/test_handlers.py | 23 ++---- .../transports/test_background_thread.py | 64 ++++++++-------- .../handlers/transports/test_sync.py | 31 +++----- .../unit_tests/test_client.py | 76 +++++++++++++++++++ 13 files changed, 468 insertions(+), 96 deletions(-) create mode 100644 packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py create mode 100644 packages/google-cloud-logging/unit_tests/handlers/test_app_engine.py create mode 100644 packages/google-cloud-logging/unit_tests/handlers/test_container_engine.py diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index b84fc9c6a736..c92f177eaac6 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -14,6 +14,7 @@ """Client for interacting with the Google Stackdriver Logging API.""" +import logging import os try: @@ -34,6 +35,12 @@ from google.cloud.logging._http import _LoggingAPI as JSONLoggingAPI from google.cloud.logging._http import _MetricsAPI as JSONMetricsAPI from google.cloud.logging._http import _SinksAPI as JSONSinksAPI +from google.cloud.logging.handlers import CloudLoggingHandler +from google.cloud.logging.handlers import AppEngineHandler +from google.cloud.logging.handlers import ContainerEngineHandler +from google.cloud.logging.handlers import setup_logging +from google.cloud.logging.handlers.handlers import EXCLUDED_LOGGER_DEFAULTS + from google.cloud.logging.logger import Logger from google.cloud.logging.metric import Metric from google.cloud.logging.sink import Sink @@ -42,6 +49,15 @@ _DISABLE_GAX = os.getenv(DISABLE_GRPC, False) _USE_GAX = _HAVE_GAX and not _DISABLE_GAX +_APPENGINE_FLEXIBLE_ENV_VM = 'GAE_APPENGINE_HOSTNAME' +"""Environment variable set in App Engine when vm:true is set.""" + +_APPENGINE_FLEXIBLE_ENV_FLEX = 'GAE_INSTANCE' +"""Environment variable set in App Engine when env:flex is set.""" + +_CONTAINER_ENGINE_ENV = 'KUBERNETES_SERVICE' +"""Environment variable set in a Google Container Engine environment.""" + class Client(JSONClient): """Client to bundle configuration needed for API requests. @@ -264,3 +280,40 @@ def list_metrics(self, page_size=None, page_token=None): """ return self.metrics_api.list_metrics( self.project, page_size, page_token) + + def get_default_handler(self): + """Return the default logging handler based on the local environment. + + :rtype: :class:`logging.Handler` + :returns: The default log handler based on the environment + """ + if (_APPENGINE_FLEXIBLE_ENV_VM in os.environ or + _APPENGINE_FLEXIBLE_ENV_FLEX in os.environ): + return AppEngineHandler() + elif _CONTAINER_ENGINE_ENV in os.environ: + return ContainerEngineHandler() + else: + return CloudLoggingHandler(self) + + def setup_logging(self, log_level=logging.INFO, + excluded_loggers=EXCLUDED_LOGGER_DEFAULTS): + """Attach default Stackdriver logging handler to the root logger. + + This method uses the default log handler, obtained by + :meth:`~get_default_handler`, and attaches it to the root Python + logger, so that a call such as ``logging.warn``, as well as all child + loggers, will report to Stackdriver logging. + + :type log_level: int + :param log_level: (Optional) Python logging log level. Defaults to + :const:`logging.INFO`. + + :type excluded_loggers: tuple + :param excluded_loggers: (Optional) The loggers to not attach the + handler to. This will always include the + loggers in the path of the logging client + itself. + """ + handler = self.get_default_handler() + setup_logging(handler, log_level=log_level, + excluded_loggers=excluded_loggers) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py index 57d08af8637f..9745296e9782 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py @@ -14,5 +14,8 @@ """Python :mod:`logging` handlers for Google Cloud Logging.""" +from google.cloud.logging.handlers.app_engine import AppEngineHandler +from google.cloud.logging.handlers.container_engine import ( + ContainerEngineHandler) from google.cloud.logging.handlers.handlers import CloudLoggingHandler from google.cloud.logging.handlers.handlers import setup_logging diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py new file mode 100644 index 000000000000..81adcf0eb545 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py @@ -0,0 +1,39 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helper functions for logging handlers.""" + +import math +import json + + +def format_stackdriver_json(record, message): + """Helper to format a LogRecord in in Stackdriver fluentd format. + + :rtype: str + :returns: JSON str to be written to the log file. + """ + subsecond, second = math.modf(record.created) + + payload = { + 'message': message, + 'timestamp': { + 'seconds': int(second), + 'nanos': int(subsecond * 1e9), + }, + 'thread': record.thread, + 'severity': record.levelname, + } + + return json.dumps(payload) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py new file mode 100644 index 000000000000..4184c2054b1a --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py @@ -0,0 +1,73 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Logging handler for App Engine Flexible + +Logs to the well-known file that the fluentd sidecar container on App Engine +Flexible is configured to read from and send to Stackdriver Logging. + +See the fluentd configuration here: + +https://github.com/GoogleCloudPlatform/appengine-sidecars-docker/tree/master/fluentd_logger +""" + +# This file is largely copied from: +# https://github.com/GoogleCloudPlatform/python-compat-runtime/blob/master +# /appengine-vmruntime/vmruntime/cloud_logging.py + +import logging.handlers +import os + +from google.cloud.logging.handlers._helpers import format_stackdriver_json + +_LOG_PATH_TEMPLATE = '/var/log/app_engine/app.{pid}.json' +_MAX_LOG_BYTES = 128 * 1024 * 1024 +_LOG_FILE_COUNT = 3 + + +class AppEngineHandler(logging.handlers.RotatingFileHandler): + """A handler that writes to the App Engine fluentd Stackdriver log file. + + Writes to the file that the fluentd agent on App Engine Flexible is + configured to discover logs and send them to Stackdriver Logging. + Log entries are wrapped in JSON and with appropriate metadata. The + process of converting the user's formatted logs into a JSON payload for + Stackdriver Logging consumption is implemented as part of the handler + itself, and not as a formatting step, so as not to interfere with + user-defined logging formats. + """ + + def __init__(self): + """Construct the handler + + Large log entries will get mangled if multiple workers write to the + same file simultaneously, so we'll use the worker's PID to pick a log + filename. + """ + self.filename = _LOG_PATH_TEMPLATE.format(pid=os.getpid()) + super(AppEngineHandler, self).__init__(self.filename, + maxBytes=_MAX_LOG_BYTES, + backupCount=_LOG_FILE_COUNT) + + def format(self, record): + """Format the specified record into the expected JSON structure. + + :type record: :class:`~logging.LogRecord` + :param record: the log record + + :rtype: str + :returns: JSON str to be written to the log file + """ + message = super(AppEngineHandler, self).format(record) + return format_stackdriver_json(record, message) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py b/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py new file mode 100644 index 000000000000..8beb7d076a4b --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py @@ -0,0 +1,44 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Logging handler for Google Container Engine (GKE). + +Formats log messages in a JSON format, so that Kubernetes clusters with the +fluentd Google Cloud plugin installed can format their log messages so that +metadata such as log level is properly captured. +""" + +import logging.handlers + +from google.cloud.logging.handlers._helpers import format_stackdriver_json + + +class ContainerEngineHandler(logging.StreamHandler): + """Handler to format log messages the format expected by GKE fluent. + + This handler is written to format messages for the Google Container Engine + (GKE) fluentd plugin, so that metadata such as log level are properly set. + """ + + def format(self, record): + """Format the message into JSON expected by fluentd. + + :type record: :class:`~logging.LogRecord` + :param record: the log record + + :rtype: str + :returns: A JSON string formatted for GKE fluentd. + """ + message = super(ContainerEngineHandler, self).format(record) + return format_stackdriver_json(record, message) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py index e3b6d5b30da4..4cf3f0cb20e9 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py @@ -12,30 +12,25 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Python :mod:`logging` handlers for Google Cloud Logging.""" +"""Python :mod:`logging` handlers for Stackdriver Logging.""" import logging from google.cloud.logging.handlers.transports import BackgroundThreadTransport - -EXCLUDE_LOGGER_DEFAULTS = ( - 'google.cloud', - 'oauth2client' -) - DEFAULT_LOGGER_NAME = 'python' +EXCLUDED_LOGGER_DEFAULTS = ('google.cloud', 'oauth2client') + class CloudLoggingHandler(logging.StreamHandler): - """Python standard ``logging`` handler. + """Handler that directly makes Stackdriver logging API calls. - This handler can be used to route Python standard logging messages - directly to the Stackdriver Logging API. + This is a Python standard ``logging`` handler using that can be used to + route Python standard logging messages directly to the Stackdriver + Logging API. - Note that this handler currently only supports a synchronous API call, - which means each logging statement that uses this handler will require - an API call. + This handler supports both an asynchronous and synchronous transport. :type client: :class:`google.cloud.logging.client` :param client: the authenticated Google Cloud Logging client for this @@ -93,8 +88,9 @@ def emit(self, record): self.transport.send(record, message) -def setup_logging(handler, excluded_loggers=EXCLUDE_LOGGER_DEFAULTS): - """Attach the ``CloudLogging`` handler to the Python root logger +def setup_logging(handler, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, + log_level=logging.INFO): + """Attach a logging handler to the Python root logger Excludes loggers that this library itself uses to avoid infinite recursion. @@ -103,9 +99,13 @@ def setup_logging(handler, excluded_loggers=EXCLUDE_LOGGER_DEFAULTS): :param handler: the handler to attach to the global handler :type excluded_loggers: tuple - :param excluded_loggers: The loggers to not attach the handler to. This - will always include the loggers in the path of - the logging client itself. + :param excluded_loggers: (Optional) The loggers to not attach the handler + to. This will always include the loggers in the + path of the logging client itself. + + :type log_level: int + :param log_level: (Optional) Python logging log level. Defaults to + :const:`logging.INFO`. Example: @@ -123,8 +123,9 @@ def setup_logging(handler, excluded_loggers=EXCLUDE_LOGGER_DEFAULTS): logging.error('bad news') # API call """ - all_excluded_loggers = set(excluded_loggers + EXCLUDE_LOGGER_DEFAULTS) + all_excluded_loggers = set(excluded_loggers + EXCLUDED_LOGGER_DEFAULTS) logger = logging.getLogger() + logger.setLevel(log_level) logger.addHandler(handler) logger.addHandler(logging.StreamHandler()) for logger_name in all_excluded_loggers: diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 144bccafc838..aa50e0d3ffc1 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -21,9 +21,10 @@ import copy import threading -from google.cloud.logging.client import Client from google.cloud.logging.handlers.transports.base import Transport +_WORKER_THREAD_NAME = 'google.cloud.logging.handlers.transport.Worker' + class _Worker(object): """A threaded worker that writes batches of log entries @@ -96,8 +97,7 @@ def _start(self): try: self._entries_condition.acquire() self._thread = threading.Thread( - target=self._run, - name='google.cloud.logging.handlers.transport.Worker') + target=self._run, name=_WORKER_THREAD_NAME) self._thread.setDaemon(True) self._thread.start() finally: @@ -152,9 +152,8 @@ class BackgroundThreadTransport(Transport): def __init__(self, client, name): http = copy.deepcopy(client._connection.http) http = client._connection.credentials.authorize(http) - self.client = Client(client.project, - client._connection.credentials, - http) + self.client = client.__class__(client.project, + client._connection.credentials, http) logger = self.client.logger(name) self.worker = _Worker(logger) diff --git a/packages/google-cloud-logging/unit_tests/handlers/test_app_engine.py b/packages/google-cloud-logging/unit_tests/handlers/test_app_engine.py new file mode 100644 index 000000000000..9be8a2bec9b3 --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/handlers/test_app_engine.py @@ -0,0 +1,57 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestAppEngineHandlerHandler(unittest.TestCase): + PROJECT = 'PROJECT' + + def _get_target_class(self): + from google.cloud.logging.handlers.app_engine import AppEngineHandler + + return AppEngineHandler + + def _make_one(self, *args, **kw): + import tempfile + + from google.cloud._testing import _Monkey + from google.cloud.logging.handlers import app_engine as _MUT + + tmpdir = tempfile.mktemp() + with _Monkey(_MUT, _LOG_PATH_TEMPLATE=tmpdir): + return self._get_target_class()(*args, **kw) + + def test_format(self): + import json + import logging + + handler = self._make_one() + logname = 'loggername' + message = 'hello world' + record = logging.LogRecord(logname, logging.INFO, None, + None, message, None, None) + record.created = 5.03 + expected_payload = { + 'message': message, + 'timestamp': { + 'seconds': 5, + 'nanos': int(.03 * 1e9), + }, + 'thread': record.thread, + 'severity': record.levelname, + } + payload = handler.format(record) + + self.assertEqual(payload, json.dumps(expected_payload)) diff --git a/packages/google-cloud-logging/unit_tests/handlers/test_container_engine.py b/packages/google-cloud-logging/unit_tests/handlers/test_container_engine.py new file mode 100644 index 000000000000..b8ce0dc436f3 --- /dev/null +++ b/packages/google-cloud-logging/unit_tests/handlers/test_container_engine.py @@ -0,0 +1,51 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestContainerEngineHandler(unittest.TestCase): + PROJECT = 'PROJECT' + + def _get_target_class(self): + from google.cloud.logging.handlers.container_engine import ( + ContainerEngineHandler) + + return ContainerEngineHandler + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def test_format(self): + import logging + import json + + handler = self._make_one() + logname = 'loggername' + message = 'hello world' + record = logging.LogRecord(logname, logging.INFO, None, None, + message, None, None) + record.created = 5.03 + expected_payload = { + 'message': message, + 'timestamp': { + 'seconds': 5, + 'nanos': int(.03 * 1e9) + }, + 'thread': record.thread, + 'severity': record.levelname, + } + payload = handler.format(record) + + self.assertEqual(payload, json.dumps(expected_payload)) diff --git a/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py b/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py index 54c38f9b82cb..234b2991df45 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py +++ b/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py @@ -36,12 +36,13 @@ def test_ctor(self): def test_emit(self): client = _Client(self.PROJECT) handler = self._make_one(client, transport=_Transport) - LOGNAME = 'loggername' - MESSAGE = 'hello world' - record = _Record(LOGNAME, logging.INFO, MESSAGE) + logname = 'loggername' + message = 'hello world' + record = logging.LogRecord(logname, logging, None, None, message, + None, None) handler.emit(record) - self.assertEqual(handler.transport.send_called_with, (record, MESSAGE)) + self.assertEqual(handler.transport.send_called_with, (record, message)) class TestSetupLogging(unittest.TestCase): @@ -100,20 +101,6 @@ def __init__(self, project): self.project = project -class _Record(object): - - def __init__(self, name, level, message): - self.name = name - self.levelname = level - self.message = message - self.exc_info = None - self.exc_text = None - self.stack_info = None - - def getMessage(self): - return self.message - - class _Transport(object): def __init__(self, client, name): diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py index 3695c591288c..eb9204b4e2ae 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py @@ -42,16 +42,17 @@ def test_send(self): transport = self._make_one(client, NAME) transport.worker.batch = client.logger(NAME).batch() - PYTHON_LOGGER_NAME = 'mylogger' - MESSAGE = 'hello world' - record = _Record(PYTHON_LOGGER_NAME, logging.INFO, MESSAGE) - transport.send(record, MESSAGE) + python_logger_name = 'mylogger' + message = 'hello world' + record = logging.LogRecord(python_logger_name, logging.INFO, + None, None, message, None, None) + transport.send(record, message) EXPECTED_STRUCT = { - 'message': MESSAGE, - 'python_logger': PYTHON_LOGGER_NAME + 'message': message, + 'python_logger': python_logger_name } - EXPECTED_SENT = (EXPECTED_STRUCT, logging.INFO) + EXPECTED_SENT = (EXPECTED_STRUCT, 'INFO') self.assertEqual(transport.worker.batch.log_struct_called_with, EXPECTED_SENT) @@ -77,9 +78,11 @@ def test_run(self): logger = _Logger(NAME) worker = self._make_one(logger) - PYTHON_LOGGER_NAME = 'mylogger' - MESSAGE = 'hello world' - record = _Record(PYTHON_LOGGER_NAME, logging.INFO, MESSAGE) + python_logger_name = 'mylogger' + message = 'hello world' + record = logging.LogRecord(python_logger_name, + logging.INFO, None, None, + message, None, None) worker._start() @@ -91,7 +94,7 @@ def test_run(self): while not worker.started: time.sleep(1) # pragma: NO COVER - worker.enqueue(record, MESSAGE) + worker.enqueue(record, message) # Set timeout to none so worker thread finishes worker._stop_timeout = None worker._stop() @@ -99,20 +102,22 @@ def test_run(self): def test_run_after_stopped(self): # No-op - NAME = 'python_logger' - logger = _Logger(NAME) + name = 'python_logger' + logger = _Logger(name) worker = self._make_one(logger) - PYTHON_LOGGER_NAME = 'mylogger' - MESSAGE = 'hello world' - record = _Record(PYTHON_LOGGER_NAME, logging.INFO, MESSAGE) + python_logger_name = 'mylogger' + message = 'hello world' + record = logging.LogRecord(python_logger_name, + logging.INFO, None, None, + message, None, None) worker._start() while not worker.started: time.sleep(1) # pragma: NO COVER worker._stop_timeout = None worker._stop() - worker.enqueue(record, MESSAGE) + worker.enqueue(record, message) self.assertFalse(worker.batch.commit_called) worker._stop() @@ -122,11 +127,13 @@ def test_run_enqueue_early(self): logger = _Logger(NAME) worker = self._make_one(logger) - PYTHON_LOGGER_NAME = 'mylogger' - MESSAGE = 'hello world' - record = _Record(PYTHON_LOGGER_NAME, logging.INFO, MESSAGE) + python_logger_name = 'mylogger' + message = 'hello world' + record = logging.LogRecord(python_logger_name, + logging.INFO, None, None, + message, None, None) - worker.enqueue(record, MESSAGE) + worker.enqueue(record, message) worker._start() while not worker.started: time.sleep(1) # pragma: NO COVER @@ -135,17 +142,6 @@ def test_run_enqueue_early(self): self.assertTrue(worker.stopped) -class _Record(object): - - def __init__(self, name, level, message): - self.name = name - self.levelname = level - self.message = message - self.exc_info = None - self.exc_text = None - self.stack_info = None - - class _Batch(object): def __init__(self): @@ -186,8 +182,10 @@ def batch(self): class _Client(object): - def __init__(self, project): + def __init__(self, project, http=None, credentials=None): self.project = project + self.http = http + self.credentials = credentials self._connection = _Connection() def logger(self, name): # pylint: disable=unused-argument diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py index 54e14dcbdfff..6650eb8a9d2e 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py @@ -36,33 +36,24 @@ def test_ctor(self): def test_send(self): client = _Client(self.PROJECT) - STACKDRIVER_LOGGER_NAME = 'python' - PYTHON_LOGGER_NAME = 'mylogger' - transport = self._make_one(client, STACKDRIVER_LOGGER_NAME) - MESSAGE = 'hello world' - record = _Record(PYTHON_LOGGER_NAME, logging.INFO, MESSAGE) - transport.send(record, MESSAGE) + stackdriver_logger_name = 'python' + python_logger_name = 'mylogger' + transport = self._make_one(client, stackdriver_logger_name) + message = 'hello world' + record = logging.LogRecord(python_logger_name, logging.INFO, + None, None, message, None, None) + + transport.send(record, message) EXPECTED_STRUCT = { - 'message': MESSAGE, - 'python_logger': PYTHON_LOGGER_NAME + 'message': message, + 'python_logger': python_logger_name, } - EXPECTED_SENT = (EXPECTED_STRUCT, logging.INFO) + EXPECTED_SENT = (EXPECTED_STRUCT, 'INFO') self.assertEqual( transport.logger.log_struct_called_with, EXPECTED_SENT) -class _Record(object): - - def __init__(self, name, level, message): - self.name = name - self.levelname = level - self.message = message - self.exc_info = None - self.exc_text = None - self.stack_info = None - - class _Logger(object): def __init__(self, name): diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 7e5173932ff1..6e7fc8f80f56 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -549,6 +549,79 @@ def test_list_metrics_with_paging(self): }, }) + def test_get_default_handler_app_engine(self): + import os + from google.cloud._testing import _Monkey + from google.cloud.logging.client import _APPENGINE_FLEXIBLE_ENV_VM + from google.cloud.logging.handlers import app_engine as _MUT + from google.cloud.logging.handlers import AppEngineHandler + + client = self._make_one(project=self.PROJECT, + credentials=_Credentials(), + use_gax=False) + + with _Monkey(_MUT, _LOG_PATH_TEMPLATE='{pid}'): + with _Monkey(os, environ={_APPENGINE_FLEXIBLE_ENV_VM: 'True'}): + handler = client.get_default_handler() + + self.assertIsInstance(handler, AppEngineHandler) + + def test_get_default_handler_container_engine(self): + import os + from google.cloud._testing import _Monkey + from google.cloud.logging.client import _CONTAINER_ENGINE_ENV + from google.cloud.logging.handlers import ContainerEngineHandler + + client = self._make_one(project=self.PROJECT, + credentials=_Credentials(), + use_gax=False) + + with _Monkey(os, environ={_CONTAINER_ENGINE_ENV: 'True'}): + handler = client.get_default_handler() + + self.assertIsInstance(handler, ContainerEngineHandler) + + def test_get_default_handler_general(self): + import httplib2 + import mock + from google.cloud.logging.handlers import CloudLoggingHandler + + http_mock = mock.Mock(spec=httplib2.Http) + credentials = _Credentials() + deepcopy = mock.Mock(return_value=http_mock) + + with mock.patch('copy.deepcopy', new=deepcopy): + client = self._make_one(project=self.PROJECT, + credentials=credentials, + use_gax=False) + handler = client.get_default_handler() + deepcopy.assert_called_once_with(client._connection.http) + + self.assertIsInstance(handler, CloudLoggingHandler) + self.assertTrue(credentials.authorized, http_mock) + + def test_setup_logging(self): + import httplib2 + import mock + + http_mock = mock.Mock(spec=httplib2.Http) + deepcopy = mock.Mock(return_value=http_mock) + setup_logging = mock.Mock() + + credentials = _Credentials() + + with mock.patch('copy.deepcopy', new=deepcopy): + with mock.patch('google.cloud.logging.client.setup_logging', + new=setup_logging): + client = self._make_one(project=self.PROJECT, + credentials=credentials, + use_gax=False) + client.setup_logging() + deepcopy.assert_called_once_with(client._connection.http) + + setup_logging.assert_called() + self.assertTrue(credentials.authorized, http_mock) + class _Credentials(object): @@ -562,6 +635,9 @@ def create_scoped(self, scope): self._scopes = scope return self + def authorize(self, http): + self.authorized = http + class _Connection(object): From 5158cb3dc4a31c64362529f2a5218079ec288d63 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 2 Dec 2016 12:38:43 -0800 Subject: [PATCH 053/855] Upgrading GAPIC deps to 0.14 series. In the process, ditching **hard** deps on protobufs, grpcio, and google-gax in those packages. Also had to upgrade calls to list_log_entries() based on https://github.com/googleapis/api-client-staging/pull/118/files/2bcd2875a578ae525d2aabb862cf9c131b4665f5#r90422054 --- .../google/cloud/logging/_gax.py | 45 +++++++-------- packages/google-cloud-logging/setup.py | 5 +- .../unit_tests/test__gax.py | 56 ++++++++++--------- 3 files changed, 55 insertions(+), 51 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 259b8e20e55a..0ffd46dd3fcb 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -16,19 +16,19 @@ import functools -from google.cloud.gapic.logging.v2.config_service_v2_api import ( - ConfigServiceV2Api) -from google.cloud.gapic.logging.v2.logging_service_v2_api import ( - LoggingServiceV2Api) -from google.cloud.gapic.logging.v2.metrics_service_v2_api import ( - MetricsServiceV2Api) +from google.cloud.gapic.logging.v2.config_service_v2_client import ( + ConfigServiceV2Client) +from google.cloud.gapic.logging.v2.logging_service_v2_client import ( + LoggingServiceV2Client) +from google.cloud.gapic.logging.v2.metrics_service_v2_client import ( + MetricsServiceV2Client) from google.gax import CallOptions from google.gax import INITIAL_PAGE from google.gax.errors import GaxError from google.gax.grpc import exc_to_code -from google.logging.v2.logging_config_pb2 import LogSink -from google.logging.v2.logging_metrics_pb2 import LogMetric -from google.logging.v2.log_entry_pb2 import LogEntry +from google.cloud.grpc.logging.v2.logging_config_pb2 import LogSink +from google.cloud.grpc.logging.v2.logging_metrics_pb2 import LogMetric +from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.protobuf.json_format import MessageToDict from google.protobuf.json_format import ParseDict from grpc import StatusCode @@ -48,7 +48,7 @@ class _LoggingAPI(object): """Helper mapping logging-related APIs. :type gax_api: - :class:`google.logging.v2.logging_service_v2_api.LoggingServiceV2Api` + :class:`.logging_service_v2_client.LoggingServiceV2Client` :param gax_api: API object used to make GAX requests. :type client: :class:`~google.cloud.logging.client.Client` @@ -92,7 +92,7 @@ def list_entries(self, projects, filter_='', order_by='', page_token = INITIAL_PAGE options = CallOptions(page_token=page_token) page_iter = self._gax_api.list_log_entries( - projects, filter_=filter_, order_by=order_by, + [], project_ids=projects, filter_=filter_, order_by=order_by, page_size=page_size, options=options) # We attach a mutable loggers dictionary so that as Logger @@ -152,7 +152,7 @@ class _SinksAPI(object): """Helper mapping sink-related APIs. :type gax_api: - :class:`google.logging.v2.config_service_v2_api.ConfigServiceV2Api` + :class:`.config_service_v2_client.ConfigServiceV2Client` :param gax_api: API object used to make GAX requests. :type client: :class:`~google.cloud.logging.client.Client` @@ -300,7 +300,8 @@ class _MetricsAPI(object): """Helper mapping sink-related APIs. :type gax_api: - :class:`google.logging.v2.metrics_service_v2_api.MetricsServiceV2Api` + :class:`.metrics_service_v2_client.MetricsServiceV2Client` + :param gax_api: API object used to make GAX requests. :type client: :class:`~google.cloud.logging.client.Client` @@ -471,7 +472,7 @@ def _item_to_entry(iterator, entry_pb, loggers): :type iterator: :class:`~google.cloud.iterator.Iterator` :param iterator: The iterator that is currently in use. - :type entry_pb: :class:`~google.logging.v2.log_entry_pb2.LogEntry` + :type entry_pb: :class:`.log_entry_pb2.LogEntry` :param entry_pb: Log entry protobuf returned from the API. :type loggers: dict @@ -494,7 +495,7 @@ def _item_to_sink(iterator, log_sink_pb): :param iterator: The iterator that is currently in use. :type log_sink_pb: - :class:`~google.logging.v2.logging_config_pb2.LogSink` + :class:`.logging_config_pb2.LogSink` :param log_sink_pb: Sink protobuf returned from the API. :rtype: :class:`~google.cloud.logging.sink.Sink` @@ -511,7 +512,7 @@ def _item_to_metric(iterator, log_metric_pb): :param iterator: The iterator that is currently in use. :type log_metric_pb: - :class:`~google.logging.v2.logging_metrics_pb2.LogMetric` + :class:`.logging_metrics_pb2.LogMetric` :param log_metric_pb: Metric protobuf returned from the API. :rtype: :class:`~google.cloud.logging.metric.Metric` @@ -532,8 +533,8 @@ def make_gax_logging_api(client): """ channel = make_secure_channel( client._connection.credentials, DEFAULT_USER_AGENT, - LoggingServiceV2Api.SERVICE_ADDRESS) - generated = LoggingServiceV2Api(channel=channel) + LoggingServiceV2Client.SERVICE_ADDRESS) + generated = LoggingServiceV2Client(channel=channel) return _LoggingAPI(generated, client) @@ -548,8 +549,8 @@ def make_gax_metrics_api(client): """ channel = make_secure_channel( client._connection.credentials, DEFAULT_USER_AGENT, - MetricsServiceV2Api.SERVICE_ADDRESS) - generated = MetricsServiceV2Api(channel=channel) + MetricsServiceV2Client.SERVICE_ADDRESS) + generated = MetricsServiceV2Client(channel=channel) return _MetricsAPI(generated, client) @@ -564,6 +565,6 @@ def make_gax_sinks_api(client): """ channel = make_secure_channel( client._connection.credentials, DEFAULT_USER_AGENT, - ConfigServiceV2Api.SERVICE_ADDRESS) - generated = ConfigServiceV2Api(channel=channel) + ConfigServiceV2Client.SERVICE_ADDRESS) + generated = ConfigServiceV2Client(channel=channel) return _SinksAPI(generated, client) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 1a335fcf6760..03b7563ec730 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -51,10 +51,7 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.21.0, < 0.22dev', - 'grpcio >= 1.0.0, < 2.0dev', - 'google-gax >= 0.14.1, < 0.15dev', - 'gapic-google-logging-v2 >= 0.10.1, < 0.11dev', - 'grpc-google-logging-v2 >= 0.10.1, < 0.11dev', + 'gapic-google-cloud-logging-v2 >= 0.14.0, < 0.15dev', ] setup( diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 1d516dbfd7bb..2d7280996a0b 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -58,7 +58,7 @@ def test_list_entries_no_paging(self): from google.api.monitored_resource_pb2 import MonitoredResource from google.gax import INITIAL_PAGE - from google.logging.v2.log_entry_pb2 import LogEntry + from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import UTC @@ -103,8 +103,9 @@ def test_list_entries_no_paging(self): self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) - projects, filter_, order_by, page_size, options = ( + resource_names, projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) + self.assertEqual(resource_names, []) self.assertEqual(projects, [self.PROJECT]) self.assertEqual(filter_, self.FILTER) self.assertEqual(order_by, DESCENDING) @@ -115,7 +116,7 @@ def _list_entries_with_paging_helper(self, payload, struct_pb): import datetime from google.api.monitored_resource_pb2 import MonitoredResource - from google.logging.v2.log_entry_pb2 import LogEntry + from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import UTC from google.cloud._testing import _GAXPageIterator @@ -158,8 +159,9 @@ def _list_entries_with_paging_helper(self, payload, struct_pb): self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) - projects, filter_, order_by, page_size, options = ( + resource_names, projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) + self.assertEqual(resource_names, []) self.assertEqual(projects, [self.PROJECT]) self.assertEqual(filter_, '') self.assertEqual(order_by, '') @@ -205,8 +207,9 @@ def test_list_entries_with_paging_nested_payload(self): def _make_log_entry_with_extras(self, labels, iid, type_url, now): from google.api.monitored_resource_pb2 import MonitoredResource - from google.logging.v2.log_entry_pb2 import LogEntry - from google.logging.v2.log_entry_pb2 import LogEntryOperation + from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry + from google.cloud.grpc.logging.v2.log_entry_pb2 import ( + LogEntryOperation) from google.logging.type.http_request_pb2 import HttpRequest from google.logging.type.log_severity_pb2 import WARNING from google.protobuf.any_pb2 import Any @@ -311,8 +314,9 @@ def test_list_entries_with_extra_properties(self): 'cacheHit': entry_pb.http_request.cache_hit, }) - projects, filter_, order_by, page_size, options = ( + resource_names, projects, filter_, order_by, page_size, options = ( gax_api._list_log_entries_called_with) + self.assertEqual(resource_names, []) self.assertEqual(projects, [self.PROJECT]) self.assertEqual(filter_, '') self.assertEqual(order_by, '') @@ -320,7 +324,7 @@ def test_list_entries_with_extra_properties(self): self.assertEqual(options.page_token, TOKEN) def test_write_entries_single(self): - from google.logging.v2.log_entry_pb2 import LogEntry + from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry TEXT = 'TEXT' ENTRY = { 'logName': self.LOG_PATH, @@ -353,7 +357,7 @@ def test_write_entries_w_extra_properties(self): # pylint: disable=too-many-statements from datetime import datetime from google.logging.type.log_severity_pb2 import WARNING - from google.logging.v2.log_entry_pb2 import LogEntry + from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.cloud._helpers import UTC, _pb_timestamp_to_datetime NOW = datetime.utcnow().replace(tzinfo=UTC) TEXT = 'TEXT' @@ -448,7 +452,7 @@ def _write_entries_multiple_helper(self, json_payload, json_struct_pb): # pylint: disable=too-many-statements import datetime from google.logging.type.log_severity_pb2 import WARNING - from google.logging.v2.log_entry_pb2 import LogEntry + from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.protobuf.any_pb2 import Any from google.cloud._helpers import _datetime_to_rfc3339, UTC TEXT = 'TEXT' @@ -616,7 +620,7 @@ def test_ctor(self): def test_list_sinks_no_paging(self): import six from google.gax import INITIAL_PAGE - from google.logging.v2.logging_config_pb2 import LogSink + from google.cloud.grpc.logging.v2.logging_config_pb2 import LogSink from google.cloud._testing import _GAXPageIterator from google.cloud.logging.sink import Sink @@ -651,7 +655,7 @@ def test_list_sinks_no_paging(self): self.assertEqual(options.page_token, INITIAL_PAGE) def test_list_sinks_w_paging(self): - from google.logging.v2.logging_config_pb2 import LogSink + from google.cloud.grpc.logging.v2.logging_config_pb2 import LogSink from google.cloud._testing import _GAXPageIterator from google.cloud.logging.sink import Sink @@ -707,7 +711,7 @@ def test_sink_create_conflict(self): self.DESTINATION_URI) def test_sink_create_ok(self): - from google.logging.v2.logging_config_pb2 import LogSink + from google.cloud.grpc.logging.v2.logging_config_pb2 import LogSink gax_api = _GAXSinksAPI() api = self._make_one(gax_api, None) @@ -740,7 +744,7 @@ def test_sink_get_miss(self): api.sink_get(self.PROJECT, self.SINK_NAME) def test_sink_get_hit(self): - from google.logging.v2.logging_config_pb2 import LogSink + from google.cloud.grpc.logging.v2.logging_config_pb2 import LogSink RESPONSE = { 'name': self.SINK_PATH, @@ -782,7 +786,7 @@ def test_sink_update_miss(self): self.DESTINATION_URI) def test_sink_update_hit(self): - from google.logging.v2.logging_config_pb2 import LogSink + from google.cloud.grpc.logging.v2.logging_config_pb2 import LogSink response = LogSink(name=self.SINK_NAME, destination=self.DESTINATION_URI, @@ -848,7 +852,7 @@ def test_ctor(self): def test_list_metrics_no_paging(self): import six from google.gax import INITIAL_PAGE - from google.logging.v2.logging_metrics_pb2 import LogMetric + from google.cloud.grpc.logging.v2.logging_metrics_pb2 import LogMetric from google.cloud._testing import _GAXPageIterator from google.cloud.logging.metric import Metric @@ -883,7 +887,7 @@ def test_list_metrics_no_paging(self): self.assertEqual(options.page_token, INITIAL_PAGE) def test_list_metrics_w_paging(self): - from google.logging.v2.logging_metrics_pb2 import LogMetric + from google.cloud.grpc.logging.v2.logging_metrics_pb2 import LogMetric from google.cloud._testing import _GAXPageIterator from google.cloud.logging.metric import Metric @@ -939,7 +943,7 @@ def test_metric_create_conflict(self): self.DESCRIPTION) def test_metric_create_ok(self): - from google.logging.v2.logging_metrics_pb2 import LogMetric + from google.cloud.grpc.logging.v2.logging_metrics_pb2 import LogMetric gax_api = _GAXMetricsAPI() api = self._make_one(gax_api, None) @@ -972,7 +976,7 @@ def test_metric_get_miss(self): api.metric_get(self.PROJECT, self.METRIC_NAME) def test_metric_get_hit(self): - from google.logging.v2.logging_metrics_pb2 import LogMetric + from google.cloud.grpc.logging.v2.logging_metrics_pb2 import LogMetric RESPONSE = { 'name': self.METRIC_PATH, @@ -1014,7 +1018,7 @@ def test_metric_update_miss(self): self.DESCRIPTION) def test_metric_update_hit(self): - from google.logging.v2.logging_metrics_pb2 import LogMetric + from google.cloud.grpc.logging.v2.logging_metrics_pb2 import LogMetric response = LogMetric(name=self.METRIC_NAME, description=self.DESCRIPTION, @@ -1093,7 +1097,7 @@ def generated_api(channel=None): patch = mock.patch.multiple( 'google.cloud.logging._gax', - LoggingServiceV2Api=generated_api, + LoggingServiceV2Client=generated_api, make_secure_channel=make_channel) with patch: logging_api = self._call_fut(client) @@ -1139,7 +1143,7 @@ def generated_api(channel=None): patch = mock.patch.multiple( 'google.cloud.logging._gax', - MetricsServiceV2Api=generated_api, + MetricsServiceV2Client=generated_api, make_secure_channel=make_channel) with patch: metrics_api = self._call_fut(client) @@ -1185,7 +1189,7 @@ def generated_api(channel=None): patch = mock.patch.multiple( 'google.cloud.logging._gax', - ConfigServiceV2Api=generated_api, + ConfigServiceV2Client=generated_api, make_secure_channel=make_channel) with patch: sinks_api = self._call_fut(client) @@ -1204,9 +1208,11 @@ class _GAXLoggingAPI(_GAXBaseAPI): _delete_not_found = False def list_log_entries( - self, projects, filter_, order_by, page_size, options): + self, resource_names, project_ids, filter_, + order_by, page_size, options): self._list_log_entries_called_with = ( - projects, filter_, order_by, page_size, options) + resource_names, project_ids, filter_, + order_by, page_size, options) return self._list_log_entries_response def write_log_entries(self, entries, log_name, resource, labels, From 77f891c803593026084eb33be9730c92e0fbd61d Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 2 Dec 2016 15:02:25 -0800 Subject: [PATCH 054/855] Switch from oauth2client to google-auth (#2726) * Removes all use of oauth2client from every package and tests. * Updates core to use google-auth's default credentials, project ID, and scoping logic. * Updates bigtable to use google-auth's scoping logic. --- .../handlers/transports/background_thread.py | 1 - .../transports/test_background_thread.py | 8 +-- .../unit_tests/test__http.py | 18 +----- .../unit_tests/test_client.py | 64 +++++++------------ 4 files changed, 26 insertions(+), 65 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index aa50e0d3ffc1..c090474a540b 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -151,7 +151,6 @@ class BackgroundThreadTransport(Transport): def __init__(self, client, name): http = copy.deepcopy(client._connection.http) - http = client._connection.credentials.authorize(http) self.client = client.__class__(client.project, client._connection.credentials, http) logger = self.client.logger(name) diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py index eb9204b4e2ae..5ca76a2f68c3 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py @@ -157,17 +157,11 @@ def commit(self): del self.entries[:] -class _Credentials(object): - - def authorize(self, _): - pass - - class _Connection(object): def __init__(self): self.http = None - self.credentials = _Credentials() + self.credentials = object() class _Logger(object): diff --git a/packages/google-cloud-logging/unit_tests/test__http.py b/packages/google-cloud-logging/unit_tests/test__http.py index 1a6e2d548a06..8dccefa1dcb0 100644 --- a/packages/google-cloud-logging/unit_tests/test__http.py +++ b/packages/google-cloud-logging/unit_tests/test__http.py @@ -29,10 +29,9 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_default_url(self): - creds = _Credentials() + creds = object() conn = self._make_one(creds) - klass = self._get_target_class() - self.assertEqual(conn.credentials._scopes, klass.SCOPE) + self.assertEqual(conn.credentials, creds) class Test_LoggingAPI(unittest.TestCase): @@ -759,19 +758,6 @@ def test_metric_delete_hit(self): self.assertEqual(conn._called_with['path'], path) -class _Credentials(object): - - _scopes = None - - @staticmethod - def create_scoped_required(): - return True - - def create_scoped(self, scope): - self._scopes = scope - return self - - class _Connection(object): _called_with = None diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 6e7fc8f80f56..4e0cf9129c67 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -35,14 +35,14 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): - creds = _Credentials() + creds = object() client = self._make_one(project=self.PROJECT, credentials=creds) self.assertEqual(client.project, self.PROJECT) def test_logging_api_wo_gax(self): from google.cloud.logging._http import _LoggingAPI - client = self._make_one(self.PROJECT, credentials=_Credentials(), + client = self._make_one(self.PROJECT, credentials=object(), use_gax=False) conn = client._connection = object() api = client.logging_api @@ -63,7 +63,7 @@ def make_api(client_obj): clients.append(client_obj) return api_obj - creds = _Credentials() + creds = object() client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=True) @@ -83,7 +83,7 @@ def test_no_gax_ctor(self): import mock from google.cloud.logging._http import _LoggingAPI - creds = _Credentials() + creds = object() patch = mock.patch( 'google.cloud.logging.client._USE_GAX', new=True) @@ -98,7 +98,7 @@ def test_sinks_api_wo_gax(self): from google.cloud.logging._http import _SinksAPI client = self._make_one( - self.PROJECT, credentials=_Credentials(), + self.PROJECT, credentials=object(), use_gax=False) conn = client._connection = object() @@ -120,7 +120,7 @@ def make_api(client_obj): clients.append(client_obj) return api_obj - creds = _Credentials() + creds = object() client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=True) @@ -140,7 +140,7 @@ def test_metrics_api_wo_gax(self): from google.cloud.logging._http import _MetricsAPI client = self._make_one( - self.PROJECT, credentials=_Credentials(), + self.PROJECT, credentials=object(), use_gax=False) conn = client._connection = object() @@ -162,7 +162,7 @@ def make_api(client_obj): clients.append(client_obj) return api_obj - creds = _Credentials() + creds = object() client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=True) @@ -180,7 +180,7 @@ def make_api(client_obj): def test_logger(self): from google.cloud.logging.logger import Logger - creds = _Credentials() + creds = object() client = self._make_one(project=self.PROJECT, credentials=creds) logger = client.logger(self.LOGGER_NAME) self.assertIsInstance(logger, Logger) @@ -204,7 +204,7 @@ def test_list_entries_defaults(self): 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), }] - creds = _Credentials() + creds = object() client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=False) returned = { @@ -269,7 +269,7 @@ def test_list_entries_explicit(self): 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), }] - client = self._make_one(self.PROJECT, credentials=_Credentials(), + client = self._make_one(self.PROJECT, credentials=object(), use_gax=False) returned = {'entries': ENTRIES} client._connection = _Connection(returned) @@ -320,7 +320,7 @@ def test_list_entries_explicit(self): def test_sink_defaults(self): from google.cloud.logging.sink import Sink - creds = _Credentials() + creds = object() client = self._make_one(project=self.PROJECT, credentials=creds) sink = client.sink(self.SINK_NAME) self.assertIsInstance(sink, Sink) @@ -332,7 +332,7 @@ def test_sink_defaults(self): def test_sink_explicit(self): from google.cloud.logging.sink import Sink - creds = _Credentials() + creds = object() client = self._make_one(project=self.PROJECT, credentials=creds) sink = client.sink(self.SINK_NAME, self.FILTER, self.DESTINATION_URI) self.assertIsInstance(sink, Sink) @@ -355,7 +355,7 @@ def test_list_sinks_no_paging(self): 'filter': FILTER, 'destination': self.DESTINATION_URI, }] - client = self._make_one(project=PROJECT, credentials=_Credentials(), + client = self._make_one(project=PROJECT, credentials=object(), use_gax=False) returned = { 'sinks': SINKS, @@ -401,7 +401,7 @@ def test_list_sinks_with_paging(self): 'filter': FILTER, 'destination': self.DESTINATION_URI, }] - client = self._make_one(project=PROJECT, credentials=_Credentials(), + client = self._make_one(project=PROJECT, credentials=object(), use_gax=False) returned = { 'sinks': SINKS, @@ -437,7 +437,7 @@ def test_list_sinks_with_paging(self): def test_metric_defaults(self): from google.cloud.logging.metric import Metric - creds = _Credentials() + creds = object() client_obj = self._make_one(project=self.PROJECT, credentials=creds) metric = client_obj.metric(self.METRIC_NAME) @@ -450,7 +450,7 @@ def test_metric_defaults(self): def test_metric_explicit(self): from google.cloud.logging.metric import Metric - creds = _Credentials() + creds = object() client_obj = self._make_one(project=self.PROJECT, credentials=creds) metric = client_obj.metric(self.METRIC_NAME, self.FILTER, @@ -471,7 +471,7 @@ def test_list_metrics_no_paging(self): 'description': self.DESCRIPTION, }] client = self._make_one( - project=self.PROJECT, credentials=_Credentials(), + project=self.PROJECT, credentials=object(), use_gax=False) returned = { 'metrics': metrics, @@ -513,7 +513,7 @@ def test_list_metrics_with_paging(self): 'description': self.DESCRIPTION, }] client = self._make_one( - project=self.PROJECT, credentials=_Credentials(), + project=self.PROJECT, credentials=object(), use_gax=False) returned = { 'metrics': metrics, @@ -557,7 +557,7 @@ def test_get_default_handler_app_engine(self): from google.cloud.logging.handlers import AppEngineHandler client = self._make_one(project=self.PROJECT, - credentials=_Credentials(), + credentials=object(), use_gax=False) with _Monkey(_MUT, _LOG_PATH_TEMPLATE='{pid}'): @@ -573,7 +573,7 @@ def test_get_default_handler_container_engine(self): from google.cloud.logging.handlers import ContainerEngineHandler client = self._make_one(project=self.PROJECT, - credentials=_Credentials(), + credentials=object(), use_gax=False) with _Monkey(os, environ={_CONTAINER_ENGINE_ENV: 'True'}): @@ -587,7 +587,7 @@ def test_get_default_handler_general(self): from google.cloud.logging.handlers import CloudLoggingHandler http_mock = mock.Mock(spec=httplib2.Http) - credentials = _Credentials() + credentials = object() deepcopy = mock.Mock(return_value=http_mock) with mock.patch('copy.deepcopy', new=deepcopy): @@ -598,7 +598,6 @@ def test_get_default_handler_general(self): deepcopy.assert_called_once_with(client._connection.http) self.assertIsInstance(handler, CloudLoggingHandler) - self.assertTrue(credentials.authorized, http_mock) def test_setup_logging(self): import httplib2 @@ -608,7 +607,7 @@ def test_setup_logging(self): deepcopy = mock.Mock(return_value=http_mock) setup_logging = mock.Mock() - credentials = _Credentials() + credentials = object() with mock.patch('copy.deepcopy', new=deepcopy): with mock.patch('google.cloud.logging.client.setup_logging', @@ -620,23 +619,6 @@ def test_setup_logging(self): deepcopy.assert_called_once_with(client._connection.http) setup_logging.assert_called() - self.assertTrue(credentials.authorized, http_mock) - - -class _Credentials(object): - - _scopes = None - - @staticmethod - def create_scoped_required(): - return True - - def create_scoped(self, scope): - self._scopes = scope - return self - - def authorize(self, http): - self.authorized = http class _Connection(object): From afb885a482117e9637a59e4ddaf307032da5c458 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 7 Dec 2016 16:00:24 -0800 Subject: [PATCH 055/855] Raise ValueError if credentials are not from google-auth (#2828) --- .../unit_tests/test__gax.py | 17 +++-- .../unit_tests/test__http.py | 15 +++-- .../unit_tests/test_client.py | 66 +++++++++---------- .../unit_tests/test_logger.py | 27 +++++--- 4 files changed, 73 insertions(+), 52 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 2d7280996a0b..4d269236e3e6 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -14,6 +14,7 @@ import unittest +import mock try: # pylint: disable=unused-import @@ -27,6 +28,13 @@ from google.cloud._testing import _GAXBaseAPI +def _make_credentials(): + # pylint: disable=redefined-outer-name + import google.auth.credentials + # pylint: enable=redefined-outer-name + return mock.Mock(spec=google.auth.credentials.Credentials) + + class _Base(object): PROJECT = 'PROJECT' PROJECT_PATH = 'projects/%s' % (PROJECT,) @@ -79,7 +87,7 @@ def test_list_entries_no_paging(self): text_payload=TEXT) response = _GAXPageIterator([entry_pb], page_token=TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) - client = Client(project=self.PROJECT, credentials=object(), + client = Client(project=self.PROJECT, credentials=_make_credentials(), use_gax=True) api = self._make_one(gax_api, client) @@ -136,7 +144,7 @@ def _list_entries_with_paging_helper(self, payload, struct_pb): json_payload=struct_pb) response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) - client = Client(project=self.PROJECT, credentials=object(), + client = Client(project=self.PROJECT, credentials=_make_credentials(), use_gax=True) api = self._make_one(gax_api, client) @@ -277,7 +285,7 @@ def test_list_entries_with_extra_properties(self): response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) - client = Client(project=self.PROJECT, credentials=object(), + client = Client(project=self.PROJECT, credentials=_make_credentials(), use_gax=True) api = self._make_one(gax_api, client) @@ -1073,7 +1081,6 @@ def _call_fut(self, client): return make_gax_logging_api(client) def test_it(self): - import mock from google.cloud.logging._gax import _LoggingAPI from google.cloud.logging._gax import DEFAULT_USER_AGENT @@ -1119,7 +1126,6 @@ def _call_fut(self, client): return make_gax_metrics_api(client) def test_it(self): - import mock from google.cloud.logging._gax import _MetricsAPI from google.cloud.logging._gax import DEFAULT_USER_AGENT @@ -1165,7 +1171,6 @@ def _call_fut(self, client): return make_gax_sinks_api(client) def test_it(self): - import mock from google.cloud.logging._gax import _SinksAPI from google.cloud.logging._gax import DEFAULT_USER_AGENT diff --git a/packages/google-cloud-logging/unit_tests/test__http.py b/packages/google-cloud-logging/unit_tests/test__http.py index 8dccefa1dcb0..6fe8c825feef 100644 --- a/packages/google-cloud-logging/unit_tests/test__http.py +++ b/packages/google-cloud-logging/unit_tests/test__http.py @@ -14,6 +14,13 @@ import unittest +import mock + + +def _make_credentials(): + import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) + class TestConnection(unittest.TestCase): @@ -29,7 +36,7 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_default_url(self): - creds = object() + creds = _make_credentials() conn = self._make_one(creds) self.assertEqual(conn.credentials, creds) @@ -91,7 +98,7 @@ def test_list_entries_no_paging(self): }], 'nextPageToken': TOKEN, } - client = Client(project=self.PROJECT, credentials=object(), + client = Client(project=self.PROJECT, credentials=_make_credentials(), use_gax=False) client._connection = _Connection(RETURNED) api = self._make_one(client) @@ -169,7 +176,7 @@ def test_list_entries_w_paging(self): self.PROJECT, self.LOGGER_NAME), }], } - client = Client(project=self.PROJECT, credentials=object(), + client = Client(project=self.PROJECT, credentials=_make_credentials(), use_gax=False) client._connection = _Connection(RETURNED) api = self._make_one(client) @@ -301,7 +308,7 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): - connection = object() + connection = _make_credentials() client = _Client(connection) api = self._make_one(client) self.assertIs(api._connection, connection) diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 4e0cf9129c67..d4a3c1a21855 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -14,6 +14,13 @@ import unittest +import mock + + +def _make_credentials(): + import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) + class TestClient(unittest.TestCase): @@ -35,14 +42,15 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): - creds = object() + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) self.assertEqual(client.project, self.PROJECT) def test_logging_api_wo_gax(self): from google.cloud.logging._http import _LoggingAPI - client = self._make_one(self.PROJECT, credentials=object(), + client = self._make_one(self.PROJECT, + credentials=_make_credentials(), use_gax=False) conn = client._connection = object() api = client.logging_api @@ -54,8 +62,6 @@ def test_logging_api_wo_gax(self): self.assertIs(again, api) def test_logging_api_w_gax(self): - import mock - clients = [] api_obj = object() @@ -63,7 +69,7 @@ def make_api(client_obj): clients.append(client_obj) return api_obj - creds = object() + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=True) @@ -80,10 +86,9 @@ def make_api(client_obj): self.assertIs(again, api) def test_no_gax_ctor(self): - import mock from google.cloud.logging._http import _LoggingAPI - creds = object() + creds = _make_credentials() patch = mock.patch( 'google.cloud.logging.client._USE_GAX', new=True) @@ -98,7 +103,7 @@ def test_sinks_api_wo_gax(self): from google.cloud.logging._http import _SinksAPI client = self._make_one( - self.PROJECT, credentials=object(), + self.PROJECT, credentials=_make_credentials(), use_gax=False) conn = client._connection = object() @@ -111,8 +116,6 @@ def test_sinks_api_wo_gax(self): self.assertIs(again, api) def test_sinks_api_w_gax(self): - import mock - clients = [] api_obj = object() @@ -120,7 +123,7 @@ def make_api(client_obj): clients.append(client_obj) return api_obj - creds = object() + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=True) @@ -140,7 +143,7 @@ def test_metrics_api_wo_gax(self): from google.cloud.logging._http import _MetricsAPI client = self._make_one( - self.PROJECT, credentials=object(), + self.PROJECT, credentials=_make_credentials(), use_gax=False) conn = client._connection = object() @@ -153,8 +156,6 @@ def test_metrics_api_wo_gax(self): self.assertIs(again, api) def test_metrics_api_w_gax(self): - import mock - clients = [] api_obj = object() @@ -162,7 +163,7 @@ def make_api(client_obj): clients.append(client_obj) return api_obj - creds = object() + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=True) @@ -180,7 +181,7 @@ def make_api(client_obj): def test_logger(self): from google.cloud.logging.logger import Logger - creds = object() + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) logger = client.logger(self.LOGGER_NAME) self.assertIsInstance(logger, Logger) @@ -204,7 +205,7 @@ def test_list_entries_defaults(self): 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), }] - creds = object() + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=False) returned = { @@ -269,7 +270,7 @@ def test_list_entries_explicit(self): 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), }] - client = self._make_one(self.PROJECT, credentials=object(), + client = self._make_one(self.PROJECT, credentials=_make_credentials(), use_gax=False) returned = {'entries': ENTRIES} client._connection = _Connection(returned) @@ -320,7 +321,7 @@ def test_list_entries_explicit(self): def test_sink_defaults(self): from google.cloud.logging.sink import Sink - creds = object() + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) sink = client.sink(self.SINK_NAME) self.assertIsInstance(sink, Sink) @@ -332,7 +333,7 @@ def test_sink_defaults(self): def test_sink_explicit(self): from google.cloud.logging.sink import Sink - creds = object() + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) sink = client.sink(self.SINK_NAME, self.FILTER, self.DESTINATION_URI) self.assertIsInstance(sink, Sink) @@ -355,7 +356,8 @@ def test_list_sinks_no_paging(self): 'filter': FILTER, 'destination': self.DESTINATION_URI, }] - client = self._make_one(project=PROJECT, credentials=object(), + client = self._make_one(project=PROJECT, + credentials=_make_credentials(), use_gax=False) returned = { 'sinks': SINKS, @@ -401,8 +403,8 @@ def test_list_sinks_with_paging(self): 'filter': FILTER, 'destination': self.DESTINATION_URI, }] - client = self._make_one(project=PROJECT, credentials=object(), - use_gax=False) + client = self._make_one( + project=PROJECT, credentials=_make_credentials(), use_gax=False) returned = { 'sinks': SINKS, } @@ -437,7 +439,7 @@ def test_list_sinks_with_paging(self): def test_metric_defaults(self): from google.cloud.logging.metric import Metric - creds = object() + creds = _make_credentials() client_obj = self._make_one(project=self.PROJECT, credentials=creds) metric = client_obj.metric(self.METRIC_NAME) @@ -450,7 +452,7 @@ def test_metric_defaults(self): def test_metric_explicit(self): from google.cloud.logging.metric import Metric - creds = object() + creds = _make_credentials() client_obj = self._make_one(project=self.PROJECT, credentials=creds) metric = client_obj.metric(self.METRIC_NAME, self.FILTER, @@ -471,7 +473,7 @@ def test_list_metrics_no_paging(self): 'description': self.DESCRIPTION, }] client = self._make_one( - project=self.PROJECT, credentials=object(), + project=self.PROJECT, credentials=_make_credentials(), use_gax=False) returned = { 'metrics': metrics, @@ -513,7 +515,7 @@ def test_list_metrics_with_paging(self): 'description': self.DESCRIPTION, }] client = self._make_one( - project=self.PROJECT, credentials=object(), + project=self.PROJECT, credentials=_make_credentials(), use_gax=False) returned = { 'metrics': metrics, @@ -557,7 +559,7 @@ def test_get_default_handler_app_engine(self): from google.cloud.logging.handlers import AppEngineHandler client = self._make_one(project=self.PROJECT, - credentials=object(), + credentials=_make_credentials(), use_gax=False) with _Monkey(_MUT, _LOG_PATH_TEMPLATE='{pid}'): @@ -573,7 +575,7 @@ def test_get_default_handler_container_engine(self): from google.cloud.logging.handlers import ContainerEngineHandler client = self._make_one(project=self.PROJECT, - credentials=object(), + credentials=_make_credentials(), use_gax=False) with _Monkey(os, environ={_CONTAINER_ENGINE_ENV: 'True'}): @@ -583,11 +585,10 @@ def test_get_default_handler_container_engine(self): def test_get_default_handler_general(self): import httplib2 - import mock from google.cloud.logging.handlers import CloudLoggingHandler http_mock = mock.Mock(spec=httplib2.Http) - credentials = object() + credentials = _make_credentials() deepcopy = mock.Mock(return_value=http_mock) with mock.patch('copy.deepcopy', new=deepcopy): @@ -601,13 +602,12 @@ def test_get_default_handler_general(self): def test_setup_logging(self): import httplib2 - import mock http_mock = mock.Mock(spec=httplib2.Http) deepcopy = mock.Mock(return_value=http_mock) setup_logging = mock.Mock() - credentials = object() + credentials = _make_credentials() with mock.patch('copy.deepcopy', new=deepcopy): with mock.patch('google.cloud.logging.client.setup_logging', diff --git a/packages/google-cloud-logging/unit_tests/test_logger.py b/packages/google-cloud-logging/unit_tests/test_logger.py index cbe149102445..15e7e7146b80 100644 --- a/packages/google-cloud-logging/unit_tests/test_logger.py +++ b/packages/google-cloud-logging/unit_tests/test_logger.py @@ -14,6 +14,13 @@ import unittest +import mock + + +def _make_credentials(): + import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) + class TestLogger(unittest.TestCase): @@ -352,7 +359,8 @@ def test_list_entries_defaults(self): TOKEN = 'TOKEN' - client = Client(project=self.PROJECT, credentials=object(), + client = Client(project=self.PROJECT, + credentials=_make_credentials(), use_gax=False) returned = { 'nextPageToken': TOKEN, @@ -389,7 +397,8 @@ def test_list_entries_explicit(self): FILTER = 'resource.type:global' TOKEN = 'TOKEN' PAGE_SIZE = 42 - client = Client(project=self.PROJECT, credentials=object(), + client = Client(project=self.PROJECT, + credentials=_make_credentials(), use_gax=False) client._connection = _Connection({}) logger = self._make_one(self.LOGGER_NAME, client=client) @@ -440,7 +449,7 @@ def test_ctor_defaults(self): def test_log_text_defaults(self): TEXT = 'This is the entry text' - client = _Client(project=self.PROJECT, connection=object()) + client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_text(TEXT) @@ -460,7 +469,7 @@ def test_log_text_explicit(self): 'requestUrl': URI, 'status': STATUS, } - client = _Client(project=self.PROJECT, connection=object()) + client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_text(TEXT, labels=LABELS, insert_id=IID, severity=SEVERITY, @@ -470,7 +479,7 @@ def test_log_text_explicit(self): def test_log_struct_defaults(self): STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} - client = _Client(project=self.PROJECT, connection=object()) + client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_struct(STRUCT) @@ -490,7 +499,7 @@ def test_log_struct_explicit(self): 'requestUrl': URI, 'status': STATUS, } - client = _Client(project=self.PROJECT, connection=object()) + client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_struct(STRUCT, labels=LABELS, insert_id=IID, @@ -501,7 +510,7 @@ def test_log_struct_explicit(self): def test_log_proto_defaults(self): from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={'foo': Value(bool_value=True)}) - client = _Client(project=self.PROJECT, connection=object()) + client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_proto(message) @@ -522,7 +531,7 @@ def test_log_proto_explicit(self): 'requestUrl': URI, 'status': STATUS, } - client = _Client(project=self.PROJECT, connection=object()) + client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_proto(message, labels=LABELS, insert_id=IID, @@ -532,7 +541,7 @@ def test_log_proto_explicit(self): def test_commit_w_invalid_entry_type(self): logger = _Logger() - client = _Client(project=self.PROJECT, connection=object()) + client = _Client(project=self.PROJECT, connection=_make_credentials()) batch = self._make_one(logger, client) batch.entries.append(('bogus', 'BOGUS', None, None, None, None)) with self.assertRaises(ValueError): From 1ca0e8b583f51824b58640565009b4bfca356f2c Mon Sep 17 00:00:00 2001 From: Bill Prin Date: Thu, 8 Dec 2016 15:30:42 -0800 Subject: [PATCH 056/855] Make logging unit test file a temp file --- packages/google-cloud-logging/unit_tests/test_client.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index d4a3c1a21855..b95f1dd6071f 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -553,6 +553,7 @@ def test_list_metrics_with_paging(self): def test_get_default_handler_app_engine(self): import os + import tempfile from google.cloud._testing import _Monkey from google.cloud.logging.client import _APPENGINE_FLEXIBLE_ENV_VM from google.cloud.logging.handlers import app_engine as _MUT @@ -562,7 +563,8 @@ def test_get_default_handler_app_engine(self): credentials=_make_credentials(), use_gax=False) - with _Monkey(_MUT, _LOG_PATH_TEMPLATE='{pid}'): + temp_log_path = os.path.join(tempfile.mkdtemp(), '{pid}') + with _Monkey(_MUT, _LOG_PATH_TEMPLATE=temp_log_path): with _Monkey(os, environ={_APPENGINE_FLEXIBLE_ENV_VM: 'True'}): handler = client.get_default_handler() From 48daf551e5b9722f183394b6b302ccbcfe14e73d Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 9 Dec 2016 08:38:40 -0500 Subject: [PATCH 057/855] Use '_tempdir' context manager to avoid leaking tempdirs. --- .../google-cloud-logging/unit_tests/test_client.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index b95f1dd6071f..93727d614bff 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -553,8 +553,8 @@ def test_list_metrics_with_paging(self): def test_get_default_handler_app_engine(self): import os - import tempfile from google.cloud._testing import _Monkey + from google.cloud._testing import _tempdir from google.cloud.logging.client import _APPENGINE_FLEXIBLE_ENV_VM from google.cloud.logging.handlers import app_engine as _MUT from google.cloud.logging.handlers import AppEngineHandler @@ -563,10 +563,11 @@ def test_get_default_handler_app_engine(self): credentials=_make_credentials(), use_gax=False) - temp_log_path = os.path.join(tempfile.mkdtemp(), '{pid}') - with _Monkey(_MUT, _LOG_PATH_TEMPLATE=temp_log_path): - with _Monkey(os, environ={_APPENGINE_FLEXIBLE_ENV_VM: 'True'}): - handler = client.get_default_handler() + with _tempdir() as tempdir: + temp_log_path = os.path.join(tempdir, '{pid}') + with _Monkey(_MUT, _LOG_PATH_TEMPLATE=temp_log_path): + with _Monkey(os, environ={_APPENGINE_FLEXIBLE_ENV_VM: 'True'}): + handler = client.get_default_handler() self.assertIsInstance(handler, AppEngineHandler) From 096a2c4503de08e9719cbdd0df6a025f3c7be81e Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 8 Dec 2016 15:17:03 -0800 Subject: [PATCH 058/855] Update versions for mega-release. We want to update - `google-cloud-bigquery` - `google-cloud-datastore` - `google-cloud-logging` - `google-cloud-storage` - `google-cloud-core` And then update `google-cloud` to re-wrap the latest versions of each. However, to avoid having packages in `google-cloud` with conflicting versions of `google-cloud-core`, we must release all packages. --- packages/google-cloud-logging/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 03b7563ec730..00cd3d5f94f0 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -50,13 +50,13 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.21.0, < 0.22dev', + 'google-cloud-core >= 0.22.1, < 0.23dev', 'gapic-google-cloud-logging-v2 >= 0.14.0, < 0.15dev', ] setup( name='google-cloud-logging', - version='0.21.0', + version='0.22.0', description='Python Client for Stackdriver Logging', long_description=README, namespace_packages=[ From 0ff32975fbed852ce4f050da815607ae3f93f7d9 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 9 Dec 2016 16:57:17 -0800 Subject: [PATCH 059/855] Explicitly putting 1.0.2 lower bound on grpcio. Also upgrading logging from 0.14.x to 0.90.x --- packages/google-cloud-logging/setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 00cd3d5f94f0..98645b8ea1e8 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -51,7 +51,8 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.22.1, < 0.23dev', - 'gapic-google-cloud-logging-v2 >= 0.14.0, < 0.15dev', + 'grpcio >= 1.0.2, < 2.0dev', + 'gapic-google-cloud-logging-v2 >= 0.90.0, < 0.91dev', ] setup( From 7688a3231f8d0af6388a4504274340a87667631b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 9 Dec 2016 20:36:52 -0500 Subject: [PATCH 060/855] Force close logging handler. Allow tempdir cleanup to work as expected on Windows. --- packages/google-cloud-logging/unit_tests/test_client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 93727d614bff..5e48f7b95367 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -568,6 +568,7 @@ def test_get_default_handler_app_engine(self): with _Monkey(_MUT, _LOG_PATH_TEMPLATE=temp_log_path): with _Monkey(os, environ={_APPENGINE_FLEXIBLE_ENV_VM: 'True'}): handler = client.get_default_handler() + handler.close() # allow tempdir cleanup on Windows self.assertIsInstance(handler, AppEngineHandler) From e6f0d87a345385452b5e4f48c43c64c34537c460 Mon Sep 17 00:00:00 2001 From: francois kawala Date: Tue, 13 Dec 2016 18:48:32 +0100 Subject: [PATCH 061/855] Added a timestamp parameter to logger. This parameter is usefull when one sends batches of log records. In this case, each log record in a batch will have the same timestamp. This timestamp would be the timestamp of insertion in the google cloud logging API. --- .../google/cloud/logging/logger.py | 53 ++++++++++++++----- .../unit_tests/test_logger.py | 30 ++++++----- 2 files changed, 56 insertions(+), 27 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 842481af42da..e8a97a4f4237 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -92,7 +92,7 @@ def batch(self, client=None): def _make_entry_resource(self, text=None, info=None, message=None, labels=None, insert_id=None, severity=None, - http_request=None): + http_request=None, timestamp=None): """Return a log entry resource of the appropriate type. Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`. @@ -120,6 +120,8 @@ def _make_entry_resource(self, text=None, info=None, message=None, :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry + :type timestamp: str + :param timestamp: (optional) timestamp of event being logged. :rtype: dict :returns: The JSON resource created. @@ -155,10 +157,13 @@ def _make_entry_resource(self, text=None, info=None, message=None, if http_request is not None: resource['httpRequest'] = http_request + if timestamp is not None: + resource['timestamp'] = timestamp + return resource def log_text(self, text, client=None, labels=None, insert_id=None, - severity=None, http_request=None): + severity=None, http_request=None, timestamp=None): """API call: log a text message via a POST request See: @@ -184,15 +189,18 @@ def log_text(self, text, client=None, labels=None, insert_id=None, :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry + + :type timestamp: str + :param timestamp: (optional) timestamp of event being logged. """ client = self._require_client(client) entry_resource = self._make_entry_resource( text=text, labels=labels, insert_id=insert_id, severity=severity, - http_request=http_request) + http_request=http_request, timestamp=timestamp) client.logging_api.write_entries([entry_resource]) def log_struct(self, info, client=None, labels=None, insert_id=None, - severity=None, http_request=None): + severity=None, http_request=None, timestamp=None): """API call: log a structured message via a POST request See: @@ -218,15 +226,18 @@ def log_struct(self, info, client=None, labels=None, insert_id=None, :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry. + + :type timestamp: str + :param timestamp: (optional) timestamp of event being logged. """ client = self._require_client(client) entry_resource = self._make_entry_resource( info=info, labels=labels, insert_id=insert_id, severity=severity, - http_request=http_request) + http_request=http_request, timestamp=timestamp) client.logging_api.write_entries([entry_resource]) def log_proto(self, message, client=None, labels=None, insert_id=None, - severity=None, http_request=None): + severity=None, http_request=None, timestamp=None): """API call: log a protobuf message via a POST request See: @@ -252,11 +263,14 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry. + + :type timestamp: str + :param timestamp: (optional) timestamp of event being logged. """ client = self._require_client(client) entry_resource = self._make_entry_resource( message=message, labels=labels, insert_id=insert_id, - severity=severity, http_request=http_request) + severity=severity, http_request=http_request, timestamp=timestamp) client.logging_api.write_entries([entry_resource]) def delete(self, client=None): @@ -340,7 +354,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.commit() def log_text(self, text, labels=None, insert_id=None, severity=None, - http_request=None): + http_request=None, timestamp=None): """Add a text entry to be logged during :meth:`commit`. :type text: str @@ -358,12 +372,15 @@ def log_text(self, text, labels=None, insert_id=None, severity=None, :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry. + + :type timestamp: str + :param timestamp: (optional) timestamp of event being logged. """ self.entries.append( - ('text', text, labels, insert_id, severity, http_request)) + ('text', text, labels, insert_id, severity, http_request, timestamp)) def log_struct(self, info, labels=None, insert_id=None, severity=None, - http_request=None): + http_request=None, timestamp=None): """Add a struct entry to be logged during :meth:`commit`. :type info: dict @@ -381,12 +398,15 @@ def log_struct(self, info, labels=None, insert_id=None, severity=None, :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry. + + :type timestamp: str + :param timestamp: (optional) timestamp of event being logged. """ self.entries.append( - ('struct', info, labels, insert_id, severity, http_request)) + ('struct', info, labels, insert_id, severity, http_request, timestamp)) def log_proto(self, message, labels=None, insert_id=None, severity=None, - http_request=None): + http_request=None, timestamp=None): """Add a protobuf entry to be logged during :meth:`commit`. :type message: protobuf message @@ -404,9 +424,12 @@ def log_proto(self, message, labels=None, insert_id=None, severity=None, :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry. + + :type timestamp: str + :param timestamp: (optional) timestamp of event being logged. """ self.entries.append( - ('proto', message, labels, insert_id, severity, http_request)) + ('proto', message, labels, insert_id, severity, http_request, timestamp)) def commit(self, client=None): """Send saved log entries as a single API call. @@ -427,7 +450,7 @@ def commit(self, client=None): kwargs['labels'] = self.logger.labels entries = [] - for entry_type, entry, labels, iid, severity, http_req in self.entries: + for entry_type, entry, labels, iid, severity, http_req, timestamp in self.entries: if entry_type == 'text': info = {'textPayload': entry} elif entry_type == 'struct': @@ -446,6 +469,8 @@ def commit(self, client=None): info['severity'] = severity if http_req is not None: info['httpRequest'] = http_req + if timestamp is not None: + info['timestamp'] = timestamp entries.append(info) client.logging_api.write_entries(entries, **kwargs) diff --git a/packages/google-cloud-logging/unit_tests/test_logger.py b/packages/google-cloud-logging/unit_tests/test_logger.py index 15e7e7146b80..91f7fdafaf96 100644 --- a/packages/google-cloud-logging/unit_tests/test_logger.py +++ b/packages/google-cloud-logging/unit_tests/test_logger.py @@ -454,7 +454,7 @@ def test_log_text_defaults(self): batch = self._make_one(logger, client=client) batch.log_text(TEXT) self.assertEqual(batch.entries, - [('text', TEXT, None, None, None, None)]) + [('text', TEXT, None, None, None, None, None)]) def test_log_text_explicit(self): TEXT = 'This is the entry text' @@ -469,13 +469,14 @@ def test_log_text_explicit(self): 'requestUrl': URI, 'status': STATUS, } + TIMESTAMP = '2016-10-12T15:01:23.045123456Z' client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_text(TEXT, labels=LABELS, insert_id=IID, severity=SEVERITY, - http_request=REQUEST) + http_request=REQUEST, timestamp=TIMESTAMP) self.assertEqual(batch.entries, - [('text', TEXT, LABELS, IID, SEVERITY, REQUEST)]) + [('text', TEXT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) def test_log_struct_defaults(self): STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} @@ -484,7 +485,7 @@ def test_log_struct_defaults(self): batch = self._make_one(logger, client=client) batch.log_struct(STRUCT) self.assertEqual(batch.entries, - [('struct', STRUCT, None, None, None, None)]) + [('struct', STRUCT, None, None, None, None, None)]) def test_log_struct_explicit(self): STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} @@ -499,13 +500,14 @@ def test_log_struct_explicit(self): 'requestUrl': URI, 'status': STATUS, } + TIMESTAMP = '2016-10-12T15:01:23.045123456Z' client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_struct(STRUCT, labels=LABELS, insert_id=IID, - severity=SEVERITY, http_request=REQUEST) + severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP) self.assertEqual(batch.entries, - [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST)]) + [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) def test_log_proto_defaults(self): from google.protobuf.struct_pb2 import Struct, Value @@ -515,7 +517,7 @@ def test_log_proto_defaults(self): batch = self._make_one(logger, client=client) batch.log_proto(message) self.assertEqual(batch.entries, - [('proto', message, None, None, None, None)]) + [('proto', message, None, None, None, None, None)]) def test_log_proto_explicit(self): from google.protobuf.struct_pb2 import Struct, Value @@ -531,13 +533,14 @@ def test_log_proto_explicit(self): 'requestUrl': URI, 'status': STATUS, } + TIMESTAMP = '2016-10-12T15:01:23.045123456Z' client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_proto(message, labels=LABELS, insert_id=IID, - severity=SEVERITY, http_request=REQUEST) + severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP) self.assertEqual(batch.entries, - [('proto', message, LABELS, IID, SEVERITY, REQUEST)]) + [('proto', message, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) def test_commit_w_invalid_entry_type(self): logger = _Logger() @@ -681,20 +684,21 @@ def test_context_mgr_failure(self): 'requestUrl': URI, 'status': STATUS, } + TIMESTAMP = '2016-10-12T15:01:23.045123456Z' message = Struct(fields={'foo': Value(bool_value=True)}) client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = _Logger() UNSENT = [ - ('text', TEXT, None, IID, None, None), - ('struct', STRUCT, None, None, SEVERITY, None), - ('proto', message, LABELS, None, None, REQUEST), + ('text', TEXT, None, IID, None, None, TIMESTAMP), + ('struct', STRUCT, None, None, SEVERITY, None, None), + ('proto', message, LABELS, None, None, REQUEST, None), ] batch = self._make_one(logger, client=client) try: with batch as other: - other.log_text(TEXT, insert_id=IID) + other.log_text(TEXT, insert_id=IID, timestamp=TIMESTAMP) other.log_struct(STRUCT, severity=SEVERITY) other.log_proto(message, labels=LABELS, http_request=REQUEST) raise _Bugout() From 59528085abe5e9f6ae8cb1d937d9f1a610c90219 Mon Sep 17 00:00:00 2001 From: francois kawala Date: Wed, 14 Dec 2016 14:34:32 +0100 Subject: [PATCH 062/855] Changed timestamp type from 'str' to 'datetime.datetime'. Convert the timestamp to a RFC3339 string representation in the helper function _make_entry_resource. --- .../google/cloud/logging/logger.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index e8a97a4f4237..37ae76e26843 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -17,6 +17,7 @@ import json from google.protobuf.json_format import MessageToJson +from google.cloud._helpers import _datetime_to_rfc3339 class Logger(object): @@ -120,7 +121,8 @@ def _make_entry_resource(self, text=None, info=None, message=None, :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry - :type timestamp: str + + :type timestamp: :class:`datetime.datetime` :param timestamp: (optional) timestamp of event being logged. :rtype: dict @@ -158,7 +160,7 @@ def _make_entry_resource(self, text=None, info=None, message=None, resource['httpRequest'] = http_request if timestamp is not None: - resource['timestamp'] = timestamp + resource['timestamp'] = _datetime_to_rfc3339(timestamp) return resource @@ -190,7 +192,7 @@ def log_text(self, text, client=None, labels=None, insert_id=None, :param http_request: (optional) info about HTTP request associated with the entry - :type timestamp: str + :type timestamp: :class:`datetime.datetime` :param timestamp: (optional) timestamp of event being logged. """ client = self._require_client(client) @@ -227,7 +229,7 @@ def log_struct(self, info, client=None, labels=None, insert_id=None, :param http_request: (optional) info about HTTP request associated with the entry. - :type timestamp: str + :type timestamp: :class:`datetime.datetime` :param timestamp: (optional) timestamp of event being logged. """ client = self._require_client(client) @@ -264,7 +266,7 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, :param http_request: (optional) info about HTTP request associated with the entry. - :type timestamp: str + :type timestamp: :class:`datetime.datetime` :param timestamp: (optional) timestamp of event being logged. """ client = self._require_client(client) @@ -373,7 +375,7 @@ def log_text(self, text, labels=None, insert_id=None, severity=None, :param http_request: (optional) info about HTTP request associated with the entry. - :type timestamp: str + :type timestamp: :class:`datetime.datetime` :param timestamp: (optional) timestamp of event being logged. """ self.entries.append( @@ -399,7 +401,7 @@ def log_struct(self, info, labels=None, insert_id=None, severity=None, :param http_request: (optional) info about HTTP request associated with the entry. - :type timestamp: str + :type timestamp: :class:`datetime.datetime` :param timestamp: (optional) timestamp of event being logged. """ self.entries.append( @@ -425,7 +427,7 @@ def log_proto(self, message, labels=None, insert_id=None, severity=None, :param http_request: (optional) info about HTTP request associated with the entry. - :type timestamp: str + :type timestamp: :class:`datetime.datetime` :param timestamp: (optional) timestamp of event being logged. """ self.entries.append( From 0c7a60523d61e987b03361ec9468fafef3bc88e4 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 14 Dec 2016 23:43:05 -0800 Subject: [PATCH 063/855] Manually creating Client._connection in subclasses. --- .../google/cloud/logging/client.py | 29 +++++++++++-------- 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index c92f177eaac6..77e762e6c808 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -67,15 +67,16 @@ class Client(JSONClient): If not passed, falls back to the default inferred from the environment. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. + + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. @@ -86,12 +87,16 @@ class Client(JSONClient): variable """ - _connection_class = Connection - _logging_api = _sinks_api = _metrics_api = None + _logging_api = None + _sinks_api = None + _metrics_api = None def __init__(self, project=None, credentials=None, http=None, use_gax=None): - super(Client, self).__init__(project, credentials, http) + super(Client, self).__init__( + project=project, credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) if use_gax is None: self._use_gax = _USE_GAX else: From ad1550d3a4d8a342537a475e9635228d55fb91b9 Mon Sep 17 00:00:00 2001 From: francois kawala Date: Thu, 15 Dec 2016 11:27:55 +0100 Subject: [PATCH 064/855] PEP8 compliance fix. Fix tests: timestamp conversion was not covered; timestamps were string instead of :class:`datetime.datetime`. --- .../google/cloud/logging/logger.py | 12 +- .../unit_tests/test_logger.py | 126 +++++++++++++++--- 2 files changed, 113 insertions(+), 25 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 37ae76e26843..07eb2de59de8 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -379,7 +379,8 @@ def log_text(self, text, labels=None, insert_id=None, severity=None, :param timestamp: (optional) timestamp of event being logged. """ self.entries.append( - ('text', text, labels, insert_id, severity, http_request, timestamp)) + ('text', text, labels, insert_id, severity, http_request, + timestamp)) def log_struct(self, info, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None): @@ -405,7 +406,8 @@ def log_struct(self, info, labels=None, insert_id=None, severity=None, :param timestamp: (optional) timestamp of event being logged. """ self.entries.append( - ('struct', info, labels, insert_id, severity, http_request, timestamp)) + ('struct', info, labels, insert_id, severity, http_request, + timestamp)) def log_proto(self, message, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None): @@ -431,7 +433,8 @@ def log_proto(self, message, labels=None, insert_id=None, severity=None, :param timestamp: (optional) timestamp of event being logged. """ self.entries.append( - ('proto', message, labels, insert_id, severity, http_request, timestamp)) + ('proto', message, labels, insert_id, severity, http_request, + timestamp)) def commit(self, client=None): """Send saved log entries as a single API call. @@ -452,7 +455,8 @@ def commit(self, client=None): kwargs['labels'] = self.logger.labels entries = [] - for entry_type, entry, labels, iid, severity, http_req, timestamp in self.entries: + for entry_type, entry, labels, iid, severity, http_req, timestamp in \ + self.entries: if entry_type == 'text': info = {'textPayload': entry} elif entry_type == 'struct': diff --git a/packages/google-cloud-logging/unit_tests/test_logger.py b/packages/google-cloud-logging/unit_tests/test_logger.py index 91f7fdafaf96..48edaf0ed5a4 100644 --- a/packages/google-cloud-logging/unit_tests/test_logger.py +++ b/packages/google-cloud-logging/unit_tests/test_logger.py @@ -125,6 +125,29 @@ def test_log_text_w_default_labels(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_text_w_timestamp(self): + import datetime + + TEXT = 'TEXT' + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'timestamp': '2016-12-31T00:01:02.999999Z', + 'resource': { + 'type': 'global', + }, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log_text(TEXT, timestamp=TIMESTAMP) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): TEXT = u'TEXT' DEFAULT_LABELS = {'foo': 'spam'} @@ -243,6 +266,28 @@ def test_log_struct_w_explicit_client_labels_severity_httpreq(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_struct_w_timestamp(self): + import datetime + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'timestamp': '2016-12-31T00:01:02.999999Z', + 'resource': { + 'type': 'global', + }, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log_struct(STRUCT, timestamp=TIMESTAMP) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + def test_log_proto_w_implicit_client(self): import json from google.protobuf.json_format import MessageToJson @@ -332,6 +377,31 @@ def test_log_proto_w_explicit_client_labels_severity_httpreq(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_proto_w_timestamp(self): + import json + import datetime + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'timestamp': '2016-12-31T00:01:02.999999Z', + 'resource': { + 'type': 'global', + }, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log_proto(message, timestamp=TIMESTAMP) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() @@ -457,6 +527,7 @@ def test_log_text_defaults(self): [('text', TEXT, None, None, None, None, None)]) def test_log_text_explicit(self): + import datetime TEXT = 'This is the entry text' LABELS = {'foo': 'bar', 'baz': 'qux'} IID = 'IID' @@ -469,14 +540,15 @@ def test_log_text_explicit(self): 'requestUrl': URI, 'status': STATUS, } - TIMESTAMP = '2016-10-12T15:01:23.045123456Z' + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_text(TEXT, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP) - self.assertEqual(batch.entries, - [('text', TEXT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) + self.assertEqual( + batch.entries, + [('text', TEXT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) def test_log_struct_defaults(self): STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} @@ -484,10 +556,12 @@ def test_log_struct_defaults(self): logger = _Logger() batch = self._make_one(logger, client=client) batch.log_struct(STRUCT) - self.assertEqual(batch.entries, - [('struct', STRUCT, None, None, None, None, None)]) + self.assertEqual( + batch.entries, + [('struct', STRUCT, None, None, None, None, None)]) def test_log_struct_explicit(self): + import datetime STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} LABELS = {'foo': 'bar', 'baz': 'qux'} IID = 'IID' @@ -500,14 +574,16 @@ def test_log_struct_explicit(self): 'requestUrl': URI, 'status': STATUS, } - TIMESTAMP = '2016-10-12T15:01:23.045123456Z' + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_struct(STRUCT, labels=LABELS, insert_id=IID, - severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP) - self.assertEqual(batch.entries, - [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) + severity=SEVERITY, http_request=REQUEST, + timestamp=TIMESTAMP) + self.assertEqual( + batch.entries, + [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) def test_log_proto_defaults(self): from google.protobuf.struct_pb2 import Struct, Value @@ -520,6 +596,7 @@ def test_log_proto_defaults(self): [('proto', message, None, None, None, None, None)]) def test_log_proto_explicit(self): + import datetime from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={'foo': Value(bool_value=True)}) LABELS = {'foo': 'bar', 'baz': 'qux'} @@ -533,25 +610,28 @@ def test_log_proto_explicit(self): 'requestUrl': URI, 'status': STATUS, } - TIMESTAMP = '2016-10-12T15:01:23.045123456Z' + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_proto(message, labels=LABELS, insert_id=IID, - severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP) - self.assertEqual(batch.entries, - [('proto', message, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) + severity=SEVERITY, http_request=REQUEST, + timestamp=TIMESTAMP) + self.assertEqual( + batch.entries, + [('proto', message, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) def test_commit_w_invalid_entry_type(self): logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) batch = self._make_one(logger, client) - batch.entries.append(('bogus', 'BOGUS', None, None, None, None)) + batch.entries.append(('bogus', 'BOGUS', None, None, None, None, None)) with self.assertRaises(ValueError): batch.commit() def test_commit_w_bound_client(self): import json + import datetime from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value TEXT = 'This is the entry text' @@ -560,23 +640,26 @@ def test_commit_w_bound_client(self): IID1 = 'IID1' IID2 = 'IID2' IID3 = 'IID3' + TIMESTAMP1 = datetime.datetime(2016, 12, 31, 0, 0, 1, 999999) + TIMESTAMP2 = datetime.datetime(2016, 12, 31, 0, 0, 2, 999999) + TIMESTAMP3 = datetime.datetime(2016, 12, 31, 0, 0, 3, 999999) RESOURCE = { 'type': 'global', } ENTRIES = [ - {'textPayload': TEXT, 'insertId': IID1}, - {'jsonPayload': STRUCT, 'insertId': IID2}, + {'textPayload': TEXT, 'insertId': IID1, 'timestamp': TIMESTAMP1}, + {'jsonPayload': STRUCT, 'insertId': IID2, 'timestamp': TIMESTAMP2}, {'protoPayload': json.loads(MessageToJson(message)), - 'insertId': IID3}, + 'insertId': IID3, 'timestamp': TIMESTAMP3}, ] client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = _Logger() batch = self._make_one(logger, client=client) - batch.log_text(TEXT, insert_id=IID1) - batch.log_struct(STRUCT, insert_id=IID2) - batch.log_proto(message, insert_id=IID3) + batch.log_text(TEXT, insert_id=IID1, timestamp=TIMESTAMP1) + batch.log_struct(STRUCT, insert_id=IID2, timestamp=TIMESTAMP2) + batch.log_proto(message, insert_id=IID3, timestamp=TIMESTAMP3) batch.commit() self.assertEqual(list(batch.entries), []) @@ -670,6 +753,7 @@ def test_context_mgr_success(self): (ENTRIES, logger.full_name, RESOURCE, DEFAULT_LABELS)) def test_context_mgr_failure(self): + import datetime from google.protobuf.struct_pb2 import Struct, Value TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} @@ -684,7 +768,7 @@ def test_context_mgr_failure(self): 'requestUrl': URI, 'status': STATUS, } - TIMESTAMP = '2016-10-12T15:01:23.045123456Z' + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) message = Struct(fields={'foo': Value(bool_value=True)}) client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() From 1f0d30c2875ef453cd4a318995fec637dbd528f4 Mon Sep 17 00:00:00 2001 From: francois kawala Date: Thu, 15 Dec 2016 17:09:13 +0100 Subject: [PATCH 065/855] Unpack-variables in parentheses to avoid backslash continuation. --- packages/google-cloud-logging/google/cloud/logging/logger.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 07eb2de59de8..d5a5b201dca0 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -455,8 +455,8 @@ def commit(self, client=None): kwargs['labels'] = self.logger.labels entries = [] - for entry_type, entry, labels, iid, severity, http_req, timestamp in \ - self.entries: + for (entry_type, entry, labels, iid, severity, http_req, + timestamp) in self.entries: if entry_type == 'text': info = {'textPayload': entry} elif entry_type == 'struct': From 4c0cf91bdd2fe6cda13238d1416770bdd54cd2f3 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 15 Dec 2016 12:11:54 -0800 Subject: [PATCH 066/855] Removing most (direct) connection usage in Logging. --- .../google/cloud/logging/_gax.py | 6 ++-- .../google/cloud/logging/_http.py | 29 +++++++++---------- .../handlers/transports/background_thread.py | 6 ++-- .../transports/test_background_thread.py | 12 ++------ .../unit_tests/test__gax.py | 18 ++---------- .../unit_tests/test__http.py | 8 ++--- .../unit_tests/test_client.py | 17 ++++++----- 7 files changed, 38 insertions(+), 58 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 0ffd46dd3fcb..7ddadba01d06 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -532,7 +532,7 @@ def make_gax_logging_api(client): :returns: A metrics API instance with the proper credentials. """ channel = make_secure_channel( - client._connection.credentials, DEFAULT_USER_AGENT, + client._credentials, DEFAULT_USER_AGENT, LoggingServiceV2Client.SERVICE_ADDRESS) generated = LoggingServiceV2Client(channel=channel) return _LoggingAPI(generated, client) @@ -548,7 +548,7 @@ def make_gax_metrics_api(client): :returns: A metrics API instance with the proper credentials. """ channel = make_secure_channel( - client._connection.credentials, DEFAULT_USER_AGENT, + client._credentials, DEFAULT_USER_AGENT, MetricsServiceV2Client.SERVICE_ADDRESS) generated = MetricsServiceV2Client(channel=channel) return _MetricsAPI(generated, client) @@ -564,7 +564,7 @@ def make_gax_sinks_api(client): :returns: A metrics API instance with the proper credentials. """ channel = make_secure_channel( - client._connection.credentials, DEFAULT_USER_AGENT, + client._credentials, DEFAULT_USER_AGENT, ConfigServiceV2Client.SERVICE_ADDRESS) generated = ConfigServiceV2Client(channel=channel) return _SinksAPI(generated, client) diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index 8d9eccc819d5..8056689235db 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Create / interact with Stackdriver Logging connections.""" +"""Interact with Stackdriver Logging via JSON-over-HTTP.""" import functools @@ -67,7 +67,7 @@ class _LoggingAPI(object): def __init__(self, client): self._client = client - self._connection = client._connection + self.api_request = client._connection.api_request def list_entries(self, projects, filter_=None, order_by=None, page_size=None, page_token=None): @@ -161,8 +161,7 @@ def write_entries(self, entries, logger_name=None, resource=None, if labels is not None: data['labels'] = labels - self._connection.api_request(method='POST', path='/entries:write', - data=data) + self.api_request(method='POST', path='/entries:write', data=data) def logger_delete(self, project, logger_name): """API call: delete all entries in a logger via a DELETE request @@ -177,7 +176,7 @@ def logger_delete(self, project, logger_name): :param logger_name: name of logger containing the log entries to delete """ path = '/projects/%s/logs/%s' % (project, logger_name) - self._connection.api_request(method='DELETE', path=path) + self.api_request(method='DELETE', path=path) class _SinksAPI(object): @@ -191,7 +190,7 @@ class _SinksAPI(object): """ def __init__(self, client): self._client = client - self._connection = client._connection + self.api_request = client._connection.api_request def list_sinks(self, project, page_size=None, page_token=None): """List sinks for the project associated with this client. @@ -253,7 +252,7 @@ def sink_create(self, project, sink_name, filter_, destination): 'filter': filter_, 'destination': destination, } - self._connection.api_request(method='POST', path=target, data=data) + self.api_request(method='POST', path=target, data=data) def sink_get(self, project, sink_name): """API call: retrieve a sink resource. @@ -271,7 +270,7 @@ def sink_get(self, project, sink_name): :returns: The JSON sink object returned from the API. """ target = '/projects/%s/sinks/%s' % (project, sink_name) - return self._connection.api_request(method='GET', path=target) + return self.api_request(method='GET', path=target) def sink_update(self, project, sink_name, filter_, destination): """API call: update a sink resource. @@ -299,7 +298,7 @@ def sink_update(self, project, sink_name, filter_, destination): 'filter': filter_, 'destination': destination, } - self._connection.api_request(method='PUT', path=target, data=data) + self.api_request(method='PUT', path=target, data=data) def sink_delete(self, project, sink_name): """API call: delete a sink resource. @@ -314,7 +313,7 @@ def sink_delete(self, project, sink_name): :param sink_name: the name of the sink """ target = '/projects/%s/sinks/%s' % (project, sink_name) - self._connection.api_request(method='DELETE', path=target) + self.api_request(method='DELETE', path=target) class _MetricsAPI(object): @@ -328,7 +327,7 @@ class _MetricsAPI(object): """ def __init__(self, client): self._client = client - self._connection = client._connection + self.api_request = client._connection.api_request def list_metrics(self, project, page_size=None, page_token=None): """List metrics for the project associated with this client. @@ -389,7 +388,7 @@ def metric_create(self, project, metric_name, filter_, description=None): 'filter': filter_, 'description': description, } - self._connection.api_request(method='POST', path=target, data=data) + self.api_request(method='POST', path=target, data=data) def metric_get(self, project, metric_name): """API call: retrieve a metric resource. @@ -407,7 +406,7 @@ def metric_get(self, project, metric_name): :returns: The JSON metric object returned from the API. """ target = '/projects/%s/metrics/%s' % (project, metric_name) - return self._connection.api_request(method='GET', path=target) + return self.api_request(method='GET', path=target) def metric_update(self, project, metric_name, filter_, description): """API call: update a metric resource. @@ -434,7 +433,7 @@ def metric_update(self, project, metric_name, filter_, description): 'filter': filter_, 'description': description, } - self._connection.api_request(method='PUT', path=target, data=data) + self.api_request(method='PUT', path=target, data=data) def metric_delete(self, project, metric_name): """API call: delete a metric resource. @@ -449,7 +448,7 @@ def metric_delete(self, project, metric_name): :param metric_name: the name of the metric. """ target = '/projects/%s/metrics/%s' % (project, metric_name) - self._connection.api_request(method='DELETE', path=target) + self.api_request(method='DELETE', path=target) def _item_to_entry(iterator, resource, loggers): diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index c090474a540b..9c8ea85c937a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -150,9 +150,9 @@ class BackgroundThreadTransport(Transport): """ def __init__(self, client, name): - http = copy.deepcopy(client._connection.http) - self.client = client.__class__(client.project, - client._connection.credentials, http) + http = copy.deepcopy(client._http) + self.client = client.__class__( + client.project, client._credentials, http) logger = self.client.logger(name) self.worker = _Worker(logger) diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py index 5ca76a2f68c3..a21302f251d7 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py @@ -157,13 +157,6 @@ def commit(self): del self.entries[:] -class _Connection(object): - - def __init__(self): - self.http = None - self.credentials = object() - - class _Logger(object): def __init__(self, name): @@ -178,9 +171,8 @@ class _Client(object): def __init__(self, project, http=None, credentials=None): self.project = project - self.http = http - self.credentials = credentials - self._connection = _Connection() + self._http = http + self._credentials = credentials def logger(self, name): # pylint: disable=unused-argument self._logger = _Logger(name) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 4d269236e3e6..e2f158ffd0cc 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -1085,7 +1085,7 @@ def test_it(self): from google.cloud.logging._gax import DEFAULT_USER_AGENT creds = object() - client = _Client(creds) + client = mock.Mock(_credentials=creds) channels = [] channel_args = [] channel_obj = object() @@ -1130,7 +1130,7 @@ def test_it(self): from google.cloud.logging._gax import DEFAULT_USER_AGENT creds = object() - client = _Client(creds) + client = mock.Mock(_credentials=creds) channels = [] channel_args = [] channel_obj = object() @@ -1175,7 +1175,7 @@ def test_it(self): from google.cloud.logging._gax import DEFAULT_USER_AGENT creds = object() - client = _Client(creds) + client = mock.Mock(_credentials=creds) channels = [] channel_args = [] channel_obj = object() @@ -1324,15 +1324,3 @@ def delete_log_metric(self, metric_name, options=None): raise GaxError('error') if self._log_metric_not_found: raise GaxError('notfound', self._make_grpc_not_found()) - - -class _Connection(object): - - def __init__(self, credentials): - self.credentials = credentials - - -class _Client(object): - - def __init__(self, credentials): - self._connection = _Connection(credentials) diff --git a/packages/google-cloud-logging/unit_tests/test__http.py b/packages/google-cloud-logging/unit_tests/test__http.py index 6fe8c825feef..bfc8d7981e46 100644 --- a/packages/google-cloud-logging/unit_tests/test__http.py +++ b/packages/google-cloud-logging/unit_tests/test__http.py @@ -58,11 +58,11 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): - connection = object() + connection = _Connection() client = _Client(connection) api = self._make_one(client) - self.assertIs(api._connection, connection) self.assertIs(api._client, client) + self.assertEqual(api.api_request, connection.api_request) @staticmethod def _make_timestamp(): @@ -308,11 +308,11 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): - connection = _make_credentials() + connection = _Connection() client = _Client(connection) api = self._make_one(client) - self.assertIs(api._connection, connection) self.assertIs(api._client, client) + self.assertEqual(api.api_request, connection.api_request) def test_list_sinks_no_paging(self): import six diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 5e48f7b95367..0e215ad1f510 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -52,11 +52,12 @@ def test_logging_api_wo_gax(self): client = self._make_one(self.PROJECT, credentials=_make_credentials(), use_gax=False) - conn = client._connection = object() + + conn = client._connection = _Connection() api = client.logging_api self.assertIsInstance(api, _LoggingAPI) - self.assertIs(api._connection, conn) + self.assertEqual(api.api_request, conn.api_request) # API instance is cached again = client.logging_api self.assertIs(again, api) @@ -106,11 +107,11 @@ def test_sinks_api_wo_gax(self): self.PROJECT, credentials=_make_credentials(), use_gax=False) - conn = client._connection = object() + conn = client._connection = _Connection() api = client.sinks_api self.assertIsInstance(api, _SinksAPI) - self.assertIs(api._connection, conn) + self.assertEqual(api.api_request, conn.api_request) # API instance is cached again = client.sinks_api self.assertIs(again, api) @@ -146,11 +147,11 @@ def test_metrics_api_wo_gax(self): self.PROJECT, credentials=_make_credentials(), use_gax=False) - conn = client._connection = object() + conn = client._connection = _Connection() api = client.metrics_api self.assertIsInstance(api, _MetricsAPI) - self.assertIs(api._connection, conn) + self.assertEqual(api.api_request, conn.api_request) # API instance is cached again = client.metrics_api self.assertIs(again, api) @@ -600,7 +601,7 @@ def test_get_default_handler_general(self): credentials=credentials, use_gax=False) handler = client.get_default_handler() - deepcopy.assert_called_once_with(client._connection.http) + deepcopy.assert_called_once_with(client._http) self.assertIsInstance(handler, CloudLoggingHandler) @@ -620,7 +621,7 @@ def test_setup_logging(self): credentials=credentials, use_gax=False) client.setup_logging() - deepcopy.assert_called_once_with(client._connection.http) + deepcopy.assert_called_once_with(client._http) setup_logging.assert_called() From 5b0945fe44d920a74ca98782eadb4b1780849f52 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 12 Jan 2017 14:17:11 -0800 Subject: [PATCH 067/855] Fixing credentials without scopes issue in logging. See: https://travis-ci.org/GoogleCloudPlatform/google-cloud-python/builds/191433056#L2195 This is a band-aid for now. I caused it in #2875 and will roll-back this roll-back at a later time. @daspecter fixed a similar breakage in #2909. --- .../google-cloud-logging/google/cloud/logging/_gax.py | 6 +++--- .../logging/handlers/transports/background_thread.py | 2 +- .../handlers/transports/test_background_thread.py | 4 ++++ packages/google-cloud-logging/unit_tests/test__gax.py | 9 ++++++--- 4 files changed, 14 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 7ddadba01d06..0ffd46dd3fcb 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -532,7 +532,7 @@ def make_gax_logging_api(client): :returns: A metrics API instance with the proper credentials. """ channel = make_secure_channel( - client._credentials, DEFAULT_USER_AGENT, + client._connection.credentials, DEFAULT_USER_AGENT, LoggingServiceV2Client.SERVICE_ADDRESS) generated = LoggingServiceV2Client(channel=channel) return _LoggingAPI(generated, client) @@ -548,7 +548,7 @@ def make_gax_metrics_api(client): :returns: A metrics API instance with the proper credentials. """ channel = make_secure_channel( - client._credentials, DEFAULT_USER_AGENT, + client._connection.credentials, DEFAULT_USER_AGENT, MetricsServiceV2Client.SERVICE_ADDRESS) generated = MetricsServiceV2Client(channel=channel) return _MetricsAPI(generated, client) @@ -564,7 +564,7 @@ def make_gax_sinks_api(client): :returns: A metrics API instance with the proper credentials. """ channel = make_secure_channel( - client._credentials, DEFAULT_USER_AGENT, + client._connection.credentials, DEFAULT_USER_AGENT, ConfigServiceV2Client.SERVICE_ADDRESS) generated = ConfigServiceV2Client(channel=channel) return _SinksAPI(generated, client) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 9c8ea85c937a..811bc23bdb9f 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -152,7 +152,7 @@ class BackgroundThreadTransport(Transport): def __init__(self, client, name): http = copy.deepcopy(client._http) self.client = client.__class__( - client.project, client._credentials, http) + client.project, client._connection.credentials, http) logger = self.client.logger(name) self.worker = _Worker(logger) diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py index a21302f251d7..aa0cae2f2669 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py @@ -170,9 +170,13 @@ def batch(self): class _Client(object): def __init__(self, project, http=None, credentials=None): + import mock + self.project = project self._http = http self._credentials = credentials + self._connection = mock.Mock( + credentials=credentials, spec=['credentials']) def logger(self, name): # pylint: disable=unused-argument self._logger = _Logger(name) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index e2f158ffd0cc..9020c44236e0 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -1085,7 +1085,8 @@ def test_it(self): from google.cloud.logging._gax import DEFAULT_USER_AGENT creds = object() - client = mock.Mock(_credentials=creds) + conn = mock.Mock(credentials=creds, spec=['credentials']) + client = mock.Mock(_connection=conn, spec=['_connection']) channels = [] channel_args = [] channel_obj = object() @@ -1130,7 +1131,8 @@ def test_it(self): from google.cloud.logging._gax import DEFAULT_USER_AGENT creds = object() - client = mock.Mock(_credentials=creds) + conn = mock.Mock(credentials=creds, spec=['credentials']) + client = mock.Mock(_connection=conn, spec=['_connection']) channels = [] channel_args = [] channel_obj = object() @@ -1175,7 +1177,8 @@ def test_it(self): from google.cloud.logging._gax import DEFAULT_USER_AGENT creds = object() - client = mock.Mock(_credentials=creds) + conn = mock.Mock(credentials=creds, spec=['credentials']) + client = mock.Mock(_connection=conn, spec=['_connection']) channels = [] channel_args = [] channel_obj = object() From b0819494ce62017ecf215324d87ec17da4bce894 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Wed, 18 Jan 2017 13:14:03 -0500 Subject: [PATCH 068/855] Update import spacing part 2. --- .../unit_tests/test__gax.py | 39 ++++++++++++++++- .../unit_tests/test__helpers.py | 1 + .../unit_tests/test__http.py | 15 +++++++ .../unit_tests/test_client.py | 7 ++++ .../unit_tests/test_entries.py | 7 ++++ .../unit_tests/test_logger.py | 42 +++++++++++++++---- .../unit_tests/test_metric.py | 2 + .../unit_tests/test_sink.py | 2 + 8 files changed, 106 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 9020c44236e0..4d150535db22 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -32,6 +32,7 @@ def _make_credentials(): # pylint: disable=redefined-outer-name import google.auth.credentials # pylint: enable=redefined-outer-name + return mock.Mock(spec=google.auth.credentials.Credentials) @@ -333,6 +334,7 @@ def test_list_entries_with_extra_properties(self): def test_write_entries_single(self): from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry + TEXT = 'TEXT' ENTRY = { 'logName': self.LOG_PATH, @@ -367,6 +369,7 @@ def test_write_entries_w_extra_properties(self): from google.logging.type.log_severity_pb2 import WARNING from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.cloud._helpers import UTC, _pb_timestamp_to_datetime + NOW = datetime.utcnow().replace(tzinfo=UTC) TEXT = 'TEXT' SEVERITY = 'WARNING' @@ -462,7 +465,9 @@ def _write_entries_multiple_helper(self, json_payload, json_struct_pb): from google.logging.type.log_severity_pb2 import WARNING from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.protobuf.any_pb2 import Any - from google.cloud._helpers import _datetime_to_rfc3339, UTC + from google.cloud._helpers import _datetime_to_rfc3339 + from google.cloud._helpers import UTC + TEXT = 'TEXT' NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) TIMESTAMP_TYPE_URL = 'type.googleapis.com/google.protobuf.Timestamp' @@ -616,6 +621,7 @@ class Test_SinksAPI(_Base, unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging._gax import _SinksAPI + return _SinksAPI def test_ctor(self): @@ -700,6 +706,7 @@ def test_list_sinks_w_paging(self): def test_sink_create_error(self): from google.gax.errors import GaxError + gax_api = _GAXSinksAPI(_random_gax_error=True) api = self._make_one(gax_api, None) @@ -710,6 +717,7 @@ def test_sink_create_error(self): def test_sink_create_conflict(self): from google.cloud.exceptions import Conflict + gax_api = _GAXSinksAPI(_create_sink_conflict=True) api = self._make_one(gax_api, None) @@ -720,6 +728,7 @@ def test_sink_create_conflict(self): def test_sink_create_ok(self): from google.cloud.grpc.logging.v2.logging_config_pb2 import LogSink + gax_api = _GAXSinksAPI() api = self._make_one(gax_api, None) @@ -737,6 +746,7 @@ def test_sink_create_ok(self): def test_sink_get_error(self): from google.cloud.exceptions import NotFound + gax_api = _GAXSinksAPI() api = self._make_one(gax_api, None) @@ -745,6 +755,7 @@ def test_sink_get_error(self): def test_sink_get_miss(self): from google.gax.errors import GaxError + gax_api = _GAXSinksAPI(_random_gax_error=True) api = self._make_one(gax_api, None) @@ -775,6 +786,7 @@ def test_sink_get_hit(self): def test_sink_update_error(self): from google.gax.errors import GaxError + gax_api = _GAXSinksAPI(_random_gax_error=True) api = self._make_one(gax_api, None) @@ -785,6 +797,7 @@ def test_sink_update_error(self): def test_sink_update_miss(self): from google.cloud.exceptions import NotFound + gax_api = _GAXSinksAPI() api = self._make_one(gax_api, None) @@ -816,6 +829,7 @@ def test_sink_update_hit(self): def test_sink_delete_error(self): from google.gax.errors import GaxError + gax_api = _GAXSinksAPI(_random_gax_error=True) api = self._make_one(gax_api, None) @@ -824,6 +838,7 @@ def test_sink_delete_error(self): def test_sink_delete_miss(self): from google.cloud.exceptions import NotFound + gax_api = _GAXSinksAPI(_sink_not_found=True) api = self._make_one(gax_api, None) @@ -850,6 +865,7 @@ class Test_MetricsAPI(_Base, unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging._gax import _MetricsAPI + return _MetricsAPI def test_ctor(self): @@ -932,6 +948,7 @@ def test_list_metrics_w_paging(self): def test_metric_create_error(self): from google.gax.errors import GaxError + gax_api = _GAXMetricsAPI(_random_gax_error=True) api = self._make_one(gax_api, None) @@ -942,6 +959,7 @@ def test_metric_create_error(self): def test_metric_create_conflict(self): from google.cloud.exceptions import Conflict + gax_api = _GAXMetricsAPI(_create_log_metric_conflict=True) api = self._make_one(gax_api, None) @@ -952,6 +970,7 @@ def test_metric_create_conflict(self): def test_metric_create_ok(self): from google.cloud.grpc.logging.v2.logging_metrics_pb2 import LogMetric + gax_api = _GAXMetricsAPI() api = self._make_one(gax_api, None) @@ -969,6 +988,7 @@ def test_metric_create_ok(self): def test_metric_get_error(self): from google.cloud.exceptions import NotFound + gax_api = _GAXMetricsAPI() api = self._make_one(gax_api, None) @@ -977,6 +997,7 @@ def test_metric_get_error(self): def test_metric_get_miss(self): from google.gax.errors import GaxError + gax_api = _GAXMetricsAPI(_random_gax_error=True) api = self._make_one(gax_api, None) @@ -1007,6 +1028,7 @@ def test_metric_get_hit(self): def test_metric_update_error(self): from google.gax.errors import GaxError + gax_api = _GAXMetricsAPI(_random_gax_error=True) api = self._make_one(gax_api, None) @@ -1017,6 +1039,7 @@ def test_metric_update_error(self): def test_metric_update_miss(self): from google.cloud.exceptions import NotFound + gax_api = _GAXMetricsAPI() api = self._make_one(gax_api, None) @@ -1048,6 +1071,7 @@ def test_metric_update_hit(self): def test_metric_delete_error(self): from google.gax.errors import GaxError + gax_api = _GAXMetricsAPI(_random_gax_error=True) api = self._make_one(gax_api, None) @@ -1056,6 +1080,7 @@ def test_metric_delete_error(self): def test_metric_delete_miss(self): from google.cloud.exceptions import NotFound + gax_api = _GAXMetricsAPI(_log_metric_not_found=True) api = self._make_one(gax_api, None) @@ -1078,6 +1103,7 @@ class Test_make_gax_logging_api(unittest.TestCase): def _call_fut(self, client): from google.cloud.logging._gax import make_gax_logging_api + return make_gax_logging_api(client) def test_it(self): @@ -1124,6 +1150,7 @@ class Test_make_gax_metrics_api(unittest.TestCase): def _call_fut(self, client): from google.cloud.logging._gax import make_gax_metrics_api + return make_gax_metrics_api(client) def test_it(self): @@ -1170,6 +1197,7 @@ class Test_make_gax_sinks_api(unittest.TestCase): def _call_fut(self, client): from google.cloud.logging._gax import make_gax_sinks_api + return make_gax_sinks_api(client) def test_it(self): @@ -1230,6 +1258,7 @@ def write_log_entries(self, entries, log_name, resource, labels, def delete_log(self, log_name, options): from google.gax.errors import GaxError + self._delete_log_called_with = log_name, options if self._random_gax_error: raise GaxError('error') @@ -1248,6 +1277,7 @@ def list_sinks(self, parent, page_size, options): def create_sink(self, parent, sink, options): from google.gax.errors import GaxError + self._create_sink_called_with = parent, sink, options if self._random_gax_error: raise GaxError('error') @@ -1256,6 +1286,7 @@ def create_sink(self, parent, sink, options): def get_sink(self, sink_name, options): from google.gax.errors import GaxError + self._get_sink_called_with = sink_name, options if self._random_gax_error: raise GaxError('error') @@ -1266,6 +1297,7 @@ def get_sink(self, sink_name, options): def update_sink(self, sink_name, sink, options=None): from google.gax.errors import GaxError + self._update_sink_called_with = sink_name, sink, options if self._random_gax_error: raise GaxError('error') @@ -1276,6 +1308,7 @@ def update_sink(self, sink_name, sink, options=None): def delete_sink(self, sink_name, options=None): from google.gax.errors import GaxError + self._delete_sink_called_with = sink_name, options if self._random_gax_error: raise GaxError('error') @@ -1294,6 +1327,7 @@ def list_log_metrics(self, parent, page_size, options): def create_log_metric(self, parent, metric, options): from google.gax.errors import GaxError + self._create_log_metric_called_with = parent, metric, options if self._random_gax_error: raise GaxError('error') @@ -1302,6 +1336,7 @@ def create_log_metric(self, parent, metric, options): def get_log_metric(self, metric_name, options): from google.gax.errors import GaxError + self._get_log_metric_called_with = metric_name, options if self._random_gax_error: raise GaxError('error') @@ -1312,6 +1347,7 @@ def get_log_metric(self, metric_name, options): def update_log_metric(self, metric_name, metric, options=None): from google.gax.errors import GaxError + self._update_log_metric_called_with = metric_name, metric, options if self._random_gax_error: raise GaxError('error') @@ -1322,6 +1358,7 @@ def update_log_metric(self, metric_name, metric, options=None): def delete_log_metric(self, metric_name, options=None): from google.gax.errors import GaxError + self._delete_log_metric_called_with = metric_name, options if self._random_gax_error: raise GaxError('error') diff --git a/packages/google-cloud-logging/unit_tests/test__helpers.py b/packages/google-cloud-logging/unit_tests/test__helpers.py index 8f8e43f36734..7cc2d392514c 100644 --- a/packages/google-cloud-logging/unit_tests/test__helpers.py +++ b/packages/google-cloud-logging/unit_tests/test__helpers.py @@ -21,6 +21,7 @@ class Test_entry_from_resource(unittest.TestCase): @staticmethod def _call_fut(resource, client, loggers): from google.cloud.logging._helpers import entry_from_resource + return entry_from_resource(resource, client, loggers) def test_unknown_type(self): diff --git a/packages/google-cloud-logging/unit_tests/test__http.py b/packages/google-cloud-logging/unit_tests/test__http.py index bfc8d7981e46..953291dad1e9 100644 --- a/packages/google-cloud-logging/unit_tests/test__http.py +++ b/packages/google-cloud-logging/unit_tests/test__http.py @@ -19,6 +19,7 @@ def _make_credentials(): import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) @@ -30,6 +31,7 @@ class TestConnection(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging._http import Connection + return Connection def _make_one(self, *args, **kw): @@ -52,6 +54,7 @@ class Test_LoggingAPI(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging._http import _LoggingAPI + return _LoggingAPI def _make_one(self, *args, **kw): @@ -302,6 +305,7 @@ class Test_SinksAPI(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging._http import _SinksAPI + return _SinksAPI def _make_one(self, *args, **kw): @@ -400,6 +404,7 @@ def test_list_sinks_w_paging(self): def test_sink_create_conflict(self): from google.cloud.exceptions import Conflict + SENT = { 'name': self.SINK_NAME, 'filter': self.FILTER, @@ -440,6 +445,7 @@ def test_sink_create_ok(self): def test_sink_get_miss(self): from google.cloud.exceptions import NotFound + conn = _Connection() client = _Client(conn) api = self._make_one(client) @@ -470,6 +476,7 @@ def test_sink_get_hit(self): def test_sink_update_miss(self): from google.cloud.exceptions import NotFound + SENT = { 'name': self.SINK_NAME, 'filter': self.FILTER, @@ -509,6 +516,7 @@ def test_sink_update_hit(self): def test_sink_delete_miss(self): from google.cloud.exceptions import NotFound + conn = _Connection() client = _Client(conn) api = self._make_one(client) @@ -544,6 +552,7 @@ class Test_MetricsAPI(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging._http import _MetricsAPI + return _MetricsAPI def _make_one(self, *args, **kw): @@ -633,6 +642,7 @@ def test_list_metrics_w_paging(self): def test_metric_create_conflict(self): from google.cloud.exceptions import Conflict + SENT = { 'name': self.METRIC_NAME, 'filter': self.FILTER, @@ -673,6 +683,7 @@ def test_metric_create_ok(self): def test_metric_get_miss(self): from google.cloud.exceptions import NotFound + conn = _Connection() client = _Client(conn) api = self._make_one(client) @@ -703,6 +714,7 @@ def test_metric_get_hit(self): def test_metric_update_miss(self): from google.cloud.exceptions import NotFound + SENT = { 'name': self.METRIC_NAME, 'filter': self.FILTER, @@ -742,6 +754,7 @@ def test_metric_update_hit(self): def test_metric_delete_miss(self): from google.cloud.exceptions import NotFound + conn = _Connection() client = _Client(conn) api = self._make_one(client) @@ -776,6 +789,7 @@ def __init__(self, *responses): def api_request(self, **kw): from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound + self._called_with = kw if self._raise_conflict: raise Conflict('oops') @@ -788,6 +802,7 @@ def api_request(self, **kw): def _datetime_to_rfc3339_w_nanos(value): from google.cloud._helpers import _RFC3339_NO_FRACTION + no_fraction = value.strftime(_RFC3339_NO_FRACTION) return '%s.%09dZ' % (no_fraction, value.microsecond * 1000) diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 0e215ad1f510..85d464a83af0 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -19,6 +19,7 @@ def _make_credentials(): import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) @@ -36,6 +37,7 @@ class TestClient(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging.client import Client + return Client def _make_one(self, *args, **kw): @@ -182,6 +184,7 @@ def make_api(client_obj): def test_logger(self): from google.cloud.logging.logger import Logger + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) logger = client.logger(self.LOGGER_NAME) @@ -322,6 +325,7 @@ def test_list_entries_explicit(self): def test_sink_defaults(self): from google.cloud.logging.sink import Sink + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) sink = client.sink(self.SINK_NAME) @@ -334,6 +338,7 @@ def test_sink_defaults(self): def test_sink_explicit(self): from google.cloud.logging.sink import Sink + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) sink = client.sink(self.SINK_NAME, self.FILTER, self.DESTINATION_URI) @@ -440,6 +445,7 @@ def test_list_sinks_with_paging(self): def test_metric_defaults(self): from google.cloud.logging.metric import Metric + creds = _make_credentials() client_obj = self._make_one(project=self.PROJECT, credentials=creds) @@ -453,6 +459,7 @@ def test_metric_defaults(self): def test_metric_explicit(self): from google.cloud.logging.metric import Metric + creds = _make_credentials() client_obj = self._make_one(project=self.PROJECT, credentials=creds) diff --git a/packages/google-cloud-logging/unit_tests/test_entries.py b/packages/google-cloud-logging/unit_tests/test_entries.py index 0ae4a5e112f9..d39d72a27af8 100644 --- a/packages/google-cloud-logging/unit_tests/test_entries.py +++ b/packages/google-cloud-logging/unit_tests/test_entries.py @@ -19,6 +19,7 @@ class Test_logger_name_from_path(unittest.TestCase): def _call_fut(self, path): from google.cloud.logging.entries import logger_name_from_path + return logger_name_from_path(path) def test_w_simple_name(self): @@ -67,6 +68,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): import datetime + PAYLOAD = 'PAYLOAD' IID = 'IID' TIMESTAMP = datetime.datetime.now() @@ -120,6 +122,7 @@ def test_from_api_repr_missing_data_no_loggers(self): def test_from_api_repr_w_loggers_no_logger_match(self): from datetime import datetime from google.cloud._helpers import UTC + klass = self._get_target_class() client = _Client(self.PROJECT) PAYLOAD = 'PAYLOAD' @@ -164,6 +167,7 @@ def test_from_api_repr_w_loggers_no_logger_match(self): def test_from_api_repr_w_loggers_w_logger_match(self): from datetime import datetime from google.cloud._helpers import UTC + client = _Client(self.PROJECT) PAYLOAD = 'PAYLOAD' IID = 'IID' @@ -197,6 +201,7 @@ class TestProtobufEntry(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging.entries import ProtobufEntry + return ProtobufEntry def _make_one(self, *args, **kw): @@ -206,6 +211,7 @@ def test_parse_message(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value + LOGGER = object() message = Struct(fields={'foo': Value(bool_value=False)}) with_true = Struct(fields={'foo': Value(bool_value=True)}) @@ -217,6 +223,7 @@ def test_parse_message(self): def _datetime_to_rfc3339_w_nanos(value): from google.cloud._helpers import _RFC3339_NO_FRACTION + no_fraction = value.strftime(_RFC3339_NO_FRACTION) return '%s.%09dZ' % (no_fraction, value.microsecond * 1000) diff --git a/packages/google-cloud-logging/unit_tests/test_logger.py b/packages/google-cloud-logging/unit_tests/test_logger.py index 48edaf0ed5a4..31bad0402d86 100644 --- a/packages/google-cloud-logging/unit_tests/test_logger.py +++ b/packages/google-cloud-logging/unit_tests/test_logger.py @@ -19,6 +19,7 @@ def _make_credentials(): import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) @@ -30,6 +31,7 @@ class TestLogger(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging.logger import Logger + return Logger def _make_one(self, *args, **kw): @@ -64,6 +66,7 @@ def test_ctor_explicit(self): def test_batch_w_bound_client(self): from google.cloud.logging.logger import Batch + conn = object() client = _Client(self.PROJECT, conn) logger = self._make_one(self.LOGGER_NAME, client=client) @@ -74,6 +77,7 @@ def test_batch_w_bound_client(self): def test_batch_w_alternate_client(self): from google.cloud.logging.logger import Batch + conn1 = object() conn2 = object() client1 = _Client(self.PROJECT, conn1) @@ -268,6 +272,7 @@ def test_log_struct_w_explicit_client_labels_severity_httpreq(self): def test_log_struct_w_timestamp(self): import datetime + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) ENTRIES = [{ @@ -292,6 +297,7 @@ def test_log_proto_w_implicit_client(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) ENTRIES = [{ 'logName': 'projects/%s/logs/%s' % ( @@ -314,6 +320,7 @@ def test_log_proto_w_default_labels(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) DEFAULT_LABELS = {'foo': 'spam'} ENTRIES = [{ @@ -338,7 +345,9 @@ def test_log_proto_w_default_labels(self): def test_log_proto_w_explicit_client_labels_severity_httpreq(self): import json from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct, Value + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + message = Struct(fields={'foo': Value(bool_value=True)}) DEFAULT_LABELS = {'foo': 'spam'} LABELS = {'foo': 'bar', 'baz': 'qux'} @@ -381,7 +390,9 @@ def test_log_proto_w_timestamp(self): import json import datetime from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct, Value + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + message = Struct(fields={'foo': Value(bool_value=True)}) TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) ENTRIES = [{ @@ -504,6 +515,7 @@ class TestBatch(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging.logger import Batch + return Batch def _make_one(self, *args, **kwargs): @@ -528,6 +540,7 @@ def test_log_text_defaults(self): def test_log_text_explicit(self): import datetime + TEXT = 'This is the entry text' LABELS = {'foo': 'bar', 'baz': 'qux'} IID = 'IID' @@ -562,6 +575,7 @@ def test_log_struct_defaults(self): def test_log_struct_explicit(self): import datetime + STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} LABELS = {'foo': 'bar', 'baz': 'qux'} IID = 'IID' @@ -586,7 +600,9 @@ def test_log_struct_explicit(self): [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) def test_log_proto_defaults(self): - from google.protobuf.struct_pb2 import Struct, Value + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + message = Struct(fields={'foo': Value(bool_value=True)}) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() @@ -597,7 +613,9 @@ def test_log_proto_defaults(self): def test_log_proto_explicit(self): import datetime - from google.protobuf.struct_pb2 import Struct, Value + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + message = Struct(fields={'foo': Value(bool_value=True)}) LABELS = {'foo': 'bar', 'baz': 'qux'} IID = 'IID' @@ -633,7 +651,9 @@ def test_commit_w_bound_client(self): import json import datetime from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct, Value + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} message = Struct(fields={'foo': Value(bool_value=True)}) @@ -669,8 +689,10 @@ def test_commit_w_bound_client(self): def test_commit_w_alternate_client(self): import json from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct, Value + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value from google.cloud.logging.logger import Logger + TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} message = Struct(fields={'foo': Value(bool_value=True)}) @@ -713,8 +735,10 @@ def test_commit_w_alternate_client(self): def test_context_mgr_success(self): import json from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct, Value + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value from google.cloud.logging.logger import Logger + TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} message = Struct(fields={'foo': Value(bool_value=True)}) @@ -754,7 +778,9 @@ def test_context_mgr_success(self): def test_context_mgr_failure(self): import datetime - from google.protobuf.struct_pb2 import Struct, Value + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} LABELS = {'foo': 'bar', 'baz': 'qux'} diff --git a/packages/google-cloud-logging/unit_tests/test_metric.py b/packages/google-cloud-logging/unit_tests/test_metric.py index 5f4b05c054d2..12bf250ca2aa 100644 --- a/packages/google-cloud-logging/unit_tests/test_metric.py +++ b/packages/google-cloud-logging/unit_tests/test_metric.py @@ -25,6 +25,7 @@ class TestMetric(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging.metric import Metric + return Metric def _make_one(self, *args, **kw): @@ -238,6 +239,7 @@ def metric_create(self, project, metric_name, filter_, description): def metric_get(self, project, metric_name): from google.cloud.exceptions import NotFound + self._metric_get_called_with = (project, metric_name) try: return self._metric_get_response diff --git a/packages/google-cloud-logging/unit_tests/test_sink.py b/packages/google-cloud-logging/unit_tests/test_sink.py index ca2a7a6df9c4..15acc46ce12d 100644 --- a/packages/google-cloud-logging/unit_tests/test_sink.py +++ b/packages/google-cloud-logging/unit_tests/test_sink.py @@ -25,6 +25,7 @@ class TestSink(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging.sink import Sink + return Sink def _make_one(self, *args, **kw): @@ -260,6 +261,7 @@ def sink_create(self, project, sink_name, filter_, destination): def sink_get(self, project, sink_name): from google.cloud.exceptions import NotFound + self._sink_get_called_with = (project, sink_name) try: return self._sink_get_response From 00a2b651e0b0ac1f2ab9485b6e7e325b42153ead Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 26 Jan 2017 13:13:02 -0800 Subject: [PATCH 069/855] Renaming JSONClient -> ClientWithProject. Done via: $ git grep -l JSONClient | xargs sed -i s/JSONClient/ClientWithProject/g Also fixing test b0rken by previous commit. --- packages/google-cloud-logging/google/cloud/logging/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 77e762e6c808..2a29c0d03b49 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -29,7 +29,7 @@ else: _HAVE_GAX = True -from google.cloud.client import JSONClient +from google.cloud.client import ClientWithProject from google.cloud.environment_vars import DISABLE_GRPC from google.cloud.logging._http import Connection from google.cloud.logging._http import _LoggingAPI as JSONLoggingAPI @@ -59,7 +59,7 @@ """Environment variable set in a Google Container Engine environment.""" -class Client(JSONClient): +class Client(ClientWithProject): """Client to bundle configuration needed for API requests. :type project: str From e371a204b1022abc8f031294d2aface30e6088b1 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 26 Jan 2017 16:52:18 -0800 Subject: [PATCH 070/855] Changing logging Connection to only accept client. --- .../google/cloud/logging/_http.py | 18 ++---------------- .../google/cloud/logging/client.py | 9 +++++++-- .../unit_tests/test__http.py | 6 +++--- 3 files changed, 12 insertions(+), 21 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index 8056689235db..e666daae2d3c 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -26,16 +26,8 @@ class Connection(_http.JSONConnection): """A connection to Google Stackdriver Logging via the JSON REST API. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` - :param credentials: (Optional) The OAuth2 Credentials to use for this - connection. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: (Optional) HTTP object to make requests. - - :type api_base_url: str - :param api_base_url: The base of the API call URL. Defaults to the value - :attr:`Connection.API_BASE_URL`. + :type client: :class:`~google.cloud.logging.client.Client` + :param client: The client that owns the current connection. """ API_BASE_URL = 'https://logging.googleapis.com' @@ -47,12 +39,6 @@ class Connection(_http.JSONConnection): API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' """A template for the URL of a particular API call.""" - SCOPE = ('https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/cloud-platform') - """The scopes required for authenticating as a Logging consumer.""" - class _LoggingAPI(object): """Helper mapping logging-related APIs. diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 2a29c0d03b49..130db45c2855 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -91,12 +91,17 @@ class Client(ClientWithProject): _sinks_api = None _metrics_api = None + SCOPE = ('https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/cloud-platform') + """The scopes required for authenticating as a Logging consumer.""" + def __init__(self, project=None, credentials=None, http=None, use_gax=None): super(Client, self).__init__( project=project, credentials=credentials, http=http) - self._connection = Connection( - credentials=self._credentials, http=self._http) + self._connection = Connection(self) if use_gax is None: self._use_gax = _USE_GAX else: diff --git a/packages/google-cloud-logging/unit_tests/test__http.py b/packages/google-cloud-logging/unit_tests/test__http.py index 953291dad1e9..b3536d2bf7b3 100644 --- a/packages/google-cloud-logging/unit_tests/test__http.py +++ b/packages/google-cloud-logging/unit_tests/test__http.py @@ -38,9 +38,9 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_default_url(self): - creds = _make_credentials() - conn = self._make_one(creds) - self.assertEqual(conn.credentials, creds) + client = object() + conn = self._make_one(client) + self.assertIs(conn._client, client) class Test_LoggingAPI(unittest.TestCase): From 2e8a1bd202a47a123907c515b8ac4c352f7df47d Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Thu, 16 Feb 2017 14:19:53 -0500 Subject: [PATCH 071/855] Update core dependency to google-cloud-core >= 0.23.0, < 0.24dev. (#3028) * Update core dependency to google-cloud-core >= 0.23.0, < 0.24dev. --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 98645b8ea1e8..8e6d4fc9974a 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -50,7 +50,7 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.22.1, < 0.23dev', + 'google-cloud-core >= 0.23.0, < 0.24dev', 'grpcio >= 1.0.2, < 2.0dev', 'gapic-google-cloud-logging-v2 >= 0.90.0, < 0.91dev', ] From 7aa67ad8044aa07d85dd7fe0ed3cd4734c5c9160 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Fri, 17 Feb 2017 08:43:51 -0500 Subject: [PATCH 072/855] Logging formatting. --- .../google-cloud-logging/unit_tests/handlers/test_handlers.py | 2 ++ .../unit_tests/handlers/transports/test_background_thread.py | 2 ++ .../unit_tests/handlers/transports/test_base.py | 1 + .../unit_tests/handlers/transports/test_sync.py | 1 + packages/google-cloud-logging/unit_tests/test__gax.py | 1 + 5 files changed, 7 insertions(+) diff --git a/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py b/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py index 234b2991df45..26d3e6352024 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py +++ b/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py @@ -23,6 +23,7 @@ class TestCloudLoggingHandler(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging.handlers.handlers import CloudLoggingHandler + return CloudLoggingHandler def _make_one(self, *args, **kw): @@ -49,6 +50,7 @@ class TestSetupLogging(unittest.TestCase): def _call_fut(self, handler, excludes=None): from google.cloud.logging.handlers.handlers import setup_logging + if excludes: return setup_logging(handler, excluded_loggers=excludes) else: diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py index aa0cae2f2669..454f1d873de5 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py @@ -25,6 +25,7 @@ class TestBackgroundThreadHandler(unittest.TestCase): def _get_target_class(): from google.cloud.logging.handlers.transports import ( BackgroundThreadTransport) + return BackgroundThreadTransport def _make_one(self, *args, **kw): @@ -62,6 +63,7 @@ class TestWorker(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging.handlers.transports import background_thread + return background_thread._Worker def _make_one(self, *args, **kw): diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py index 0fd673fc2a1b..2844f64fbf5a 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py @@ -22,6 +22,7 @@ class TestBaseHandler(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging.handlers.transports import Transport + return Transport def _make_one(self, *args, **kw): diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py index 6650eb8a9d2e..562a7175380b 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py @@ -23,6 +23,7 @@ class TestSyncHandler(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging.handlers.transports import SyncTransport + return SyncTransport def _make_one(self, *args, **kw): diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 4d150535db22..852166861417 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -53,6 +53,7 @@ class Test_LoggingAPI(_Base, unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.logging._gax import _LoggingAPI + return _LoggingAPI def test_ctor(self): From 41abf9c9a0b341de7fa4b9b943b63146d265e9ad Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Mon, 20 Feb 2017 13:36:32 -0500 Subject: [PATCH 073/855] Rename deprecated assertEquals to assertEqual. --- .../unit_tests/handlers/transports/test_background_thread.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py index 454f1d873de5..bcd54a8feb06 100644 --- a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py @@ -35,7 +35,7 @@ def test_ctor(self): client = _Client(self.PROJECT) NAME = 'python_logger' transport = self._make_one(client, NAME) - self.assertEquals(transport.worker.logger.name, NAME) + self.assertEqual(transport.worker.logger.name, NAME) def test_send(self): client = _Client(self.PROJECT) @@ -73,7 +73,7 @@ def test_ctor(self): NAME = 'python_logger' logger = _Logger(NAME) worker = self._make_one(logger) - self.assertEquals(worker.batch, logger._batch) + self.assertEqual(worker.batch, logger._batch) def test_run(self): NAME = 'python_logger' From 16346049b024eeb7a91f26cb02a21a1dd57e4367 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 22 Feb 2017 07:40:10 -0800 Subject: [PATCH 074/855] Adding GCCL header for HTTP APIs. (#3046) --- .../google/cloud/logging/__init__.py | 3 ++ .../google/cloud/logging/_http.py | 9 ++++++ .../unit_tests/test__http.py | 30 +++++++++++++++++++ 3 files changed, 42 insertions(+) diff --git a/packages/google-cloud-logging/google/cloud/logging/__init__.py b/packages/google-cloud-logging/google/cloud/logging/__init__.py index d83ea4798949..17df46b08bad 100644 --- a/packages/google-cloud-logging/google/cloud/logging/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/__init__.py @@ -15,6 +15,9 @@ """Google Stackdriver Logging API wrapper.""" +from pkg_resources import get_distribution +__version__ = get_distribution('google-cloud-logging').version + from google.cloud.logging.client import Client diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index e666daae2d3c..2764736b1e39 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -18,11 +18,16 @@ from google.cloud import _http from google.cloud.iterator import HTTPIterator + +from google.cloud.logging import __version__ from google.cloud.logging._helpers import entry_from_resource from google.cloud.logging.sink import Sink from google.cloud.logging.metric import Metric +_CLIENT_INFO = _http.CLIENT_INFO_TEMPLATE.format(__version__) + + class Connection(_http.JSONConnection): """A connection to Google Stackdriver Logging via the JSON REST API. @@ -39,6 +44,10 @@ class Connection(_http.JSONConnection): API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' """A template for the URL of a particular API call.""" + _EXTRA_HEADERS = { + _http.CLIENT_INFO_HEADER: _CLIENT_INFO, + } + class _LoggingAPI(object): """Helper mapping logging-related APIs. diff --git a/packages/google-cloud-logging/unit_tests/test__http.py b/packages/google-cloud-logging/unit_tests/test__http.py index b3536d2bf7b3..f0121f9dd319 100644 --- a/packages/google-cloud-logging/unit_tests/test__http.py +++ b/packages/google-cloud-logging/unit_tests/test__http.py @@ -42,6 +42,36 @@ def test_default_url(self): conn = self._make_one(client) self.assertIs(conn._client, client) + def test_extra_headers(self): + from google.cloud import _http as base_http + from google.cloud.logging import _http as MUT + + http = mock.Mock(spec=['request']) + response = mock.Mock(status=200, spec=['status']) + data = b'brent-spiner' + http.request.return_value = response, data + client = mock.Mock(_http=http, spec=['_http']) + + conn = self._make_one(client) + req_data = 'req-data-boring' + result = conn.api_request( + 'GET', '/rainbow', data=req_data, expect_json=False) + self.assertEqual(result, data) + + expected_headers = { + 'Content-Length': str(len(req_data)), + 'Accept-Encoding': 'gzip', + base_http.CLIENT_INFO_HEADER: MUT._CLIENT_INFO, + 'User-Agent': conn.USER_AGENT, + } + expected_uri = conn.build_api_url('/rainbow') + http.request.assert_called_once_with( + body=req_data, + headers=expected_headers, + method='GET', + uri=expected_uri, + ) + class Test_LoggingAPI(unittest.TestCase): From cc247fe4f8e289c839015024af7d625914c05ba2 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 23 Feb 2017 08:32:40 -0800 Subject: [PATCH 075/855] GAPIC Header Consistency: Logging (#3054) --- .../google/cloud/logging/_gax.py | 10 ++++--- packages/google-cloud-logging/setup.py | 4 +-- .../unit_tests/test__gax.py | 27 ++++++++++++++++--- 3 files changed, 33 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 0ffd46dd3fcb..b9eea35bd0fb 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -39,6 +39,7 @@ from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound from google.cloud.iterator import GAXIterator +from google.cloud.logging import __version__ from google.cloud.logging._helpers import entry_from_resource from google.cloud.logging.sink import Sink from google.cloud.logging.metric import Metric @@ -534,7 +535,8 @@ def make_gax_logging_api(client): channel = make_secure_channel( client._connection.credentials, DEFAULT_USER_AGENT, LoggingServiceV2Client.SERVICE_ADDRESS) - generated = LoggingServiceV2Client(channel=channel) + generated = LoggingServiceV2Client( + channel=channel, lib_name='gccl', lib_version=__version__) return _LoggingAPI(generated, client) @@ -550,7 +552,8 @@ def make_gax_metrics_api(client): channel = make_secure_channel( client._connection.credentials, DEFAULT_USER_AGENT, MetricsServiceV2Client.SERVICE_ADDRESS) - generated = MetricsServiceV2Client(channel=channel) + generated = MetricsServiceV2Client( + channel=channel, lib_name='gccl', lib_version=__version__) return _MetricsAPI(generated, client) @@ -566,5 +569,6 @@ def make_gax_sinks_api(client): channel = make_secure_channel( client._connection.credentials, DEFAULT_USER_AGENT, ConfigServiceV2Client.SERVICE_ADDRESS) - generated = ConfigServiceV2Client(channel=channel) + generated = ConfigServiceV2Client( + channel=channel, lib_name='gccl', lib_version=__version__) return _SinksAPI(generated, client) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 8e6d4fc9974a..798542e69d67 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -52,12 +52,12 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.23.0, < 0.24dev', 'grpcio >= 1.0.2, < 2.0dev', - 'gapic-google-cloud-logging-v2 >= 0.90.0, < 0.91dev', + 'gapic-google-cloud-logging-v2 >= 0.90.1, < 0.91dev', ] setup( name='google-cloud-logging', - version='0.22.0', + version='0.23.0', description='Python Client for Stackdriver Logging', long_description=README, namespace_packages=[ diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 852166861417..3f3973152206 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -1108,6 +1108,7 @@ def _call_fut(self, client): return make_gax_logging_api(client) def test_it(self): + from google.cloud.logging import __version__ from google.cloud.logging._gax import _LoggingAPI from google.cloud.logging._gax import DEFAULT_USER_AGENT @@ -1116,6 +1117,7 @@ def test_it(self): client = mock.Mock(_connection=conn, spec=['_connection']) channels = [] channel_args = [] + generated_api_kwargs = [] channel_obj = object() generated = object() @@ -1123,8 +1125,9 @@ def make_channel(*args): channel_args.append(args) return channel_obj - def generated_api(channel=None): + def generated_api(channel=None, **kwargs): channels.append(channel) + generated_api_kwargs.append(kwargs) return generated host = 'foo.apis.invalid' @@ -1141,6 +1144,10 @@ def generated_api(channel=None): self.assertEqual(channel_args, [(creds, DEFAULT_USER_AGENT, host)]) + self.assertEqual(len(generated_api_kwargs), 1) + self.assertEqual(generated_api_kwargs[0]['lib_name'], 'gccl') + self.assertEqual(generated_api_kwargs[0]['lib_version'], __version__) + self.assertIsInstance(logging_api, _LoggingAPI) self.assertIs(logging_api._gax_api, generated) self.assertIs(logging_api._client, client) @@ -1155,6 +1162,7 @@ def _call_fut(self, client): return make_gax_metrics_api(client) def test_it(self): + from google.cloud.logging import __version__ from google.cloud.logging._gax import _MetricsAPI from google.cloud.logging._gax import DEFAULT_USER_AGENT @@ -1163,6 +1171,7 @@ def test_it(self): client = mock.Mock(_connection=conn, spec=['_connection']) channels = [] channel_args = [] + generated_api_kwargs = [] channel_obj = object() generated = object() @@ -1170,8 +1179,9 @@ def make_channel(*args): channel_args.append(args) return channel_obj - def generated_api(channel=None): + def generated_api(channel=None, **kwargs): channels.append(channel) + generated_api_kwargs.append(kwargs) return generated host = 'foo.apis.invalid' @@ -1188,6 +1198,10 @@ def generated_api(channel=None): self.assertEqual(channel_args, [(creds, DEFAULT_USER_AGENT, host)]) + self.assertEqual(len(generated_api_kwargs), 1) + self.assertEqual(generated_api_kwargs[0]['lib_name'], 'gccl') + self.assertEqual(generated_api_kwargs[0]['lib_version'], __version__) + self.assertIsInstance(metrics_api, _MetricsAPI) self.assertIs(metrics_api._gax_api, generated) self.assertIs(metrics_api._client, client) @@ -1202,6 +1216,7 @@ def _call_fut(self, client): return make_gax_sinks_api(client) def test_it(self): + from google.cloud.logging import __version__ from google.cloud.logging._gax import _SinksAPI from google.cloud.logging._gax import DEFAULT_USER_AGENT @@ -1210,6 +1225,7 @@ def test_it(self): client = mock.Mock(_connection=conn, spec=['_connection']) channels = [] channel_args = [] + generated_api_kwargs = [] channel_obj = object() generated = object() @@ -1217,8 +1233,9 @@ def make_channel(*args): channel_args.append(args) return channel_obj - def generated_api(channel=None): + def generated_api(channel=None, **kwargs): channels.append(channel) + generated_api_kwargs.append(kwargs) return generated host = 'foo.apis.invalid' @@ -1235,6 +1252,10 @@ def generated_api(channel=None): self.assertEqual(channel_args, [(creds, DEFAULT_USER_AGENT, host)]) + self.assertEqual(len(generated_api_kwargs), 1) + self.assertEqual(generated_api_kwargs[0]['lib_name'], 'gccl') + self.assertEqual(generated_api_kwargs[0]['lib_version'], __version__) + self.assertIsInstance(sinks_api, _SinksAPI) self.assertIs(sinks_api._gax_api, generated) self.assertIs(sinks_api._client, client) From b30ff8a43273c1f010bd683564b53ee4aecdaf84 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 23 Feb 2017 12:28:30 -0800 Subject: [PATCH 076/855] Dropping usage of connection.credentials in logging. --- .../google-cloud-logging/google/cloud/logging/_gax.py | 6 +++--- .../logging/handlers/transports/background_thread.py | 2 +- packages/google-cloud-logging/unit_tests/test__gax.py | 9 +++------ 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index b9eea35bd0fb..cd8cccbd3eaa 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -533,7 +533,7 @@ def make_gax_logging_api(client): :returns: A metrics API instance with the proper credentials. """ channel = make_secure_channel( - client._connection.credentials, DEFAULT_USER_AGENT, + client._credentials, DEFAULT_USER_AGENT, LoggingServiceV2Client.SERVICE_ADDRESS) generated = LoggingServiceV2Client( channel=channel, lib_name='gccl', lib_version=__version__) @@ -550,7 +550,7 @@ def make_gax_metrics_api(client): :returns: A metrics API instance with the proper credentials. """ channel = make_secure_channel( - client._connection.credentials, DEFAULT_USER_AGENT, + client._credentials, DEFAULT_USER_AGENT, MetricsServiceV2Client.SERVICE_ADDRESS) generated = MetricsServiceV2Client( channel=channel, lib_name='gccl', lib_version=__version__) @@ -567,7 +567,7 @@ def make_gax_sinks_api(client): :returns: A metrics API instance with the proper credentials. """ channel = make_secure_channel( - client._connection.credentials, DEFAULT_USER_AGENT, + client._credentials, DEFAULT_USER_AGENT, ConfigServiceV2Client.SERVICE_ADDRESS) generated = ConfigServiceV2Client( channel=channel, lib_name='gccl', lib_version=__version__) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 811bc23bdb9f..9c8ea85c937a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -152,7 +152,7 @@ class BackgroundThreadTransport(Transport): def __init__(self, client, name): http = copy.deepcopy(client._http) self.client = client.__class__( - client.project, client._connection.credentials, http) + client.project, client._credentials, http) logger = self.client.logger(name) self.worker = _Worker(logger) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 3f3973152206..1a69586651e9 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -1113,8 +1113,7 @@ def test_it(self): from google.cloud.logging._gax import DEFAULT_USER_AGENT creds = object() - conn = mock.Mock(credentials=creds, spec=['credentials']) - client = mock.Mock(_connection=conn, spec=['_connection']) + client = mock.Mock(_credentials=creds, spec=['_credentials']) channels = [] channel_args = [] generated_api_kwargs = [] @@ -1167,8 +1166,7 @@ def test_it(self): from google.cloud.logging._gax import DEFAULT_USER_AGENT creds = object() - conn = mock.Mock(credentials=creds, spec=['credentials']) - client = mock.Mock(_connection=conn, spec=['_connection']) + client = mock.Mock(_credentials=creds, spec=['_credentials']) channels = [] channel_args = [] generated_api_kwargs = [] @@ -1221,8 +1219,7 @@ def test_it(self): from google.cloud.logging._gax import DEFAULT_USER_AGENT creds = object() - conn = mock.Mock(credentials=creds, spec=['credentials']) - client = mock.Mock(_connection=conn, spec=['_connection']) + client = mock.Mock(_credentials=creds, spec=['_credentials']) channels = [] channel_args = [] generated_api_kwargs = [] From e0aaefb932c816b5fee895326fdbbc11c0420869 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 24 Feb 2017 11:30:18 -0800 Subject: [PATCH 077/855] Upgrading all versions for umbrella release. --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 798542e69d67..ff723043fe09 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -50,7 +50,7 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.23.0, < 0.24dev', + 'google-cloud-core >= 0.23.1, < 0.24dev', 'grpcio >= 1.0.2, < 2.0dev', 'gapic-google-cloud-logging-v2 >= 0.90.1, < 0.91dev', ] From befc8ff316832df0a8738d0338134e81f58d00b1 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 24 Feb 2017 21:57:44 -0800 Subject: [PATCH 078/855] Vision 1.1 API Client (#3069) --- packages/google-cloud-logging/google/cloud/logging/logger.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index d5a5b201dca0..4ea35881765c 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -171,7 +171,7 @@ def log_text(self, text, client=None, labels=None, insert_id=None, See: https://cloud.google.com/logging/docs/api/reference/rest/v2/entries/write - :type text: text + :type text: str :param text: the log message. :type client: :class:`~google.cloud.logging.client.Client` or From 7b420da11d85c47e8f7b1e79e68225a676035cac Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 27 Feb 2017 15:27:22 -0800 Subject: [PATCH 079/855] Bump logging dependencies to 0.91.0 (#3079) --- .../google/cloud/logging/_gax.py | 6 ++-- packages/google-cloud-logging/setup.py | 4 +-- .../unit_tests/test__gax.py | 34 +++++++++---------- 3 files changed, 22 insertions(+), 22 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index cd8cccbd3eaa..887dd80ccb33 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -26,9 +26,9 @@ from google.gax import INITIAL_PAGE from google.gax.errors import GaxError from google.gax.grpc import exc_to_code -from google.cloud.grpc.logging.v2.logging_config_pb2 import LogSink -from google.cloud.grpc.logging.v2.logging_metrics_pb2 import LogMetric -from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry +from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink +from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric +from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry from google.protobuf.json_format import MessageToDict from google.protobuf.json_format import ParseDict from grpc import StatusCode diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index ff723043fe09..d56b15995c71 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -52,12 +52,12 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.23.1, < 0.24dev', 'grpcio >= 1.0.2, < 2.0dev', - 'gapic-google-cloud-logging-v2 >= 0.90.1, < 0.91dev', + 'gapic-google-cloud-logging-v2 >= 0.91.0, < 0.92dev', ] setup( name='google-cloud-logging', - version='0.23.0', + version='0.23.1', description='Python Client for Stackdriver Logging', long_description=README, namespace_packages=[ diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 1a69586651e9..35d71750aa93 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -68,7 +68,7 @@ def test_list_entries_no_paging(self): from google.api.monitored_resource_pb2 import MonitoredResource from google.gax import INITIAL_PAGE - from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import UTC @@ -126,7 +126,7 @@ def _list_entries_with_paging_helper(self, payload, struct_pb): import datetime from google.api.monitored_resource_pb2 import MonitoredResource - from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import UTC from google.cloud._testing import _GAXPageIterator @@ -217,8 +217,8 @@ def test_list_entries_with_paging_nested_payload(self): def _make_log_entry_with_extras(self, labels, iid, type_url, now): from google.api.monitored_resource_pb2 import MonitoredResource - from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry - from google.cloud.grpc.logging.v2.log_entry_pb2 import ( + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry + from google.cloud.proto.logging.v2.log_entry_pb2 import ( LogEntryOperation) from google.logging.type.http_request_pb2 import HttpRequest from google.logging.type.log_severity_pb2 import WARNING @@ -334,7 +334,7 @@ def test_list_entries_with_extra_properties(self): self.assertEqual(options.page_token, TOKEN) def test_write_entries_single(self): - from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry TEXT = 'TEXT' ENTRY = { @@ -368,7 +368,7 @@ def test_write_entries_w_extra_properties(self): # pylint: disable=too-many-statements from datetime import datetime from google.logging.type.log_severity_pb2 import WARNING - from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry from google.cloud._helpers import UTC, _pb_timestamp_to_datetime NOW = datetime.utcnow().replace(tzinfo=UTC) @@ -464,7 +464,7 @@ def _write_entries_multiple_helper(self, json_payload, json_struct_pb): # pylint: disable=too-many-statements import datetime from google.logging.type.log_severity_pb2 import WARNING - from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry from google.protobuf.any_pb2 import Any from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._helpers import UTC @@ -635,7 +635,7 @@ def test_ctor(self): def test_list_sinks_no_paging(self): import six from google.gax import INITIAL_PAGE - from google.cloud.grpc.logging.v2.logging_config_pb2 import LogSink + from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink from google.cloud._testing import _GAXPageIterator from google.cloud.logging.sink import Sink @@ -670,7 +670,7 @@ def test_list_sinks_no_paging(self): self.assertEqual(options.page_token, INITIAL_PAGE) def test_list_sinks_w_paging(self): - from google.cloud.grpc.logging.v2.logging_config_pb2 import LogSink + from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink from google.cloud._testing import _GAXPageIterator from google.cloud.logging.sink import Sink @@ -728,7 +728,7 @@ def test_sink_create_conflict(self): self.DESTINATION_URI) def test_sink_create_ok(self): - from google.cloud.grpc.logging.v2.logging_config_pb2 import LogSink + from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink gax_api = _GAXSinksAPI() api = self._make_one(gax_api, None) @@ -764,7 +764,7 @@ def test_sink_get_miss(self): api.sink_get(self.PROJECT, self.SINK_NAME) def test_sink_get_hit(self): - from google.cloud.grpc.logging.v2.logging_config_pb2 import LogSink + from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink RESPONSE = { 'name': self.SINK_PATH, @@ -808,7 +808,7 @@ def test_sink_update_miss(self): self.DESTINATION_URI) def test_sink_update_hit(self): - from google.cloud.grpc.logging.v2.logging_config_pb2 import LogSink + from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink response = LogSink(name=self.SINK_NAME, destination=self.DESTINATION_URI, @@ -877,7 +877,7 @@ def test_ctor(self): def test_list_metrics_no_paging(self): import six from google.gax import INITIAL_PAGE - from google.cloud.grpc.logging.v2.logging_metrics_pb2 import LogMetric + from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric from google.cloud._testing import _GAXPageIterator from google.cloud.logging.metric import Metric @@ -912,7 +912,7 @@ def test_list_metrics_no_paging(self): self.assertEqual(options.page_token, INITIAL_PAGE) def test_list_metrics_w_paging(self): - from google.cloud.grpc.logging.v2.logging_metrics_pb2 import LogMetric + from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric from google.cloud._testing import _GAXPageIterator from google.cloud.logging.metric import Metric @@ -970,7 +970,7 @@ def test_metric_create_conflict(self): self.DESCRIPTION) def test_metric_create_ok(self): - from google.cloud.grpc.logging.v2.logging_metrics_pb2 import LogMetric + from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric gax_api = _GAXMetricsAPI() api = self._make_one(gax_api, None) @@ -1006,7 +1006,7 @@ def test_metric_get_miss(self): api.metric_get(self.PROJECT, self.METRIC_NAME) def test_metric_get_hit(self): - from google.cloud.grpc.logging.v2.logging_metrics_pb2 import LogMetric + from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric RESPONSE = { 'name': self.METRIC_PATH, @@ -1050,7 +1050,7 @@ def test_metric_update_miss(self): self.DESCRIPTION) def test_metric_update_hit(self): - from google.cloud.grpc.logging.v2.logging_metrics_pb2 import LogMetric + from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric response = LogMetric(name=self.METRIC_NAME, description=self.DESCRIPTION, From 3b511762b3f369f864a13ab28bf1f36a7ec0e23c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 23 Feb 2017 13:51:39 -0800 Subject: [PATCH 080/855] Removing overly broad mock.Mock() in logging. --- packages/google-cloud-logging/unit_tests/test_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/unit_tests/test_client.py index 85d464a83af0..825b47f2f9b4 100644 --- a/packages/google-cloud-logging/unit_tests/test_client.py +++ b/packages/google-cloud-logging/unit_tests/test_client.py @@ -617,7 +617,7 @@ def test_setup_logging(self): http_mock = mock.Mock(spec=httplib2.Http) deepcopy = mock.Mock(return_value=http_mock) - setup_logging = mock.Mock() + setup_logging = mock.Mock(spec=[]) credentials = _make_credentials() From e50c6ca75ade3fb5b9983e1523c62074499bdcdd Mon Sep 17 00:00:00 2001 From: daspecster Date: Wed, 15 Mar 2017 13:23:23 -0400 Subject: [PATCH 081/855] Fix double conversion of datetime for log entries. --- packages/google-cloud-logging/google/cloud/logging/_gax.py | 3 --- packages/google-cloud-logging/unit_tests/test__gax.py | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 887dd80ccb33..6a4ede985e42 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -33,7 +33,6 @@ from google.protobuf.json_format import ParseDict from grpc import StatusCode -from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._helpers import make_secure_channel from google.cloud._http import DEFAULT_USER_AGENT from google.cloud.exceptions import Conflict @@ -452,8 +451,6 @@ def _log_entry_mapping_to_pb(mapping): the keys expected in the JSON API. """ entry_pb = LogEntry() - if 'timestamp' in mapping: - mapping['timestamp'] = _datetime_to_rfc3339(mapping['timestamp']) ParseDict(mapping, entry_pb) return entry_pb diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 35d71750aa93..8054756cfc65 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -412,7 +412,7 @@ def test_write_entries_w_extra_properties(self): 'severity': SEVERITY, 'labels': LABELS, 'insertId': IID, - 'timestamp': NOW, + 'timestamp': NOW.strftime("%Y-%m-%dT%H:%M:%S.%fZ"), 'httpRequest': REQUEST, 'operation': OPERATION, } From a064dfdb0b313dabe1e3f6b6902ac0b2866f7b6e Mon Sep 17 00:00:00 2001 From: daspecster Date: Wed, 15 Mar 2017 14:23:13 -0400 Subject: [PATCH 082/855] Convert batch timestamp to string. --- .../google-cloud-logging/google/cloud/logging/logger.py | 2 +- packages/google-cloud-logging/unit_tests/test_logger.py | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 4ea35881765c..b81c27389ef4 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -476,7 +476,7 @@ def commit(self, client=None): if http_req is not None: info['httpRequest'] = http_req if timestamp is not None: - info['timestamp'] = timestamp + info['timestamp'] = _datetime_to_rfc3339(timestamp) entries.append(info) client.logging_api.write_entries(entries, **kwargs) diff --git a/packages/google-cloud-logging/unit_tests/test_logger.py b/packages/google-cloud-logging/unit_tests/test_logger.py index 31bad0402d86..1264fe798f64 100644 --- a/packages/google-cloud-logging/unit_tests/test_logger.py +++ b/packages/google-cloud-logging/unit_tests/test_logger.py @@ -667,10 +667,13 @@ def test_commit_w_bound_client(self): 'type': 'global', } ENTRIES = [ - {'textPayload': TEXT, 'insertId': IID1, 'timestamp': TIMESTAMP1}, - {'jsonPayload': STRUCT, 'insertId': IID2, 'timestamp': TIMESTAMP2}, + {'textPayload': TEXT, 'insertId': IID1, + 'timestamp': TIMESTAMP1.strftime("%Y-%m-%dT%H:%M:%S.%fZ")}, + {'jsonPayload': STRUCT, 'insertId': IID2, + 'timestamp': TIMESTAMP2.strftime("%Y-%m-%dT%H:%M:%S.%fZ")}, {'protoPayload': json.loads(MessageToJson(message)), - 'insertId': IID3, 'timestamp': TIMESTAMP3}, + 'insertId': IID3, + 'timestamp': TIMESTAMP3.strftime("%Y-%m-%dT%H:%M:%S.%fZ")}, ] client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() From 7793193bc549f48706e0e3621056a6a36a71e27d Mon Sep 17 00:00:00 2001 From: daspecster Date: Thu, 16 Mar 2017 00:36:59 -0400 Subject: [PATCH 083/855] Move imports in logging system tests, use _datetime_to_rfc3339. --- packages/google-cloud-logging/unit_tests/test__gax.py | 3 ++- packages/google-cloud-logging/unit_tests/test_logger.py | 7 ++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/unit_tests/test__gax.py index 8054756cfc65..d1f73c699827 100644 --- a/packages/google-cloud-logging/unit_tests/test__gax.py +++ b/packages/google-cloud-logging/unit_tests/test__gax.py @@ -369,6 +369,7 @@ def test_write_entries_w_extra_properties(self): from datetime import datetime from google.logging.type.log_severity_pb2 import WARNING from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry + from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._helpers import UTC, _pb_timestamp_to_datetime NOW = datetime.utcnow().replace(tzinfo=UTC) @@ -412,7 +413,7 @@ def test_write_entries_w_extra_properties(self): 'severity': SEVERITY, 'labels': LABELS, 'insertId': IID, - 'timestamp': NOW.strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + 'timestamp': _datetime_to_rfc3339(NOW), 'httpRequest': REQUEST, 'operation': OPERATION, } diff --git a/packages/google-cloud-logging/unit_tests/test_logger.py b/packages/google-cloud-logging/unit_tests/test_logger.py index 1264fe798f64..541c9ec501c4 100644 --- a/packages/google-cloud-logging/unit_tests/test_logger.py +++ b/packages/google-cloud-logging/unit_tests/test_logger.py @@ -653,6 +653,7 @@ def test_commit_w_bound_client(self): from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value + from google.cloud._helpers import _datetime_to_rfc3339 TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} @@ -668,12 +669,12 @@ def test_commit_w_bound_client(self): } ENTRIES = [ {'textPayload': TEXT, 'insertId': IID1, - 'timestamp': TIMESTAMP1.strftime("%Y-%m-%dT%H:%M:%S.%fZ")}, + 'timestamp': _datetime_to_rfc3339(TIMESTAMP1)}, {'jsonPayload': STRUCT, 'insertId': IID2, - 'timestamp': TIMESTAMP2.strftime("%Y-%m-%dT%H:%M:%S.%fZ")}, + 'timestamp': _datetime_to_rfc3339(TIMESTAMP2)}, {'protoPayload': json.loads(MessageToJson(message)), 'insertId': IID3, - 'timestamp': TIMESTAMP3.strftime("%Y-%m-%dT%H:%M:%S.%fZ")}, + 'timestamp': _datetime_to_rfc3339(TIMESTAMP3)}, ] client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() From 3bac61d4344e7aae2aaf100621c7724fe56db67a Mon Sep 17 00:00:00 2001 From: daspecster Date: Wed, 22 Mar 2017 16:25:07 -0400 Subject: [PATCH 084/855] Update references that were 301 redirecting. --- .../google/cloud/logging/_gax.py | 4 +-- .../google/cloud/logging/_http.py | 34 +++++++++---------- .../google/cloud/logging/client.py | 14 ++++---- .../google/cloud/logging/entries.py | 6 ++-- .../google/cloud/logging/logger.py | 12 +++---- .../google/cloud/logging/metric.py | 12 +++---- .../google/cloud/logging/sink.py | 12 +++---- 7 files changed, 47 insertions(+), 47 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 6a4ede985e42..e2f048fbd54f 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -194,7 +194,7 @@ def sink_create(self, project, sink_name, filter_, destination): """API call: create a sink resource. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/create + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create :type project: str :param project: ID of the project in which to create the sink. @@ -343,7 +343,7 @@ def metric_create(self, project, metric_name, filter_, description): """API call: create a metric resource. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/create + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create :type project: str :param project: ID of the project in which to create the metric. diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index 2764736b1e39..d9e7e4dacacd 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -53,8 +53,8 @@ class _LoggingAPI(object): """Helper mapping logging-related APIs. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/entries - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.logs + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs :type client: :class:`~google.cloud.logging.client.Client` :param client: The client used to make API requests. @@ -69,7 +69,7 @@ def list_entries(self, projects, filter_=None, order_by=None, """Return a page of log entry resources. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/entries/list + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list :type projects: list of strings :param projects: project IDs to include. If not passed, @@ -128,7 +128,7 @@ def write_entries(self, entries, logger_name=None, resource=None, """API call: log an entry resource via a POST request See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/entries/write + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write :type entries: sequence of mapping :param entries: the log entry resources to log. @@ -162,7 +162,7 @@ def logger_delete(self, project, logger_name): """API call: delete all entries in a logger via a DELETE request See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.logs/delete + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs/delete :type project: str :param project: ID of project containing the log entries to delete @@ -178,7 +178,7 @@ class _SinksAPI(object): """Helper mapping sink-related APIs. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks :type client: :class:`~google.cloud.logging.client.Client` :param client: The client used to make API requests. @@ -191,7 +191,7 @@ def list_sinks(self, project, page_size=None, page_token=None): """List sinks for the project associated with this client. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/list + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/list :type project: str :param project: ID of the project whose sinks are to be listed. @@ -225,7 +225,7 @@ def sink_create(self, project, sink_name, filter_, destination): """API call: create a sink resource. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/create + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create :type project: str :param project: ID of the project in which to create the sink. @@ -253,7 +253,7 @@ def sink_get(self, project, sink_name): """API call: retrieve a sink resource. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/get + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get :type project: str :param project: ID of the project containing the sink. @@ -271,7 +271,7 @@ def sink_update(self, project, sink_name, filter_, destination): """API call: update a sink resource. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/update + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/update :type project: str :param project: ID of the project containing the sink. @@ -299,7 +299,7 @@ def sink_delete(self, project, sink_name): """API call: delete a sink resource. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/delete + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/delete :type project: str :param project: ID of the project containing the sink. @@ -315,7 +315,7 @@ class _MetricsAPI(object): """Helper mapping sink-related APIs. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics :type client: :class:`~google.cloud.logging.client.Client` :param client: The client used to make API requests. @@ -328,7 +328,7 @@ def list_metrics(self, project, page_size=None, page_token=None): """List metrics for the project associated with this client. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/list + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list :type project: str :param project: ID of the project whose metrics are to be listed. @@ -362,7 +362,7 @@ def metric_create(self, project, metric_name, filter_, description=None): """API call: create a metric resource. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/create + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create :type project: str :param project: ID of the project in which to create the metric. @@ -389,7 +389,7 @@ def metric_get(self, project, metric_name): """API call: retrieve a metric resource. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/get + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get :type project: str :param project: ID of the project containing the metric. @@ -407,7 +407,7 @@ def metric_update(self, project, metric_name, filter_, description): """API call: update a metric resource. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/update + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update :type project: str :param project: ID of the project containing the metric. @@ -434,7 +434,7 @@ def metric_delete(self, project, metric_name): """API call: delete a metric resource. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/delete + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/delete :type project: str :param project: ID of the project containing the metric. diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 130db45c2855..2da6eeb38c07 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -112,8 +112,8 @@ def logging_api(self): """Helper for logging-related API calls. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/entries - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.logs + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs """ if self._logging_api is None: if self._use_gax: @@ -127,7 +127,7 @@ def sinks_api(self): """Helper for log sink-related API calls. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks """ if self._sinks_api is None: if self._use_gax: @@ -141,7 +141,7 @@ def metrics_api(self): """Helper for log metric-related API calls. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics """ if self._metrics_api is None: if self._use_gax: @@ -166,7 +166,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None, """Return a page of log entries. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/entries/list + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list :type projects: list of strings :param projects: project IDs to include. If not passed, @@ -228,7 +228,7 @@ def list_sinks(self, page_size=None, page_token=None): """List sinks for the project associated with this client. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/list + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/list :type page_size: int :param page_size: maximum number of sinks to return, If not passed, @@ -273,7 +273,7 @@ def list_metrics(self, page_size=None, page_token=None): """List metrics for the project associated with this client. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/list + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list :type page_size: int :param page_size: maximum number of metrics to return, If not passed, diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py index 417d42cefdca..1ae5d34ec8b9 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -125,7 +125,7 @@ class TextEntry(_BaseEntry): """Entry created with ``textPayload``. See: - https://cloud.google.com/logging/docs/api/reference/rest/Shared.Types/LogEntry + https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry """ _PAYLOAD_KEY = 'textPayload' @@ -134,7 +134,7 @@ class StructEntry(_BaseEntry): """Entry created with ``jsonPayload``. See: - https://cloud.google.com/logging/docs/api/reference/rest/Shared.Types/LogEntry + https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry """ _PAYLOAD_KEY = 'jsonPayload' @@ -143,7 +143,7 @@ class ProtobufEntry(_BaseEntry): """Entry created with ``protoPayload``. See: - https://cloud.google.com/logging/docs/api/reference/rest/Shared.Types/LogEntry + https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry """ _PAYLOAD_KEY = 'protoPayload' diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index b81c27389ef4..459647bbea67 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -24,7 +24,7 @@ class Logger(object): """Loggers represent named targets for log entries. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.logs + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs :type name: str :param name: the name of the logger @@ -169,7 +169,7 @@ def log_text(self, text, client=None, labels=None, insert_id=None, """API call: log a text message via a POST request See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/entries/write + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write :type text: str :param text: the log message. @@ -206,7 +206,7 @@ def log_struct(self, info, client=None, labels=None, insert_id=None, """API call: log a structured message via a POST request See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/entries/write + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write :type info: dict :param info: the log entry information @@ -243,7 +243,7 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, """API call: log a protobuf message via a POST request See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/entries/write + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list :type message: Protobuf message :param message: the message to be logged @@ -279,7 +279,7 @@ def delete(self, client=None): """API call: delete all entries in a logger via a DELETE request See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.logs/delete + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs/delete :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -294,7 +294,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None, """Return a page of log entries. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/entries/list + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list :type projects: list of strings :param projects: project IDs to include. If not passed, diff --git a/packages/google-cloud-logging/google/cloud/logging/metric.py b/packages/google-cloud-logging/google/cloud/logging/metric.py index e0912583c8a9..8067fb281b23 100644 --- a/packages/google-cloud-logging/google/cloud/logging/metric.py +++ b/packages/google-cloud-logging/google/cloud/logging/metric.py @@ -21,7 +21,7 @@ class Metric(object): """Metrics represent named filters for log entries. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics :type name: str :param name: the name of the metric @@ -103,7 +103,7 @@ def create(self, client=None): """API call: create the metric via a PUT request See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/create + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -118,7 +118,7 @@ def exists(self, client=None): """API call: test for the existence of the metric via a GET request See - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/get + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -141,7 +141,7 @@ def reload(self, client=None): """API call: sync local metric configuration via a GET request See - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/get + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -157,7 +157,7 @@ def update(self, client=None): """API call: update metric configuration via a PUT request See - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/update + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -172,7 +172,7 @@ def delete(self, client=None): """API call: delete a metric via a DELETE request See - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/delete + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/delete :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` diff --git a/packages/google-cloud-logging/google/cloud/logging/sink.py b/packages/google-cloud-logging/google/cloud/logging/sink.py index aeadde05ed9a..184cf36b00e6 100644 --- a/packages/google-cloud-logging/google/cloud/logging/sink.py +++ b/packages/google-cloud-logging/google/cloud/logging/sink.py @@ -21,7 +21,7 @@ class Sink(object): """Sinks represent filtered exports for log entries. See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks :type name: str :param name: the name of the sink @@ -107,7 +107,7 @@ def create(self, client=None): """API call: create the sink via a PUT request See: - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/create + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -122,7 +122,7 @@ def exists(self, client=None): """API call: test for the existence of the sink via a GET request See - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/get + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -145,7 +145,7 @@ def reload(self, client=None): """API call: sync local sink configuration via a GET request See - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/get + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -161,7 +161,7 @@ def update(self, client=None): """API call: update sink configuration via a PUT request See - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/update + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/update :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -176,7 +176,7 @@ def delete(self, client=None): """API call: delete a sink via a DELETE request See - https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/delete + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/delete :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` From 84e5192dd02d2062bc55ad7d9537eaf52deef500 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 23 Mar 2017 14:49:26 -0700 Subject: [PATCH 085/855] CI Rehash (#3146) --- packages/google-cloud-logging/.flake8 | 6 + packages/google-cloud-logging/LICENSE | 202 ++++++++ packages/google-cloud-logging/MANIFEST.in | 8 +- .../google/cloud/logging/__init__.py | 2 + .../google/cloud/logging/handlers/__init__.py | 3 + .../logging/handlers/transports/__init__.py | 2 + packages/google-cloud-logging/nox.py | 88 ++++ packages/google-cloud-logging/setup.py | 2 +- .../google-cloud-logging/tests/__init__.py | 0 packages/google-cloud-logging/tests/system.py | 462 ++++++++++++++++++ .../{unit_tests => tests/unit}/__init__.py | 0 .../unit}/handlers/__init__.py | 0 .../unit}/handlers/test_app_engine.py | 0 .../unit}/handlers/test_container_engine.py | 0 .../unit}/handlers/test_handlers.py | 0 .../unit}/handlers/transports/__init__.py | 0 .../transports/test_background_thread.py | 0 .../unit}/handlers/transports/test_base.py | 0 .../unit}/handlers/transports/test_sync.py | 0 .../{unit_tests => tests/unit}/test__gax.py | 0 .../unit}/test__helpers.py | 0 .../{unit_tests => tests/unit}/test__http.py | 0 .../{unit_tests => tests/unit}/test_client.py | 0 .../unit}/test_entries.py | 0 .../{unit_tests => tests/unit}/test_logger.py | 0 .../{unit_tests => tests/unit}/test_metric.py | 0 .../{unit_tests => tests/unit}/test_sink.py | 0 packages/google-cloud-logging/tox.ini | 35 -- 28 files changed, 770 insertions(+), 40 deletions(-) create mode 100644 packages/google-cloud-logging/.flake8 create mode 100644 packages/google-cloud-logging/LICENSE create mode 100644 packages/google-cloud-logging/nox.py create mode 100644 packages/google-cloud-logging/tests/__init__.py create mode 100644 packages/google-cloud-logging/tests/system.py rename packages/google-cloud-logging/{unit_tests => tests/unit}/__init__.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/handlers/__init__.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/handlers/test_app_engine.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/handlers/test_container_engine.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/handlers/test_handlers.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/handlers/transports/__init__.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/handlers/transports/test_background_thread.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/handlers/transports/test_base.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/handlers/transports/test_sync.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/test__gax.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/test__helpers.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/test__http.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/test_client.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/test_entries.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/test_logger.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/test_metric.py (100%) rename packages/google-cloud-logging/{unit_tests => tests/unit}/test_sink.py (100%) delete mode 100644 packages/google-cloud-logging/tox.ini diff --git a/packages/google-cloud-logging/.flake8 b/packages/google-cloud-logging/.flake8 new file mode 100644 index 000000000000..25168dc87605 --- /dev/null +++ b/packages/google-cloud-logging/.flake8 @@ -0,0 +1,6 @@ +[flake8] +exclude = + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-logging/LICENSE b/packages/google-cloud-logging/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-logging/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-logging/MANIFEST.in b/packages/google-cloud-logging/MANIFEST.in index cb3a2b9ef4fa..9f7100c9528a 100644 --- a/packages/google-cloud-logging/MANIFEST.in +++ b/packages/google-cloud-logging/MANIFEST.in @@ -1,4 +1,4 @@ -include README.rst -graft google -graft unit_tests -global-exclude *.pyc +include README.rst LICENSE +recursive-include google *.json *.proto +recursive-include unit_tests * +global-exclude *.pyc __pycache__ diff --git a/packages/google-cloud-logging/google/cloud/logging/__init__.py b/packages/google-cloud-logging/google/cloud/logging/__init__.py index 17df46b08bad..cced78370c6a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/__init__.py @@ -25,3 +25,5 @@ """Query string to order by ascending timestamps.""" DESCENDING = 'timestamp desc' """Query string to order by decending timestamps.""" + +__all__ = ['__version__', 'ASCENDING', 'Client', 'DESCENDING'] diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py index 9745296e9782..432419543bea 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py @@ -19,3 +19,6 @@ ContainerEngineHandler) from google.cloud.logging.handlers.handlers import CloudLoggingHandler from google.cloud.logging.handlers.handlers import setup_logging + +__all__ = ['AppEngineHandler', 'CloudLoggingHandler', 'ContainerEngineHandler', + 'setup_logging'] diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py index 6c689e378a42..b1091b70788d 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py @@ -24,3 +24,5 @@ from google.cloud.logging.handlers.transports.sync import SyncTransport from google.cloud.logging.handlers.transports.background_thread import ( BackgroundThreadTransport) + +__all__ = ['BackgroundThreadTransport', 'SyncTransport', 'Transport'] diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py new file mode 100644 index 000000000000..59e85f72b4a9 --- /dev/null +++ b/packages/google-cloud-logging/nox.py @@ -0,0 +1,88 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import os + +import nox + + +@nox.session +@nox.parametrize('python_version', ['2.7', '3.4', '3.5', '3.6']) +def unit_tests(session, python_version): + """Run the unit test suite.""" + + # Run unit tests against all supported versions of Python. + session.interpreter = 'python%s' % python_version + + # Install all test dependencies, then install this package in-place. + session.install('mock', 'pytest', 'pytest-cov', '../core/') + session.install('-e', '.') + + # Run py.test against the unit tests. + session.run('py.test', '--quiet', + '--cov=google.cloud.logging', '--cov=tests.unit', '--cov-append', + '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', + 'tests/unit', + ) + + +@nox.session +@nox.parametrize('python_version', ['2.7', '3.6']) +def system_tests(session, python_version): + """Run the system test suite.""" + + # Sanity check: Only run system tests if the environment variable is set. + if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): + return + + # Run the system tests against latest Python 2 and Python 3 only. + session.interpreter = 'python%s' % python_version + + # Install all test dependencies, then install this package into the + # virutalenv's dist-packages. + session.install('mock', 'pytest', + '../core/', '../test_utils/', + '../bigquery/', '../pubsub/', '../storage/') + session.install('.') + + # Run py.test against the system tests. + session.run('py.test', '--quiet', 'tests/system.py') + + +@nox.session +def lint(session): + """Run flake8. + + Returns a failure if flake8 finds linting errors or sufficiently + serious code quality issues. + """ + session.interpreter = 'python3.6' + session.install('flake8') + session.install('.') + session.run('flake8', 'google/cloud/logging') + + +@nox.session +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.interpreter = 'python3.6' + session.install('coverage', 'pytest-cov') + session.run('coverage', 'report', '--show-missing', '--fail-under=100') + session.run('coverage', 'erase') diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index d56b15995c71..87a4e66fe15e 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -64,7 +64,7 @@ 'google', 'google.cloud', ], - packages=find_packages(), + packages=find_packages(exclude=('unit_tests*',)), install_requires=REQUIREMENTS, **SETUP_BASE ) diff --git a/packages/google-cloud-logging/tests/__init__.py b/packages/google-cloud-logging/tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/tests/system.py b/packages/google-cloud-logging/tests/system.py new file mode 100644 index 000000000000..f564ec3dcbdf --- /dev/null +++ b/packages/google-cloud-logging/tests/system.py @@ -0,0 +1,462 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import logging +import unittest + +from google.gax.errors import GaxError +from google.gax.grpc import exc_to_code +from grpc import StatusCode + +from google.cloud._helpers import UTC +from google.cloud.exceptions import Conflict +from google.cloud.exceptions import NotFound +from google.cloud.exceptions import TooManyRequests +import google.cloud.logging +import google.cloud.logging.handlers.handlers +from google.cloud.logging.handlers.handlers import CloudLoggingHandler +from google.cloud.logging.handlers.transports import SyncTransport +from google.cloud.logging import client + +from test_utils.retry import RetryErrors +from test_utils.retry import RetryResult +from test_utils.system import unique_resource_id + +_RESOURCE_ID = unique_resource_id('-') +DEFAULT_FILTER = 'logName:syslog AND severity>=INFO' +DEFAULT_DESCRIPTION = 'System testing' +retry_429 = RetryErrors(TooManyRequests) + + +def _retry_on_unavailable(exc): + """Retry only errors whose status code is 'UNAVAILABLE'. + + :type exc: :class:`~google.gax.errors.GaxError` + :param exc: The exception that was caught. + + :rtype: bool + :returns: Boolean indicating if the exception was UNAVAILABLE. + """ + return exc_to_code(exc) == StatusCode.UNAVAILABLE + + +def _consume_entries(logger): + """Consume all log entries from logger iterator. + + :type logger: :class:`~google.cloud.logging.logger.Logger` + :param logger: A Logger containing entries. + + :rtype: list + :returns: List of all entries consumed. + """ + return list(logger.list_entries()) + + +def _list_entries(logger): + """Retry-ing list entries in a logger. + + Retry until there are actual results and retry on any + failures. + + :type logger: :class:`~google.cloud.logging.logger.Logger` + :param logger: A Logger containing entries. + + :rtype: list + :returns: List of all entries consumed. + """ + inner = RetryResult(_has_entries)(_consume_entries) + outer = RetryErrors(GaxError, _retry_on_unavailable)(inner) + return outer(logger) + + +def _has_entries(result): + return len(result) > 0 + + +class Config(object): + """Run-time configuration to be modified at set-up. + + This is a mutable stand-in to allow test set-up to modify + global state. + """ + CLIENT = None + + +def setUpModule(): + Config.CLIENT = client.Client() + + +class TestLogging(unittest.TestCase): + + JSON_PAYLOAD = { + 'message': 'System test: test_log_struct', + 'weather': { + 'clouds': 'party or partly', + 'temperature': 70, + 'precipitation': False, + }, + } + + def setUp(self): + self.to_delete = [] + self._handlers_cache = logging.getLogger().handlers[:] + + def tearDown(self): + retry = RetryErrors(NotFound, max_tries=10) + for doomed in self.to_delete: + retry(doomed.delete)() + logging.getLogger().handlers = self._handlers_cache[:] + + @staticmethod + def _logger_name(): + return 'system-tests-logger' + unique_resource_id('-') + + def test_log_text(self): + TEXT_PAYLOAD = 'System test: test_log_text' + logger = Config.CLIENT.logger(self._logger_name()) + self.to_delete.append(logger) + logger.log_text(TEXT_PAYLOAD) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + + def test_log_text_with_timestamp(self): + text_payload = 'System test: test_log_text_with_timestamp' + logger = Config.CLIENT.logger(self._logger_name()) + now = datetime.datetime.utcnow() + + self.to_delete.append(logger) + + logger.log_text(text_payload, timestamp=now) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, text_payload) + self.assertEqual(entries[0].timestamp, now.replace(tzinfo=UTC)) + + def test_log_text_w_metadata(self): + TEXT_PAYLOAD = 'System test: test_log_text' + INSERT_ID = 'INSERTID' + SEVERITY = 'INFO' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = 500 + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + logger = Config.CLIENT.logger(self._logger_name()) + self.to_delete.append(logger) + + logger.log_text(TEXT_PAYLOAD, insert_id=INSERT_ID, severity=SEVERITY, + http_request=REQUEST) + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + + entry = entries[0] + self.assertEqual(entry.payload, TEXT_PAYLOAD) + self.assertEqual(entry.insert_id, INSERT_ID) + self.assertEqual(entry.severity, SEVERITY) + + request = entry.http_request + self.assertEqual(request['requestMethod'], METHOD) + self.assertEqual(request['requestUrl'], URI) + self.assertEqual(request['status'], STATUS) + + def test_log_struct(self): + logger = Config.CLIENT.logger(self._logger_name()) + self.to_delete.append(logger) + + logger.log_struct(self.JSON_PAYLOAD) + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + + def test_log_handler_async(self): + LOG_MESSAGE = 'It was the worst of times' + + handler = CloudLoggingHandler(Config.CLIENT) + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler.name) + self.to_delete.append(logger) + + cloud_logger = logging.getLogger(handler.name) + cloud_logger.addHandler(handler) + cloud_logger.warn(LOG_MESSAGE) + entries = _list_entries(logger) + expected_payload = { + 'message': LOG_MESSAGE, + 'python_logger': handler.name + } + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, expected_payload) + + def test_log_handler_sync(self): + LOG_MESSAGE = 'It was the best of times.' + + handler = CloudLoggingHandler(Config.CLIENT, + name=self._logger_name(), + transport=SyncTransport) + + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler.name) + self.to_delete.append(logger) + + LOGGER_NAME = 'mylogger' + cloud_logger = logging.getLogger(LOGGER_NAME) + cloud_logger.addHandler(handler) + cloud_logger.warn(LOG_MESSAGE) + + entries = _list_entries(logger) + expected_payload = { + 'message': LOG_MESSAGE, + 'python_logger': LOGGER_NAME + } + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, expected_payload) + + def test_log_root_handler(self): + LOG_MESSAGE = 'It was the best of times.' + + handler = CloudLoggingHandler(Config.CLIENT, name=self._logger_name()) + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler.name) + self.to_delete.append(logger) + + google.cloud.logging.handlers.handlers.setup_logging(handler) + logging.warn(LOG_MESSAGE) + + entries = _list_entries(logger) + expected_payload = { + 'message': LOG_MESSAGE, + 'python_logger': 'root' + } + + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, expected_payload) + + def test_log_struct_w_metadata(self): + INSERT_ID = 'INSERTID' + SEVERITY = 'INFO' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = 500 + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + logger = Config.CLIENT.logger(self._logger_name()) + self.to_delete.append(logger) + + logger.log_struct(self.JSON_PAYLOAD, insert_id=INSERT_ID, + severity=SEVERITY, http_request=REQUEST) + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + self.assertEqual(entries[0].insert_id, INSERT_ID) + self.assertEqual(entries[0].severity, SEVERITY) + request = entries[0].http_request + self.assertEqual(request['requestMethod'], METHOD) + self.assertEqual(request['requestUrl'], URI) + self.assertEqual(request['status'], STATUS) + + def test_create_metric(self): + METRIC_NAME = 'test-create-metric%s' % (_RESOURCE_ID,) + metric = Config.CLIENT.metric( + METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + self.assertFalse(metric.exists()) + metric.create() + self.to_delete.append(metric) + self.assertTrue(metric.exists()) + + def test_list_metrics(self): + METRIC_NAME = 'test-list-metrics%s' % (_RESOURCE_ID,) + metric = Config.CLIENT.metric( + METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + self.assertFalse(metric.exists()) + before_metrics = list(Config.CLIENT.list_metrics()) + before_names = set(metric.name for metric in before_metrics) + metric.create() + self.to_delete.append(metric) + self.assertTrue(metric.exists()) + after_metrics = list(Config.CLIENT.list_metrics()) + after_names = set(metric.name for metric in after_metrics) + self.assertEqual(after_names - before_names, + set([METRIC_NAME])) + + def test_reload_metric(self): + METRIC_NAME = 'test-reload-metric%s' % (_RESOURCE_ID,) + retry = RetryErrors(Conflict) + metric = Config.CLIENT.metric( + METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + self.assertFalse(metric.exists()) + retry(metric.create)() + self.to_delete.append(metric) + metric.filter_ = 'logName:other' + metric.description = 'local changes' + metric.reload() + self.assertEqual(metric.filter_, DEFAULT_FILTER) + self.assertEqual(metric.description, DEFAULT_DESCRIPTION) + + def test_update_metric(self): + METRIC_NAME = 'test-update-metric%s' % (_RESOURCE_ID,) + retry = RetryErrors(Conflict) + NEW_FILTER = 'logName:other' + NEW_DESCRIPTION = 'updated' + metric = Config.CLIENT.metric( + METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + self.assertFalse(metric.exists()) + retry(metric.create)() + self.to_delete.append(metric) + metric.filter_ = NEW_FILTER + metric.description = NEW_DESCRIPTION + metric.update() + after_metrics = list(Config.CLIENT.list_metrics()) + after_info = {metric.name: metric for metric in after_metrics} + after = after_info[METRIC_NAME] + self.assertEqual(after.filter_, NEW_FILTER) + self.assertEqual(after.description, NEW_DESCRIPTION) + + def _init_storage_bucket(self): + from google.cloud import storage + BUCKET_NAME = 'g-c-python-testing%s' % (_RESOURCE_ID,) + BUCKET_URI = 'storage.googleapis.com/%s' % (BUCKET_NAME,) + + # Create the destination bucket, and set up the ACL to allow + # Stackdriver Logging to write into it. + storage_client = storage.Client() + bucket = storage_client.bucket(BUCKET_NAME) + retry_429(bucket.create)() + self.to_delete.append(bucket) + bucket.acl.reload() + logs_group = bucket.acl.group('cloud-logs@google.com') + logs_group.grant_owner() + bucket.acl.add_entity(logs_group) + bucket.acl.save() + + return BUCKET_URI + + def test_create_sink_storage_bucket(self): + uri = self._init_storage_bucket() + SINK_NAME = 'test-create-sink-bucket%s' % (_RESOURCE_ID,) + + sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) + self.assertFalse(sink.exists()) + sink.create() + self.to_delete.append(sink) + self.assertTrue(sink.exists()) + + def test_create_sink_pubsub_topic(self): + from google.cloud.pubsub import client as pubsub_client + SINK_NAME = 'test-create-sink-topic%s' % (_RESOURCE_ID,) + TOPIC_NAME = 'logging-test-sink%s' % (_RESOURCE_ID,) + + # Create the destination topic, and set up the IAM policy to allow + # Stackdriver Logging to write into it. + pubsub_client = pubsub_client.Client() + topic = pubsub_client.topic(TOPIC_NAME) + topic.create() + self.to_delete.append(topic) + policy = topic.get_iam_policy() + policy.owners.add(policy.group('cloud-logs@google.com')) + topic.set_iam_policy(policy) + + TOPIC_URI = 'pubsub.googleapis.com/%s' % (topic.full_name,) + + sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, TOPIC_URI) + self.assertFalse(sink.exists()) + sink.create() + self.to_delete.append(sink) + self.assertTrue(sink.exists()) + + def _init_bigquery_dataset(self): + from google.cloud import bigquery + from google.cloud.bigquery.dataset import AccessGrant + DATASET_NAME = ( + 'system_testing_dataset' + _RESOURCE_ID).replace('-', '_') + DATASET_URI = 'bigquery.googleapis.com/projects/%s/datasets/%s' % ( + Config.CLIENT.project, DATASET_NAME,) + + # Create the destination dataset, and set up the ACL to allow + # Stackdriver Logging to write into it. + bigquery_client = bigquery.Client() + dataset = bigquery_client.dataset(DATASET_NAME) + dataset.create() + self.to_delete.append(dataset) + dataset.reload() + grants = dataset.access_grants + grants.append(AccessGrant( + 'WRITER', 'groupByEmail', 'cloud-logs@google.com')) + dataset.access_grants = grants + dataset.update() + return DATASET_URI + + def test_create_sink_bigquery_dataset(self): + SINK_NAME = 'test-create-sink-dataset%s' % (_RESOURCE_ID,) + uri = self._init_bigquery_dataset() + sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) + self.assertFalse(sink.exists()) + sink.create() + self.to_delete.append(sink) + self.assertTrue(sink.exists()) + + def test_list_sinks(self): + SINK_NAME = 'test-list-sinks%s' % (_RESOURCE_ID,) + uri = self._init_storage_bucket() + sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) + self.assertFalse(sink.exists()) + before_sinks = list(Config.CLIENT.list_sinks()) + before_names = set(sink.name for sink in before_sinks) + sink.create() + self.to_delete.append(sink) + self.assertTrue(sink.exists()) + after_sinks = list(Config.CLIENT.list_sinks()) + after_names = set(sink.name for sink in after_sinks) + self.assertEqual(after_names - before_names, + set([SINK_NAME])) + + def test_reload_sink(self): + SINK_NAME = 'test-reload-sink%s' % (_RESOURCE_ID,) + retry = RetryErrors(Conflict) + uri = self._init_bigquery_dataset() + sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) + self.assertFalse(sink.exists()) + retry(sink.create)() + self.to_delete.append(sink) + sink.filter_ = 'BOGUS FILTER' + sink.destination = 'BOGUS DESTINATION' + sink.reload() + self.assertEqual(sink.filter_, DEFAULT_FILTER) + self.assertEqual(sink.destination, uri) + + def test_update_sink(self): + SINK_NAME = 'test-update-sink%s' % (_RESOURCE_ID,) + retry = RetryErrors(Conflict, max_tries=10) + bucket_uri = self._init_storage_bucket() + dataset_uri = self._init_bigquery_dataset() + UPDATED_FILTER = 'logName:syslog' + sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, bucket_uri) + self.assertFalse(sink.exists()) + retry(sink.create)() + self.to_delete.append(sink) + sink.filter_ = UPDATED_FILTER + sink.destination = dataset_uri + sink.update() + self.assertEqual(sink.filter_, UPDATED_FILTER) + self.assertEqual(sink.destination, dataset_uri) diff --git a/packages/google-cloud-logging/unit_tests/__init__.py b/packages/google-cloud-logging/tests/unit/__init__.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/__init__.py rename to packages/google-cloud-logging/tests/unit/__init__.py diff --git a/packages/google-cloud-logging/unit_tests/handlers/__init__.py b/packages/google-cloud-logging/tests/unit/handlers/__init__.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/handlers/__init__.py rename to packages/google-cloud-logging/tests/unit/handlers/__init__.py diff --git a/packages/google-cloud-logging/unit_tests/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/handlers/test_app_engine.py rename to packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py diff --git a/packages/google-cloud-logging/unit_tests/handlers/test_container_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/handlers/test_container_engine.py rename to packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py diff --git a/packages/google-cloud-logging/unit_tests/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/handlers/test_handlers.py rename to packages/google-cloud-logging/tests/unit/handlers/test_handlers.py diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/__init__.py b/packages/google-cloud-logging/tests/unit/handlers/transports/__init__.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/handlers/transports/__init__.py rename to packages/google-cloud-logging/tests/unit/handlers/transports/__init__.py diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/handlers/transports/test_background_thread.py rename to packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/handlers/transports/test_base.py rename to packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py diff --git a/packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/handlers/transports/test_sync.py rename to packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py diff --git a/packages/google-cloud-logging/unit_tests/test__gax.py b/packages/google-cloud-logging/tests/unit/test__gax.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/test__gax.py rename to packages/google-cloud-logging/tests/unit/test__gax.py diff --git a/packages/google-cloud-logging/unit_tests/test__helpers.py b/packages/google-cloud-logging/tests/unit/test__helpers.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/test__helpers.py rename to packages/google-cloud-logging/tests/unit/test__helpers.py diff --git a/packages/google-cloud-logging/unit_tests/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/test__http.py rename to packages/google-cloud-logging/tests/unit/test__http.py diff --git a/packages/google-cloud-logging/unit_tests/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/test_client.py rename to packages/google-cloud-logging/tests/unit/test_client.py diff --git a/packages/google-cloud-logging/unit_tests/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/test_entries.py rename to packages/google-cloud-logging/tests/unit/test_entries.py diff --git a/packages/google-cloud-logging/unit_tests/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/test_logger.py rename to packages/google-cloud-logging/tests/unit/test_logger.py diff --git a/packages/google-cloud-logging/unit_tests/test_metric.py b/packages/google-cloud-logging/tests/unit/test_metric.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/test_metric.py rename to packages/google-cloud-logging/tests/unit/test_metric.py diff --git a/packages/google-cloud-logging/unit_tests/test_sink.py b/packages/google-cloud-logging/tests/unit/test_sink.py similarity index 100% rename from packages/google-cloud-logging/unit_tests/test_sink.py rename to packages/google-cloud-logging/tests/unit/test_sink.py diff --git a/packages/google-cloud-logging/tox.ini b/packages/google-cloud-logging/tox.ini deleted file mode 100644 index db3e8a654954..000000000000 --- a/packages/google-cloud-logging/tox.ini +++ /dev/null @@ -1,35 +0,0 @@ -[tox] -envlist = - py27,py34,py35,cover - -[testing] -localdeps = - pip install --quiet --upgrade {toxinidir}/../core -deps = - {toxinidir}/../core - mock - pytest -covercmd = - py.test --quiet \ - --cov=google.cloud.logging \ - --cov=unit_tests \ - --cov-config {toxinidir}/.coveragerc \ - unit_tests - -[testenv] -commands = - {[testing]localdeps} - py.test --quiet {posargs} unit_tests -deps = - {[testing]deps} - -[testenv:cover] -basepython = - python2.7 -commands = - {[testing]localdeps} - {[testing]covercmd} -deps = - {[testenv]deps} - coverage - pytest-cov From fbfe21de63b13763f833a5977914b4aa3a59d475 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 27 Mar 2017 10:20:16 -0700 Subject: [PATCH 086/855] Fixing up some format strings in nox configs. Using `STRING_TEMPLATE % VARIABLE` can introduce hard-to-find bugs if `VARIABLE` is expected to be a string but ends up being a tuple. Instead of using percent formatting, just using `.format`. Also making tweaks to `get_target_packages` to make some path manipulation / checks OS-independent. --- packages/google-cloud-logging/nox.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index 59e85f72b4a9..e72e4e3c6aae 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -25,7 +25,7 @@ def unit_tests(session, python_version): """Run the unit test suite.""" # Run unit tests against all supported versions of Python. - session.interpreter = 'python%s' % python_version + session.interpreter = 'python{}'.format(python_version) # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', '../core/') @@ -49,7 +49,7 @@ def system_tests(session, python_version): return # Run the system tests against latest Python 2 and Python 3 only. - session.interpreter = 'python%s' % python_version + session.interpreter = 'python{}'.format(python_version) # Install all test dependencies, then install this package into the # virutalenv's dist-packages. From 9e5c33ea14698bad9eeb9a94cde986241cf9b11c Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Thu, 30 Mar 2017 10:54:26 -0400 Subject: [PATCH 087/855] Fix untested snippet examples. (#3219) --- .../google/cloud/logging/handlers/handlers.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py index 4cf3f0cb20e9..ae66c4516ee9 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py @@ -52,6 +52,7 @@ class CloudLoggingHandler(logging.StreamHandler): .. code-block:: python + import logging import google.cloud.logging from google.cloud.logging.handlers import CloudLoggingHandler @@ -62,7 +63,7 @@ class CloudLoggingHandler(logging.StreamHandler): cloud_logger.setLevel(logging.INFO) cloud_logger.addHandler(handler) - cloud.logger.error('bad news') # API call + cloud_logger.error('bad news') # API call """ @@ -117,7 +118,7 @@ def setup_logging(handler, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, client = google.cloud.logging.Client() handler = CloudLoggingHandler(client) - google.cloud.logging.setup_logging(handler) + google.cloud.logging.handlers.setup_logging(handler) logging.getLogger().setLevel(logging.DEBUG) logging.error('bad news') # API call From e4429d0a6be31aafd34b443841909a484dcc5669 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 30 Mar 2017 08:43:22 -0700 Subject: [PATCH 088/855] Renaming http argument(s) as _http. (#3235) --- .../google/cloud/logging/client.py | 54 ++++++++++--------- .../transports/test_background_thread.py | 4 +- .../tests/unit/test__gax.py | 22 ++++---- .../tests/unit/test__http.py | 4 +- .../tests/unit/test_client.py | 36 ++++++------- .../tests/unit/test_logger.py | 4 +- 6 files changed, 64 insertions(+), 60 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 2da6eeb38c07..34bf8a3074e9 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -22,12 +22,12 @@ from google.cloud.logging._gax import make_gax_metrics_api from google.cloud.logging._gax import make_gax_sinks_api except ImportError: # pragma: NO COVER - _HAVE_GAX = False + _HAVE_GRPC = False make_gax_logging_api = None make_gax_metrics_api = None make_gax_sinks_api = None else: - _HAVE_GAX = True + _HAVE_GRPC = True from google.cloud.client import ClientWithProject from google.cloud.environment_vars import DISABLE_GRPC @@ -46,8 +46,8 @@ from google.cloud.logging.sink import Sink -_DISABLE_GAX = os.getenv(DISABLE_GRPC, False) -_USE_GAX = _HAVE_GAX and not _DISABLE_GAX +_DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) +_USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC _APPENGINE_FLEXIBLE_ENV_VM = 'GAE_APPENGINE_HOSTNAME' """Environment variable set in App Engine when vm:true is set.""" @@ -69,22 +69,26 @@ class Client(ClientWithProject): :type credentials: :class:`~google.auth.credentials.Credentials` :param credentials: (Optional) The OAuth2 Credentials to use for this - client. If not passed (and if no ``http`` object is + client. If not passed (and if no ``_http`` object is passed), falls back to the default inferred from the environment. - :type http: :class:`~httplib2.Http` - :param http: (Optional) HTTP object to make requests. Can be any object - that defines ``request()`` with the same interface as - :meth:`~httplib2.Http.request`. If not passed, an - ``http`` object is created that is bound to the - ``credentials`` for the current object. - - :type use_gax: bool - :param use_gax: (Optional) Explicitly specifies whether - to use the gRPC transport (via GAX) or HTTP. If unset, - falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` environment - variable + :type _http: :class:`~httplib2.Http` + :param _http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an + ``_http`` object is created that is bound to the + ``credentials`` for the current object. + This parameter should be considered private, and could + change in the future. + + :type _use_grpc: bool + :param _use_grpc: (Optional) Explicitly specifies whether + to use the gRPC transport (via GAX) or HTTP. If unset, + falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` + environment variable + This parameter should be considered private, and could + change in the future. """ _logging_api = None @@ -98,14 +102,14 @@ class Client(ClientWithProject): """The scopes required for authenticating as a Logging consumer.""" def __init__(self, project=None, credentials=None, - http=None, use_gax=None): + _http=None, _use_grpc=None): super(Client, self).__init__( - project=project, credentials=credentials, http=http) + project=project, credentials=credentials, _http=_http) self._connection = Connection(self) - if use_gax is None: - self._use_gax = _USE_GAX + if _use_grpc is None: + self._use_grpc = _USE_GRPC else: - self._use_gax = use_gax + self._use_grpc = _use_grpc @property def logging_api(self): @@ -116,7 +120,7 @@ def logging_api(self): https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs """ if self._logging_api is None: - if self._use_gax: + if self._use_grpc: self._logging_api = make_gax_logging_api(self) else: self._logging_api = JSONLoggingAPI(self) @@ -130,7 +134,7 @@ def sinks_api(self): https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks """ if self._sinks_api is None: - if self._use_gax: + if self._use_grpc: self._sinks_api = make_gax_sinks_api(self) else: self._sinks_api = JSONSinksAPI(self) @@ -144,7 +148,7 @@ def metrics_api(self): https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics """ if self._metrics_api is None: - if self._use_gax: + if self._use_grpc: self._metrics_api = make_gax_metrics_api(self) else: self._metrics_api = JSONMetricsAPI(self) diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index bcd54a8feb06..3952a0b6422c 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -171,11 +171,11 @@ def batch(self): class _Client(object): - def __init__(self, project, http=None, credentials=None): + def __init__(self, project, _http=None, credentials=None): import mock self.project = project - self._http = http + self._http = _http self._credentials = credentials self._connection = mock.Mock( credentials=credentials, spec=['credentials']) diff --git a/packages/google-cloud-logging/tests/unit/test__gax.py b/packages/google-cloud-logging/tests/unit/test__gax.py index d1f73c699827..67830a99db42 100644 --- a/packages/google-cloud-logging/tests/unit/test__gax.py +++ b/packages/google-cloud-logging/tests/unit/test__gax.py @@ -21,9 +21,9 @@ import google.cloud.logging._gax # pylint: enable=unused-import except ImportError: # pragma: NO COVER - _HAVE_GAX = False + _HAVE_GRPC = False else: - _HAVE_GAX = True + _HAVE_GRPC = True from google.cloud._testing import _GAXBaseAPI @@ -45,7 +45,7 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) -@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') class Test_LoggingAPI(_Base, unittest.TestCase): LOG_NAME = 'log_name' LOG_PATH = 'projects/%s/logs/%s' % (_Base.PROJECT, LOG_NAME) @@ -90,7 +90,7 @@ def test_list_entries_no_paging(self): response = _GAXPageIterator([entry_pb], page_token=TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) client = Client(project=self.PROJECT, credentials=_make_credentials(), - use_gax=True) + _use_grpc=True) api = self._make_one(gax_api, client) iterator = api.list_entries( @@ -147,7 +147,7 @@ def _list_entries_with_paging_helper(self, payload, struct_pb): response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) client = Client(project=self.PROJECT, credentials=_make_credentials(), - use_gax=True) + _use_grpc=True) api = self._make_one(gax_api, client) iterator = api.list_entries( @@ -288,7 +288,7 @@ def test_list_entries_with_extra_properties(self): response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) gax_api = _GAXLoggingAPI(_list_log_entries_response=response) client = Client(project=self.PROJECT, credentials=_make_credentials(), - use_gax=True) + _use_grpc=True) api = self._make_one(gax_api, client) iterator = api.list_entries( @@ -614,7 +614,7 @@ def test_logger_delete_error(self): self.assertIsNone(options) -@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') class Test_SinksAPI(_Base, unittest.TestCase): SINK_NAME = 'sink_name' SINK_PATH = 'projects/%s/sinks/%s' % (_Base.PROJECT, SINK_NAME) @@ -858,7 +858,7 @@ def test_sink_delete_hit(self): self.assertIsNone(options) -@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') class Test_MetricsAPI(_Base, unittest.TestCase): METRIC_NAME = 'metric_name' METRIC_PATH = 'projects/%s/metrics/%s' % (_Base.PROJECT, METRIC_NAME) @@ -1100,7 +1100,7 @@ def test_metric_delete_hit(self): self.assertIsNone(options) -@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') class Test_make_gax_logging_api(unittest.TestCase): def _call_fut(self, client): @@ -1153,7 +1153,7 @@ def generated_api(channel=None, **kwargs): self.assertIs(logging_api._client, client) -@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') class Test_make_gax_metrics_api(unittest.TestCase): def _call_fut(self, client): @@ -1206,7 +1206,7 @@ def generated_api(channel=None, **kwargs): self.assertIs(metrics_api._client, client) -@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') class Test_make_gax_sinks_api(unittest.TestCase): def _call_fut(self, client): diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index f0121f9dd319..459c0cf304d7 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -132,7 +132,7 @@ def test_list_entries_no_paging(self): 'nextPageToken': TOKEN, } client = Client(project=self.PROJECT, credentials=_make_credentials(), - use_gax=False) + _use_grpc=False) client._connection = _Connection(RETURNED) api = self._make_one(client) @@ -210,7 +210,7 @@ def test_list_entries_w_paging(self): }], } client = Client(project=self.PROJECT, credentials=_make_credentials(), - use_gax=False) + _use_grpc=False) client._connection = _Connection(RETURNED) api = self._make_one(client) diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 825b47f2f9b4..6cd7c42926f6 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -53,7 +53,7 @@ def test_logging_api_wo_gax(self): client = self._make_one(self.PROJECT, credentials=_make_credentials(), - use_gax=False) + _use_grpc=False) conn = client._connection = _Connection() api = client.logging_api @@ -74,7 +74,7 @@ def make_api(client_obj): creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=True) + _use_grpc=True) patch = mock.patch( 'google.cloud.logging.client.make_gax_logging_api', @@ -93,11 +93,11 @@ def test_no_gax_ctor(self): creds = _make_credentials() patch = mock.patch( - 'google.cloud.logging.client._USE_GAX', + 'google.cloud.logging.client._USE_GRPC', new=True) with patch: client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=False) + _use_grpc=False) api = client.logging_api self.assertIsInstance(api, _LoggingAPI) @@ -107,7 +107,7 @@ def test_sinks_api_wo_gax(self): client = self._make_one( self.PROJECT, credentials=_make_credentials(), - use_gax=False) + _use_grpc=False) conn = client._connection = _Connection() api = client.sinks_api @@ -128,7 +128,7 @@ def make_api(client_obj): creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=True) + _use_grpc=True) patch = mock.patch( 'google.cloud.logging.client.make_gax_sinks_api', @@ -147,7 +147,7 @@ def test_metrics_api_wo_gax(self): client = self._make_one( self.PROJECT, credentials=_make_credentials(), - use_gax=False) + _use_grpc=False) conn = client._connection = _Connection() api = client.metrics_api @@ -168,7 +168,7 @@ def make_api(client_obj): creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=True) + _use_grpc=True) patch = mock.patch( 'google.cloud.logging.client.make_gax_metrics_api', @@ -211,7 +211,7 @@ def test_list_entries_defaults(self): }] creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=False) + _use_grpc=False) returned = { 'entries': ENTRIES, 'nextPageToken': TOKEN, @@ -275,7 +275,7 @@ def test_list_entries_explicit(self): self.PROJECT, self.LOGGER_NAME), }] client = self._make_one(self.PROJECT, credentials=_make_credentials(), - use_gax=False) + _use_grpc=False) returned = {'entries': ENTRIES} client._connection = _Connection(returned) @@ -364,7 +364,7 @@ def test_list_sinks_no_paging(self): }] client = self._make_one(project=PROJECT, credentials=_make_credentials(), - use_gax=False) + _use_grpc=False) returned = { 'sinks': SINKS, 'nextPageToken': TOKEN, @@ -410,7 +410,7 @@ def test_list_sinks_with_paging(self): 'destination': self.DESTINATION_URI, }] client = self._make_one( - project=PROJECT, credentials=_make_credentials(), use_gax=False) + project=PROJECT, credentials=_make_credentials(), _use_grpc=False) returned = { 'sinks': SINKS, } @@ -482,7 +482,7 @@ def test_list_metrics_no_paging(self): }] client = self._make_one( project=self.PROJECT, credentials=_make_credentials(), - use_gax=False) + _use_grpc=False) returned = { 'metrics': metrics, } @@ -524,7 +524,7 @@ def test_list_metrics_with_paging(self): }] client = self._make_one( project=self.PROJECT, credentials=_make_credentials(), - use_gax=False) + _use_grpc=False) returned = { 'metrics': metrics, 'nextPageToken': next_token, @@ -569,7 +569,7 @@ def test_get_default_handler_app_engine(self): client = self._make_one(project=self.PROJECT, credentials=_make_credentials(), - use_gax=False) + _use_grpc=False) with _tempdir() as tempdir: temp_log_path = os.path.join(tempdir, '{pid}') @@ -588,7 +588,7 @@ def test_get_default_handler_container_engine(self): client = self._make_one(project=self.PROJECT, credentials=_make_credentials(), - use_gax=False) + _use_grpc=False) with _Monkey(os, environ={_CONTAINER_ENGINE_ENV: 'True'}): handler = client.get_default_handler() @@ -606,7 +606,7 @@ def test_get_default_handler_general(self): with mock.patch('copy.deepcopy', new=deepcopy): client = self._make_one(project=self.PROJECT, credentials=credentials, - use_gax=False) + _use_grpc=False) handler = client.get_default_handler() deepcopy.assert_called_once_with(client._http) @@ -626,7 +626,7 @@ def test_setup_logging(self): new=setup_logging): client = self._make_one(project=self.PROJECT, credentials=credentials, - use_gax=False) + _use_grpc=False) client.setup_logging() deepcopy.assert_called_once_with(client._http) diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 541c9ec501c4..0501bee1fd39 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -442,7 +442,7 @@ def test_list_entries_defaults(self): client = Client(project=self.PROJECT, credentials=_make_credentials(), - use_gax=False) + _use_grpc=False) returned = { 'nextPageToken': TOKEN, } @@ -480,7 +480,7 @@ def test_list_entries_explicit(self): PAGE_SIZE = 42 client = Client(project=self.PROJECT, credentials=_make_credentials(), - use_gax=False) + _use_grpc=False) client._connection = _Connection({}) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries( From 667e4a1e12d60a42324dd3b2f55a787579047ce5 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 30 Mar 2017 10:04:57 -0700 Subject: [PATCH 089/855] Make logging system tests verbose. (#3244) --- packages/google-cloud-logging/nox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index e72e4e3c6aae..a39551beea6b 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -59,7 +59,7 @@ def system_tests(session, python_version): session.install('.') # Run py.test against the system tests. - session.run('py.test', '--quiet', 'tests/system.py') + session.run('py.test', '-vvv', 'tests/system.py') @nox.session From 7843574280b028b905070a63c117243722a47305 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 30 Mar 2017 14:45:10 -0700 Subject: [PATCH 090/855] GA and Beta Promotions (#3245) * Make clients explicitly unpickleable. Closes #3211. * Make clients explicitly unpickleable. Closes #3211. * Add GA designator, add 1.0 version numbers. * Version changes. Eep. * Oops, Speech is still alpha. * 0.24.0, not 0.24.1 * Remove double __getstate__ goof. * Version changes. Eep. * Oops, Speech is still alpha. * Remove double __getstate__ goof. * Adding 3.6 classifier where missing and fixing bad versions. Done via "git grep '0\.24'" and "git grep '0\.23'". * Fix Noxfiles forlocal packages. * Fixing copy-pasta issue in error reporting nox config. Also fixing bad indent in same file. * Depend on stable logging in error reporting package. * Fixing lint errors in error_reporting. These were masked because error_reporting's lint nox session was linting the datastore codebase. This also means that the error reporting package has gained __all__. * Fixing a syntax error in nox config for logging. Also fixing an indent error while I was in there. * Revert "Add docs for 'result_index' usage and a system test." This reverts commit b5742aa160f604ec7cd81873ad24ac9aa75e548d. * Fixing docs nox session for umbrella package. Two issues: - error_reporting came BEFORE logging (which means it would try to pull in a logging dep from PyPI that doesn't exist) - dns was NOT in the list of local packages * Updating upper bound on logging in error_reporting. * Un-revert typo fix. --- packages/google-cloud-logging/nox.py | 16 ++++++++++------ packages/google-cloud-logging/setup.py | 7 ++++--- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index a39551beea6b..c96ad4871a77 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -19,6 +19,9 @@ import nox +LOCAL_DEPS = ('../core/',) + + @nox.session @nox.parametrize('python_version', ['2.7', '3.4', '3.5', '3.6']) def unit_tests(session, python_version): @@ -28,11 +31,12 @@ def unit_tests(session, python_version): session.interpreter = 'python{}'.format(python_version) # Install all test dependencies, then install this package in-place. - session.install('mock', 'pytest', 'pytest-cov', '../core/') + session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', + session.run( + 'py.test', '--quiet', '--cov=google.cloud.logging', '--cov=tests.unit', '--cov-append', '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', 'tests/unit', @@ -53,9 +57,9 @@ def system_tests(session, python_version): # Install all test dependencies, then install this package into the # virutalenv's dist-packages. - session.install('mock', 'pytest', - '../core/', '../test_utils/', - '../bigquery/', '../pubsub/', '../storage/') + session.install('mock', 'pytest', *LOCAL_DEPS) + session.install('../test_utils/', '../bigquery/', '../pubsub/', + '../storage/') session.install('.') # Run py.test against the system tests. @@ -70,7 +74,7 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8') + session.install('flake8', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/logging') diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 87a4e66fe15e..4e6054ee9e3c 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -35,7 +35,7 @@ 'include_package_data': True, 'zip_safe': False, 'classifiers': [ - 'Development Status :: 4 - Beta', + 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', @@ -44,20 +44,21 @@ 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', 'Topic :: Internet', ], } REQUIREMENTS = [ - 'google-cloud-core >= 0.23.1, < 0.24dev', + 'google-cloud-core >= 0.24.0, < 0.25dev', 'grpcio >= 1.0.2, < 2.0dev', 'gapic-google-cloud-logging-v2 >= 0.91.0, < 0.92dev', ] setup( name='google-cloud-logging', - version='0.23.1', + version='1.0.0', description='Python Client for Stackdriver Logging', long_description=README, namespace_packages=[ From 5cf5498eaafe00506a533c28041b5a2c9d486a31 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 31 Mar 2017 18:46:48 -0700 Subject: [PATCH 091/855] Downgrading logging from 1.0.0 to 0.24.0. This is temporary as #2674 is still being sorted out. This way we don't block an umbrella release or the release of error reporting. --- packages/google-cloud-logging/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 4e6054ee9e3c..e7b4b3b6beba 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -52,13 +52,13 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.24.0, < 0.25dev', - 'grpcio >= 1.0.2, < 2.0dev', + 'grpcio >= 1.2.0, < 2.0dev', 'gapic-google-cloud-logging-v2 >= 0.91.0, < 0.92dev', ] setup( name='google-cloud-logging', - version='1.0.0', + version='0.24.0', description='Python Client for Stackdriver Logging', long_description=README, namespace_packages=[ From 8eb951de6f7e6a1d0ed72f8fa747b4d0763e6adc Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 6 Apr 2017 11:42:12 -0700 Subject: [PATCH 092/855] Gracefully continue if LogEntry.proto_payload type URL is not in registry. (#3270) * Gracefully continue if LogEntry.proto_payload type URL is not in registry. Fixes #2674. * Adding unit / system tests for "graceful" continuation on LogEntry parsing. * Docs fix for PR 3270. * Adding rtype/returns to metric_update and sink_update in logging _http module. * Add trailing commas. --- .../google/cloud/logging/_gax.py | 55 +++++- .../google/cloud/logging/_http.py | 10 +- .../google/cloud/logging/entries.py | 44 ++++- .../google/cloud/logging/logger.py | 40 +++-- packages/google-cloud-logging/tests/system.py | 26 +++ .../tests/unit/test__gax.py | 166 ++++++++++++++++++ .../tests/unit/test_entries.py | 28 +++ 7 files changed, 345 insertions(+), 24 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index e2f048fbd54f..d1e6196bbebb 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -243,6 +243,8 @@ def sink_get(self, project, sink_name): if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) raise + # NOTE: LogSink message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. return MessageToDict(sink_pb) def sink_update(self, project, sink_name, filter_, destination): @@ -270,11 +272,13 @@ def sink_update(self, project, sink_name, filter_, destination): path = 'projects/%s/sinks/%s' % (project, sink_name) sink_pb = LogSink(name=path, filter=filter_, destination=destination) try: - self._gax_api.update_sink(path, sink_pb, options=options) + sink_pb = self._gax_api.update_sink(path, sink_pb, options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) raise + # NOTE: LogSink message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. return MessageToDict(sink_pb) def sink_delete(self, project, sink_name): @@ -391,6 +395,8 @@ def metric_get(self, project, metric_name): if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) raise + # NOTE: LogMetric message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. return MessageToDict(metric_pb) def metric_update(self, project, metric_name, filter_, description): @@ -418,11 +424,14 @@ def metric_update(self, project, metric_name, filter_, description): metric_pb = LogMetric(name=path, filter=filter_, description=description) try: - self._gax_api.update_log_metric(path, metric_pb, options=options) + metric_pb = self._gax_api.update_log_metric( + path, metric_pb, options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) raise + # NOTE: LogMetric message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. return MessageToDict(metric_pb) def metric_delete(self, project, metric_name): @@ -444,6 +453,35 @@ def metric_delete(self, project, metric_name): raise +def _parse_log_entry(entry_pb): + """Special helper to parse ``LogEntry`` protobuf into a dictionary. + + The ``proto_payload`` field in ``LogEntry`` is of type ``Any``. This + can be problematic if the type URL in the payload isn't in the + ``google.protobuf`` registry. To help with parsing unregistered types, + this function will remove ``proto_payload`` before parsing. + + :type entry_pb: :class:`.log_entry_pb2.LogEntry` + :param entry_pb: Log entry protobuf. + + :rtype: dict + :returns: The parsed log entry. The ``protoPayload`` key may contain + the raw ``Any`` protobuf from ``entry_pb.proto_payload`` if + it could not be parsed. + """ + try: + return MessageToDict(entry_pb) + except TypeError: + if entry_pb.HasField('proto_payload'): + proto_payload = entry_pb.proto_payload + entry_pb.ClearField('proto_payload') + entry_mapping = MessageToDict(entry_pb) + entry_mapping['protoPayload'] = proto_payload + return entry_mapping + else: + raise + + def _log_entry_mapping_to_pb(mapping): """Helper for :meth:`write_entries`, et aliae @@ -451,6 +489,13 @@ def _log_entry_mapping_to_pb(mapping): the keys expected in the JSON API. """ entry_pb = LogEntry() + # NOTE: We assume ``mapping`` was created in ``Batch.commit`` + # or ``Logger._make_entry_resource``. In either case, if + # the ``protoPayload`` key is present, we assume that the + # type URL is registered with ``google.protobuf`` and will + # not cause any issues in the JSON->protobuf conversion + # of the corresponding ``proto_payload`` in the log entry + # (it is an ``Any`` field). ParseDict(mapping, entry_pb) return entry_pb @@ -482,7 +527,7 @@ def _item_to_entry(iterator, entry_pb, loggers): :rtype: :class:`~google.cloud.logging.entries._BaseEntry` :returns: The next log entry in the page. """ - resource = MessageToDict(entry_pb) + resource = _parse_log_entry(entry_pb) return entry_from_resource(resource, iterator.client, loggers) @@ -499,6 +544,8 @@ def _item_to_sink(iterator, log_sink_pb): :rtype: :class:`~google.cloud.logging.sink.Sink` :returns: The next sink in the page. """ + # NOTE: LogSink message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. resource = MessageToDict(log_sink_pb) return Sink.from_api_repr(resource, iterator.client) @@ -516,6 +563,8 @@ def _item_to_metric(iterator, log_metric_pb): :rtype: :class:`~google.cloud.logging.metric.Metric` :returns: The next metric in the page. """ + # NOTE: LogMetric message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. resource = MessageToDict(log_metric_pb) return Metric.from_api_repr(resource, iterator.client) diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index d9e7e4dacacd..0838e7fe42ac 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -286,6 +286,9 @@ def sink_update(self, project, sink_name, filter_, destination): :type destination: str :param destination: destination URI for the entries exported by the sink. + + :rtype: dict + :returns: The returned (updated) resource. """ target = '/projects/%s/sinks/%s' % (project, sink_name) data = { @@ -293,7 +296,7 @@ def sink_update(self, project, sink_name, filter_, destination): 'filter': filter_, 'destination': destination, } - self.api_request(method='PUT', path=target, data=data) + return self.api_request(method='PUT', path=target, data=data) def sink_delete(self, project, sink_name): """API call: delete a sink resource. @@ -421,6 +424,9 @@ def metric_update(self, project, metric_name, filter_, description): :type description: str :param description: description of the metric. + + :rtype: dict + :returns: The returned (updated) resource. """ target = '/projects/%s/metrics/%s' % (project, metric_name) data = { @@ -428,7 +434,7 @@ def metric_update(self, project, metric_name, filter_, description): 'filter': filter_, 'description': description, } - self.api_request(method='PUT', path=target, data=data) + return self.api_request(method='PUT', path=target, data=data) def metric_delete(self, project, metric_name): """API call: delete a metric resource. diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py index 1ae5d34ec8b9..284562c5de5b 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -17,6 +17,7 @@ import json import re +from google.protobuf import any_pb2 from google.protobuf.json_format import Parse from google.cloud._helpers import _name_from_project_path @@ -47,7 +48,7 @@ def logger_name_from_path(path): class _BaseEntry(object): - """Base class for TextEntry, StructEntry. + """Base class for TextEntry, StructEntry, ProtobufEntry. :type payload: text or dict :param payload: The payload passed as ``textPayload``, ``jsonPayload``, @@ -99,7 +100,7 @@ def from_api_repr(cls, resource, client, loggers=None): (Optional) A mapping of logger fullnames -> loggers. If not passed, the entry will have a newly-created logger. - :rtype: :class:`google.cloud.logging.entries.TextEntry` + :rtype: :class:`google.cloud.logging.entries._BaseEntry` :returns: Text entry parsed from ``resource``. """ if loggers is None: @@ -144,9 +145,45 @@ class ProtobufEntry(_BaseEntry): See: https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry + + :type payload: str, dict or any_pb2.Any + :param payload: The payload passed as ``textPayload``, ``jsonPayload``, + or ``protoPayload``. This also may be passed as a raw + :class:`.any_pb2.Any` if the ``protoPayload`` could + not be deserialized. + + :type logger: :class:`~google.cloud.logging.logger.Logger` + :param logger: the logger used to write the entry. + + :type insert_id: str + :param insert_id: (optional) the ID used to identify an entry uniquely. + + :type timestamp: :class:`datetime.datetime` + :param timestamp: (optional) timestamp for the entry + + :type labels: dict + :param labels: (optional) mapping of labels for the entry + + :type severity: str + :param severity: (optional) severity of event being logged. + + :type http_request: dict + :param http_request: (optional) info about HTTP request associated with + the entry """ _PAYLOAD_KEY = 'protoPayload' + def __init__(self, payload, logger, insert_id=None, timestamp=None, + labels=None, severity=None, http_request=None): + super(ProtobufEntry, self).__init__( + payload, logger, insert_id=insert_id, timestamp=timestamp, + labels=labels, severity=severity, http_request=http_request) + if isinstance(self.payload, any_pb2.Any): + self.payload_pb = self.payload + self.payload = None + else: + self.payload_pb = None + def parse_message(self, message): """Parse payload into a protobuf message. @@ -155,4 +192,7 @@ def parse_message(self, message): :type message: Protobuf message :param message: the message to be logged """ + # NOTE: This assumes that ``payload`` is already a deserialized + # ``Any`` field and ``message`` has come from an imported + # ``pb2`` module with the relevant protobuf message type. Parse(json.dumps(self.payload), message) diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 459647bbea67..f093e6e48c88 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -14,9 +14,7 @@ """Define API Loggers.""" -import json - -from google.protobuf.json_format import MessageToJson +from google.protobuf.json_format import MessageToDict from google.cloud._helpers import _datetime_to_rfc3339 @@ -106,24 +104,24 @@ def _make_entry_resource(self, text=None, info=None, message=None, :type info: dict :param info: (Optional) struct payload - :type message: Protobuf message or :class:`NoneType` - :param message: protobuf payload + :type message: :class:`~google.protobuf.message.Message` + :param message: (Optional) The protobuf payload to log. :type labels: dict :param labels: (Optional) labels passed in to calling method. :type insert_id: str - :param insert_id: (optional) unique ID for log entry. + :param insert_id: (Optional) unique ID for log entry. :type severity: str - :param severity: (optional) severity of event being logged. + :param severity: (Optional) severity of event being logged. :type http_request: dict - :param http_request: (optional) info about HTTP request associated with + :param http_request: (Optional) info about HTTP request associated with the entry :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp of event being logged. + :param timestamp: (Optional) timestamp of event being logged. :rtype: dict :returns: The JSON resource created. @@ -140,9 +138,13 @@ def _make_entry_resource(self, text=None, info=None, message=None, resource['jsonPayload'] = info if message is not None: - as_json_str = MessageToJson(message) - as_json = json.loads(as_json_str) - resource['protoPayload'] = as_json + # NOTE: If ``message`` contains an ``Any`` field with an + # unknown type, this will fail with a ``TypeError``. + # However, since ``message`` will be provided by a user, + # the assumption is that any types needed for the + # protobuf->JSON conversion will be known from already + # imported ``pb2`` modules. + resource['protoPayload'] = MessageToDict(message) if labels is None: labels = self.labels @@ -245,8 +247,8 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, See: https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list - :type message: Protobuf message - :param message: the message to be logged + :type message: :class:`~google.protobuf.message.Message` + :param message: The protobuf message to be logged. :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -462,9 +464,13 @@ def commit(self, client=None): elif entry_type == 'struct': info = {'jsonPayload': entry} elif entry_type == 'proto': - as_json_str = MessageToJson(entry) - as_json = json.loads(as_json_str) - info = {'protoPayload': as_json} + # NOTE: If ``entry`` contains an ``Any`` field with an + # unknown type, this will fail with a ``TypeError``. + # However, since ``entry`` was provided by a user in + # ``Batch.log_proto``, the assumption is that any types + # needed for the protobuf->JSON conversion will be known + # from already imported ``pb2`` modules. + info = {'protoPayload': MessageToDict(entry)} else: raise ValueError('Unknown entry type: %s' % (entry_type,)) if labels is not None: diff --git a/packages/google-cloud-logging/tests/system.py b/packages/google-cloud-logging/tests/system.py index f564ec3dcbdf..3bd8d5fb2139 100644 --- a/packages/google-cloud-logging/tests/system.py +++ b/packages/google-cloud-logging/tests/system.py @@ -108,6 +108,7 @@ class TestLogging(unittest.TestCase): 'precipitation': False, }, } + TYPE_FILTER = 'protoPayload.@type = "{}"' def setUp(self): self.to_delete = [] @@ -123,6 +124,31 @@ def tearDown(self): def _logger_name(): return 'system-tests-logger' + unique_resource_id('-') + def test_list_entry_with_unregistered(self): + from google.protobuf import any_pb2 + from google.protobuf import descriptor_pool + from google.cloud.logging import entries + + pool = descriptor_pool.Default() + type_name = 'google.cloud.audit.AuditLog' + # Make sure the descriptor is not known in the registry. + with self.assertRaises(KeyError): + pool.FindMessageTypeByName(type_name) + + type_url = 'type.googleapis.com/' + type_name + filter_ = self.TYPE_FILTER.format(type_url) + entry_iter = iter( + Config.CLIENT.list_entries(page_size=1, filter_=filter_)) + protobuf_entry = next(entry_iter) + self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) + if Config.CLIENT._use_grpc: + self.assertIsNone(protobuf_entry.payload) + self.assertIsInstance(protobuf_entry.payload_pb, any_pb2.Any) + self.assertEqual(protobuf_entry.payload_pb.type_url, type_url) + else: + self.assertIsNone(protobuf_entry.payload_pb) + self.assertEqual(protobuf_entry.payload['@type'], type_url) + def test_log_text(self): TEXT_PAYLOAD = 'System test: test_log_text' logger = Config.CLIENT.logger(self._logger_name()) diff --git a/packages/google-cloud-logging/tests/unit/test__gax.py b/packages/google-cloud-logging/tests/unit/test__gax.py index 67830a99db42..57041097efac 100644 --- a/packages/google-cloud-logging/tests/unit/test__gax.py +++ b/packages/google-cloud-logging/tests/unit/test__gax.py @@ -1100,6 +1100,172 @@ def test_metric_delete_hit(self): self.assertIsNone(options) +@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') +class Test__parse_log_entry(unittest.TestCase): + + @staticmethod + def _call_fut(*args, **kwargs): + from google.cloud.logging._gax import _parse_log_entry + + return _parse_log_entry(*args, **kwargs) + + def test_simple(self): + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry + + entry_pb = LogEntry(log_name=u'lol-jk', text_payload=u'bah humbug') + result = self._call_fut(entry_pb) + expected = { + 'logName': entry_pb.log_name, + 'textPayload': entry_pb.text_payload, + } + self.assertEqual(result, expected) + + @mock.patch('google.cloud.logging._gax.MessageToDict', + side_effect=TypeError) + def test_non_registry_failure(self, msg_to_dict_mock): + entry_pb = mock.Mock(spec=['HasField']) + entry_pb.HasField.return_value = False + with self.assertRaises(TypeError): + self._call_fut(entry_pb) + + entry_pb.HasField.assert_called_once_with('proto_payload') + msg_to_dict_mock.assert_called_once_with(entry_pb) + + def test_unregistered_type(self): + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry + from google.protobuf import any_pb2 + from google.protobuf import descriptor_pool + from google.protobuf.timestamp_pb2 import Timestamp + + pool = descriptor_pool.Default() + type_name = 'google.bigtable.admin.v2.UpdateClusterMetadata' + # Make sure the descriptor is not known in the registry. + with self.assertRaises(KeyError): + pool.FindMessageTypeByName(type_name) + + type_url = 'type.googleapis.com/' + type_name + metadata_bytes = ( + b'\n\n\n\x03foo\x12\x03bar\x12\x06\x08\xbd\xb6\xfb\xc6\x05') + any_pb = any_pb2.Any(type_url=type_url, value=metadata_bytes) + timestamp = Timestamp(seconds=61, nanos=1234000) + + entry_pb = LogEntry(proto_payload=any_pb, timestamp=timestamp) + result = self._call_fut(entry_pb) + expected = { + 'protoPayload': any_pb, + 'timestamp': '1970-01-01T00:01:01.001234Z', + } + self.assertEqual(result, expected) + + def test_registered_type(self): + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry + from google.protobuf import any_pb2 + from google.protobuf import descriptor_pool + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + pool = descriptor_pool.Default() + type_name = 'google.protobuf.Struct' + # Make sure the descriptor is known in the registry. + descriptor = pool.FindMessageTypeByName(type_name) + self.assertEqual(descriptor.name, 'Struct') + + type_url = 'type.googleapis.com/' + type_name + field_name = 'foo' + field_value = u'Bar' + struct_pb = Struct( + fields={field_name: Value(string_value=field_value)}) + any_pb = any_pb2.Any( + type_url=type_url, + value=struct_pb.SerializeToString(), + ) + + entry_pb = LogEntry(proto_payload=any_pb, log_name=u'all-good') + result = self._call_fut(entry_pb) + expected_proto = { + 'logName': entry_pb.log_name, + 'protoPayload': { + '@type': type_url, + 'value': {field_name: field_value}, + }, + } + self.assertEqual(result, expected_proto) + + +@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') +class Test__log_entry_mapping_to_pb(unittest.TestCase): + + @staticmethod + def _call_fut(*args, **kwargs): + from google.cloud.logging._gax import _log_entry_mapping_to_pb + + return _log_entry_mapping_to_pb(*args, **kwargs) + + def test_simple(self): + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry + + result = self._call_fut({}) + self.assertEqual(result, LogEntry()) + + def test_unregistered_type(self): + from google.protobuf import descriptor_pool + from google.protobuf.json_format import ParseError + + pool = descriptor_pool.Default() + type_name = 'google.bigtable.admin.v2.UpdateClusterMetadata' + # Make sure the descriptor is not known in the registry. + with self.assertRaises(KeyError): + pool.FindMessageTypeByName(type_name) + + type_url = 'type.googleapis.com/' + type_name + json_mapping = { + 'protoPayload': { + '@type': type_url, + 'originalRequest': { + 'name': 'foo', + 'location': 'bar', + }, + 'requestTime': { + 'seconds': 1491000125, + }, + }, + } + with self.assertRaises(ParseError): + self._call_fut(json_mapping) + + def test_registered_type(self): + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry + from google.protobuf import any_pb2 + from google.protobuf import descriptor_pool + + pool = descriptor_pool.Default() + type_name = 'google.protobuf.Struct' + # Make sure the descriptor is known in the registry. + descriptor = pool.FindMessageTypeByName(type_name) + self.assertEqual(descriptor.name, 'Struct') + + type_url = 'type.googleapis.com/' + type_name + field_name = 'foo' + field_value = u'Bar' + json_mapping = { + 'logName': u'hi-everybody', + 'protoPayload': { + '@type': type_url, + 'value': {field_name: field_value}, + }, + } + # Convert to a valid LogEntry. + result = self._call_fut(json_mapping) + entry_pb = LogEntry( + log_name=json_mapping['logName'], + proto_payload=any_pb2.Any( + type_url=type_url, + value=b'\n\014\n\003foo\022\005\032\003Bar', + ), + ) + self.assertEqual(result, entry_pb) + + @unittest.skipUnless(_HAVE_GRPC, 'No gax-python') class Test_make_gax_logging_api(unittest.TestCase): diff --git a/packages/google-cloud-logging/tests/unit/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py index d39d72a27af8..4d254eb9d1ef 100644 --- a/packages/google-cloud-logging/tests/unit/test_entries.py +++ b/packages/google-cloud-logging/tests/unit/test_entries.py @@ -14,6 +14,8 @@ import unittest +import mock + class Test_logger_name_from_path(unittest.TestCase): @@ -207,6 +209,32 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) + def test_constructor_basic(self): + payload = {'foo': 'bar'} + pb_entry = self._make_one(payload, mock.sentinel.logger) + self.assertEqual(pb_entry.payload, payload) + self.assertIsNone(pb_entry.payload_pb) + self.assertIs(pb_entry.logger, mock.sentinel.logger) + self.assertIsNone(pb_entry.insert_id) + self.assertIsNone(pb_entry.timestamp) + self.assertIsNone(pb_entry.labels) + self.assertIsNone(pb_entry.severity) + self.assertIsNone(pb_entry.http_request) + + def test_constructor_with_any(self): + from google.protobuf.any_pb2 import Any + + payload = Any() + pb_entry = self._make_one(payload, mock.sentinel.logger) + self.assertIs(pb_entry.payload_pb, payload) + self.assertIsNone(pb_entry.payload) + self.assertIs(pb_entry.logger, mock.sentinel.logger) + self.assertIsNone(pb_entry.insert_id) + self.assertIsNone(pb_entry.timestamp) + self.assertIsNone(pb_entry.labels) + self.assertIsNone(pb_entry.severity) + self.assertIsNone(pb_entry.http_request) + def test_parse_message(self): import json from google.protobuf.json_format import MessageToJson From d65e970b7688f88414ca088c7c281ec43997218a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 6 Apr 2017 18:43:55 -0700 Subject: [PATCH 093/855] Cut release 1.0.0 of logging package. (#3278) --- packages/google-cloud-logging/setup.py | 2 +- packages/google-cloud-logging/tests/unit/test__gax.py | 11 ++++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index e7b4b3b6beba..a2656e744a91 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -58,7 +58,7 @@ setup( name='google-cloud-logging', - version='0.24.0', + version='1.0.0', description='Python Client for Stackdriver Logging', long_description=README, namespace_packages=[ diff --git a/packages/google-cloud-logging/tests/unit/test__gax.py b/packages/google-cloud-logging/tests/unit/test__gax.py index 57041097efac..f1b98696dc9e 100644 --- a/packages/google-cloud-logging/tests/unit/test__gax.py +++ b/packages/google-cloud-logging/tests/unit/test__gax.py @@ -1151,11 +1151,12 @@ def test_unregistered_type(self): entry_pb = LogEntry(proto_payload=any_pb, timestamp=timestamp) result = self._call_fut(entry_pb) - expected = { - 'protoPayload': any_pb, - 'timestamp': '1970-01-01T00:01:01.001234Z', - } - self.assertEqual(result, expected) + self.assertEqual(len(result), 2) + self.assertEqual(result['timestamp'], '1970-01-01T00:01:01.001234Z') + # NOTE: This "hack" is needed on Windows, where the equality check + # for an ``Any`` instance fails on unregistered types. + self.assertEqual(result['protoPayload'].type_url, type_url) + self.assertEqual(result['protoPayload'].value, metadata_bytes) def test_registered_type(self): from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry From fe0063e4454ea000ba3293fb1005646cbf54ca39 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Apr 2017 13:00:32 -0700 Subject: [PATCH 094/855] Adding check that **all** setup.py README's are valid RST. (#3318) * Adding check that **all** setup.py README's are valid RST. Follow up to #3316. Fixes #2446. * Fixing duplicate reference in Logging README. * Fixing duplicate reference in Monitoring README. --- packages/google-cloud-logging/README.rst | 1 - packages/google-cloud-logging/nox.py | 9 +++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 6dd9f74e62a3..5df19dd1f79a 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -35,7 +35,6 @@ Using the API `Stackdriver Logging`_ API (`Logging API docs`_) allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud Platform. -.. _Stackdriver Logging: https://cloud.google.com/logging/ .. _Logging API docs: https://cloud.google.com/logging/docs/ .. code:: python diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index c96ad4871a77..7f6447c56924 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -79,6 +79,15 @@ def lint(session): session.run('flake8', 'google/cloud/logging') +@nox.session +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.interpreter = 'python3.6' + session.install('docutils', 'Pygments') + session.run( + 'python', 'setup.py', 'check', '--restructuredtext', '--strict') + + @nox.session def cover(session): """Run the final coverage report. From 231147b83df4bb3cbcdc566442dff9bba96c8ea5 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 21 Apr 2017 10:03:56 -0700 Subject: [PATCH 095/855] Ignore tests (rather than unit_tests) in setup.py files. (#3319) --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index a2656e744a91..e3f8334cd5bb 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -65,7 +65,7 @@ 'google', 'google.cloud', ], - packages=find_packages(exclude=('unit_tests*',)), + packages=find_packages(exclude=('tests*',)), install_requires=REQUIREMENTS, **SETUP_BASE ) From 77430608be4de7c525ecae70e3af76325b118bc9 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 27 Apr 2017 10:27:31 -0700 Subject: [PATCH 096/855] Fixing broken logging system test. (#3325) Tries to update a frozenset(). --- packages/google-cloud-logging/tests/system.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/tests/system.py b/packages/google-cloud-logging/tests/system.py index 3bd8d5fb2139..89047edef5c4 100644 --- a/packages/google-cloud-logging/tests/system.py +++ b/packages/google-cloud-logging/tests/system.py @@ -389,7 +389,9 @@ def test_create_sink_storage_bucket(self): self.assertTrue(sink.exists()) def test_create_sink_pubsub_topic(self): + from google.cloud.iam import OWNER_ROLE from google.cloud.pubsub import client as pubsub_client + SINK_NAME = 'test-create-sink-topic%s' % (_RESOURCE_ID,) TOPIC_NAME = 'logging-test-sink%s' % (_RESOURCE_ID,) @@ -400,7 +402,9 @@ def test_create_sink_pubsub_topic(self): topic.create() self.to_delete.append(topic) policy = topic.get_iam_policy() - policy.owners.add(policy.group('cloud-logs@google.com')) + new_owners = set([policy.group('cloud-logs@google.com')]) + new_owners.update(policy.owners) + policy[OWNER_ROLE] = new_owners topic.set_iam_policy(policy) TOPIC_URI = 'pubsub.googleapis.com/%s' % (topic.full_name,) From 17d82519bc31b08ab8fd9c1970849cb70b6a1039 Mon Sep 17 00:00:00 2001 From: Bill Prin Date: Thu, 11 May 2017 10:52:38 -0700 Subject: [PATCH 097/855] Add monitored resource support to Logging (#3386) --- .../google/cloud/logging/entries.py | 26 ++- .../google/cloud/logging/logger.py | 115 ++++++++++---- .../google/cloud/logging/resource.py | 58 +++++++ packages/google-cloud-logging/tests/system.py | 24 +++ .../tests/unit/test_entries.py | 21 ++- .../tests/unit/test_logger.py | 149 ++++++++++++++---- 6 files changed, 329 insertions(+), 64 deletions(-) create mode 100644 packages/google-cloud-logging/google/cloud/logging/resource.py diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py index 284562c5de5b..24c8392eba14 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -20,6 +20,7 @@ from google.protobuf import any_pb2 from google.protobuf.json_format import Parse +from google.cloud.logging.resource import Resource from google.cloud._helpers import _name_from_project_path from google.cloud._helpers import _rfc3339_nanos_to_datetime @@ -71,10 +72,13 @@ class _BaseEntry(object): :type http_request: dict :param http_request: (optional) info about HTTP request associated with - the entry + the entry. + + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: (Optional) Monitored resource of the entry """ def __init__(self, payload, logger, insert_id=None, timestamp=None, - labels=None, severity=None, http_request=None): + labels=None, severity=None, http_request=None, resource=None): self.payload = payload self.logger = logger self.insert_id = insert_id @@ -82,6 +86,7 @@ def __init__(self, payload, logger, insert_id=None, timestamp=None, self.labels = labels self.severity = severity self.http_request = http_request + self.resource = resource @classmethod def from_api_repr(cls, resource, client, loggers=None): @@ -118,8 +123,15 @@ def from_api_repr(cls, resource, client, loggers=None): labels = resource.get('labels') severity = resource.get('severity') http_request = resource.get('httpRequest') + + monitored_resource_dict = resource.get('resource') + monitored_resource = None + if monitored_resource_dict is not None: + monitored_resource = Resource._from_dict(monitored_resource_dict) + return cls(payload, logger, insert_id=insert_id, timestamp=timestamp, - labels=labels, severity=severity, http_request=http_request) + labels=labels, severity=severity, http_request=http_request, + resource=monitored_resource) class TextEntry(_BaseEntry): @@ -170,14 +182,18 @@ class ProtobufEntry(_BaseEntry): :type http_request: dict :param http_request: (optional) info about HTTP request associated with the entry + + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: (Optional) Monitored resource of the entry """ _PAYLOAD_KEY = 'protoPayload' def __init__(self, payload, logger, insert_id=None, timestamp=None, - labels=None, severity=None, http_request=None): + labels=None, severity=None, http_request=None, resource=None): super(ProtobufEntry, self).__init__( payload, logger, insert_id=insert_id, timestamp=timestamp, - labels=labels, severity=severity, http_request=http_request) + labels=labels, severity=severity, http_request=http_request, + resource=resource) if isinstance(self.payload, any_pb2.Any): self.payload_pb = self.payload self.payload = None diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index f093e6e48c88..874d05014479 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -16,6 +16,10 @@ from google.protobuf.json_format import MessageToDict from google.cloud._helpers import _datetime_to_rfc3339 +from google.cloud.logging.resource import Resource + + +_GLOBAL_RESOURCE = Resource(type='global', labels={}) class Logger(object): @@ -91,7 +95,8 @@ def batch(self, client=None): def _make_entry_resource(self, text=None, info=None, message=None, labels=None, insert_id=None, severity=None, - http_request=None, timestamp=None): + http_request=None, timestamp=None, + resource=_GLOBAL_RESOURCE): """Return a log entry resource of the appropriate type. Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`. @@ -123,19 +128,22 @@ def _make_entry_resource(self, text=None, info=None, message=None, :type timestamp: :class:`datetime.datetime` :param timestamp: (Optional) timestamp of event being logged. + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: (Optional) Monitored resource of the entry + :rtype: dict :returns: The JSON resource created. """ - resource = { + entry = { 'logName': self.full_name, - 'resource': {'type': 'global'}, + 'resource': resource._to_dict(), } if text is not None: - resource['textPayload'] = text + entry['textPayload'] = text if info is not None: - resource['jsonPayload'] = info + entry['jsonPayload'] = info if message is not None: # NOTE: If ``message`` contains an ``Any`` field with an @@ -144,30 +152,31 @@ def _make_entry_resource(self, text=None, info=None, message=None, # the assumption is that any types needed for the # protobuf->JSON conversion will be known from already # imported ``pb2`` modules. - resource['protoPayload'] = MessageToDict(message) + entry['protoPayload'] = MessageToDict(message) if labels is None: labels = self.labels if labels is not None: - resource['labels'] = labels + entry['labels'] = labels if insert_id is not None: - resource['insertId'] = insert_id + entry['insertId'] = insert_id if severity is not None: - resource['severity'] = severity + entry['severity'] = severity if http_request is not None: - resource['httpRequest'] = http_request + entry['httpRequest'] = http_request if timestamp is not None: - resource['timestamp'] = _datetime_to_rfc3339(timestamp) + entry['timestamp'] = _datetime_to_rfc3339(timestamp) - return resource + return entry def log_text(self, text, client=None, labels=None, insert_id=None, - severity=None, http_request=None, timestamp=None): + severity=None, http_request=None, timestamp=None, + resource=_GLOBAL_RESOURCE): """API call: log a text message via a POST request See: @@ -194,17 +203,22 @@ def log_text(self, text, client=None, labels=None, insert_id=None, :param http_request: (optional) info about HTTP request associated with the entry + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: Monitored resource of the entry, defaults + to the global resource type. + :type timestamp: :class:`datetime.datetime` :param timestamp: (optional) timestamp of event being logged. """ client = self._require_client(client) entry_resource = self._make_entry_resource( text=text, labels=labels, insert_id=insert_id, severity=severity, - http_request=http_request, timestamp=timestamp) + http_request=http_request, timestamp=timestamp, resource=resource) client.logging_api.write_entries([entry_resource]) def log_struct(self, info, client=None, labels=None, insert_id=None, - severity=None, http_request=None, timestamp=None): + severity=None, http_request=None, timestamp=None, + resource=_GLOBAL_RESOURCE): """API call: log a structured message via a POST request See: @@ -231,17 +245,22 @@ def log_struct(self, info, client=None, labels=None, insert_id=None, :param http_request: (optional) info about HTTP request associated with the entry. + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: Monitored resource of the entry, defaults + to the global resource type. + :type timestamp: :class:`datetime.datetime` :param timestamp: (optional) timestamp of event being logged. """ client = self._require_client(client) entry_resource = self._make_entry_resource( info=info, labels=labels, insert_id=insert_id, severity=severity, - http_request=http_request, timestamp=timestamp) + http_request=http_request, timestamp=timestamp, resource=resource) client.logging_api.write_entries([entry_resource]) def log_proto(self, message, client=None, labels=None, insert_id=None, - severity=None, http_request=None, timestamp=None): + severity=None, http_request=None, timestamp=None, + resource=_GLOBAL_RESOURCE): """API call: log a protobuf message via a POST request See: @@ -268,13 +287,18 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, :param http_request: (optional) info about HTTP request associated with the entry. + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: Monitored resource of the entry, defaults + to the global resource type. + :type timestamp: :class:`datetime.datetime` :param timestamp: (optional) timestamp of event being logged. """ client = self._require_client(client) entry_resource = self._make_entry_resource( message=message, labels=labels, insert_id=insert_id, - severity=severity, http_request=http_request, timestamp=timestamp) + severity=severity, http_request=http_request, timestamp=timestamp, + resource=resource) client.logging_api.write_entries([entry_resource]) def delete(self, client=None): @@ -344,11 +368,21 @@ class Batch(object): :type client: :class:`google.cloud.logging.client.Client` :param client: The client to use. + + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: (Optional) Monitored resource of the batch, defaults + to None, which requires that every entry should have a + resource specified. Since the methods used to write + entries default the entry's resource to the global + resource type, this parameter is only required + if explicitly set to None. If no entries' resource are + set to None, this parameter will be ignored on the server. """ - def __init__(self, logger, client): + def __init__(self, logger, client, resource=None): self.logger = logger self.entries = [] self.client = client + self.resource = resource def __enter__(self): return self @@ -358,7 +392,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.commit() def log_text(self, text, labels=None, insert_id=None, severity=None, - http_request=None, timestamp=None): + http_request=None, timestamp=None, resource=_GLOBAL_RESOURCE): """Add a text entry to be logged during :meth:`commit`. :type text: str @@ -379,13 +413,21 @@ def log_text(self, text, labels=None, insert_id=None, severity=None, :type timestamp: :class:`datetime.datetime` :param timestamp: (optional) timestamp of event being logged. + + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: (Optional) Monitored resource of the entry. Defaults + to the global resource type. If set to None, the + resource of the batch is used for this entry. If + both this resource and the Batch resource are None, + the API will return an error. """ self.entries.append( ('text', text, labels, insert_id, severity, http_request, - timestamp)) + timestamp, resource)) def log_struct(self, info, labels=None, insert_id=None, severity=None, - http_request=None, timestamp=None): + http_request=None, timestamp=None, + resource=_GLOBAL_RESOURCE): """Add a struct entry to be logged during :meth:`commit`. :type info: dict @@ -406,13 +448,21 @@ def log_struct(self, info, labels=None, insert_id=None, severity=None, :type timestamp: :class:`datetime.datetime` :param timestamp: (optional) timestamp of event being logged. + + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: (Optional) Monitored resource of the entry. Defaults + to the global resource type. If set to None, the + resource of the batch is used for this entry. If + both this resource and the Batch resource are None, + the API will return an error. """ self.entries.append( ('struct', info, labels, insert_id, severity, http_request, - timestamp)) + timestamp, resource)) def log_proto(self, message, labels=None, insert_id=None, severity=None, - http_request=None, timestamp=None): + http_request=None, timestamp=None, + resource=_GLOBAL_RESOURCE): """Add a protobuf entry to be logged during :meth:`commit`. :type message: protobuf message @@ -433,10 +483,17 @@ def log_proto(self, message, labels=None, insert_id=None, severity=None, :type timestamp: :class:`datetime.datetime` :param timestamp: (optional) timestamp of event being logged. + + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: (Optional) Monitored resource of the entry. Defaults + to the global resource type. If set to None, the + resource of the batch is used for this entry. If + both this resource and the Batch resource are None, + the API will return an error. """ self.entries.append( ('proto', message, labels, insert_id, severity, http_request, - timestamp)) + timestamp, resource)) def commit(self, client=None): """Send saved log entries as a single API call. @@ -451,14 +508,16 @@ def commit(self, client=None): kwargs = { 'logger_name': self.logger.full_name, - 'resource': {'type': 'global'}, } + + if self.resource is not None: + kwargs['resource'] = self.resource._to_dict() if self.logger.labels is not None: kwargs['labels'] = self.logger.labels entries = [] for (entry_type, entry, labels, iid, severity, http_req, - timestamp) in self.entries: + timestamp, resource) in self.entries: if entry_type == 'text': info = {'textPayload': entry} elif entry_type == 'struct': @@ -473,6 +532,8 @@ def commit(self, client=None): info = {'protoPayload': MessageToDict(entry)} else: raise ValueError('Unknown entry type: %s' % (entry_type,)) + if resource is not None: + info['resource'] = resource._to_dict() if labels is not None: info['labels'] = labels if iid is not None: diff --git a/packages/google-cloud-logging/google/cloud/logging/resource.py b/packages/google-cloud-logging/google/cloud/logging/resource.py new file mode 100644 index 000000000000..aa37287db3ef --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/resource.py @@ -0,0 +1,58 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Monitored Resource for the Google Logging API V2.""" + +import collections + + +class Resource(collections.namedtuple('Resource', 'type labels')): + """A monitored resource identified by specifying values for all labels. + + :type type: str + :param type: The resource type name. + + :type labels: dict + :param labels: A mapping from label names to values for all labels + enumerated in the associated :class:`ResourceDescriptor`. + """ + __slots__ = () + + @classmethod + def _from_dict(cls, info): + """Construct a resource object from the parsed JSON representation. + + :type info: dict + :param info: + A ``dict`` parsed from the JSON wire-format representation. + + :rtype: :class:`Resource` + :returns: A resource object. + """ + return cls( + type=info['type'], + labels=info.get('labels', {}), + ) + + def _to_dict(self): + """Build a dictionary ready to be serialized to the JSON format. + + :rtype: dict + :returns: A dict representation of the object that can be written to + the API. + """ + return { + 'type': self.type, + 'labels': self.labels, + } diff --git a/packages/google-cloud-logging/tests/system.py b/packages/google-cloud-logging/tests/system.py index 89047edef5c4..075ff5ffd6cc 100644 --- a/packages/google-cloud-logging/tests/system.py +++ b/packages/google-cloud-logging/tests/system.py @@ -29,6 +29,7 @@ from google.cloud.logging.handlers.handlers import CloudLoggingHandler from google.cloud.logging.handlers.transports import SyncTransport from google.cloud.logging import client +from google.cloud.logging.resource import Resource from test_utils.retry import RetryErrors from test_utils.retry import RetryResult @@ -171,6 +172,29 @@ def test_log_text_with_timestamp(self): self.assertEqual(entries[0].payload, text_payload) self.assertEqual(entries[0].timestamp, now.replace(tzinfo=UTC)) + def test_log_text_with_resource(self): + text_payload = 'System test: test_log_text_with_timestamp' + + logger = Config.CLIENT.logger(self._logger_name()) + now = datetime.datetime.utcnow() + resource = Resource( + type='gae_app', + labels={ + 'module_id': 'default', + 'version_id': 'test' + } + ) + + self.to_delete.append(logger) + + logger.log_text(text_payload, timestamp=now, resource=resource) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, text_payload) + # project_id is output only so we don't want it in assertion + del entries[0].resource.labels['project_id'] + self.assertEqual(entries[0].resource, resource) + def test_log_text_w_metadata(self): TEXT_PAYLOAD = 'System test: test_log_text' INSERT_ID = 'INSERTID' diff --git a/packages/google-cloud-logging/tests/unit/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py index 4d254eb9d1ef..75cb641636a0 100644 --- a/packages/google-cloud-logging/tests/unit/test_entries.py +++ b/packages/google-cloud-logging/tests/unit/test_entries.py @@ -67,9 +67,11 @@ def test_ctor_defaults(self): self.assertIsNone(entry.labels) self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) + self.assertIsNone(entry.resource) def test_ctor_explicit(self): import datetime + from google.cloud.logging.resource import Resource PAYLOAD = 'PAYLOAD' IID = 'IID' @@ -84,13 +86,16 @@ def test_ctor_explicit(self): 'requestUrl': URI, 'status': STATUS, } + resource = Resource(type='global', labels={}) + logger = _Logger(self.LOGGER_NAME, self.PROJECT) entry = self._make_one(PAYLOAD, logger, insert_id=IID, timestamp=TIMESTAMP, labels=LABELS, severity=SEVERITY, - http_request=REQUEST) + http_request=REQUEST, + resource=resource) self.assertEqual(entry.payload, PAYLOAD) self.assertIs(entry.logger, logger) self.assertEqual(entry.insert_id, IID) @@ -100,6 +105,7 @@ def test_ctor_explicit(self): self.assertEqual(entry.http_request['requestMethod'], METHOD) self.assertEqual(entry.http_request['requestUrl'], URI) self.assertEqual(entry.http_request['status'], STATUS) + self.assertEqual(entry.resource, resource) def test_from_api_repr_missing_data_no_loggers(self): client = _Client(self.PROJECT) @@ -124,6 +130,7 @@ def test_from_api_repr_missing_data_no_loggers(self): def test_from_api_repr_w_loggers_no_logger_match(self): from datetime import datetime from google.cloud._helpers import UTC + from google.cloud.logging.resource import Resource klass = self._get_target_class() client = _Client(self.PROJECT) @@ -136,6 +143,16 @@ def test_from_api_repr_w_loggers_no_logger_match(self): LABELS = {'foo': 'bar', 'baz': 'qux'} METHOD = 'POST' URI = 'https://api.example.com/endpoint' + RESOURCE = Resource( + type='gae_app', + labels={ + 'type': 'gae_app', + 'labels': { + 'module_id': 'default', + 'version': 'test', + } + } + ) STATUS = '500' API_REPR = { 'dummyPayload': PAYLOAD, @@ -149,6 +166,7 @@ def test_from_api_repr_w_loggers_no_logger_match(self): 'requestUrl': URI, 'status': STATUS, }, + 'resource': RESOURCE._to_dict(), } loggers = {} entry = klass.from_api_repr(API_REPR, client, loggers=loggers) @@ -165,6 +183,7 @@ def test_from_api_repr_w_loggers_no_logger_match(self): self.assertIs(logger.client, client) self.assertEqual(logger.name, self.LOGGER_NAME) self.assertEqual(loggers, {LOG_NAME: logger}) + self.assertEqual(entry.resource, RESOURCE) def test_from_api_repr_w_loggers_w_logger_match(self): from datetime import datetime diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 0501bee1fd39..5c184f7c3dec 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -96,6 +96,7 @@ def test_log_text_w_str_implicit_client(self): 'textPayload': TEXT, 'resource': { 'type': 'global', + 'labels': {}, }, }] client = _Client(self.PROJECT) @@ -116,6 +117,7 @@ def test_log_text_w_default_labels(self): 'textPayload': TEXT, 'resource': { 'type': 'global', + 'labels': {}, }, 'labels': DEFAULT_LABELS, }] @@ -141,6 +143,7 @@ def test_log_text_w_timestamp(self): 'timestamp': '2016-12-31T00:01:02.999999Z', 'resource': { 'type': 'global', + 'labels': {}, }, }] client = _Client(self.PROJECT) @@ -172,6 +175,7 @@ def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): 'textPayload': TEXT, 'resource': { 'type': 'global', + 'labels': {}, }, 'labels': LABELS, 'insertId': IID, @@ -198,6 +202,7 @@ def test_log_struct_w_implicit_client(self): 'jsonPayload': STRUCT, 'resource': { 'type': 'global', + 'labels': {}, }, }] client = _Client(self.PROJECT) @@ -218,6 +223,7 @@ def test_log_struct_w_default_labels(self): 'jsonPayload': STRUCT, 'resource': { 'type': 'global', + 'labels': {}, }, 'labels': DEFAULT_LABELS, }] @@ -251,6 +257,7 @@ def test_log_struct_w_explicit_client_labels_severity_httpreq(self): 'jsonPayload': STRUCT, 'resource': { 'type': 'global', + 'labels': {}, }, 'labels': LABELS, 'insertId': IID, @@ -282,6 +289,7 @@ def test_log_struct_w_timestamp(self): 'timestamp': '2016-12-31T00:01:02.999999Z', 'resource': { 'type': 'global', + 'labels': {}, }, }] client = _Client(self.PROJECT) @@ -305,6 +313,7 @@ def test_log_proto_w_implicit_client(self): 'protoPayload': json.loads(MessageToJson(message)), 'resource': { 'type': 'global', + 'labels': {}, }, }] client = _Client(self.PROJECT) @@ -329,6 +338,7 @@ def test_log_proto_w_default_labels(self): 'protoPayload': json.loads(MessageToJson(message)), 'resource': { 'type': 'global', + 'labels': {}, }, 'labels': DEFAULT_LABELS, }] @@ -367,6 +377,7 @@ def test_log_proto_w_explicit_client_labels_severity_httpreq(self): 'protoPayload': json.loads(MessageToJson(message)), 'resource': { 'type': 'global', + 'labels': {}, }, 'labels': LABELS, 'insertId': IID, @@ -402,6 +413,7 @@ def test_log_proto_w_timestamp(self): 'timestamp': '2016-12-31T00:01:02.999999Z', 'resource': { 'type': 'global', + 'labels': {}, }, }] client = _Client(self.PROJECT) @@ -530,16 +542,19 @@ def test_ctor_defaults(self): self.assertEqual(len(batch.entries), 0) def test_log_text_defaults(self): + from google.cloud.logging.logger import _GLOBAL_RESOURCE TEXT = 'This is the entry text' client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_text(TEXT) self.assertEqual(batch.entries, - [('text', TEXT, None, None, None, None, None)]) + [('text', TEXT, None, None, None, None, None, + _GLOBAL_RESOURCE)]) def test_log_text_explicit(self): import datetime + from google.cloud.logging.resource import Resource TEXT = 'This is the entry text' LABELS = {'foo': 'bar', 'baz': 'qux'} @@ -554,16 +569,26 @@ def test_log_text_explicit(self): 'status': STATUS, } TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + RESOURCE = Resource( + type='gae_app', + labels={ + 'module_id': 'default', + 'version_id': 'test', + }) + client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_text(TEXT, labels=LABELS, insert_id=IID, severity=SEVERITY, - http_request=REQUEST, timestamp=TIMESTAMP) + http_request=REQUEST, timestamp=TIMESTAMP, + resource=RESOURCE) self.assertEqual( batch.entries, - [('text', TEXT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) + [('text', TEXT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP, + RESOURCE)]) def test_log_struct_defaults(self): + from google.cloud.logging.logger import _GLOBAL_RESOURCE STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() @@ -571,10 +596,12 @@ def test_log_struct_defaults(self): batch.log_struct(STRUCT) self.assertEqual( batch.entries, - [('struct', STRUCT, None, None, None, None, None)]) + [('struct', STRUCT, None, None, None, None, None, + _GLOBAL_RESOURCE)]) def test_log_struct_explicit(self): import datetime + from google.cloud.logging.resource import Resource STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} LABELS = {'foo': 'bar', 'baz': 'qux'} @@ -589,17 +616,27 @@ def test_log_struct_explicit(self): 'status': STATUS, } TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + RESOURCE = Resource( + type='gae_app', + labels={ + 'module_id': 'default', + 'version_id': 'test', + } + ) + client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_struct(STRUCT, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, - timestamp=TIMESTAMP) + timestamp=TIMESTAMP, resource=RESOURCE) self.assertEqual( batch.entries, - [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) + [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP , + RESOURCE)]) def test_log_proto_defaults(self): + from google.cloud.logging.logger import _GLOBAL_RESOURCE from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value @@ -609,10 +646,12 @@ def test_log_proto_defaults(self): batch = self._make_one(logger, client=client) batch.log_proto(message) self.assertEqual(batch.entries, - [('proto', message, None, None, None, None, None)]) + [('proto', message, None, None, None, None, None, + _GLOBAL_RESOURCE)]) def test_log_proto_explicit(self): import datetime + from google.cloud.logging.resource import Resource from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value @@ -629,24 +668,61 @@ def test_log_proto_explicit(self): 'status': STATUS, } TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + RESOURCE = Resource( + type='gae_app', + labels={ + 'module_id': 'default', + 'version_id': 'test', + } + ) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_proto(message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, - timestamp=TIMESTAMP) + timestamp=TIMESTAMP, resource=RESOURCE) self.assertEqual( batch.entries, - [('proto', message, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)]) + [('proto', message, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP, + RESOURCE)]) def test_commit_w_invalid_entry_type(self): logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) batch = self._make_one(logger, client) - batch.entries.append(('bogus', 'BOGUS', None, None, None, None, None)) + batch.entries.append(('bogus', 'BOGUS', None, None, None, None, None, + None)) with self.assertRaises(ValueError): batch.commit() + def test_commit_w_resource_specified(self): + from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging.resource import Resource + + logger = _Logger() + client = _Client(project=self.PROJECT, connection=_make_credentials()) + api = client.logging_api = _DummyLoggingAPI() + RESOURCE = Resource( + type='gae_app', + labels={ + 'module_id': 'default', + 'version_id': 'test', + } + ) + + batch = self._make_one(logger, client, resource=RESOURCE) + MESSAGE = 'This is the entry text' + ENTRIES = [ + {'textPayload': MESSAGE}, + {'textPayload': MESSAGE, 'resource': _GLOBAL_RESOURCE._to_dict()}, + ] + batch.log_text(MESSAGE, resource=None) + batch.log_text(MESSAGE) + batch.commit() + self.assertEqual(api._write_entries_called_with, + (ENTRIES, logger.full_name, + RESOURCE._to_dict(), None)) + def test_commit_w_bound_client(self): import json import datetime @@ -654,6 +730,8 @@ def test_commit_w_bound_client(self): from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud._helpers import _datetime_to_rfc3339 + from google.cloud.logging.logger import _GLOBAL_RESOURCE + TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} @@ -664,17 +742,17 @@ def test_commit_w_bound_client(self): TIMESTAMP1 = datetime.datetime(2016, 12, 31, 0, 0, 1, 999999) TIMESTAMP2 = datetime.datetime(2016, 12, 31, 0, 0, 2, 999999) TIMESTAMP3 = datetime.datetime(2016, 12, 31, 0, 0, 3, 999999) - RESOURCE = { - 'type': 'global', - } ENTRIES = [ {'textPayload': TEXT, 'insertId': IID1, - 'timestamp': _datetime_to_rfc3339(TIMESTAMP1)}, + 'timestamp': _datetime_to_rfc3339(TIMESTAMP1), + 'resource': _GLOBAL_RESOURCE._to_dict()}, {'jsonPayload': STRUCT, 'insertId': IID2, - 'timestamp': _datetime_to_rfc3339(TIMESTAMP2)}, + 'timestamp': _datetime_to_rfc3339(TIMESTAMP2), + 'resource': _GLOBAL_RESOURCE._to_dict()}, {'protoPayload': json.loads(MessageToJson(message)), 'insertId': IID3, - 'timestamp': _datetime_to_rfc3339(TIMESTAMP3)}, + 'timestamp': _datetime_to_rfc3339(TIMESTAMP3), + 'resource': _GLOBAL_RESOURCE._to_dict()}, ] client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() @@ -688,7 +766,7 @@ def test_commit_w_bound_client(self): self.assertEqual(list(batch.entries), []) self.assertEqual(api._write_entries_called_with, - (ENTRIES, logger.full_name, RESOURCE, None)) + (ENTRIES, logger.full_name, None, None)) def test_commit_w_alternate_client(self): import json @@ -696,6 +774,7 @@ def test_commit_w_alternate_client(self): from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud.logging.logger import Logger + from google.cloud.logging.logger import _GLOBAL_RESOURCE TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} @@ -718,12 +797,14 @@ def test_commit_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() logger = Logger('logger_name', client1, labels=DEFAULT_LABELS) - RESOURCE = {'type': 'global'} ENTRIES = [ - {'textPayload': TEXT, 'labels': LABELS}, - {'jsonPayload': STRUCT, 'severity': SEVERITY}, + {'textPayload': TEXT, 'labels': LABELS, 'resource': + _GLOBAL_RESOURCE._to_dict()}, + {'jsonPayload': STRUCT, 'severity': SEVERITY, + 'resource': _GLOBAL_RESOURCE._to_dict()}, {'protoPayload': json.loads(MessageToJson(message)), - 'httpRequest': REQUEST}, + 'httpRequest': REQUEST, + 'resource': _GLOBAL_RESOURCE._to_dict()}, ] batch = self._make_one(logger, client=client1) @@ -734,7 +815,7 @@ def test_commit_w_alternate_client(self): self.assertEqual(list(batch.entries), []) self.assertEqual(api._write_entries_called_with, - (ENTRIES, logger.full_name, RESOURCE, DEFAULT_LABELS)) + (ENTRIES, logger.full_name, None, DEFAULT_LABELS)) def test_context_mgr_success(self): import json @@ -742,6 +823,8 @@ def test_context_mgr_success(self): from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud.logging.logger import Logger + from google.cloud.logging.logger import _GLOBAL_RESOURCE + TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} @@ -760,13 +843,13 @@ def test_context_mgr_success(self): client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = Logger('logger_name', client, labels=DEFAULT_LABELS) - RESOURCE = { - 'type': 'global', - } ENTRIES = [ - {'textPayload': TEXT, 'httpRequest': REQUEST}, - {'jsonPayload': STRUCT, 'labels': LABELS}, + {'textPayload': TEXT, 'httpRequest': REQUEST, + 'resource': _GLOBAL_RESOURCE._to_dict()}, + {'jsonPayload': STRUCT, 'labels': LABELS, + 'resource': _GLOBAL_RESOURCE._to_dict()}, {'protoPayload': json.loads(MessageToJson(message)), + 'resource': _GLOBAL_RESOURCE._to_dict(), 'severity': SEVERITY}, ] batch = self._make_one(logger, client=client) @@ -778,12 +861,13 @@ def test_context_mgr_success(self): self.assertEqual(list(batch.entries), []) self.assertEqual(api._write_entries_called_with, - (ENTRIES, logger.full_name, RESOURCE, DEFAULT_LABELS)) + (ENTRIES, logger.full_name, None, DEFAULT_LABELS)) def test_context_mgr_failure(self): import datetime from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value + from google.cloud.logging.logger import _GLOBAL_RESOURCE TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} @@ -804,9 +888,12 @@ def test_context_mgr_failure(self): api = client.logging_api = _DummyLoggingAPI() logger = _Logger() UNSENT = [ - ('text', TEXT, None, IID, None, None, TIMESTAMP), - ('struct', STRUCT, None, None, SEVERITY, None, None), - ('proto', message, LABELS, None, None, REQUEST, None), + ('text', TEXT, None, IID, None, None, TIMESTAMP, + _GLOBAL_RESOURCE), + ('struct', STRUCT, None, None, SEVERITY, None, None, + _GLOBAL_RESOURCE), + ('proto', message, LABELS, None, None, REQUEST, None, + _GLOBAL_RESOURCE), ] batch = self._make_one(logger, client=client) From ae57a13b649531fb40ca29bc0e44f40bea4e617d Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 12 May 2017 13:30:48 -0700 Subject: [PATCH 098/855] Overhaul logging background thread transport (#3407) --- .../google/cloud/logging/handlers/handlers.py | 6 +- .../handlers/transports/background_thread.py | 274 ++++++++++------ packages/google-cloud-logging/nox.py | 4 +- .../transports/test_background_thread.py | 298 +++++++++++++----- 4 files changed, 401 insertions(+), 181 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py index ae66c4516ee9..62ec6c6d561a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py @@ -20,7 +20,11 @@ DEFAULT_LOGGER_NAME = 'python' -EXCLUDED_LOGGER_DEFAULTS = ('google.cloud', 'oauth2client') +EXCLUDED_LOGGER_DEFAULTS = ( + 'google.cloud', + 'google.auth', + 'google_auth_httplib2', +) class CloudLoggingHandler(logging.StreamHandler): diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 9c8ea85c937a..4b651243be45 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -17,139 +17,231 @@ Uses a background worker to log to Stackdriver Logging asynchronously. """ +from __future__ import print_function + import atexit import copy +import logging import threading +from six.moves import range +from six.moves import queue + from google.cloud.logging.handlers.transports.base import Transport -_WORKER_THREAD_NAME = 'google.cloud.logging.handlers.transport.Worker' +_DEFAULT_GRACE_PERIOD = 5.0 # Seconds +_DEFAULT_MAX_BATCH_SIZE = 10 +_WORKER_THREAD_NAME = 'google.cloud.logging.Worker' +_WORKER_TERMINATOR = object() +_LOGGER = logging.getLogger(__name__) -class _Worker(object): - """A threaded worker that writes batches of log entries +def _get_many(queue_, max_items=None): + """Get multiple items from a Queue. - Writes entries to the logger API. + Gets at least one (blocking) and at most ``max_items`` items + (non-blocking) from a given Queue. Does not mark the items as done. - This class reuses a single :class:`Batch` method to write successive - entries. + :type queue_: :class:`~queue.Queue` + :param queue_: The Queue to get items from. - Currently, the only public methods are constructing it (which also starts - it) and enqueuing :class:`Logger` (record, message) pairs. + :type max_items: int + :param max_items: The maximum number of items to get. If ``None``, then all + available items in the queue are returned. + + :rtype: Sequence + :returns: A sequence of items retrieved from the queue. """ + # Always return at least one item. + items = [queue_.get()] + while max_items is None or len(items) < max_items: + try: + items.append(queue_.get_nowait()) + except queue.Empty: + break + return items - def __init__(self, logger): - self.started = False - self.stopping = False - self.stopped = False - # _entries_condition is used to signal from the main thread whether - # there are any waiting queued logger entries to be written - self._entries_condition = threading.Condition() +class _Worker(object): + """A background thread that writes batches of log entries. - # _stop_condition is used to signal from the worker thread to the - # main thread that it's finished its last entries - self._stop_condition = threading.Condition() + :type cloud_logger: :class:`~google.cloud.logging.logger.Logger` + :param cloud_logger: The logger to send entries to. - # This object continually reuses the same :class:`Batch` object to - # write multiple entries at the same time. - self.logger = logger - self.batch = self.logger.batch() + :type grace_period: float + :param grace_period: The amount of time to wait for pending logs to + be submitted when the process is shutting down. + :type max_batch_size: int + :param max_batch_size: The maximum number of items to send at a time + in the background thread. + """ + + def __init__(self, cloud_logger, grace_period=_DEFAULT_GRACE_PERIOD, + max_batch_size=_DEFAULT_MAX_BATCH_SIZE): + self._cloud_logger = cloud_logger + self._grace_period = grace_period + self._max_batch_size = max_batch_size + self._queue = queue.Queue(0) + self._operational_lock = threading.Lock() self._thread = None - # Number in seconds of how long to wait for worker to send remaining - self._stop_timeout = 5 + @property + def is_alive(self): + """Returns True is the background thread is running.""" + return self._thread is not None and self._thread.is_alive() - self._start() + def _safely_commit_batch(self, batch): + total_logs = len(batch.entries) - def _run(self): + try: + if total_logs > 0: + batch.commit() + _LOGGER.debug('Submitted %d logs', total_logs) + except Exception: + _LOGGER.error( + 'Failed to submit %d logs.', total_logs, exc_info=True) + + def _thread_main(self): """The entry point for the worker thread. - Loops until ``stopping`` is set to :data:`True`, and commits batch - entries written during :meth:`enqueue`. + Pulls pending log entries off the queue and writes them in batches to + the Cloud Logger. """ - try: - self._entries_condition.acquire() - self.started = True - while not self.stopping: - if len(self.batch.entries) == 0: - # branch coverage of this code extremely flaky - self._entries_condition.wait() # pragma: NO COVER - - if len(self.batch.entries) > 0: - self.batch.commit() - finally: - self._entries_condition.release() - - # main thread may be waiting for worker thread to finish writing its - # final entries. here we signal that it's done. - self._stop_condition.acquire() - self._stop_condition.notify() - self._stop_condition.release() - - def _start(self): - """Called by this class's constructor - - This method is responsible for starting the thread and registering - the exit handlers. + _LOGGER.debug('Background thread started.') + + quit_ = False + while True: + batch = self._cloud_logger.batch() + items = _get_many(self._queue, max_items=self._max_batch_size) + + for item in items: + if item is _WORKER_TERMINATOR: + quit_ = True + # Continue processing items, don't break, try to process + # all items we got back before quitting. + else: + batch.log_struct(**item) + + self._safely_commit_batch(batch) + + for _ in range(len(items)): + self._queue.task_done() + + if quit_: + break + + _LOGGER.debug('Background thread exited gracefully.') + + def start(self): + """Starts the background thread. + + Additionally, this registers a handler for process exit to attempt + to send any pending log entries before shutdown. """ - try: - self._entries_condition.acquire() + with self._operational_lock: + if self.is_alive: + return + self._thread = threading.Thread( - target=self._run, name=_WORKER_THREAD_NAME) - self._thread.setDaemon(True) + target=self._thread_main, + name=_WORKER_THREAD_NAME) + self._thread.daemon = True self._thread.start() - finally: - self._entries_condition.release() - atexit.register(self._stop) + atexit.register(self._main_thread_terminated) + + def stop(self, grace_period=None): + """Signals the background thread to stop. - def _stop(self): - """Signals the worker thread to shut down + This does not terminate the background thread. It simply queues the + stop signal. If the main process exits before the background thread + processes the stop signal, it will be terminated without finishing + work. The ``grace_period`` parameter will give the background + thread some time to finish processing before this function returns. - Also waits for ``stop_timeout`` seconds for the worker to finish. + :type grace_period: float + :param grace_period: If specified, this method will block up to this + many seconds to allow the background thread to finish work before + returning. - This method is called by the ``atexit`` handler registered by - :meth:`start`. + :rtype: bool + :returns: True if the thread terminated. False if the thread is still + running. """ - if not self.started or self.stopping: - return + if not self.is_alive: + return True + + with self._operational_lock: + self._queue.put_nowait(_WORKER_TERMINATOR) + + if grace_period is not None: + print('Waiting up to %d seconds.' % (grace_period,)) - # lock the stop condition first so that the worker - # thread can't notify it's finished before we wait - self._stop_condition.acquire() + self._thread.join(timeout=grace_period) - # now notify the worker thread to shutdown - self._entries_condition.acquire() - self.stopping = True - self._entries_condition.notify() - self._entries_condition.release() + # Check this before disowning the thread, because after we disown + # the thread is_alive will be False regardless of if the thread + # exited or not. + success = not self.is_alive - # now wait for it to signal it's finished - self._stop_condition.wait(self._stop_timeout) - self._stop_condition.release() - self.stopped = True + self._thread = None + + return success + + def _main_thread_terminated(self): + """Callback that attempts to send pending logs before termination.""" + if not self.is_alive: + return + + if not self._queue.empty(): + print( + 'Program shutting down, attempting to send %d queued log ' + 'entries to Stackdriver Logging...' % (self._queue.qsize(),)) + + if self.stop(self._grace_period): + print('Sent all pending logs.') + else: + print('Failed to send %d pending logs.' % (self._queue.qsize(),)) def enqueue(self, record, message): - """Queues up a log entry to be written by the background thread.""" - try: - self._entries_condition.acquire() - if self.stopping: - return - info = {'message': message, 'python_logger': record.name} - self.batch.log_struct(info, severity=record.levelname) - self._entries_condition.notify() - finally: - self._entries_condition.release() + """Queues a log entry to be written by the background thread. + + :type record: :class:`logging.LogRecord` + :param record: Python log record that the handler was called with. + + :type message: str + :param message: The message from the ``LogRecord`` after being + formatted by the associated log formatters. + """ + self._queue.put_nowait({ + 'info': { + 'message': message, + 'python_logger': record.name, + }, + 'severity': record.levelname, + }) class BackgroundThreadTransport(Transport): - """Aysnchronous transport that uses a background thread. + """Asynchronous transport that uses a background thread. + + :type client: :class:`~google.cloud.logging.client.Client` + :param client: The Logging client. + + :type name: str + :param name: the name of the logger. + + :type grace_period: float + :param grace_period: The amount of time to wait for pending logs to + be submitted when the process is shutting down. - Writes logging entries as a batch process. + :type batch_size: int + :param batch_size: The maximum number of items to send at a time in the + background thread. """ - def __init__(self, client, name): + def __init__(self, client, name, grace_period=_DEFAULT_GRACE_PERIOD, + batch_size=_DEFAULT_MAX_BATCH_SIZE): http = copy.deepcopy(client._http) self.client = client.__class__( client.project, client._credentials, http) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index 7f6447c56924..5d4751a955a5 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -39,7 +39,7 @@ def unit_tests(session, python_version): 'py.test', '--quiet', '--cov=google.cloud.logging', '--cov=tests.unit', '--cov-append', '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', + 'tests/unit', *session.posargs ) @@ -63,7 +63,7 @@ def system_tests(session, python_version): session.install('.') # Run py.test against the system tests. - session.run('py.test', '-vvv', 'tests/system.py') + session.run('py.test', '-vvv', 'tests/system.py', *session.posargs) @nox.session diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 3952a0b6422c..177c30e10863 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -13,12 +13,12 @@ # limitations under the License. import logging -import time import unittest +import mock -class TestBackgroundThreadHandler(unittest.TestCase): +class TestBackgroundThreadHandler(unittest.TestCase): PROJECT = 'PROJECT' @staticmethod @@ -29,36 +29,41 @@ def _get_target_class(): return BackgroundThreadTransport def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): + worker_patch = mock.patch( + 'google.cloud.logging.handlers.transports.' + 'background_thread._Worker', + autospec=True) + with worker_patch as worker_mock: + return self._get_target_class()(*args, **kw), worker_mock + + def test_constructor(self): client = _Client(self.PROJECT) - NAME = 'python_logger' - transport = self._make_one(client, NAME) - self.assertEqual(transport.worker.logger.name, NAME) + name = 'python_logger' + + transport, worker = self._make_one(client, name) + + logger, = worker.call_args[0] # call_args[0] is *args. + self.assertEqual(logger.name, name) def test_send(self): client = _Client(self.PROJECT) - NAME = 'python_logger' - transport = self._make_one(client, NAME) - transport.worker.batch = client.logger(NAME).batch() + name = 'python_logger' + + transport, _ = self._make_one(client, name) python_logger_name = 'mylogger' message = 'hello world' - record = logging.LogRecord(python_logger_name, logging.INFO, - None, None, message, None, None) + record = logging.LogRecord( + python_logger_name, logging.INFO, + None, None, message, None, None) + transport.send(record, message) - EXPECTED_STRUCT = { - 'message': message, - 'python_logger': python_logger_name - } - EXPECTED_SENT = (EXPECTED_STRUCT, 'INFO') - self.assertEqual(transport.worker.batch.log_struct_called_with, - EXPECTED_SENT) + transport.worker.enqueue.assert_called_once_with(record, message) -class TestWorker(unittest.TestCase): +class Test_Worker(unittest.TestCase): + NAME = 'python_logger' @staticmethod def _get_target_class(): @@ -69,79 +74,188 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_ctor(self): - NAME = 'python_logger' - logger = _Logger(NAME) - worker = self._make_one(logger) - self.assertEqual(worker.batch, logger._batch) + def _start_with_thread_patch(self, worker): + with mock.patch('threading.Thread', new=_Thread) as thread_mock: + with mock.patch('atexit.register') as atexit_mock: + worker.start() + return thread_mock, atexit_mock - def test_run(self): - NAME = 'python_logger' - logger = _Logger(NAME) - worker = self._make_one(logger) + def test_constructor(self): + logger = _Logger(self.NAME) + grace_period = 50 + max_batch_size = 50 - python_logger_name = 'mylogger' - message = 'hello world' - record = logging.LogRecord(python_logger_name, - logging.INFO, None, None, - message, None, None) + worker = self._make_one( + logger, grace_period=grace_period, max_batch_size=max_batch_size) - worker._start() + self.assertEqual(worker._cloud_logger, logger) + self.assertEqual(worker._grace_period, grace_period) + self.assertEqual(worker._max_batch_size, max_batch_size) + self.assertFalse(worker.is_alive) + self.assertIsNone(worker._thread) - # first sleep is for branch coverage - ensure condition - # where queue is empty occurs - time.sleep(1) - # second polling is to avoid starting/stopping worker - # before anything ran - while not worker.started: - time.sleep(1) # pragma: NO COVER + def test_start(self): + from google.cloud.logging.handlers.transports import background_thread - worker.enqueue(record, message) - # Set timeout to none so worker thread finishes - worker._stop_timeout = None - worker._stop() - self.assertTrue(worker.batch.commit_called) + worker = self._make_one(_Logger(self.NAME)) - def test_run_after_stopped(self): - # No-op - name = 'python_logger' - logger = _Logger(name) - worker = self._make_one(logger) + _, atexit_mock = self._start_with_thread_patch(worker) - python_logger_name = 'mylogger' - message = 'hello world' - record = logging.LogRecord(python_logger_name, - logging.INFO, None, None, - message, None, None) - - worker._start() - while not worker.started: - time.sleep(1) # pragma: NO COVER - worker._stop_timeout = None - worker._stop() - worker.enqueue(record, message) - self.assertFalse(worker.batch.commit_called) - worker._stop() + self.assertTrue(worker.is_alive) + self.assertIsNotNone(worker._thread) + self.assertTrue(worker._thread.daemon) + self.assertEqual(worker._thread._target, worker._thread_main) + self.assertEqual( + worker._thread._name, background_thread._WORKER_THREAD_NAME) + atexit_mock.assert_called_once_with(worker._main_thread_terminated) - def test_run_enqueue_early(self): - # No-op - NAME = 'python_logger' - logger = _Logger(NAME) - worker = self._make_one(logger) + # Calling start again should not start a new thread. + current_thread = worker._thread + self._start_with_thread_patch(worker) + self.assertIs(current_thread, worker._thread) - python_logger_name = 'mylogger' - message = 'hello world' - record = logging.LogRecord(python_logger_name, - logging.INFO, None, None, - message, None, None) + def test_stop(self): + from google.cloud.logging.handlers.transports import background_thread + + grace_period = 5.0 + worker = self._make_one(_Logger(self.NAME)) + + self._start_with_thread_patch(worker) + thread = worker._thread + + worker.stop(grace_period) + + self.assertEqual(worker._queue.qsize(), 1) + self.assertEqual( + worker._queue.get(), background_thread._WORKER_TERMINATOR) + self.assertFalse(worker.is_alive) + self.assertIsNone(worker._thread) + self.assertEqual(thread._timeout, grace_period) + + # Stopping twice should not be an error + worker.stop() + + def test_stop_no_grace(self): + worker = self._make_one(_Logger(self.NAME)) + + self._start_with_thread_patch(worker) + thread = worker._thread + + worker.stop() + + self.assertEqual(thread._timeout, None) + + def test__main_thread_terminated(self): + worker = self._make_one(_Logger(self.NAME)) + + self._start_with_thread_patch(worker) + worker._main_thread_terminated() + + self.assertFalse(worker.is_alive) + + # Calling twice should not be an error + worker._main_thread_terminated() + + def test__main_thread_terminated_non_empty_queue(self): + worker = self._make_one(_Logger(self.NAME)) + self._start_with_thread_patch(worker) + worker.enqueue(mock.Mock(), '') + worker._main_thread_terminated() + + self.assertFalse(worker.is_alive) + + def test__main_thread_terminated_did_not_join(self): + worker = self._make_one(_Logger(self.NAME)) + + self._start_with_thread_patch(worker) + worker._thread._terminate_on_join = False + worker.enqueue(mock.Mock(), '') + worker._main_thread_terminated() + + self.assertFalse(worker.is_alive) + + @staticmethod + def _enqueue_record(worker, message): + record = logging.LogRecord( + 'python_logger', logging.INFO, + None, None, message, None, None) worker.enqueue(record, message) - worker._start() - while not worker.started: - time.sleep(1) # pragma: NO COVER - worker._stop_timeout = None - worker._stop() - self.assertTrue(worker.stopped) + + def test__thread_main(self): + from google.cloud.logging.handlers.transports import background_thread + + worker = self._make_one(_Logger(self.NAME)) + + # Enqueue two records and the termination signal. + self._enqueue_record(worker, '1') + self._enqueue_record(worker, '2') + worker._queue.put_nowait(background_thread._WORKER_TERMINATOR) + + worker._thread_main() + + self.assertTrue(worker._cloud_logger._batch.commit_called) + self.assertEqual(worker._cloud_logger._batch.commit_count, 2) + self.assertEqual(worker._queue.qsize(), 0) + + def test__thread_main_error(self): + from google.cloud.logging.handlers.transports import background_thread + + worker = self._make_one(_Logger(self.NAME)) + worker._cloud_logger._batch_cls = _RaisingBatch + + # Enqueue one record and the termination signal. + self._enqueue_record(worker, '1') + worker._queue.put_nowait(background_thread._WORKER_TERMINATOR) + + worker._thread_main() + + self.assertTrue(worker._cloud_logger._batch.commit_called) + self.assertEqual(worker._queue.qsize(), 0) + + def test__thread_main_batches(self): + from google.cloud.logging.handlers.transports import background_thread + + worker = self._make_one(_Logger(self.NAME), max_batch_size=2) + + # Enqueue three records and the termination signal. This should be + # enough to perform two separate batches and a third loop with just + # the exit. + self._enqueue_record(worker, '1') + self._enqueue_record(worker, '2') + self._enqueue_record(worker, '3') + self._enqueue_record(worker, '4') + worker._queue.put_nowait(background_thread._WORKER_TERMINATOR) + + worker._thread_main() + + # The last batch should not have been executed because it had no items. + self.assertFalse(worker._cloud_logger._batch.commit_called) + self.assertEqual(worker._queue.qsize(), 0) + + +class _Thread(object): + + def __init__(self, target, name): + self._target = target + self._name = name + self._timeout = None + self._terminate_on_join = True + self.daemon = False + + def is_alive(self): + return self._is_alive + + def start(self): + self._is_alive = True + + def stop(self): + self._is_alive = False + + def join(self, timeout=None): + self._timeout = timeout + if self._terminate_on_join: + self.stop() class _Batch(object): @@ -149,23 +263,33 @@ class _Batch(object): def __init__(self): self.entries = [] self.commit_called = False + self.commit_count = None - def log_struct(self, record, severity=logging.INFO): - self.log_struct_called_with = (record, severity) - self.entries.append(record) + def log_struct(self, info, severity=logging.INFO): + self.log_struct_called_with = (info, severity) + self.entries.append(info) def commit(self): self.commit_called = True + self.commit_count = len(self.entries) del self.entries[:] +class _RaisingBatch(_Batch): + def commit(self): + self.commit_called = True + raise ValueError('This batch raises on commit.') + + class _Logger(object): def __init__(self, name): self.name = name + self._batch_cls = _Batch + self._batch = None def batch(self): - self._batch = _Batch() + self._batch = self._batch_cls() return self._batch From 2b2f8cf718902fe4252f8991e19a478c1d218341 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 12 May 2017 15:09:05 -0700 Subject: [PATCH 099/855] Call start when creating a background thread transport (#3412) --- .../cloud/logging/handlers/transports/background_thread.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 4b651243be45..7ee80d68e5ef 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -247,6 +247,7 @@ def __init__(self, client, name, grace_period=_DEFAULT_GRACE_PERIOD, client.project, client._credentials, http) logger = self.client.logger(name) self.worker = _Worker(logger) + self.worker.start() def send(self, record, message): """Overrides Transport.send(). From 614e228fc716279ba433719f7846127342bf067e Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 12 May 2017 15:48:34 -0700 Subject: [PATCH 100/855] Implement flush for cloud logging handlers (#3413) --- .../handlers/transports/background_thread.py | 8 ++++++++ .../cloud/logging/handlers/transports/base.py | 6 ++++++ packages/google-cloud-logging/tests/system.py | 1 + .../transports/test_background_thread.py | 19 +++++++++++++++++++ .../unit/handlers/transports/test_base.py | 4 ++++ 5 files changed, 38 insertions(+) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 7ee80d68e5ef..b179ec2ab876 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -221,6 +221,10 @@ def enqueue(self, record, message): 'severity': record.levelname, }) + def flush(self): + """Submit any pending log records.""" + self._queue.join() + class BackgroundThreadTransport(Transport): """Asynchronous transport that uses a background thread. @@ -260,3 +264,7 @@ def send(self, record, message): formatted by the associated log formatters. """ self.worker.enqueue(record, message) + + def flush(self): + """Submit any pending log records.""" + self.worker.flush() diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py index 09711231bce2..4fbb7964146c 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py @@ -33,3 +33,9 @@ def send(self, record, message): formatted by the associated log formatters. """ raise NotImplementedError + + def flush(self): + """Submit any pending log records. + + For blocking/sync transports, this is a no-op. + """ diff --git a/packages/google-cloud-logging/tests/system.py b/packages/google-cloud-logging/tests/system.py index 075ff5ffd6cc..70a950f15b91 100644 --- a/packages/google-cloud-logging/tests/system.py +++ b/packages/google-cloud-logging/tests/system.py @@ -247,6 +247,7 @@ def test_log_handler_async(self): cloud_logger = logging.getLogger(handler.name) cloud_logger.addHandler(handler) cloud_logger.warn(LOG_MESSAGE) + handler.flush() entries = _list_entries(logger) expected_payload = { 'message': LOG_MESSAGE, diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 177c30e10863..f8770cc5d127 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -16,6 +16,7 @@ import unittest import mock +from six.moves import queue class TestBackgroundThreadHandler(unittest.TestCase): @@ -61,6 +62,16 @@ def test_send(self): transport.worker.enqueue.assert_called_once_with(record, message) + def test_flush(self): + client = _Client(self.PROJECT) + name = 'python_logger' + + transport, _ = self._make_one(client, name) + + transport.flush() + + transport.worker.flush.assert_called() + class Test_Worker(unittest.TestCase): NAME = 'python_logger' @@ -233,6 +244,14 @@ def test__thread_main_batches(self): self.assertFalse(worker._cloud_logger._batch.commit_called) self.assertEqual(worker._queue.qsize(), 0) + def test_flush(self): + worker = self._make_one(_Logger(self.NAME)) + worker._queue = mock.Mock(spec=queue.Queue) + + # Queue is empty, should not block. + worker.flush() + worker._queue.join.assert_called() + class _Thread(object): diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py index 2844f64fbf5a..29aca81bab3f 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py @@ -32,3 +32,7 @@ def test_send_is_abstract(self): target = self._make_one() with self.assertRaises(NotImplementedError): target.send(None, None) + + def test_flush_is_abstract_and_optional(self): + target = self._make_one() + target.flush() From 54395b9e2a539961a5b371cdca2bf75e1d626f2b Mon Sep 17 00:00:00 2001 From: Angela Li Date: Wed, 17 May 2017 15:15:31 -0700 Subject: [PATCH 101/855] Stop writing to '/var/log/app_engine/' and write logs to Stackdriver logging API (#3410) --- .../google/cloud/logging/client.py | 2 +- .../cloud/logging/handlers/app_engine.py | 82 +++++++++---------- .../google/cloud/logging/handlers/handlers.py | 11 ++- .../handlers/transports/background_thread.py | 13 ++- .../cloud/logging/handlers/transports/base.py | 5 +- .../cloud/logging/handlers/transports/sync.py | 6 +- .../tests/unit/handlers/test_app_engine.py | 66 +++++++++------ .../tests/unit/handlers/test_handlers.py | 10 ++- .../transports/test_background_thread.py | 16 +++- .../unit/handlers/transports/test_base.py | 2 +- .../unit/handlers/transports/test_sync.py | 11 ++- .../tests/unit/test_client.py | 22 ++--- 12 files changed, 144 insertions(+), 102 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 34bf8a3074e9..cb6d9d70fb4f 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -303,7 +303,7 @@ def get_default_handler(self): """ if (_APPENGINE_FLEXIBLE_ENV_VM in os.environ or _APPENGINE_FLEXIBLE_ENV_FLEX in os.environ): - return AppEngineHandler() + return AppEngineHandler(self) elif _CONTAINER_ENGINE_ENV in os.environ: return ContainerEngineHandler() else: diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py index 4184c2054b1a..c7394f32262d 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py @@ -14,60 +14,56 @@ """Logging handler for App Engine Flexible -Logs to the well-known file that the fluentd sidecar container on App Engine -Flexible is configured to read from and send to Stackdriver Logging. - -See the fluentd configuration here: - -https://github.com/GoogleCloudPlatform/appengine-sidecars-docker/tree/master/fluentd_logger +Sends logs to the Stackdriver Logging API with the appropriate resource +and labels for App Engine logs. """ -# This file is largely copied from: -# https://github.com/GoogleCloudPlatform/python-compat-runtime/blob/master -# /appengine-vmruntime/vmruntime/cloud_logging.py - -import logging.handlers import os -from google.cloud.logging.handlers._helpers import format_stackdriver_json +from google.cloud.logging.handlers.handlers import CloudLoggingHandler +from google.cloud.logging.handlers.transports import BackgroundThreadTransport +from google.cloud.logging.resource import Resource -_LOG_PATH_TEMPLATE = '/var/log/app_engine/app.{pid}.json' -_MAX_LOG_BYTES = 128 * 1024 * 1024 -_LOG_FILE_COUNT = 3 +_DEFAULT_GAE_LOGGER_NAME = 'app' +_GAE_PROJECT_ENV = 'GCLOUD_PROJECT' +_GAE_SERVICE_ENV = 'GAE_SERVICE' +_GAE_VERSION_ENV = 'GAE_VERSION' -class AppEngineHandler(logging.handlers.RotatingFileHandler): - """A handler that writes to the App Engine fluentd Stackdriver log file. - Writes to the file that the fluentd agent on App Engine Flexible is - configured to discover logs and send them to Stackdriver Logging. - Log entries are wrapped in JSON and with appropriate metadata. The - process of converting the user's formatted logs into a JSON payload for - Stackdriver Logging consumption is implemented as part of the handler - itself, and not as a formatting step, so as not to interfere with - user-defined logging formats. - """ +class AppEngineHandler(CloudLoggingHandler): + """A logging handler that sends App Engine-formatted logs to Stackdriver. - def __init__(self): - """Construct the handler + :type client: :class:`~google.cloud.logging.client.Client` + :param client: The authenticated Google Cloud Logging client for this + handler to use. - Large log entries will get mangled if multiple workers write to the - same file simultaneously, so we'll use the worker's PID to pick a log - filename. - """ - self.filename = _LOG_PATH_TEMPLATE.format(pid=os.getpid()) - super(AppEngineHandler, self).__init__(self.filename, - maxBytes=_MAX_LOG_BYTES, - backupCount=_LOG_FILE_COUNT) + :type transport: type + :param transport: The transport class. It should be a subclass + of :class:`.Transport`. If unspecified, + :class:`.BackgroundThreadTransport` will be used. + """ - def format(self, record): - """Format the specified record into the expected JSON structure. + def __init__(self, client, + transport=BackgroundThreadTransport): + super(AppEngineHandler, self).__init__( + client, + name=_DEFAULT_GAE_LOGGER_NAME, + transport=transport, + resource=self.get_gae_resource()) - :type record: :class:`~logging.LogRecord` - :param record: the log record + def get_gae_resource(self): + """Return the GAE resource using the environment variables. - :rtype: str - :returns: JSON str to be written to the log file + :rtype: :class:`~google.cloud.logging.resource.Resource` + :returns: Monitored resource for GAE. """ - message = super(AppEngineHandler, self).format(record) - return format_stackdriver_json(record, message) + gae_resource = Resource( + type='gae_app', + labels={ + 'project_id': os.environ.get(_GAE_PROJECT_ENV), + 'module_id': os.environ.get(_GAE_SERVICE_ENV), + 'version_id': os.environ.get(_GAE_VERSION_ENV), + }, + ) + return gae_resource diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py index 62ec6c6d561a..2269c2858f33 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py @@ -17,6 +17,7 @@ import logging from google.cloud.logging.handlers.transports import BackgroundThreadTransport +from google.cloud.logging.logger import _GLOBAL_RESOURCE DEFAULT_LOGGER_NAME = 'python' @@ -52,6 +53,10 @@ class CloudLoggingHandler(logging.StreamHandler): :class:`.BackgroundThreadTransport`. The other option is :class:`.SyncTransport`. + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: (Optional) Monitored resource of the entry, defaults + to the global resource type. + Example: .. code-block:: python @@ -73,11 +78,13 @@ class CloudLoggingHandler(logging.StreamHandler): def __init__(self, client, name=DEFAULT_LOGGER_NAME, - transport=BackgroundThreadTransport): + transport=BackgroundThreadTransport, + resource=_GLOBAL_RESOURCE): super(CloudLoggingHandler, self).__init__() self.name = name self.client = client self.transport = transport(client, name) + self.resource = resource def emit(self, record): """Actually log the specified logging record. @@ -90,7 +97,7 @@ def emit(self, record): :param record: The record to be logged. """ message = super(CloudLoggingHandler, self).format(record) - self.transport.send(record, message) + self.transport.send(record, message, resource=self.resource) def setup_logging(handler, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index b179ec2ab876..010c06b36bc9 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -203,7 +203,7 @@ def _main_thread_terminated(self): else: print('Failed to send %d pending logs.' % (self._queue.qsize(),)) - def enqueue(self, record, message): + def enqueue(self, record, message, resource=None): """Queues a log entry to be written by the background thread. :type record: :class:`logging.LogRecord` @@ -212,6 +212,9 @@ def enqueue(self, record, message): :type message: str :param message: The message from the ``LogRecord`` after being formatted by the associated log formatters. + + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: (Optional) Monitored resource of the entry """ self._queue.put_nowait({ 'info': { @@ -219,6 +222,7 @@ def enqueue(self, record, message): 'python_logger': record.name, }, 'severity': record.levelname, + 'resource': resource, }) def flush(self): @@ -253,7 +257,7 @@ def __init__(self, client, name, grace_period=_DEFAULT_GRACE_PERIOD, self.worker = _Worker(logger) self.worker.start() - def send(self, record, message): + def send(self, record, message, resource=None): """Overrides Transport.send(). :type record: :class:`logging.LogRecord` @@ -262,8 +266,11 @@ def send(self, record, message): :type message: str :param message: The message from the ``LogRecord`` after being formatted by the associated log formatters. + + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: (Optional) Monitored resource of the entry. """ - self.worker.enqueue(record, message) + self.worker.enqueue(record, message, resource=resource) def flush(self): """Submit any pending log records.""" diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py index 4fbb7964146c..21957021793f 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py @@ -22,7 +22,7 @@ class Transport(object): client and name object, and must override :meth:`send`. """ - def send(self, record, message): + def send(self, record, message, resource=None): """Transport send to be implemented by subclasses. :type record: :class:`logging.LogRecord` @@ -31,6 +31,9 @@ def send(self, record, message): :type message: str :param message: The message from the ``LogRecord`` after being formatted by the associated log formatters. + + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: (Optional) Monitored resource of the entry. """ raise NotImplementedError diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py index eec5ffecf6ee..0dd6e0bd7e24 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py @@ -29,7 +29,7 @@ class SyncTransport(Transport): def __init__(self, client, name): self.logger = client.logger(name) - def send(self, record, message): + def send(self, record, message, resource=None): """Overrides transport.send(). :type record: :class:`logging.LogRecord` @@ -40,4 +40,6 @@ def send(self, record, message): formatted by the associated log formatters. """ info = {'message': message, 'python_logger': record.name} - self.logger.log_struct(info, severity=record.levelname) + self.logger.log_struct(info, + severity=record.levelname, + resource=resource) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index 9be8a2bec9b3..c39328593f7a 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging import unittest @@ -24,34 +25,47 @@ def _get_target_class(self): return AppEngineHandler def _make_one(self, *args, **kw): - import tempfile + return self._get_target_class()(*args, **kw) - from google.cloud._testing import _Monkey - from google.cloud.logging.handlers import app_engine as _MUT + def test_constructor(self): + import mock + from google.cloud.logging.handlers.app_engine import _GAE_PROJECT_ENV + from google.cloud.logging.handlers.app_engine import _GAE_SERVICE_ENV + from google.cloud.logging.handlers.app_engine import _GAE_VERSION_ENV - tmpdir = tempfile.mktemp() - with _Monkey(_MUT, _LOG_PATH_TEMPLATE=tmpdir): - return self._get_target_class()(*args, **kw) + client = mock.Mock(project=self.PROJECT, spec=['project']) + with mock.patch('os.environ', new={_GAE_PROJECT_ENV: 'test_project', + _GAE_SERVICE_ENV: 'test_service', + _GAE_VERSION_ENV: 'test_version'}): + handler = self._make_one(client, transport=_Transport) + self.assertIs(handler.client, client) + self.assertEqual(handler.resource.type, 'gae_app') + self.assertEqual(handler.resource.labels['project_id'], 'test_project') + self.assertEqual(handler.resource.labels['module_id'], 'test_service') + self.assertEqual(handler.resource.labels['version_id'], 'test_version') - def test_format(self): - import json - import logging + def test_emit(self): + import mock - handler = self._make_one() - logname = 'loggername' + client = mock.Mock(project=self.PROJECT, spec=['project']) + handler = self._make_one(client, transport=_Transport) + gae_resource = handler.get_gae_resource() + logname = 'app' message = 'hello world' - record = logging.LogRecord(logname, logging.INFO, None, - None, message, None, None) - record.created = 5.03 - expected_payload = { - 'message': message, - 'timestamp': { - 'seconds': 5, - 'nanos': int(.03 * 1e9), - }, - 'thread': record.thread, - 'severity': record.levelname, - } - payload = handler.format(record) - - self.assertEqual(payload, json.dumps(expected_payload)) + record = logging.LogRecord(logname, logging, None, None, message, + None, None) + handler.emit(record) + + self.assertIs(handler.transport.client, client) + self.assertEqual(handler.transport.name, logname) + self.assertEqual(handler.transport.send_called_with, (record, message, gae_resource)) + + +class _Transport(object): + + def __init__(self, client, name): + self.client = client + self.name = name + + def send(self, record, message, resource): + self.send_called_with = (record, message, resource) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 26d3e6352024..05dc87631478 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -35,15 +35,17 @@ def test_ctor(self): self.assertEqual(handler.client, client) def test_emit(self): + from google.cloud.logging.logger import _GLOBAL_RESOURCE + client = _Client(self.PROJECT) - handler = self._make_one(client, transport=_Transport) + handler = self._make_one(client, transport=_Transport, resource=_GLOBAL_RESOURCE) logname = 'loggername' message = 'hello world' record = logging.LogRecord(logname, logging, None, None, message, None, None) handler.emit(record) - self.assertEqual(handler.transport.send_called_with, (record, message)) + self.assertEqual(handler.transport.send_called_with, (record, message, _GLOBAL_RESOURCE)) class TestSetupLogging(unittest.TestCase): @@ -108,5 +110,5 @@ class _Transport(object): def __init__(self, client, name): pass - def send(self, record, message): - self.send_called_with = (record, message) + def send(self, record, message, resource): + self.send_called_with = (record, message, resource) diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index f8770cc5d127..ccc244fa65c8 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -47,6 +47,8 @@ def test_constructor(self): self.assertEqual(logger.name, name) def test_send(self): + from google.cloud.logging.logger import _GLOBAL_RESOURCE + client = _Client(self.PROJECT) name = 'python_logger' @@ -54,13 +56,14 @@ def test_send(self): python_logger_name = 'mylogger' message = 'hello world' + record = logging.LogRecord( python_logger_name, logging.INFO, None, None, message, None, None) - transport.send(record, message) + transport.send(record, message, _GLOBAL_RESOURCE) - transport.worker.enqueue.assert_called_once_with(record, message) + transport.worker.enqueue.assert_called_once_with(record, message, _GLOBAL_RESOURCE) def test_flush(self): client = _Client(self.PROJECT) @@ -284,8 +287,13 @@ def __init__(self): self.commit_called = False self.commit_count = None - def log_struct(self, info, severity=logging.INFO): - self.log_struct_called_with = (info, severity) + def log_struct(self, info, severity=logging.INFO, resource=None): + from google.cloud.logging.logger import _GLOBAL_RESOURCE + + if resource is None: + resource = _GLOBAL_RESOURCE + + self.log_struct_called_with = (info, severity, resource) self.entries.append(info) def commit(self): diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py index 29aca81bab3f..f797e0b09338 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py @@ -31,7 +31,7 @@ def _make_one(self, *args, **kw): def test_send_is_abstract(self): target = self._make_one() with self.assertRaises(NotImplementedError): - target.send(None, None) + target.send(None, None, None) def test_flush_is_abstract_and_optional(self): target = self._make_one() diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py index 562a7175380b..475ecc9c6a71 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py @@ -36,6 +36,8 @@ def test_ctor(self): self.assertEqual(transport.logger.name, 'python_logger') def test_send(self): + from google.cloud.logging.logger import _GLOBAL_RESOURCE + client = _Client(self.PROJECT) stackdriver_logger_name = 'python' @@ -45,23 +47,24 @@ def test_send(self): record = logging.LogRecord(python_logger_name, logging.INFO, None, None, message, None, None) - transport.send(record, message) + transport.send(record, message, _GLOBAL_RESOURCE) EXPECTED_STRUCT = { 'message': message, 'python_logger': python_logger_name, } - EXPECTED_SENT = (EXPECTED_STRUCT, 'INFO') + EXPECTED_SENT = (EXPECTED_STRUCT, 'INFO', _GLOBAL_RESOURCE) self.assertEqual( transport.logger.log_struct_called_with, EXPECTED_SENT) class _Logger(object): + from google.cloud.logging.logger import _GLOBAL_RESOURCE def __init__(self, name): self.name = name - def log_struct(self, message, severity=None): - self.log_struct_called_with = (message, severity) + def log_struct(self, message, severity=None, resource=_GLOBAL_RESOURCE): + self.log_struct_called_with = (message, severity, resource) class _Client(object): diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 6cd7c42926f6..1655dd7ad1c6 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -560,23 +560,23 @@ def test_list_metrics_with_paging(self): }) def test_get_default_handler_app_engine(self): + import httplib2 import os from google.cloud._testing import _Monkey - from google.cloud._testing import _tempdir from google.cloud.logging.client import _APPENGINE_FLEXIBLE_ENV_VM - from google.cloud.logging.handlers import app_engine as _MUT from google.cloud.logging.handlers import AppEngineHandler - client = self._make_one(project=self.PROJECT, - credentials=_make_credentials(), - _use_grpc=False) + http_mock = mock.Mock(spec=httplib2.Http) + credentials = _make_credentials() + deepcopy = mock.Mock(return_value=http_mock) - with _tempdir() as tempdir: - temp_log_path = os.path.join(tempdir, '{pid}') - with _Monkey(_MUT, _LOG_PATH_TEMPLATE=temp_log_path): - with _Monkey(os, environ={_APPENGINE_FLEXIBLE_ENV_VM: 'True'}): - handler = client.get_default_handler() - handler.close() # allow tempdir cleanup on Windows + with _Monkey(os, environ={_APPENGINE_FLEXIBLE_ENV_VM: 'True'}): + with mock.patch('copy.deepcopy', new=deepcopy): + client = self._make_one(project=self.PROJECT, + credentials=credentials, + _use_grpc=False) + handler = client.get_default_handler() + deepcopy.assert_called_once_with(client._http) self.assertIsInstance(handler, AppEngineHandler) From d24218e81e0713b45482957944381ce9b8d520cf Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 19 May 2017 16:37:47 -0400 Subject: [PATCH 102/855] Remove uncovered / pointless branch. (#3445) --- .../tests/unit/handlers/transports/test_background_thread.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index ccc244fa65c8..3e3378dcd361 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -290,8 +290,8 @@ def __init__(self): def log_struct(self, info, severity=logging.INFO, resource=None): from google.cloud.logging.logger import _GLOBAL_RESOURCE - if resource is None: - resource = _GLOBAL_RESOURCE + assert resource is None + resource = _GLOBAL_RESOURCE self.log_struct_called_with = (info, severity, resource) self.entries.append(info) From c212ea4c17ccce0af8565f508f983b2e49ccbed7 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 2 Jun 2017 14:36:29 -0700 Subject: [PATCH 103/855] Vision semi-GAPIC (#3373) --- .../google/cloud/logging/_gax.py | 6 ++-- .../google/cloud/logging/_http.py | 34 +++++++++---------- .../google/cloud/logging/client.py | 14 ++++---- .../google/cloud/logging/entries.py | 6 ++-- .../cloud/logging/handlers/app_engine.py | 2 +- .../google/cloud/logging/handlers/handlers.py | 4 +-- .../google/cloud/logging/logger.py | 14 ++++---- .../google/cloud/logging/metric.py | 4 +-- .../google/cloud/logging/sink.py | 4 +-- 9 files changed, 44 insertions(+), 44 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index d1e6196bbebb..3fb648d98f7f 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -68,7 +68,7 @@ def list_entries(self, projects, filter_='', order_by='', :type filter_: str :param filter_: - a filter expression. See: + a filter expression. See https://cloud.google.com/logging/docs/view/advanced_filters :type order_by: str @@ -193,7 +193,7 @@ def list_sinks(self, project, page_size=0, page_token=None): def sink_create(self, project, sink_name, filter_, destination): """API call: create a sink resource. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create :type project: str @@ -346,7 +346,7 @@ def list_metrics(self, project, page_size=0, page_token=None): def metric_create(self, project, metric_name, filter_, description): """API call: create a metric resource. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create :type project: str diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index 0838e7fe42ac..7ca5c457c25d 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -52,7 +52,7 @@ class Connection(_http.JSONConnection): class _LoggingAPI(object): """Helper mapping logging-related APIs. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs @@ -68,7 +68,7 @@ def list_entries(self, projects, filter_=None, order_by=None, page_size=None, page_token=None): """Return a page of log entry resources. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list :type projects: list of strings @@ -77,7 +77,7 @@ def list_entries(self, projects, filter_=None, order_by=None, :type filter_: str :param filter_: - a filter expression. See: + a filter expression. See https://cloud.google.com/logging/docs/view/advanced_filters :type order_by: str @@ -127,7 +127,7 @@ def write_entries(self, entries, logger_name=None, resource=None, labels=None): """API call: log an entry resource via a POST request - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write :type entries: sequence of mapping @@ -161,7 +161,7 @@ def write_entries(self, entries, logger_name=None, resource=None, def logger_delete(self, project, logger_name): """API call: delete all entries in a logger via a DELETE request - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs/delete :type project: str @@ -177,7 +177,7 @@ def logger_delete(self, project, logger_name): class _SinksAPI(object): """Helper mapping sink-related APIs. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks :type client: :class:`~google.cloud.logging.client.Client` @@ -190,7 +190,7 @@ def __init__(self, client): def list_sinks(self, project, page_size=None, page_token=None): """List sinks for the project associated with this client. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/list :type project: str @@ -224,7 +224,7 @@ def list_sinks(self, project, page_size=None, page_token=None): def sink_create(self, project, sink_name, filter_, destination): """API call: create a sink resource. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create :type project: str @@ -252,7 +252,7 @@ def sink_create(self, project, sink_name, filter_, destination): def sink_get(self, project, sink_name): """API call: retrieve a sink resource. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get :type project: str @@ -270,7 +270,7 @@ def sink_get(self, project, sink_name): def sink_update(self, project, sink_name, filter_, destination): """API call: update a sink resource. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/update :type project: str @@ -301,7 +301,7 @@ def sink_update(self, project, sink_name, filter_, destination): def sink_delete(self, project, sink_name): """API call: delete a sink resource. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/delete :type project: str @@ -317,7 +317,7 @@ def sink_delete(self, project, sink_name): class _MetricsAPI(object): """Helper mapping sink-related APIs. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics :type client: :class:`~google.cloud.logging.client.Client` @@ -330,7 +330,7 @@ def __init__(self, client): def list_metrics(self, project, page_size=None, page_token=None): """List metrics for the project associated with this client. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list :type project: str @@ -364,7 +364,7 @@ def list_metrics(self, project, page_size=None, page_token=None): def metric_create(self, project, metric_name, filter_, description=None): """API call: create a metric resource. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create :type project: str @@ -391,7 +391,7 @@ def metric_create(self, project, metric_name, filter_, description=None): def metric_get(self, project, metric_name): """API call: retrieve a metric resource. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get :type project: str @@ -409,7 +409,7 @@ def metric_get(self, project, metric_name): def metric_update(self, project, metric_name, filter_, description): """API call: update a metric resource. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update :type project: str @@ -439,7 +439,7 @@ def metric_update(self, project, metric_name, filter_, description): def metric_delete(self, project, metric_name): """API call: delete a metric resource. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/delete :type project: str diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index cb6d9d70fb4f..ca698dde99de 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -115,7 +115,7 @@ def __init__(self, project=None, credentials=None, def logging_api(self): """Helper for logging-related API calls. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs """ @@ -130,7 +130,7 @@ def logging_api(self): def sinks_api(self): """Helper for log sink-related API calls. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks """ if self._sinks_api is None: @@ -144,7 +144,7 @@ def sinks_api(self): def metrics_api(self): """Helper for log metric-related API calls. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics """ if self._metrics_api is None: @@ -169,7 +169,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None, page_size=None, page_token=None): """Return a page of log entries. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list :type projects: list of strings @@ -178,7 +178,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None, :type filter_: str :param filter_: - a filter expression. See: + a filter expression. See https://cloud.google.com/logging/docs/view/advanced_filters :type order_by: str @@ -231,7 +231,7 @@ def sink(self, name, filter_=None, destination=None): def list_sinks(self, page_size=None, page_token=None): """List sinks for the project associated with this client. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/list :type page_size: int @@ -276,7 +276,7 @@ def metric(self, name, filter_=None, description=''): def list_metrics(self, page_size=None, page_token=None): """List metrics for the project associated with this client. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list :type page_size: int diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py index 24c8392eba14..d39092c3e324 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -137,7 +137,7 @@ def from_api_repr(cls, resource, client, loggers=None): class TextEntry(_BaseEntry): """Entry created with ``textPayload``. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry """ _PAYLOAD_KEY = 'textPayload' @@ -146,7 +146,7 @@ class TextEntry(_BaseEntry): class StructEntry(_BaseEntry): """Entry created with ``jsonPayload``. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry """ _PAYLOAD_KEY = 'jsonPayload' @@ -155,7 +155,7 @@ class StructEntry(_BaseEntry): class ProtobufEntry(_BaseEntry): """Entry created with ``protoPayload``. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry :type payload: str, dict or any_pb2.Any diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py index c7394f32262d..7011819f8a2f 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py @@ -38,7 +38,7 @@ class AppEngineHandler(CloudLoggingHandler): :param client: The authenticated Google Cloud Logging client for this handler to use. - :type transport: type + :type transport: :class:`type` :param transport: The transport class. It should be a subclass of :class:`.Transport`. If unspecified, :class:`.BackgroundThreadTransport` will be used. diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py index 2269c2858f33..97afde9f87fb 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py @@ -46,7 +46,7 @@ class CloudLoggingHandler(logging.StreamHandler): to 'python'. The name of the Python logger will be represented in the ``python_logger`` field. - :type transport: type + :type transport: :class:`type` :param transport: Class for creating new transport objects. It should extend from the base :class:`.Transport` type and implement :meth`.Transport.send`. Defaults to @@ -91,7 +91,7 @@ def emit(self, record): Overrides the default emit behavior of ``StreamHandler``. - See: https://docs.python.org/2/library/logging.html#handler-objects + See https://docs.python.org/2/library/logging.html#handler-objects :type record: :class:`logging.LogRecord` :param record: The record to be logged. diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 874d05014479..a13b06cd260b 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -25,7 +25,7 @@ class Logger(object): """Loggers represent named targets for log entries. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs :type name: str @@ -179,7 +179,7 @@ def log_text(self, text, client=None, labels=None, insert_id=None, resource=_GLOBAL_RESOURCE): """API call: log a text message via a POST request - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write :type text: str @@ -221,7 +221,7 @@ def log_struct(self, info, client=None, labels=None, insert_id=None, resource=_GLOBAL_RESOURCE): """API call: log a structured message via a POST request - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write :type info: dict @@ -263,7 +263,7 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, resource=_GLOBAL_RESOURCE): """API call: log a protobuf message via a POST request - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list :type message: :class:`~google.protobuf.message.Message` @@ -304,7 +304,7 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, def delete(self, client=None): """API call: delete all entries in a logger via a DELETE request - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs/delete :type client: :class:`~google.cloud.logging.client.Client` or @@ -319,7 +319,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None, page_size=None, page_token=None): """Return a page of log entries. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list :type projects: list of strings @@ -328,7 +328,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None, :type filter_: str :param filter_: - a filter expression. See: + a filter expression. See https://cloud.google.com/logging/docs/view/advanced_filters :type order_by: str diff --git a/packages/google-cloud-logging/google/cloud/logging/metric.py b/packages/google-cloud-logging/google/cloud/logging/metric.py index 8067fb281b23..ff0a4748540a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/metric.py +++ b/packages/google-cloud-logging/google/cloud/logging/metric.py @@ -20,7 +20,7 @@ class Metric(object): """Metrics represent named filters for log entries. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics :type name: str @@ -102,7 +102,7 @@ def _require_client(self, client): def create(self, client=None): """API call: create the metric via a PUT request - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create :type client: :class:`~google.cloud.logging.client.Client` or diff --git a/packages/google-cloud-logging/google/cloud/logging/sink.py b/packages/google-cloud-logging/google/cloud/logging/sink.py index 184cf36b00e6..3f468e6cf2f0 100644 --- a/packages/google-cloud-logging/google/cloud/logging/sink.py +++ b/packages/google-cloud-logging/google/cloud/logging/sink.py @@ -20,7 +20,7 @@ class Sink(object): """Sinks represent filtered exports for log entries. - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks :type name: str @@ -106,7 +106,7 @@ def _require_client(self, client): def create(self, client=None): """API call: create the sink via a PUT request - See: + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create :type client: :class:`~google.cloud.logging.client.Client` or From d223150455ecedd51b1d4a7f3172676409dc6eb7 Mon Sep 17 00:00:00 2001 From: Angela Li Date: Fri, 2 Jun 2017 15:24:02 -0700 Subject: [PATCH 104/855] Send trace context with logs from web applications (#3448) --- .../google/cloud/logging/handlers/_helpers.py | 66 +++++++ .../cloud/logging/handlers/app_engine.py | 23 ++- .../google/cloud/logging/handlers/handlers.py | 13 +- .../logging/handlers/middleware/__init__.py | 17 ++ .../logging/handlers/middleware/request.py | 45 +++++ .../handlers/transports/background_thread.py | 13 +- .../cloud/logging/handlers/transports/base.py | 5 +- .../cloud/logging/handlers/transports/sync.py | 11 +- packages/google-cloud-logging/nox.py | 4 +- .../unit/handlers/middleware/test_request.py | 86 +++++++++ .../tests/unit/handlers/test__helpers.py | 171 ++++++++++++++++++ .../tests/unit/handlers/test_app_engine.py | 48 ++++- .../tests/unit/handlers/test_handlers.py | 8 +- .../transports/test_background_thread.py | 9 +- .../unit/handlers/transports/test_sync.py | 7 +- 15 files changed, 499 insertions(+), 27 deletions(-) create mode 100644 packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py create mode 100644 packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py create mode 100644 packages/google-cloud-logging/tests/unit/handlers/test__helpers.py diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py index 81adcf0eb545..1ebb064ed228 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py @@ -17,6 +17,17 @@ import math import json +try: + import flask +except ImportError: # pragma: NO COVER + flask = None + +from google.cloud.logging.handlers.middleware.request import ( + _get_django_request) + +_FLASK_TRACE_HEADER = 'X_CLOUD_TRACE_CONTEXT' +_DJANGO_TRACE_HEADER = 'HTTP_X_CLOUD_TRACE_CONTEXT' + def format_stackdriver_json(record, message): """Helper to format a LogRecord in in Stackdriver fluentd format. @@ -37,3 +48,58 @@ def format_stackdriver_json(record, message): } return json.dumps(payload) + + +def get_trace_id_from_flask(): + """Get trace_id from flask request headers. + + :rtype: str + :return: Trace_id in HTTP request headers. + """ + if flask is None or not flask.request: + return None + + header = flask.request.headers.get(_FLASK_TRACE_HEADER) + + if header is None: + return None + + trace_id = header.split('/', 1)[0] + + return trace_id + + +def get_trace_id_from_django(): + """Get trace_id from django request headers. + + :rtype: str + :return: Trace_id in HTTP request headers. + """ + request = _get_django_request() + + if request is None: + return None + + header = request.META.get(_DJANGO_TRACE_HEADER) + if header is None: + return None + + trace_id = header.split('/', 1)[0] + + return trace_id + + +def get_trace_id(): + """Helper to get trace_id from web application request header. + + :rtype: str + :returns: Trace_id in HTTP request headers. + """ + checkers = (get_trace_id_from_django, get_trace_id_from_flask) + + for checker in checkers: + trace_id = checker() + if trace_id is not None: + return trace_id + + return None diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py index 7011819f8a2f..509bf8002fb1 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py @@ -20,6 +20,7 @@ import os +from google.cloud.logging.handlers._helpers import get_trace_id from google.cloud.logging.handlers.handlers import CloudLoggingHandler from google.cloud.logging.handlers.transports import BackgroundThreadTransport from google.cloud.logging.resource import Resource @@ -30,6 +31,8 @@ _GAE_SERVICE_ENV = 'GAE_SERVICE' _GAE_VERSION_ENV = 'GAE_VERSION' +_TRACE_ID_LABEL = 'appengine.googleapis.com/trace_id' + class AppEngineHandler(CloudLoggingHandler): """A logging handler that sends App Engine-formatted logs to Stackdriver. @@ -50,7 +53,8 @@ def __init__(self, client, client, name=_DEFAULT_GAE_LOGGER_NAME, transport=transport, - resource=self.get_gae_resource()) + resource=self.get_gae_resource(), + labels=self.get_gae_labels()) def get_gae_resource(self): """Return the GAE resource using the environment variables. @@ -67,3 +71,20 @@ def get_gae_resource(self): }, ) return gae_resource + + def get_gae_labels(self): + """Return the labels for GAE app. + + If the trace ID can be detected, it will be included as a label. + Currently, no other labels are included. + + :rtype: dict + :returns: Labels for GAE app. + """ + gae_labels = {} + + trace_id = get_trace_id() + if trace_id is not None: + gae_labels[_TRACE_ID_LABEL] = trace_id + + return gae_labels diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py index 97afde9f87fb..fe9848848d38 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py @@ -57,6 +57,9 @@ class CloudLoggingHandler(logging.StreamHandler): :param resource: (Optional) Monitored resource of the entry, defaults to the global resource type. + :type labels: dict + :param labels: (Optional) Mapping of labels for the entry. + Example: .. code-block:: python @@ -79,12 +82,14 @@ class CloudLoggingHandler(logging.StreamHandler): def __init__(self, client, name=DEFAULT_LOGGER_NAME, transport=BackgroundThreadTransport, - resource=_GLOBAL_RESOURCE): + resource=_GLOBAL_RESOURCE, + labels=None): super(CloudLoggingHandler, self).__init__() self.name = name self.client = client self.transport = transport(client, name) self.resource = resource + self.labels = labels def emit(self, record): """Actually log the specified logging record. @@ -97,7 +102,11 @@ def emit(self, record): :param record: The record to be logged. """ message = super(CloudLoggingHandler, self).format(record) - self.transport.send(record, message, resource=self.resource) + self.transport.send( + record, + message, + resource=self.resource, + labels=self.labels) def setup_logging(handler, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py new file mode 100644 index 000000000000..c340235b8bdd --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.logging.handlers.middleware.request import RequestMiddleware + +__all__ = ['RequestMiddleware'] diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py new file mode 100644 index 000000000000..4c0b22a8e96b --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py @@ -0,0 +1,45 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Django middleware helper to capture a request. + +The request is stored on a thread-local so that it can be +inspected by other helpers. +""" + +import threading + + +_thread_locals = threading.local() + + +def _get_django_request(): + """Get Django request from thread local. + + :rtype: str + :returns: Django request. + """ + return getattr(_thread_locals, 'request', None) + + +class RequestMiddleware(object): + """Saves the request in thread local""" + + def process_request(self, request): + """Called on each request, before Django decides which view to execute. + + :type request: :class:`~django.http.request.HttpRequest` + :param request: Django http request. + """ + _thread_locals.request = request diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 010c06b36bc9..d889bed62626 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -203,7 +203,7 @@ def _main_thread_terminated(self): else: print('Failed to send %d pending logs.' % (self._queue.qsize(),)) - def enqueue(self, record, message, resource=None): + def enqueue(self, record, message, resource=None, labels=None): """Queues a log entry to be written by the background thread. :type record: :class:`logging.LogRecord` @@ -215,6 +215,9 @@ def enqueue(self, record, message, resource=None): :type resource: :class:`~google.cloud.logging.resource.Resource` :param resource: (Optional) Monitored resource of the entry + + :type labels: dict + :param labels: (Optional) Mapping of labels for the entry. """ self._queue.put_nowait({ 'info': { @@ -223,6 +226,7 @@ def enqueue(self, record, message, resource=None): }, 'severity': record.levelname, 'resource': resource, + 'labels': labels, }) def flush(self): @@ -257,7 +261,7 @@ def __init__(self, client, name, grace_period=_DEFAULT_GRACE_PERIOD, self.worker = _Worker(logger) self.worker.start() - def send(self, record, message, resource=None): + def send(self, record, message, resource=None, labels=None): """Overrides Transport.send(). :type record: :class:`logging.LogRecord` @@ -269,8 +273,11 @@ def send(self, record, message, resource=None): :type resource: :class:`~google.cloud.logging.resource.Resource` :param resource: (Optional) Monitored resource of the entry. + + :type labels: dict + :param labels: (Optional) Mapping of labels for the entry. """ - self.worker.enqueue(record, message, resource=resource) + self.worker.enqueue(record, message, resource=resource, labels=labels) def flush(self): """Submit any pending log records.""" diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py index 21957021793f..7829201b1c98 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py @@ -22,7 +22,7 @@ class Transport(object): client and name object, and must override :meth:`send`. """ - def send(self, record, message, resource=None): + def send(self, record, message, resource=None, labels=None): """Transport send to be implemented by subclasses. :type record: :class:`logging.LogRecord` @@ -34,6 +34,9 @@ def send(self, record, message, resource=None): :type resource: :class:`~google.cloud.logging.resource.Resource` :param resource: (Optional) Monitored resource of the entry. + + :type labels: dict + :param labels: (Optional) Mapping of labels for the entry. """ raise NotImplementedError diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py index 0dd6e0bd7e24..be70e60a14e1 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py @@ -29,7 +29,7 @@ class SyncTransport(Transport): def __init__(self, client, name): self.logger = client.logger(name) - def send(self, record, message, resource=None): + def send(self, record, message, resource=None, labels=None): """Overrides transport.send(). :type record: :class:`logging.LogRecord` @@ -38,8 +38,15 @@ def send(self, record, message, resource=None): :type message: str :param message: The message from the ``LogRecord`` after being formatted by the associated log formatters. + + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: (Optional) Monitored resource of the entry. + + :type labels: dict + :param labels: (Optional) Mapping of labels for the entry. """ info = {'message': message, 'python_logger': record.name} self.logger.log_struct(info, severity=record.levelname, - resource=resource) + resource=resource, + labels=labels) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index 5d4751a955a5..fbbbec1958c1 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -31,7 +31,9 @@ def unit_tests(session, python_version): session.interpreter = 'python{}'.format(python_version) # Install all test dependencies, then install this package in-place. - session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) + session.install( + 'mock', 'pytest', 'pytest-cov', + 'flask', 'django', *LOCAL_DEPS) session.install('-e', '.') # Run py.test against the unit tests. diff --git a/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py b/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py new file mode 100644 index 000000000000..983d67129647 --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py @@ -0,0 +1,86 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import mock + + +class DjangoBase(unittest.TestCase): + + @classmethod + def setUpClass(cls): + from django.conf import settings + from django.test.utils import setup_test_environment + + if not settings.configured: + settings.configure() + setup_test_environment() + + @classmethod + def tearDownClass(cls): + from django.test.utils import teardown_test_environment + + teardown_test_environment() + + +class TestRequestMiddleware(DjangoBase): + + def _get_target_class(self): + from google.cloud.logging.handlers.middleware import request + + return request.RequestMiddleware + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def test_process_request(self): + from django.test import RequestFactory + from google.cloud.logging.handlers.middleware import request + + middleware = self._make_one() + mock_request = RequestFactory().get('/') + middleware.process_request(mock_request) + + django_request = request._get_django_request() + self.assertEqual(django_request, mock_request) + + +class Test__get_django_request(DjangoBase): + + @staticmethod + def _call_fut(): + from google.cloud.logging.handlers.middleware import request + + return request._get_django_request() + + @staticmethod + def _make_patch(new_locals): + return mock.patch( + 'google.cloud.logging.handlers.middleware.request._thread_locals', + new=new_locals) + + def test_with_request(self): + thread_locals = mock.Mock(spec=['request']) + with self._make_patch(thread_locals): + django_request = self._call_fut() + + self.assertIs(django_request, thread_locals.request) + + def test_without_request(self): + thread_locals = mock.Mock(spec=[]) + with self._make_patch(thread_locals): + django_request = self._call_fut() + + self.assertIsNone(django_request) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py new file mode 100644 index 000000000000..0731c825d32c --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -0,0 +1,171 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import mock + + +class Test_get_trace_id_from_flask(unittest.TestCase): + + @staticmethod + def _call_fut(): + from google.cloud.logging.handlers import _helpers + + return _helpers.get_trace_id_from_flask() + + @staticmethod + def create_app(): + import flask + + app = flask.Flask(__name__) + + @app.route('/') + def index(): + return 'test flask trace' # pragma: NO COVER + + return app + + def setUp(self): + self.app = self.create_app() + + def test_no_context_header(self): + with self.app.test_request_context( + path='/', + headers={}): + trace_id = self._call_fut() + + self.assertIsNone(trace_id) + + def test_valid_context_header(self): + flask_trace_header = 'X_CLOUD_TRACE_CONTEXT' + expected_trace_id = 'testtraceidflask' + flask_trace_id = expected_trace_id + '/testspanid' + + context = self.app.test_request_context( + path='/', + headers={flask_trace_header: flask_trace_id}) + + with context: + trace_id = self._call_fut() + + self.assertEqual(trace_id, expected_trace_id) + + +class Test_get_trace_id_from_django(unittest.TestCase): + + @staticmethod + def _call_fut(): + from google.cloud.logging.handlers import _helpers + + return _helpers.get_trace_id_from_django() + + def setUp(self): + from django.conf import settings + from django.test.utils import setup_test_environment + + if not settings.configured: + settings.configure() + setup_test_environment() + + def tearDown(self): + from django.test.utils import teardown_test_environment + from google.cloud.logging.handlers.middleware import request + + teardown_test_environment() + request._thread_locals.__dict__.clear() + + def test_no_context_header(self): + from django.test import RequestFactory + from google.cloud.logging.handlers.middleware import request + + django_request = RequestFactory().get('/') + + middleware = request.RequestMiddleware() + middleware.process_request(django_request) + trace_id = self._call_fut() + self.assertIsNone(trace_id) + + def test_valid_context_header(self): + from django.test import RequestFactory + from google.cloud.logging.handlers.middleware import request + + django_trace_header = 'HTTP_X_CLOUD_TRACE_CONTEXT' + expected_trace_id = 'testtraceiddjango' + django_trace_id = expected_trace_id + '/testspanid' + + django_request = RequestFactory().get( + '/', + **{django_trace_header: django_trace_id}) + + middleware = request.RequestMiddleware() + middleware.process_request(django_request) + trace_id = self._call_fut() + + self.assertEqual(trace_id, expected_trace_id) + + +class Test_get_trace_id(unittest.TestCase): + + @staticmethod + def _call_fut(): + from google.cloud.logging.handlers import _helpers + + return _helpers.get_trace_id() + + def _helper(self, django_return, flask_return): + django_patch = mock.patch( + 'google.cloud.logging.handlers._helpers.get_trace_id_from_django', + return_value=django_return) + flask_patch = mock.patch( + 'google.cloud.logging.handlers._helpers.get_trace_id_from_flask', + return_value=flask_return) + + with django_patch as django_mock: + with flask_patch as flask_mock: + trace_id = self._call_fut() + + return django_mock, flask_mock, trace_id + + def test_from_django(self): + django_mock, flask_mock, trace_id = self._helper( + 'test-django-trace-id', None) + self.assertEqual(trace_id, django_mock.return_value) + + django_mock.assert_called_once_with() + flask_mock.assert_not_called() + + def test_from_flask(self): + django_mock, flask_mock, trace_id = self._helper( + None, 'test-flask-trace-id') + self.assertEqual(trace_id, flask_mock.return_value) + + django_mock.assert_called_once_with() + flask_mock.assert_called_once_with() + + def test_from_django_and_flask(self): + django_mock, flask_mock, trace_id = self._helper( + 'test-django-trace-id', 'test-flask-trace-id') + # Django wins. + self.assertEqual(trace_id, django_mock.return_value) + + django_mock.assert_called_once_with() + flask_mock.assert_not_called() + + def test_missing(self): + django_mock, flask_mock, trace_id = self._helper(None, None) + self.assertIsNone(trace_id) + + django_mock.assert_called_once_with() + flask_mock.assert_called_once_with() diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index c39328593f7a..6438c4abb8a0 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -15,8 +15,10 @@ import logging import unittest +import mock -class TestAppEngineHandlerHandler(unittest.TestCase): + +class TestAppEngineHandler(unittest.TestCase): PROJECT = 'PROJECT' def _get_target_class(self): @@ -28,12 +30,13 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_constructor(self): - import mock from google.cloud.logging.handlers.app_engine import _GAE_PROJECT_ENV from google.cloud.logging.handlers.app_engine import _GAE_SERVICE_ENV from google.cloud.logging.handlers.app_engine import _GAE_VERSION_ENV + from google.cloud.logging.handlers.app_engine import _TRACE_ID_LABEL client = mock.Mock(project=self.PROJECT, spec=['project']) + with mock.patch('os.environ', new={_GAE_PROJECT_ENV: 'test_project', _GAE_SERVICE_ENV: 'test_service', _GAE_VERSION_ENV: 'test_version'}): @@ -43,13 +46,13 @@ def test_constructor(self): self.assertEqual(handler.resource.labels['project_id'], 'test_project') self.assertEqual(handler.resource.labels['module_id'], 'test_service') self.assertEqual(handler.resource.labels['version_id'], 'test_version') + self.assertEqual(handler.labels, {}) def test_emit(self): - import mock - client = mock.Mock(project=self.PROJECT, spec=['project']) handler = self._make_one(client, transport=_Transport) gae_resource = handler.get_gae_resource() + gae_labels = handler.get_gae_labels() logname = 'app' message = 'hello world' record = logging.LogRecord(logname, logging, None, None, message, @@ -58,7 +61,38 @@ def test_emit(self): self.assertIs(handler.transport.client, client) self.assertEqual(handler.transport.name, logname) - self.assertEqual(handler.transport.send_called_with, (record, message, gae_resource)) + self.assertEqual( + handler.transport.send_called_with, + (record, message, gae_resource, gae_labels)) + + def _get_gae_labels_helper(self, trace_id): + get_trace_patch = mock.patch( + 'google.cloud.logging.handlers.app_engine.get_trace_id', + return_value=trace_id) + + client = mock.Mock(project=self.PROJECT, spec=['project']) + # The handler actually calls ``get_gae_labels()``. + with get_trace_patch as mock_get_trace: + handler = self._make_one(client, transport=_Transport) + mock_get_trace.assert_called_once_with() + + gae_labels = handler.get_gae_labels() + self.assertEqual(mock_get_trace.mock_calls, + [mock.call(), mock.call()]) + + return gae_labels + + def test_get_gae_labels_with_label(self): + from google.cloud.logging.handlers import app_engine + + trace_id = 'test-gae-trace-id' + gae_labels = self._get_gae_labels_helper(trace_id) + expected_labels = {app_engine._TRACE_ID_LABEL: trace_id} + self.assertEqual(gae_labels, expected_labels) + + def test_get_gae_labels_without_label(self): + gae_labels = self._get_gae_labels_helper(None) + self.assertEqual(gae_labels, {}) class _Transport(object): @@ -67,5 +101,5 @@ def __init__(self, client, name): self.client = client self.name = name - def send(self, record, message, resource): - self.send_called_with = (record, message, resource) + def send(self, record, message, resource, labels): + self.send_called_with = (record, message, resource, labels) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 05dc87631478..96823b2e906d 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -45,7 +45,9 @@ def test_emit(self): None, None) handler.emit(record) - self.assertEqual(handler.transport.send_called_with, (record, message, _GLOBAL_RESOURCE)) + self.assertEqual( + handler.transport.send_called_with, + (record, message, _GLOBAL_RESOURCE, None)) class TestSetupLogging(unittest.TestCase): @@ -110,5 +112,5 @@ class _Transport(object): def __init__(self, client, name): pass - def send(self, record, message, resource): - self.send_called_with = (record, message, resource) + def send(self, record, message, resource, labels=None): + self.send_called_with = (record, message, resource, labels) diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 3e3378dcd361..f6671273b53d 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -61,9 +61,10 @@ def test_send(self): python_logger_name, logging.INFO, None, None, message, None, None) - transport.send(record, message, _GLOBAL_RESOURCE) + transport.send(record, message, _GLOBAL_RESOURCE, None) - transport.worker.enqueue.assert_called_once_with(record, message, _GLOBAL_RESOURCE) + transport.worker.enqueue.assert_called_once_with( + record, message, _GLOBAL_RESOURCE, None) def test_flush(self): client = _Client(self.PROJECT) @@ -287,13 +288,13 @@ def __init__(self): self.commit_called = False self.commit_count = None - def log_struct(self, info, severity=logging.INFO, resource=None): + def log_struct(self, info, severity=logging.INFO, resource=None, labels=None): from google.cloud.logging.logger import _GLOBAL_RESOURCE assert resource is None resource = _GLOBAL_RESOURCE - self.log_struct_called_with = (info, severity, resource) + self.log_struct_called_with = (info, severity, resource, labels) self.entries.append(info) def commit(self): diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py index 475ecc9c6a71..01c15240f3b7 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py @@ -52,7 +52,7 @@ def test_send(self): 'message': message, 'python_logger': python_logger_name, } - EXPECTED_SENT = (EXPECTED_STRUCT, 'INFO', _GLOBAL_RESOURCE) + EXPECTED_SENT = (EXPECTED_STRUCT, 'INFO', _GLOBAL_RESOURCE, None) self.assertEqual( transport.logger.log_struct_called_with, EXPECTED_SENT) @@ -63,8 +63,9 @@ class _Logger(object): def __init__(self, name): self.name = name - def log_struct(self, message, severity=None, resource=_GLOBAL_RESOURCE): - self.log_struct_called_with = (message, severity, resource) + def log_struct(self, message, severity=None, + resource=_GLOBAL_RESOURCE, labels=None): + self.log_struct_called_with = (message, severity, resource, labels) class _Client(object): From 4dd276c5eb6de3399018eaa26a47f19ec3f79a7e Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 23 Jun 2017 15:08:10 -0700 Subject: [PATCH 105/855] Re-enable pylint in info-only mode for all packages (#3519) --- packages/google-cloud-logging/nox.py | 13 +++++++--- .../google-cloud-logging/pylint.config.py | 25 +++++++++++++++++++ 2 files changed, 35 insertions(+), 3 deletions(-) create mode 100644 packages/google-cloud-logging/pylint.config.py diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index fbbbec1958c1..9a9eb10ef3d3 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -70,15 +70,22 @@ def system_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/logging') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session diff --git a/packages/google-cloud-logging/pylint.config.py b/packages/google-cloud-logging/pylint.config.py new file mode 100644 index 000000000000..d8ca7b92e85e --- /dev/null +++ b/packages/google-cloud-logging/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) From 6d634a7b753684c1057d0ee958c97cb62cbfcaf9 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 26 Jun 2017 18:34:40 -0400 Subject: [PATCH 106/855] Prep logging-1.1.0 release. (#3528) --- packages/google-cloud-logging/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index e3f8334cd5bb..3f613b94cbe7 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -51,14 +51,14 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', 'grpcio >= 1.2.0, < 2.0dev', 'gapic-google-cloud-logging-v2 >= 0.91.0, < 0.92dev', ] setup( name='google-cloud-logging', - version='1.0.0', + version='1.1.0', description='Python Client for Stackdriver Logging', long_description=README, namespace_packages=[ From 0737a8878c634c6aa2452aea989b9470d87262ae Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 27 Jun 2017 10:32:30 -0700 Subject: [PATCH 107/855] Fix inclusion of tests in manifest.in (#3552) --- packages/google-cloud-logging/MANIFEST.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/MANIFEST.in b/packages/google-cloud-logging/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/packages/google-cloud-logging/MANIFEST.in +++ b/packages/google-cloud-logging/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ From 0819a695e291b7b134b8daa4efa0799dc0f693f7 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 28 Jun 2017 14:07:25 -0700 Subject: [PATCH 108/855] Making all LICENSE headers "uniform". (#3563) --- packages/google-cloud-logging/pylint.config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/pylint.config.py b/packages/google-cloud-logging/pylint.config.py index d8ca7b92e85e..b618319b8b61 100644 --- a/packages/google-cloud-logging/pylint.config.py +++ b/packages/google-cloud-logging/pylint.config.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, From acd3998e9ea1f24c932b8a0db00d5fd49038db04 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 29 Jun 2017 10:56:09 -0700 Subject: [PATCH 109/855] Skipping system tests when credentials env. var is unset. (#3475) --- packages/google-cloud-logging/nox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index 9a9eb10ef3d3..1d9d5f184e43 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -52,7 +52,7 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) From ef03ab6e734863edce01dbe3b420da323d29d6d2 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Jul 2017 16:41:31 -0400 Subject: [PATCH 110/855] Shorten nox virtualenv names to avoid hashing. (#3585) --- packages/google-cloud-logging/nox.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index 1d9d5f184e43..068d5ae8d198 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -30,6 +30,9 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install( 'mock', 'pytest', 'pytest-cov', @@ -57,6 +60,9 @@ def system_tests(session, python_version): # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package into the # virutalenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) @@ -92,6 +98,10 @@ def lint(session): def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') From e24ae679e3f329987a25e82384878b833e79a09e Mon Sep 17 00:00:00 2001 From: Craig Silverstein Date: Mon, 10 Jul 2017 09:12:24 -0700 Subject: [PATCH 111/855] Add support for logging the trace-id in webapp2 apps. (#3593) --- .../google/cloud/logging/handlers/_helpers.py | 49 ++++++++++++-- packages/google-cloud-logging/nox.py | 2 +- .../tests/unit/handlers/test__helpers.py | 66 +++++++++++++++++-- 3 files changed, 106 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py index 1ebb064ed228..864f0e53617e 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py @@ -22,11 +22,21 @@ except ImportError: # pragma: NO COVER flask = None +try: + import webapp2 +except (ImportError, SyntaxError): # pragma: NO COVER + # If you try to import webapp2 under python3, you'll get a syntax + # error (since it hasn't been ported yet). We just pretend it + # doesn't exist. This is unlikely to hit in real life but does + # in the tests. + webapp2 = None + from google.cloud.logging.handlers.middleware.request import ( _get_django_request) -_FLASK_TRACE_HEADER = 'X_CLOUD_TRACE_CONTEXT' _DJANGO_TRACE_HEADER = 'HTTP_X_CLOUD_TRACE_CONTEXT' +_FLASK_TRACE_HEADER = 'X_CLOUD_TRACE_CONTEXT' +_WEBAPP2_TRACE_HEADER = 'X-CLOUD-TRACE-CONTEXT' def format_stackdriver_json(record, message): @@ -54,7 +64,7 @@ def get_trace_id_from_flask(): """Get trace_id from flask request headers. :rtype: str - :return: Trace_id in HTTP request headers. + :returns: TraceID in HTTP request headers. """ if flask is None or not flask.request: return None @@ -69,11 +79,38 @@ def get_trace_id_from_flask(): return trace_id +def get_trace_id_from_webapp2(): + """Get trace_id from webapp2 request headers. + + :rtype: str + :returns: TraceID in HTTP request headers. + """ + if webapp2 is None: + return None + + try: + # get_request() succeeds if we're in the middle of a webapp2 + # request, or raises an assertion error otherwise: + # "Request global variable is not set". + req = webapp2.get_request() + except AssertionError: + return None + + header = req.headers.get(_WEBAPP2_TRACE_HEADER) + + if header is None: + return None + + trace_id = header.split('/', 1)[0] + + return trace_id + + def get_trace_id_from_django(): """Get trace_id from django request headers. :rtype: str - :return: Trace_id in HTTP request headers. + :returns: TraceID in HTTP request headers. """ request = _get_django_request() @@ -93,9 +130,11 @@ def get_trace_id(): """Helper to get trace_id from web application request header. :rtype: str - :returns: Trace_id in HTTP request headers. + :returns: TraceID in HTTP request headers. """ - checkers = (get_trace_id_from_django, get_trace_id_from_flask) + checkers = (get_trace_id_from_django, + get_trace_id_from_flask, + get_trace_id_from_webapp2) for checker in checkers: trace_id = checker() diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index 068d5ae8d198..ce8d1c0afbce 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -36,7 +36,7 @@ def unit_tests(session, python_version): # Install all test dependencies, then install this package in-place. session.install( 'mock', 'pytest', 'pytest-cov', - 'flask', 'django', *LOCAL_DEPS) + 'flask', 'webapp2', 'webob', 'django', *LOCAL_DEPS) session.install('-e', '.') # Run py.test against the unit tests. diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index 0731c825d32c..516cd93fc2d5 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -12,9 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json import unittest import mock +import six + +try: + from webapp2 import RequestHandler +except SyntaxError: + # webapp2 has not been ported to python3, so it will give a syntax + # error if we try. We'll just skip the webapp2 tests in that case. + RequestHandler = object class Test_get_trace_id_from_flask(unittest.TestCase): @@ -37,11 +46,9 @@ def index(): return app - def setUp(self): - self.app = self.create_app() - def test_no_context_header(self): - with self.app.test_request_context( + app = self.create_app() + with app.test_request_context( path='/', headers={}): trace_id = self._call_fut() @@ -53,7 +60,8 @@ def test_valid_context_header(self): expected_trace_id = 'testtraceidflask' flask_trace_id = expected_trace_id + '/testspanid' - context = self.app.test_request_context( + app = self.create_app() + context = app.test_request_context( path='/', headers={flask_trace_header: flask_trace_id}) @@ -63,6 +71,54 @@ def test_valid_context_header(self): self.assertEqual(trace_id, expected_trace_id) +class _GetTraceId(RequestHandler): + def get(self): + from google.cloud.logging.handlers import _helpers + + trace_id = _helpers.get_trace_id_from_webapp2() + self.response.content_type = 'application/json' + self.response.out.write(json.dumps(trace_id)) + + + +@unittest.skipIf(six.PY3, 'webapp2 is Python 2 only') +class Test_get_trace_id_from_webapp2(unittest.TestCase): + + @staticmethod + def create_app(): + import webapp2 + + app = webapp2.WSGIApplication([ + ('/', _GetTraceId), + ]) + + return app + + def test_no_context_header(self): + import webob + + req = webob.BaseRequest.blank('/') + response = req.get_response(self.create_app()) + trace_id = json.loads(response.body) + + self.assertEquals(None, trace_id) + + def test_valid_context_header(self): + import webob + + webapp2_trace_header = 'X-Cloud-Trace-Context' + expected_trace_id = 'testtraceidwebapp2' + webapp2_trace_id = expected_trace_id + '/testspanid' + + req = webob.BaseRequest.blank( + '/', + headers={webapp2_trace_header: webapp2_trace_id}) + response = req.get_response(self.create_app()) + trace_id = json.loads(response.body) + + self.assertEqual(trace_id, expected_trace_id) + + class Test_get_trace_id_from_django(unittest.TestCase): @staticmethod From ada48db7a5931ad53e6eea6969b2049d08e30f6b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 11 Jul 2017 10:51:40 -0700 Subject: [PATCH 112/855] Updating author_email in all setup.py. (#3598) Done via: $ git grep -l author_email | \ > xargs sed -i s/jjg+google-cloud-python@google.com/googleapis-publisher@google.com/g and manually editing `videointelligence/setup.py` and `vision/setup.py`. --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 3f613b94cbe7..82dc4f1fcf8a 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', From d86cb727815e3d41d9a3f962205b254d5002a600 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 18 Jul 2017 11:46:05 -0700 Subject: [PATCH 113/855] Using assertEqual instead of assertEquals. (#3619) `assertEquals` is deprecated (but still is a synonym). --- .../google-cloud-logging/tests/unit/handlers/test__helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index 516cd93fc2d5..f721881eea11 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -101,7 +101,7 @@ def test_no_context_header(self): response = req.get_response(self.create_app()) trace_id = json.loads(response.body) - self.assertEquals(None, trace_id) + self.assertEqual(None, trace_id) def test_valid_context_header(self): import webob From fa07ef0b8aba5734e451532ff09378d68b502516 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 19 Jul 2017 14:58:17 -0700 Subject: [PATCH 114/855] Fixing references to "dead" docs links. (#3631) * Fixing references to "dead" docs links. Done via: $ git grep -l 'google-cloud-auth.html' | \ > xargs sed -i s/'google-cloud-auth.html'/'core\/auth.html'/g $ git grep -l 'http\:\/\/google-cloud-python.readthedocs.io' | \ > xargs sed -i s/'http\:\/\/google-cloud-python.readthedocs.io'/\ > 'https\:\/\/google-cloud-python.readthedocs.io'/g Fixes #3531. * Fixing up other docs that were moved in #3459. --- packages/google-cloud-logging/README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 5df19dd1f79a..a706b50079ac 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -9,7 +9,7 @@ Python Client for Stackdriver Logging - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -54,7 +54,7 @@ Example of fetching entries: See the ``google-cloud-python`` API `logging documentation`_ to learn how to connect to Stackdriver Logging using this Client Library. -.. _logging documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging-usage.html +.. _logging documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging/usage.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-logging.svg :target: https://pypi.python.org/pypi/google-cloud-logging From 68792b2102d86b54968e53680ba8406b340d0d76 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Jul 2017 09:33:21 -0700 Subject: [PATCH 115/855] Changing all pypi.python.org links to warehouse links. (#3641) Done via $ export OLD='https\:\/\/pypi.python.org\/pypi\/' $ export NEW='https\:\/\/pypi.org\/project\/' $ git grep -l ${OLD} | xargs sed -i s/${OLD}/${NEW}/g Then manually going through and adding a trailing slash to all warehouse links. (Though I did undo changes to `docs/json/`.) --- packages/google-cloud-logging/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index a706b50079ac..8cf274e4e4a1 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -57,6 +57,6 @@ connect to Stackdriver Logging using this Client Library. .. _logging documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging/usage.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-logging.svg - :target: https://pypi.python.org/pypi/google-cloud-logging + :target: https://pypi.org/project/google-cloud-logging/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-logging.svg - :target: https://pypi.python.org/pypi/google-cloud-logging + :target: https://pypi.org/project/google-cloud-logging/ From 4eea79cab6bc19116d403e160af2833afe2a9edd Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 27 Jul 2017 11:21:30 -0700 Subject: [PATCH 116/855] Remove httplib2, replace with Requests (#3674) * Core: remove httplib2, replace with Requests Additionally remove make_exception in favor of from_http_status and from_http_response. * Datastore: replace httplib2 with Requests * DNS: replace httplib2 with Requests * Error Reporting: replace httplib2 with requests * Language: replace httplib2 with Requests * Logging: replace httplib2 with requests * Monitoring: replace httplib2 with Requests * Pubsub: replace httplib2 with Requests * Resource Manager: replace httplib2 with Requests * Runtimeconfig: replace httplib2 with Requests * Speech: replace httplib2 with Requests * Storage: replace httplib2 with Requests * BigQuery: replace httplib2 with Requests * Translate: replace httplib2 with Requests * Vision: replace httplib2 with Requests --- .../google/cloud/logging/client.py | 4 ++-- .../google-cloud-logging/tests/unit/test__http.py | 15 +++++++++------ .../tests/unit/test_client.py | 12 ++++++------ 3 files changed, 17 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index ca698dde99de..3ce67fba151c 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -73,10 +73,10 @@ class Client(ClientWithProject): passed), falls back to the default inferred from the environment. - :type _http: :class:`~httplib2.Http` + :type _http: :class:`~requests.Session` :param _http: (Optional) HTTP object to make requests. Can be any object that defines ``request()`` with the same interface as - :meth:`~httplib2.Http.request`. If not passed, an + :meth:`requests.Session.request`. If not passed, an ``_http`` object is created that is bound to the ``credentials`` for the current object. This parameter should be considered private, and could diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index 459c0cf304d7..d3e9970cb757 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -43,13 +43,17 @@ def test_default_url(self): self.assertIs(conn._client, client) def test_extra_headers(self): + import requests + from google.cloud import _http as base_http from google.cloud.logging import _http as MUT - http = mock.Mock(spec=['request']) - response = mock.Mock(status=200, spec=['status']) + http = mock.create_autospec(requests.Session, instance=True) + response = requests.Response() + response.status_code = 200 data = b'brent-spiner' - http.request.return_value = response, data + response._content = data + http.request.return_value = response client = mock.Mock(_http=http, spec=['_http']) conn = self._make_one(client) @@ -59,17 +63,16 @@ def test_extra_headers(self): self.assertEqual(result, data) expected_headers = { - 'Content-Length': str(len(req_data)), 'Accept-Encoding': 'gzip', base_http.CLIENT_INFO_HEADER: MUT._CLIENT_INFO, 'User-Agent': conn.USER_AGENT, } expected_uri = conn.build_api_url('/rainbow') http.request.assert_called_once_with( - body=req_data, + data=req_data, headers=expected_headers, method='GET', - uri=expected_uri, + url=expected_uri, ) diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 1655dd7ad1c6..37bfc5c18214 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -560,13 +560,13 @@ def test_list_metrics_with_paging(self): }) def test_get_default_handler_app_engine(self): - import httplib2 + import requests import os from google.cloud._testing import _Monkey from google.cloud.logging.client import _APPENGINE_FLEXIBLE_ENV_VM from google.cloud.logging.handlers import AppEngineHandler - http_mock = mock.Mock(spec=httplib2.Http) + http_mock = mock.Mock(spec=requests.Session) credentials = _make_credentials() deepcopy = mock.Mock(return_value=http_mock) @@ -596,10 +596,10 @@ def test_get_default_handler_container_engine(self): self.assertIsInstance(handler, ContainerEngineHandler) def test_get_default_handler_general(self): - import httplib2 + import requests from google.cloud.logging.handlers import CloudLoggingHandler - http_mock = mock.Mock(spec=httplib2.Http) + http_mock = mock.Mock(spec=requests.Session) credentials = _make_credentials() deepcopy = mock.Mock(return_value=http_mock) @@ -613,9 +613,9 @@ def test_get_default_handler_general(self): self.assertIsInstance(handler, CloudLoggingHandler) def test_setup_logging(self): - import httplib2 + import requests - http_mock = mock.Mock(spec=httplib2.Http) + http_mock = mock.Mock(spec=requests.Session) deepcopy = mock.Mock(return_value=http_mock) setup_logging = mock.Mock(spec=[]) From 242b1415f0d8ecd3ad0b964fa7754d2db022a95e Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 31 Jul 2017 09:24:26 -0700 Subject: [PATCH 117/855] Allowing logging system tests to fail. (#3691) These hose our builds. --- packages/google-cloud-logging/nox.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index ce8d1c0afbce..ea3621040796 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -71,7 +71,13 @@ def system_tests(session, python_version): session.install('.') # Run py.test against the system tests. - session.run('py.test', '-vvv', 'tests/system.py', *session.posargs) + session.run( + 'py.test', + '-vvv', + 'tests/system.py', + *session.posargs, + success_codes=range(0, 100), + ) @nox.session From d7679ac0c538bcfefc0beafadd632e2766bd124f Mon Sep 17 00:00:00 2001 From: Angela Li Date: Tue, 1 Aug 2017 10:36:40 -0700 Subject: [PATCH 118/855] Reduce the max tries for logging system tests (#3708) --- packages/google-cloud-logging/nox.py | 3 ++- packages/google-cloud-logging/tests/system.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index ea3621040796..f1a1e5516e60 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -74,9 +74,10 @@ def system_tests(session, python_version): session.run( 'py.test', '-vvv', + '-s', 'tests/system.py', *session.posargs, - success_codes=range(0, 100), + success_codes=range(0, 100) ) diff --git a/packages/google-cloud-logging/tests/system.py b/packages/google-cloud-logging/tests/system.py index 70a950f15b91..0e2cb3ab9a32 100644 --- a/packages/google-cloud-logging/tests/system.py +++ b/packages/google-cloud-logging/tests/system.py @@ -116,7 +116,7 @@ def setUp(self): self._handlers_cache = logging.getLogger().handlers[:] def tearDown(self): - retry = RetryErrors(NotFound, max_tries=10) + retry = RetryErrors(NotFound, max_tries=9) for doomed in self.to_delete: retry(doomed.delete)() logging.getLogger().handlers = self._handlers_cache[:] From eb21b883bc2a2d58717e8563b388c4c71b03bd80 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Aug 2017 16:45:43 -0700 Subject: [PATCH 119/855] Updating all affected packages after google-cloud-core update. (#3730) * Updating all affected packages after google-cloud-core update. * Moving 'pip install .' **after** subpackages in nox docs. @lukesneeringer still hasn't explained why it was moved. In it's current location, the depencencies are first retrieved from PyPI (which fails here for the unreleased versions), e.g. https://circleci.com/gh/GoogleCloudPlatform/google-cloud-python/2716 --- packages/google-cloud-logging/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 82dc4f1fcf8a..37350d8b9538 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -51,14 +51,14 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.25.0, < 0.26dev', + 'google-cloud-core >= 0.26.0, < 0.27dev', 'grpcio >= 1.2.0, < 2.0dev', 'gapic-google-cloud-logging-v2 >= 0.91.0, < 0.92dev', ] setup( name='google-cloud-logging', - version='1.1.0', + version='1.2.0', description='Python Client for Stackdriver Logging', long_description=README, namespace_packages=[ From 1a6dc331dd07fdcf9e90559d7ac638b769980784 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 8 Aug 2017 14:50:31 -0700 Subject: [PATCH 120/855] Use latest/ directory for docs instead of stable/ (#3766) See also https://github.com/GoogleCloudPlatform/google-cloud-python/pull/3763 $ sed -i '' 's/googlecloudplatform.github.io\/google-cloud-python\/stable\//googlecloudplatform.github.io\/google-cloud-python\/latest\//g' **/*.rst --- packages/google-cloud-logging/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 8cf274e4e4a1..7e0f8a55180d 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -9,7 +9,7 @@ Python Client for Stackdriver Logging - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging/usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/logging/usage.html Quick Start ----------- @@ -54,7 +54,7 @@ Example of fetching entries: See the ``google-cloud-python`` API `logging documentation`_ to learn how to connect to Stackdriver Logging using this Client Library. -.. _logging documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging/usage.html +.. _logging documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/logging/usage.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-logging.svg :target: https://pypi.org/project/google-cloud-logging/ From 4a46b54566df2c5288c58d02d379afce0f694361 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 9 Aug 2017 10:02:05 -0700 Subject: [PATCH 121/855] Move google.cloud.iterator to google.api.core.page_iterator (#3770) * Move google.cloud.iterator to google.api.core.page_iterator * Re-write tests to pytest style. * Make GAXIterator private- it will soon be removed. * Pass api_request into HTTPIterator to avoid accessing private members * BigQuery: use google.api.core.page_iterator * DNS: use google.api.core.page_iterator * Logging: use google.api.core.page_iterator * PubSub: use google.api.core.page_iterator * Resource manager: use google.api.core.page_iterator * Runtimeconfig: use google.api.core.page_iterator * logging: use google.api.core._GAXIterator * Storage: use google.api.core.page_iterator * Pubsub: use google.api.core._GAXIterator * Trace: use google.api.core._GAXIterator * Spanner: use google.api.core._GAXIterator --- .../google/cloud/logging/_gax.py | 23 ++++---- .../google/cloud/logging/_http.py | 52 ++++++++++++------- .../google/cloud/logging/client.py | 6 +-- .../google/cloud/logging/logger.py | 2 +- 4 files changed, 49 insertions(+), 34 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 3fb648d98f7f..bfea5df022ad 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -16,6 +16,7 @@ import functools +from google.api.core import page_iterator from google.cloud.gapic.logging.v2.config_service_v2_client import ( ConfigServiceV2Client) from google.cloud.gapic.logging.v2.logging_service_v2_client import ( @@ -37,7 +38,6 @@ from google.cloud._http import DEFAULT_USER_AGENT from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound -from google.cloud.iterator import GAXIterator from google.cloud.logging import __version__ from google.cloud.logging._helpers import entry_from_resource from google.cloud.logging.sink import Sink @@ -84,7 +84,7 @@ def list_entries(self, projects, filter_='', order_by='', passed, the API will return the first page of entries. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` accessible to the current API. """ @@ -101,7 +101,8 @@ def list_entries(self, projects, filter_='', order_by='', loggers = {} item_to_value = functools.partial( _item_to_entry, loggers=loggers) - return GAXIterator(self._client, page_iter, item_to_value) + return page_iterator._GAXIterator( + self._client, page_iter, item_to_value) def write_entries(self, entries, logger_name=None, resource=None, labels=None): @@ -188,7 +189,8 @@ def list_sinks(self, project, page_size=0, page_token=None): path = 'projects/%s' % (project,) page_iter = self._gax_api.list_sinks(path, page_size=page_size, options=options) - return GAXIterator(self._client, page_iter, _item_to_sink) + return page_iterator._GAXIterator( + self._client, page_iter, _item_to_sink) def sink_create(self, project, sink_name, filter_, destination): """API call: create a sink resource. @@ -330,7 +332,7 @@ def list_metrics(self, project, page_size=0, page_token=None): passed, the API will return the first page of metrics. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.metric.Metric` accessible to the current API. @@ -341,7 +343,8 @@ def list_metrics(self, project, page_size=0, page_token=None): path = 'projects/%s' % (project,) page_iter = self._gax_api.list_log_metrics( path, page_size=page_size, options=options) - return GAXIterator(self._client, page_iter, _item_to_metric) + return page_iterator._GAXIterator( + self._client, page_iter, _item_to_metric) def metric_create(self, project, metric_name, filter_, description): """API call: create a metric resource. @@ -507,12 +510,12 @@ def _item_to_entry(iterator, entry_pb, loggers): This method does not have the correct signature to be used as the ``item_to_value`` argument to - :class:`~google.cloud.iterator.Iterator`. It is intended to be + :class:`~google.api.core.page_iterator.Iterator`. It is intended to be patched with a mutable ``loggers`` argument that can be updated on subsequent calls. For an example, see how the method is used above in :meth:`_LoggingAPI.list_entries`. - :type iterator: :class:`~google.cloud.iterator.Iterator` + :type iterator: :class:`~google.api.core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type entry_pb: :class:`.log_entry_pb2.LogEntry` @@ -534,7 +537,7 @@ def _item_to_entry(iterator, entry_pb, loggers): def _item_to_sink(iterator, log_sink_pb): """Convert a sink protobuf to the native object. - :type iterator: :class:`~google.cloud.iterator.Iterator` + :type iterator: :class:`~google.api.core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type log_sink_pb: @@ -553,7 +556,7 @@ def _item_to_sink(iterator, log_sink_pb): def _item_to_metric(iterator, log_metric_pb): """Convert a metric protobuf to the native object. - :type iterator: :class:`~google.cloud.iterator.Iterator` + :type iterator: :class:`~google.api.core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type log_metric_pb: diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index 7ca5c457c25d..45db345fa847 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -16,8 +16,8 @@ import functools +from google.api.core import page_iterator from google.cloud import _http -from google.cloud.iterator import HTTPIterator from google.cloud.logging import __version__ from google.cloud.logging._helpers import entry_from_resource @@ -93,7 +93,7 @@ def list_entries(self, projects, filter_=None, order_by=None, passed, the API will return the first page of entries. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` accessible to the current API. """ @@ -115,10 +115,14 @@ def list_entries(self, projects, filter_=None, order_by=None, loggers = {} item_to_value = functools.partial( _item_to_entry, loggers=loggers) - iterator = HTTPIterator( - client=self._client, path=path, - item_to_value=item_to_value, items_key='entries', - page_token=page_token, extra_params=extra_params) + iterator = page_iterator.HTTPIterator( + client=self._client, + api_request=self._client._connection.api_request, + path=path, + item_to_value=item_to_value, + items_key='entries', + page_token=page_token, + extra_params=extra_params) # This method uses POST to make a read-only request. iterator._HTTP_METHOD = 'POST' return iterator @@ -205,7 +209,7 @@ def list_sinks(self, project, page_size=None, page_token=None): passed, the API will return the first page of sinks. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.sink.Sink` accessible to the current API. @@ -216,10 +220,14 @@ def list_sinks(self, project, page_size=None, page_token=None): extra_params['pageSize'] = page_size path = '/projects/%s/sinks' % (project,) - return HTTPIterator( - client=self._client, path=path, - item_to_value=_item_to_sink, items_key='sinks', - page_token=page_token, extra_params=extra_params) + return page_iterator.HTTPIterator( + client=self._client, + api_request=self._client._connection.api_request, + path=path, + item_to_value=_item_to_sink, + items_key='sinks', + page_token=page_token, + extra_params=extra_params) def sink_create(self, project, sink_name, filter_, destination): """API call: create a sink resource. @@ -345,7 +353,7 @@ def list_metrics(self, project, page_size=None, page_token=None): passed, the API will return the first page of metrics. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.metric.Metric` accessible to the current API. @@ -356,10 +364,14 @@ def list_metrics(self, project, page_size=None, page_token=None): extra_params['pageSize'] = page_size path = '/projects/%s/metrics' % (project,) - return HTTPIterator( - client=self._client, path=path, - item_to_value=_item_to_metric, items_key='metrics', - page_token=page_token, extra_params=extra_params) + return page_iterator.HTTPIterator( + client=self._client, + api_request=self._client._connection.api_request, + path=path, + item_to_value=_item_to_metric, + items_key='metrics', + page_token=page_token, + extra_params=extra_params) def metric_create(self, project, metric_name, filter_, description=None): """API call: create a metric resource. @@ -459,12 +471,12 @@ def _item_to_entry(iterator, resource, loggers): This method does not have the correct signature to be used as the ``item_to_value`` argument to - :class:`~google.cloud.iterator.Iterator`. It is intended to be + :class:`~google.api.core.page_iterator.Iterator`. It is intended to be patched with a mutable ``loggers`` argument that can be updated on subsequent calls. For an example, see how the method is used above in :meth:`_LoggingAPI.list_entries`. - :type iterator: :class:`~google.cloud.iterator.Iterator` + :type iterator: :class:`~google.api.core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type resource: dict @@ -485,7 +497,7 @@ def _item_to_entry(iterator, resource, loggers): def _item_to_sink(iterator, resource): """Convert a sink resource to the native object. - :type iterator: :class:`~google.cloud.iterator.Iterator` + :type iterator: :class:`~google.api.core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type resource: dict @@ -500,7 +512,7 @@ def _item_to_sink(iterator, resource): def _item_to_metric(iterator, resource): """Convert a metric resource to the native object. - :type iterator: :class:`~google.cloud.iterator.Iterator` + :type iterator: :class:`~google.api.core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type resource: dict diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 3ce67fba151c..23ec84ec67d0 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -194,7 +194,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None, passed, the API will return the first page of entries. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` accessible to the current client. """ @@ -243,7 +243,7 @@ def list_sinks(self, page_size=None, page_token=None): passed, the API will return the first page of sinks. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.sink.Sink` accessible to the current client. @@ -288,7 +288,7 @@ def list_metrics(self, page_size=None, page_token=None): passed, the API will return the first page of metrics. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.metric.Metric` accessible to the current client. """ diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index a13b06cd260b..1006ebb1e693 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -344,7 +344,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None, passed, the API will return the first page of entries. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` accessible to the current logger. """ From bfd9442099a3168c349a0d269e78839c23d224e3 Mon Sep 17 00:00:00 2001 From: Angela Li Date: Wed, 23 Aug 2017 15:08:22 -0700 Subject: [PATCH 122/855] Logging: Use metadata server to detect GKE environment (#3856) --- .../google/cloud/logging/_helpers.py | 35 +++++++++ .../google/cloud/logging/client.py | 9 ++- .../tests/unit/test__helpers.py | 72 +++++++++++++++++++ .../tests/unit/test_client.py | 16 +++-- 4 files changed, 122 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/_helpers.py index 8e17a9538e76..c7fab41bc4e8 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging/_helpers.py @@ -14,11 +14,17 @@ """Common logging helpers.""" +import requests from google.cloud.logging.entries import ProtobufEntry from google.cloud.logging.entries import StructEntry from google.cloud.logging.entries import TextEntry +METADATA_URL = 'http://metadata/computeMetadata/v1/' +METADATA_HEADERS = { + 'Metadata-Flavor': 'Google' +} + def entry_from_resource(resource, client, loggers): """Detect correct entry type from resource and instantiate. @@ -46,3 +52,32 @@ def entry_from_resource(resource, client, loggers): return ProtobufEntry.from_api_repr(resource, client, loggers) raise ValueError('Cannot parse log entry resource.') + + +def retrieve_metadata_server(metadata_key): + """Retrieve the metadata key in the metadata server. + + See: https://cloud.google.com/compute/docs/storing-retrieving-metadata + + :type metadata_key: str + :param metadata_key: Key of the metadata which will form the url. You can + also supply query parameters after the metadata key. + e.g. "tags?alt=json" + + :rtype: str + :returns: The value of the metadata key returned by the metadata server. + """ + url = METADATA_URL + metadata_key + + try: + response = requests.get(url, headers=METADATA_HEADERS) + + if response.status_code == requests.codes.ok: + return response.text + + except requests.exceptions.RequestException: + # Ignore the exception, connection failed means the attribute does not + # exist in the metadata server. + pass + + return None diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 23ec84ec67d0..ae20dd48fcf6 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -31,6 +31,7 @@ from google.cloud.client import ClientWithProject from google.cloud.environment_vars import DISABLE_GRPC +from google.cloud.logging._helpers import retrieve_metadata_server from google.cloud.logging._http import Connection from google.cloud.logging._http import _LoggingAPI as JSONLoggingAPI from google.cloud.logging._http import _MetricsAPI as JSONMetricsAPI @@ -55,8 +56,8 @@ _APPENGINE_FLEXIBLE_ENV_FLEX = 'GAE_INSTANCE' """Environment variable set in App Engine when env:flex is set.""" -_CONTAINER_ENGINE_ENV = 'KUBERNETES_SERVICE' -"""Environment variable set in a Google Container Engine environment.""" +_GKE_CLUSTER_NAME = 'instance/attributes/cluster-name' +"""Attribute in metadata server when in GKE environment.""" class Client(ClientWithProject): @@ -301,10 +302,12 @@ def get_default_handler(self): :rtype: :class:`logging.Handler` :returns: The default log handler based on the environment """ + gke_cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME) + if (_APPENGINE_FLEXIBLE_ENV_VM in os.environ or _APPENGINE_FLEXIBLE_ENV_FLEX in os.environ): return AppEngineHandler(self) - elif _CONTAINER_ENGINE_ENV in os.environ: + elif gke_cluster_name is not None: return ContainerEngineHandler() else: return CloudLoggingHandler(self) diff --git a/packages/google-cloud-logging/tests/unit/test__helpers.py b/packages/google-cloud-logging/tests/unit/test__helpers.py index 7cc2d392514c..93532eed0c05 100644 --- a/packages/google-cloud-logging/tests/unit/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/test__helpers.py @@ -15,6 +15,8 @@ import unittest +import mock + class Test_entry_from_resource(unittest.TestCase): @@ -53,6 +55,69 @@ def test_proto_payload(self): self._payload_helper('protoPayload', 'ProtobufEntry') +class Test_retrieve_metadata_server(unittest.TestCase): + + @staticmethod + def _call_fut(metadata_key): + from google.cloud.logging._helpers import retrieve_metadata_server + + return retrieve_metadata_server(metadata_key) + + def test_metadata_exists(self): + status_code_ok = 200 + response_text = 'my-gke-cluster' + metadata_key = 'test_key' + + response_mock = ResponseMock(status_code=status_code_ok) + response_mock.text = response_text + + requests_mock = mock.Mock() + requests_mock.get.return_value = response_mock + requests_mock.codes.ok = status_code_ok + + patch = mock.patch( + 'google.cloud.logging._helpers.requests', + requests_mock) + + with patch: + metadata = self._call_fut(metadata_key) + + self.assertEqual(metadata, response_text) + + def test_metadata_does_not_exist(self): + status_code_ok = 200 + status_code_not_found = 404 + metadata_key = 'test_key' + + response_mock = ResponseMock(status_code=status_code_not_found) + + requests_mock = mock.Mock() + requests_mock.get.return_value = response_mock + requests_mock.codes.ok = status_code_ok + + patch = mock.patch( + 'google.cloud.logging._helpers.requests', + requests_mock) + + with patch: + metadata = self._call_fut(metadata_key) + + self.assertIsNone(metadata) + + def test_request_exception(self): + metadata_key = 'test_url_cannot_connect' + metadata_url = 'http://metadata.invalid/' + + patch = mock.patch( + 'google.cloud.logging._helpers.METADATA_URL', + new=metadata_url) + + with patch: + metadata = self._call_fut(metadata_key) + + self.assertIsNone(metadata) + + class EntryMock(object): def __init__(self): @@ -62,3 +127,10 @@ def __init__(self): def from_api_repr(self, resource, client, loggers): self.called = (resource, client, loggers) return self.sentinel + + +class ResponseMock(object): + + def __init__(self, status_code, text='test_response_text'): + self.status_code = status_code + self.text = text diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 37bfc5c18214..bb16e85c7ae5 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -581,16 +581,18 @@ def test_get_default_handler_app_engine(self): self.assertIsInstance(handler, AppEngineHandler) def test_get_default_handler_container_engine(self): - import os - from google.cloud._testing import _Monkey - from google.cloud.logging.client import _CONTAINER_ENGINE_ENV from google.cloud.logging.handlers import ContainerEngineHandler - client = self._make_one(project=self.PROJECT, - credentials=_make_credentials(), - _use_grpc=False) + client = self._make_one( + project=self.PROJECT, + credentials=_make_credentials(), + _use_grpc=False) + + patch = mock.patch( + 'google.cloud.logging.client.retrieve_metadata_server', + return_value='test-gke-cluster') - with _Monkey(os, environ={_CONTAINER_ENGINE_ENV: 'True'}): + with patch: handler = client.get_default_handler() self.assertIsInstance(handler, ContainerEngineHandler) From 267f4c9fbfd0d012512de7b51fa286ea379e184a Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 24 Aug 2017 13:28:07 -0700 Subject: [PATCH 123/855] Bump core version number (#3864) --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 37350d8b9538..1efb71a36a4a 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -51,7 +51,7 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.26.0, < 0.27dev', + 'google-cloud-core >= 0.27.0, < 0.28dev', 'grpcio >= 1.2.0, < 2.0dev', 'gapic-google-cloud-logging-v2 >= 0.91.0, < 0.92dev', ] From 1658e0cb43bb01b46e0802a240c1fb84a74141d8 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 24 Aug 2017 14:15:01 -0700 Subject: [PATCH 124/855] Updating 10 packages after google-cloud-core==0.27.0 (#3866) - dns - error_reporting - language - logging - monitoring - resource_manager - runtimeconfig - speech - translate - vision Also updating bounds on these in `google-cloud` uber-package. --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 1efb71a36a4a..98cca4b3ebec 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -58,7 +58,7 @@ setup( name='google-cloud-logging', - version='1.2.0', + version='1.3.0', description='Python Client for Stackdriver Logging', long_description=README, namespace_packages=[ From e45e07f22195e56d93b65ce2fb06cc76391db6f5 Mon Sep 17 00:00:00 2001 From: Angela Li Date: Wed, 20 Sep 2017 06:57:52 -0700 Subject: [PATCH 125/855] Remove deepcopy of client._http (#3954) --- .../handlers/transports/background_thread.py | 5 +-- packages/google-cloud-logging/tests/system.py | 32 +++++++------- .../tests/unit/test_client.py | 44 ++++++------------- 3 files changed, 32 insertions(+), 49 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index d889bed62626..69f3fe45a1e3 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -20,7 +20,6 @@ from __future__ import print_function import atexit -import copy import logging import threading @@ -254,9 +253,7 @@ class BackgroundThreadTransport(Transport): def __init__(self, client, name, grace_period=_DEFAULT_GRACE_PERIOD, batch_size=_DEFAULT_MAX_BATCH_SIZE): - http = copy.deepcopy(client._http) - self.client = client.__class__( - client.project, client._credentials, http) + self.client = client logger = self.client.logger(name) self.worker = _Worker(logger) self.worker.start() diff --git a/packages/google-cloud-logging/tests/system.py b/packages/google-cloud-logging/tests/system.py index 0e2cb3ab9a32..e0771a659be6 100644 --- a/packages/google-cloud-logging/tests/system.py +++ b/packages/google-cloud-logging/tests/system.py @@ -414,30 +414,32 @@ def test_create_sink_storage_bucket(self): self.assertTrue(sink.exists()) def test_create_sink_pubsub_topic(self): - from google.cloud.iam import OWNER_ROLE - from google.cloud.pubsub import client as pubsub_client + import uuid + + from google.cloud import pubsub_v1 SINK_NAME = 'test-create-sink-topic%s' % (_RESOURCE_ID,) - TOPIC_NAME = 'logging-test-sink%s' % (_RESOURCE_ID,) + TOPIC_NAME = '%s-%s' % ('systest', str(uuid.uuid4())[0:8]) # Create the destination topic, and set up the IAM policy to allow # Stackdriver Logging to write into it. - pubsub_client = pubsub_client.Client() - topic = pubsub_client.topic(TOPIC_NAME) - topic.create() - self.to_delete.append(topic) - policy = topic.get_iam_policy() - new_owners = set([policy.group('cloud-logs@google.com')]) - new_owners.update(policy.owners) - policy[OWNER_ROLE] = new_owners - topic.set_iam_policy(policy) - - TOPIC_URI = 'pubsub.googleapis.com/%s' % (topic.full_name,) + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(Config.CLIENT.project, TOPIC_NAME) + publisher.create_topic(topic_path) + + policy = publisher.get_iam_policy(topic_path) + policy.bindings.add( + role='roles/owner', + members=['group:cloud-logs@google.com'] + ) + publisher.set_iam_policy(topic_path, policy) + + TOPIC_URI = 'pubsub.googleapis.com/%s' % (topic_path,) sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, TOPIC_URI) self.assertFalse(sink.exists()) sink.create() - self.to_delete.append(sink) + publisher.delete_topic(topic_path) self.assertTrue(sink.exists()) def _init_bigquery_dataset(self): diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index bb16e85c7ae5..c92c7ded1ea8 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -560,23 +560,18 @@ def test_list_metrics_with_paging(self): }) def test_get_default_handler_app_engine(self): - import requests import os from google.cloud._testing import _Monkey from google.cloud.logging.client import _APPENGINE_FLEXIBLE_ENV_VM from google.cloud.logging.handlers import AppEngineHandler - http_mock = mock.Mock(spec=requests.Session) credentials = _make_credentials() - deepcopy = mock.Mock(return_value=http_mock) with _Monkey(os, environ={_APPENGINE_FLEXIBLE_ENV_VM: 'True'}): - with mock.patch('copy.deepcopy', new=deepcopy): - client = self._make_one(project=self.PROJECT, - credentials=credentials, - _use_grpc=False) - handler = client.get_default_handler() - deepcopy.assert_called_once_with(client._http) + client = self._make_one(project=self.PROJECT, + credentials=credentials, + _use_grpc=False) + handler = client.get_default_handler() self.assertIsInstance(handler, AppEngineHandler) @@ -598,39 +593,28 @@ def test_get_default_handler_container_engine(self): self.assertIsInstance(handler, ContainerEngineHandler) def test_get_default_handler_general(self): - import requests from google.cloud.logging.handlers import CloudLoggingHandler - http_mock = mock.Mock(spec=requests.Session) credentials = _make_credentials() - deepcopy = mock.Mock(return_value=http_mock) - with mock.patch('copy.deepcopy', new=deepcopy): - client = self._make_one(project=self.PROJECT, - credentials=credentials, - _use_grpc=False) - handler = client.get_default_handler() - deepcopy.assert_called_once_with(client._http) + client = self._make_one(project=self.PROJECT, + credentials=credentials, + _use_grpc=False) + handler = client.get_default_handler() self.assertIsInstance(handler, CloudLoggingHandler) def test_setup_logging(self): - import requests - - http_mock = mock.Mock(spec=requests.Session) - deepcopy = mock.Mock(return_value=http_mock) setup_logging = mock.Mock(spec=[]) credentials = _make_credentials() - with mock.patch('copy.deepcopy', new=deepcopy): - with mock.patch('google.cloud.logging.client.setup_logging', - new=setup_logging): - client = self._make_one(project=self.PROJECT, - credentials=credentials, - _use_grpc=False) - client.setup_logging() - deepcopy.assert_called_once_with(client._http) + with mock.patch('google.cloud.logging.client.setup_logging', + new=setup_logging): + client = self._make_one(project=self.PROJECT, + credentials=credentials, + _use_grpc=False) + client.setup_logging() setup_logging.assert_called() From 2d6b7cdb1ea90be48d35b010a0f1fce2013d2b5c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 2 Oct 2017 10:15:57 -0700 Subject: [PATCH 126/855] Avoiding `grpcio==1.6.0` in deps. (#4096) This is due to `google-gax` doing the same, which has broken RTD builds: https://readthedocs.org/projects/google-cloud-python/builds/6063446/ The motivation for avoiding `grpcio==1.6.0` is: https://github.com/grpc/grpc/issues/12455 --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 98cca4b3ebec..c211b10f7fc0 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -52,7 +52,7 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.27.0, < 0.28dev', - 'grpcio >= 1.2.0, < 2.0dev', + 'grpcio >= 1.2.0, < 1.6dev', 'gapic-google-cloud-logging-v2 >= 0.91.0, < 0.92dev', ] From 004ecaed1632cfc2705f019d5ce8b5deb73ae89f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 3 Oct 2017 13:02:49 -0700 Subject: [PATCH 127/855] Fixing virutal->virtual typo. (#4108) Done via: $ git grep -l virutal | xargs sed -i s/virutal/virtual/g --- packages/google-cloud-logging/nox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index f1a1e5516e60..2682e87e343c 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -64,7 +64,7 @@ def system_tests(session, python_version): session.virtualenv_dirname = 'sys-' + python_version # Install all test dependencies, then install this package into the - # virutalenv's dist-packages. + # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) session.install('../test_utils/', '../bigquery/', '../pubsub/', '../storage/') From 5e91aefe62adecf35c0a2a0ff0948a0eca266296 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 4 Oct 2017 12:45:40 -0700 Subject: [PATCH 128/855] Removing `googleapis-common-protos` from deps in non-`core` packages. (#4098) * Removing `googleapis-common-protos` from deps in non-`core` packages. Also - removing `grpcio` from non-`core` packages. - manually specifying the `grpcio` dep in core (rather than getting it from `googleapis-common-protos[grpc]`) * Making `grpc` an extra for `core`. * Adding `googleapis-common-protos` back to `videointelligence`. --- packages/google-cloud-logging/setup.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index c211b10f7fc0..2b11432fd5e8 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -51,8 +51,7 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.27.0, < 0.28dev', - 'grpcio >= 1.2.0, < 1.6dev', + 'google-cloud-core[grpc] >= 0.27.1, < 0.28dev', 'gapic-google-cloud-logging-v2 >= 0.91.0, < 0.92dev', ] From 2f5d42db54486de4dbe2463cf2a9cc6644854e6c Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 12 Oct 2017 17:13:19 -0700 Subject: [PATCH 129/855] s/gcloud-common/google-cloud-common/g (#4180) The gcloud-common repo moved to https://github.com/GoogleCloudPlatform/google-cloud-common --- packages/google-cloud-logging/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 7e0f8a55180d..00ca1135a5d8 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -27,7 +27,7 @@ learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. .. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html -.. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication +.. _authentication document: https://github.com/GoogleCloudPlatform/google-cloud-common/tree/master/authentication Using the API ------------- From fbdbb97c8466429166bdab28e6452da094ffded2 Mon Sep 17 00:00:00 2001 From: michaelawyu Date: Fri, 13 Oct 2017 13:46:24 -0700 Subject: [PATCH 130/855] Update Docs with Python Setup Guide (#4187) --- packages/google-cloud-logging/README.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 00ca1135a5d8..7e9bb29655fa 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -18,6 +18,10 @@ Quick Start $ pip install --upgrade google-cloud-logging +Fore more information on setting up your Python development environment, such as installing ``pip`` and on your system, please refer to `Python Development Environment Setup Guide`_ for Google Cloud Platform. + +.. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup + Authentication -------------- From 5cdfd4fe11f6b81a5fb81adddf1c2d3a00fe909a Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 18 Oct 2017 15:36:57 -0700 Subject: [PATCH 131/855] Replace usage of google.api.core with google.api_core (#4221) * Remove api.core packages from google.cloud.core, make google.cloud.core depend on api_core. * s/google.api.core/google.api_core/g and nox updates * Fixing core tests, addressing review feedback * Fix bigquery --- .../google/cloud/logging/_gax.py | 14 +++++++------- .../google/cloud/logging/_http.py | 16 ++++++++-------- .../google/cloud/logging/client.py | 6 +++--- .../google/cloud/logging/logger.py | 2 +- packages/google-cloud-logging/nox.py | 5 ++++- 5 files changed, 23 insertions(+), 20 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index bfea5df022ad..d979548c0b19 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -16,7 +16,7 @@ import functools -from google.api.core import page_iterator +from google.api_core import page_iterator from google.cloud.gapic.logging.v2.config_service_v2_client import ( ConfigServiceV2Client) from google.cloud.gapic.logging.v2.logging_service_v2_client import ( @@ -84,7 +84,7 @@ def list_entries(self, projects, filter_='', order_by='', passed, the API will return the first page of entries. - :rtype: :class:`~google.api.core.page_iterator.Iterator` + :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` accessible to the current API. """ @@ -332,7 +332,7 @@ def list_metrics(self, project, page_size=0, page_token=None): passed, the API will return the first page of metrics. - :rtype: :class:`~google.api.core.page_iterator.Iterator` + :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.metric.Metric` accessible to the current API. @@ -510,12 +510,12 @@ def _item_to_entry(iterator, entry_pb, loggers): This method does not have the correct signature to be used as the ``item_to_value`` argument to - :class:`~google.api.core.page_iterator.Iterator`. It is intended to be + :class:`~google.api_core.page_iterator.Iterator`. It is intended to be patched with a mutable ``loggers`` argument that can be updated on subsequent calls. For an example, see how the method is used above in :meth:`_LoggingAPI.list_entries`. - :type iterator: :class:`~google.api.core.page_iterator.Iterator` + :type iterator: :class:`~google.api_core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type entry_pb: :class:`.log_entry_pb2.LogEntry` @@ -537,7 +537,7 @@ def _item_to_entry(iterator, entry_pb, loggers): def _item_to_sink(iterator, log_sink_pb): """Convert a sink protobuf to the native object. - :type iterator: :class:`~google.api.core.page_iterator.Iterator` + :type iterator: :class:`~google.api_core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type log_sink_pb: @@ -556,7 +556,7 @@ def _item_to_sink(iterator, log_sink_pb): def _item_to_metric(iterator, log_metric_pb): """Convert a metric protobuf to the native object. - :type iterator: :class:`~google.api.core.page_iterator.Iterator` + :type iterator: :class:`~google.api_core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type log_metric_pb: diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index 45db345fa847..702d6515b22b 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -16,7 +16,7 @@ import functools -from google.api.core import page_iterator +from google.api_core import page_iterator from google.cloud import _http from google.cloud.logging import __version__ @@ -93,7 +93,7 @@ def list_entries(self, projects, filter_=None, order_by=None, passed, the API will return the first page of entries. - :rtype: :class:`~google.api.core.page_iterator.Iterator` + :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` accessible to the current API. """ @@ -209,7 +209,7 @@ def list_sinks(self, project, page_size=None, page_token=None): passed, the API will return the first page of sinks. - :rtype: :class:`~google.api.core.page_iterator.Iterator` + :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.sink.Sink` accessible to the current API. @@ -353,7 +353,7 @@ def list_metrics(self, project, page_size=None, page_token=None): passed, the API will return the first page of metrics. - :rtype: :class:`~google.api.core.page_iterator.Iterator` + :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.metric.Metric` accessible to the current API. @@ -471,12 +471,12 @@ def _item_to_entry(iterator, resource, loggers): This method does not have the correct signature to be used as the ``item_to_value`` argument to - :class:`~google.api.core.page_iterator.Iterator`. It is intended to be + :class:`~google.api_core.page_iterator.Iterator`. It is intended to be patched with a mutable ``loggers`` argument that can be updated on subsequent calls. For an example, see how the method is used above in :meth:`_LoggingAPI.list_entries`. - :type iterator: :class:`~google.api.core.page_iterator.Iterator` + :type iterator: :class:`~google.api_core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type resource: dict @@ -497,7 +497,7 @@ def _item_to_entry(iterator, resource, loggers): def _item_to_sink(iterator, resource): """Convert a sink resource to the native object. - :type iterator: :class:`~google.api.core.page_iterator.Iterator` + :type iterator: :class:`~google.api_core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type resource: dict @@ -512,7 +512,7 @@ def _item_to_sink(iterator, resource): def _item_to_metric(iterator, resource): """Convert a metric resource to the native object. - :type iterator: :class:`~google.api.core.page_iterator.Iterator` + :type iterator: :class:`~google.api_core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type resource: dict diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index ae20dd48fcf6..ba884b311c12 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -195,7 +195,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None, passed, the API will return the first page of entries. - :rtype: :class:`~google.api.core.page_iterator.Iterator` + :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` accessible to the current client. """ @@ -244,7 +244,7 @@ def list_sinks(self, page_size=None, page_token=None): passed, the API will return the first page of sinks. - :rtype: :class:`~google.api.core.page_iterator.Iterator` + :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.sink.Sink` accessible to the current client. @@ -289,7 +289,7 @@ def list_metrics(self, page_size=None, page_token=None): passed, the API will return the first page of metrics. - :rtype: :class:`~google.api.core.page_iterator.Iterator` + :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.metric.Metric` accessible to the current client. """ diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 1006ebb1e693..6f54ca478444 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -344,7 +344,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None, passed, the API will return the first page of entries. - :rtype: :class:`~google.api.core.page_iterator.Iterator` + :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` accessible to the current logger. """ diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index 2682e87e343c..e6c1037d83be 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -19,7 +19,10 @@ import nox -LOCAL_DEPS = ('../core/',) +LOCAL_DEPS = ( + os.path.join('..', 'api_core'), + os.path.join('..', 'core'), +) @nox.session From a4513ff994d78dcca0787fced1f8ba93b27a4f92 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 30 Oct 2017 14:41:42 -0700 Subject: [PATCH 132/855] Cutting version 0.28.0 of `google-cloud-core`. (#4280) Also - updating all dependencies of `grpcio` to `>= 1.7.0`. This was due to an issue [1] with `1.6.0`. - updating the version of `google-api-core` (also to be released, This is required since the bounds on `grpcio` of `google-cloud-core==0.28.0` and `google-api-core==0.1.0` are mutually exclusive.) - Updating `google-api-core` CHANGELOG for release. - Updating packages to depend on `google-cloud-core>=0.28.0`. - Installing `nox -s lint` deps locally for vision. [1]: https://github.com/grpc/grpc/issues/12455 --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 2b11432fd5e8..a757d10b7518 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -51,7 +51,7 @@ REQUIREMENTS = [ - 'google-cloud-core[grpc] >= 0.27.1, < 0.28dev', + 'google-cloud-core[grpc] >= 0.28.0, < 0.29dev', 'gapic-google-cloud-logging-v2 >= 0.91.0, < 0.92dev', ] From 4d5c5dd494415f4b58fa317ee775abb996bac483 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 31 Oct 2017 08:57:09 -0700 Subject: [PATCH 133/855] Switch copyright holder to "Google LLC" (#4287) --- packages/google-cloud-logging/google/__init__.py | 2 +- packages/google-cloud-logging/google/cloud/__init__.py | 2 +- packages/google-cloud-logging/google/cloud/logging/__init__.py | 2 +- packages/google-cloud-logging/google/cloud/logging/_gax.py | 2 +- packages/google-cloud-logging/google/cloud/logging/_helpers.py | 2 +- packages/google-cloud-logging/google/cloud/logging/_http.py | 2 +- packages/google-cloud-logging/google/cloud/logging/client.py | 2 +- packages/google-cloud-logging/google/cloud/logging/entries.py | 2 +- .../google/cloud/logging/handlers/__init__.py | 2 +- .../google/cloud/logging/handlers/_helpers.py | 2 +- .../google/cloud/logging/handlers/app_engine.py | 2 +- .../google/cloud/logging/handlers/container_engine.py | 2 +- .../google/cloud/logging/handlers/handlers.py | 2 +- .../google/cloud/logging/handlers/middleware/__init__.py | 2 +- .../google/cloud/logging/handlers/middleware/request.py | 2 +- .../google/cloud/logging/handlers/transports/__init__.py | 2 +- .../cloud/logging/handlers/transports/background_thread.py | 2 +- .../google/cloud/logging/handlers/transports/base.py | 2 +- .../google/cloud/logging/handlers/transports/sync.py | 2 +- packages/google-cloud-logging/google/cloud/logging/logger.py | 2 +- packages/google-cloud-logging/google/cloud/logging/metric.py | 2 +- packages/google-cloud-logging/google/cloud/logging/resource.py | 2 +- packages/google-cloud-logging/google/cloud/logging/sink.py | 2 +- packages/google-cloud-logging/nox.py | 2 +- packages/google-cloud-logging/pylint.config.py | 2 +- packages/google-cloud-logging/setup.py | 2 +- packages/google-cloud-logging/tests/system.py | 2 +- packages/google-cloud-logging/tests/unit/__init__.py | 2 +- packages/google-cloud-logging/tests/unit/handlers/__init__.py | 2 +- .../tests/unit/handlers/middleware/test_request.py | 2 +- .../google-cloud-logging/tests/unit/handlers/test__helpers.py | 2 +- .../google-cloud-logging/tests/unit/handlers/test_app_engine.py | 2 +- .../tests/unit/handlers/test_container_engine.py | 2 +- .../google-cloud-logging/tests/unit/handlers/test_handlers.py | 2 +- .../tests/unit/handlers/transports/__init__.py | 2 +- .../tests/unit/handlers/transports/test_background_thread.py | 2 +- .../tests/unit/handlers/transports/test_base.py | 2 +- .../tests/unit/handlers/transports/test_sync.py | 2 +- packages/google-cloud-logging/tests/unit/test__gax.py | 2 +- packages/google-cloud-logging/tests/unit/test__helpers.py | 2 +- packages/google-cloud-logging/tests/unit/test__http.py | 2 +- packages/google-cloud-logging/tests/unit/test_client.py | 2 +- packages/google-cloud-logging/tests/unit/test_entries.py | 2 +- packages/google-cloud-logging/tests/unit/test_logger.py | 2 +- packages/google-cloud-logging/tests/unit/test_metric.py | 2 +- packages/google-cloud-logging/tests/unit/test_sink.py | 2 +- 46 files changed, 46 insertions(+), 46 deletions(-) diff --git a/packages/google-cloud-logging/google/__init__.py b/packages/google-cloud-logging/google/__init__.py index b2b833373882..9ee9bf4342ab 100644 --- a/packages/google-cloud-logging/google/__init__.py +++ b/packages/google-cloud-logging/google/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/__init__.py b/packages/google-cloud-logging/google/cloud/__init__.py index b2b833373882..9ee9bf4342ab 100644 --- a/packages/google-cloud-logging/google/cloud/__init__.py +++ b/packages/google-cloud-logging/google/cloud/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/__init__.py b/packages/google-cloud-logging/google/cloud/logging/__init__.py index cced78370c6a..952a5899c280 100644 --- a/packages/google-cloud-logging/google/cloud/logging/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index d979548c0b19..08ea448d7875 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/_helpers.py index c7fab41bc4e8..79ae6646e547 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging/_helpers.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index 702d6515b22b..810b3536f2ef 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index ba884b311c12..92506bdcc2f7 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py index d39092c3e324..995aa6e410b3 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py index 432419543bea..59562f67ebf0 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py index 864f0e53617e..03dc6bfa4f3e 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. All Rights Reserved. +# Copyright 2016 Google LLC All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py index 509bf8002fb1..4bace1f1e20e 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. All Rights Reserved. +# Copyright 2016 Google LLC All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py b/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py index 8beb7d076a4b..6c0868c0ad83 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. All Rights Reserved. +# Copyright 2016 Google LLC All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py index fe9848848d38..c56e0393833b 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py index c340235b8bdd..94f6feccc358 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2017 Google Inc. All Rights Reserved. +# Copyright 2017 Google LLC All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py index 4c0b22a8e96b..212327a0717a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py @@ -1,4 +1,4 @@ -# Copyright 2017 Google Inc. All Rights Reserved. +# Copyright 2017 Google LLC All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py index b1091b70788d..d07bdf9edc02 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 69f3fe45a1e3..8d5fec79b610 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py index 7829201b1c98..cefbb6909a07 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py index be70e60a14e1..e5979d1bdf58 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 6f54ca478444..b6db8828bffe 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/metric.py b/packages/google-cloud-logging/google/cloud/logging/metric.py index ff0a4748540a..a85f9271c1ee 100644 --- a/packages/google-cloud-logging/google/cloud/logging/metric.py +++ b/packages/google-cloud-logging/google/cloud/logging/metric.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/resource.py b/packages/google-cloud-logging/google/cloud/logging/resource.py index aa37287db3ef..3f5bb4490364 100644 --- a/packages/google-cloud-logging/google/cloud/logging/resource.py +++ b/packages/google-cloud-logging/google/cloud/logging/resource.py @@ -1,4 +1,4 @@ -# Copyright 2017 Google Inc. +# Copyright 2017 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging/sink.py b/packages/google-cloud-logging/google/cloud/logging/sink.py index 3f468e6cf2f0..71ff28bf1334 100644 --- a/packages/google-cloud-logging/google/cloud/logging/sink.py +++ b/packages/google-cloud-logging/google/cloud/logging/sink.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index e6c1037d83be..2407059445df 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/pylint.config.py b/packages/google-cloud-logging/pylint.config.py index b618319b8b61..5d64b9d2f256 100644 --- a/packages/google-cloud-logging/pylint.config.py +++ b/packages/google-cloud-logging/pylint.config.py @@ -1,4 +1,4 @@ -# Copyright 2017 Google Inc. +# Copyright 2017 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index a757d10b7518..b4635fab21ef 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/system.py b/packages/google-cloud-logging/tests/system.py index e0771a659be6..312c1e5860da 100644 --- a/packages/google-cloud-logging/tests/system.py +++ b/packages/google-cloud-logging/tests/system.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/__init__.py b/packages/google-cloud-logging/tests/unit/__init__.py index 58e0d9153632..df379f1e9d88 100644 --- a/packages/google-cloud-logging/tests/unit/__init__.py +++ b/packages/google-cloud-logging/tests/unit/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/handlers/__init__.py b/packages/google-cloud-logging/tests/unit/handlers/__init__.py index 58e0d9153632..df379f1e9d88 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/__init__.py +++ b/packages/google-cloud-logging/tests/unit/handlers/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py b/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py index 983d67129647..f3762aea38d5 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py +++ b/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py @@ -1,4 +1,4 @@ -# Copyright 2017 Google Inc. All Rights Reserved. +# Copyright 2017 Google LLC All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index f721881eea11..a448f339a046 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -1,4 +1,4 @@ -# Copyright 2017 Google Inc. All Rights Reserved. +# Copyright 2017 Google LLC All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index 6438c4abb8a0..07ac4eaa168a 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. All Rights Reserved. +# Copyright 2016 Google LLC All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py index b8ce0dc436f3..b2b461e8bae0 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. All Rights Reserved. +# Copyright 2016 Google LLC All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 96823b2e906d..c5a6e4434c43 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/__init__.py b/packages/google-cloud-logging/tests/unit/handlers/transports/__init__.py index 58e0d9153632..df379f1e9d88 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/__init__.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index f6671273b53d..03769dca4e29 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py index f797e0b09338..24e2d31ae02a 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py index 01c15240f3b7..6c2e51f944fb 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/test__gax.py b/packages/google-cloud-logging/tests/unit/test__gax.py index f1b98696dc9e..cb07af7d0dad 100644 --- a/packages/google-cloud-logging/tests/unit/test__gax.py +++ b/packages/google-cloud-logging/tests/unit/test__gax.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/test__helpers.py b/packages/google-cloud-logging/tests/unit/test__helpers.py index 93532eed0c05..6098eaefd877 100644 --- a/packages/google-cloud-logging/tests/unit/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/test__helpers.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index d3e9970cb757..2a920fce7ee6 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index c92c7ded1ea8..312f933cad6f 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py index 75cb641636a0..71b3ce561299 100644 --- a/packages/google-cloud-logging/tests/unit/test_entries.py +++ b/packages/google-cloud-logging/tests/unit/test_entries.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 5c184f7c3dec..7e9893f46cc9 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/test_metric.py b/packages/google-cloud-logging/tests/unit/test_metric.py index 12bf250ca2aa..862ddf9bd6c5 100644 --- a/packages/google-cloud-logging/tests/unit/test_metric.py +++ b/packages/google-cloud-logging/tests/unit/test_metric.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/test_sink.py b/packages/google-cloud-logging/tests/unit/test_sink.py index 15acc46ce12d..8a31fa047e0d 100644 --- a/packages/google-cloud-logging/tests/unit/test_sink.py +++ b/packages/google-cloud-logging/tests/unit/test_sink.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 1f9fc89aad31503e1a6d183b2b74845425363743 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 31 Oct 2017 14:28:55 -0700 Subject: [PATCH 134/855] Making release for most packages. (#4296) * Making release for most packages. Every package except those that have already been released (`google-cloud-core`, `google-api-core`, `google-cloud-bigquery`): - `google-cloud` - `google-cloud-bigtable` - `google-cloud-datastore` - `google-cloud-dns` - `google-cloud-error-reporting` - `google-cloud-firestore` - `google-cloud-language` - `google-cloud-logging` - `google-cloud-monitoring` - `google-cloud-resource-manager` - `google-cloud-runtimeconfig` - `google-cloud-spanner` - `google-cloud-speech` - `google-cloud-storage` - `google-cloud-trace` - `google-cloud-translate` - `google-cloud-videointelligence` - `google-cloud-vision` * Adding changelog files for each package. --- packages/google-cloud-logging/CHANGELOG.md | 25 ++++++++++++++++++++++ packages/google-cloud-logging/setup.py | 3 ++- 2 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-logging/CHANGELOG.md diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md new file mode 100644 index 000000000000..878346719f68 --- /dev/null +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -0,0 +1,25 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-logging/#history + +## 1.4.0 + +### Implementation Changes + +- Remove `deepcopy` of `Client._http` in background transport (#3954) + +### Documentation + +- Added link to "Python Development Environment Setup Guide" in + project README (#4187, h/t to @michaelawyu) + +### Dependencies + +- Upgrading to `google-cloud-core >= 0.28.0` and adding dependency + on `google-api-core` (#4221, #4280) +- Deferring to `google-api-core` for `grpcio` and + `googleapis-common-protos`dependencies (#4096, #4098) + +PyPI: https://pypi.org/project/google-cloud-logging/1.4.0/ diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index b4635fab21ef..0d9c18ed08b5 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -52,12 +52,13 @@ REQUIREMENTS = [ 'google-cloud-core[grpc] >= 0.28.0, < 0.29dev', + 'google-api-core >= 0.1.1, < 0.2.0dev', 'gapic-google-cloud-logging-v2 >= 0.91.0, < 0.92dev', ] setup( name='google-cloud-logging', - version='1.3.0', + version='1.4.0', description='Python Client for Stackdriver Logging', long_description=README, namespace_packages=[ From 0c8c839106318ac659c3afeb82ae024141071063 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 31 Oct 2017 15:43:51 -0700 Subject: [PATCH 135/855] Marking all remaining versions as "dev". (#4299) This is to make it clear the code is between releases. Any code that relies on a **new** feature (e.g. of `google-api-core`) will then be able to **explicitly** make this clear by using the lower bound of the `devN` version. Fixes #4208. See: https://snarky.ca/how-i-manage-package-version-numbers/ --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 0d9c18ed08b5..5ecaed3aff74 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -58,7 +58,7 @@ setup( name='google-cloud-logging', - version='1.4.0', + version='1.4.1.dev1', description='Python Client for Stackdriver Logging', long_description=README, namespace_packages=[ From b7ab1e7fcdd0ddfefddab43087cbb6d8676f933a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 1 Nov 2017 12:43:23 -0700 Subject: [PATCH 136/855] Fixing "Fore" -> "For" typo in README docs. (#4317) Also obeying an 80-column limit for the content and adding a missing "``virtualenv``" in the phrase "``pip`` and ``virtualenv``" in some of the docs. --- packages/google-cloud-logging/README.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 7e9bb29655fa..24f2538a3b53 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -18,7 +18,9 @@ Quick Start $ pip install --upgrade google-cloud-logging -Fore more information on setting up your Python development environment, such as installing ``pip`` and on your system, please refer to `Python Development Environment Setup Guide`_ for Google Cloud Platform. +For more information on setting up your Python development environment, +such as installing ``pip`` and ``virtualenv`` on your system, please refer +to `Python Development Environment Setup Guide`_ for Google Cloud Platform. .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup From b38c49e165b94462e08b7b7d80717e623f821562 Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Wed, 1 Nov 2017 16:53:46 -0700 Subject: [PATCH 137/855] Closes #4319 - shorten test names (#4321) * Closes #4319 - shorten test names * #4319 update docs and config files --- packages/google-cloud-logging/nox.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index 2407059445df..8e004dc653a1 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -26,15 +26,15 @@ @nox.session -@nox.parametrize('python_version', ['2.7', '3.4', '3.5', '3.6']) -def unit_tests(session, python_version): +@nox.parametrize('py', ['2.7', '3.4', '3.5', '3.6']) +def unit(session, py): """Run the unit test suite.""" # Run unit tests against all supported versions of Python. - session.interpreter = 'python{}'.format(python_version) + session.interpreter = 'python{}'.format(py) # Set the virtualenv dirname. - session.virtualenv_dirname = 'unit-' + python_version + session.virtualenv_dirname = 'unit-' + py # Install all test dependencies, then install this package in-place. session.install( @@ -52,8 +52,8 @@ def unit_tests(session, python_version): @nox.session -@nox.parametrize('python_version', ['2.7', '3.6']) -def system_tests(session, python_version): +@nox.parametrize('py', ['2.7', '3.6']) +def system(session, py): """Run the system test suite.""" # Sanity check: Only run system tests if the environment variable is set. @@ -61,10 +61,10 @@ def system_tests(session, python_version): session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. - session.interpreter = 'python{}'.format(python_version) + session.interpreter = 'python{}'.format(py) # Set the virtualenv dirname. - session.virtualenv_dirname = 'sys-' + python_version + session.virtualenv_dirname = 'sys-' + py # Install all test dependencies, then install this package into the # virtualenv's dist-packages. From 0e7b5442cbf817818569fd12ca13cec9913e9182 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 1 Nov 2017 21:47:55 -0700 Subject: [PATCH 138/855] Making a `nox -s default` session for all packages. (#4324) * Making a `nox -s default` session for all packages. * Using "default" `nox` session on AppVeyor. This was 32-bit or 64-bit Python can be used, depending on which is the active `python` / the active `nox.exe`. --- packages/google-cloud-logging/nox.py | 44 ++++++++++++++++++++-------- 1 file changed, 31 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index 8e004dc653a1..3bd661d6c1db 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -26,16 +26,14 @@ @nox.session -@nox.parametrize('py', ['2.7', '3.4', '3.5', '3.6']) -def unit(session, py): - """Run the unit test suite.""" - - # Run unit tests against all supported versions of Python. - session.interpreter = 'python{}'.format(py) - - # Set the virtualenv dirname. - session.virtualenv_dirname = 'unit-' + py +def default(session): + """Default unit test session. + This is intended to be run **without** an interpreter set, so + that the current ``python`` (on the ``PATH``) or the version of + Python corresponding to the ``nox`` binary the ``PATH`` can + run the tests. + """ # Install all test dependencies, then install this package in-place. session.install( 'mock', 'pytest', 'pytest-cov', @@ -44,13 +42,33 @@ def unit(session, py): # Run py.test against the unit tests. session.run( - 'py.test', '--quiet', - '--cov=google.cloud.logging', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', *session.posargs + 'py.test', + '--quiet', + '--cov=google.cloud.logging', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', + 'tests/unit', + *session.posargs ) +@nox.session +@nox.parametrize('py', ['2.7', '3.4', '3.5', '3.6']) +def unit(session, py): + """Run the unit test suite.""" + + # Run unit tests against all supported versions of Python. + session.interpreter = 'python{}'.format(py) + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + py + + default(session) + + @nox.session @nox.parametrize('py', ['2.7', '3.6']) def system(session, py): From 0474dfc8f551e6388f7013245b3ad696581b3e81 Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Thu, 9 Nov 2017 12:55:59 -0800 Subject: [PATCH 139/855] Logging: pass in parameters to workers (#4369) * closes #4365 - pass in parameters to workers * review changes --- .../handlers/transports/background_thread.py | 4 +++- .../transports/test_background_thread.py | 16 ++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 8d5fec79b610..d5f40d855cb3 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -255,7 +255,9 @@ def __init__(self, client, name, grace_period=_DEFAULT_GRACE_PERIOD, batch_size=_DEFAULT_MAX_BATCH_SIZE): self.client = client logger = self.client.logger(name) - self.worker = _Worker(logger) + self.worker = _Worker(logger, + grace_period=grace_period, + max_batch_size=batch_size) self.worker.start() def send(self, record, message, resource=None, labels=None): diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 03769dca4e29..2be6198a69c5 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -76,6 +76,22 @@ def test_flush(self): transport.worker.flush.assert_called() + def test_worker(self): + client = _Client(self.PROJECT) + name = 'python_logger' + batch_size = 30 + grace_period = 20. + transport, worker = self._make_one(client, + name, + grace_period=grace_period, + batch_size=batch_size) + worker_grace_period = worker.call_args[1]['grace_period'] # **kwargs. + worker_batch_size = worker.call_args[1]['max_batch_size'] + self.assertEqual(worker_grace_period, + grace_period) + self.assertEqual(worker_batch_size, + batch_size) + class Test_Worker(unittest.TestCase): NAME = 'python_logger' From c465302a58acbc5c7d3f223fbfc50144d5ef87d5 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 4 Dec 2017 09:44:59 -0800 Subject: [PATCH 140/855] Pinning `django` test dependency to < 2.0 in Python 2.7. (#4519) See: https://www.djangoproject.com/weblog/2017/dec/02/django-20-released/ Python 2.7 support is explicitly removed. --- packages/google-cloud-logging/nox.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index 3bd661d6c1db..cccdafbbe2f8 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -23,6 +23,14 @@ os.path.join('..', 'api_core'), os.path.join('..', 'core'), ) +UNIT_TEST_DEPS = ( + 'mock', + 'pytest', + 'pytest-cov', + 'flask', + 'webapp2', + 'webob', +) @nox.session @@ -35,9 +43,14 @@ def default(session): run the tests. """ # Install all test dependencies, then install this package in-place. - session.install( - 'mock', 'pytest', 'pytest-cov', - 'flask', 'webapp2', 'webob', 'django', *LOCAL_DEPS) + deps = UNIT_TEST_DEPS + if session.interpreter == 'python2.7': + deps += ('django >= 1.11.0, < 2.0.0dev',) + else: + deps += ('django',) + + deps += LOCAL_DEPS + session.install(*deps) session.install('-e', '.') # Run py.test against the unit tests. From 619c297d3d86b60c6251a7f6f386e53155560a06 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 4 Dec 2017 10:37:50 -0800 Subject: [PATCH 141/855] Adding another check for Python 2.7 in Logging `nox -s default`. (#4523) See failure: https://ci.appveyor.com/project/GoogleCloudPlatform/google-cloud-python/build/1.0.2663.master/job/rfi7n10xteq2r4xp --- packages/google-cloud-logging/nox.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index cccdafbbe2f8..c25cc45e4b2d 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -15,6 +15,7 @@ from __future__ import absolute_import import os +import sys import nox @@ -46,6 +47,8 @@ def default(session): deps = UNIT_TEST_DEPS if session.interpreter == 'python2.7': deps += ('django >= 1.11.0, < 2.0.0dev',) + elif session.interpreter is None and sys.version_info[:2] == (2, 7): + deps += ('django >= 1.11.0, < 2.0.0dev',) else: deps += ('django',) From 6dc773c1f107f684a1fbcefcf6b38384cbd644b7 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 6 Dec 2017 11:24:39 -0800 Subject: [PATCH 142/855] Adding cleanup for Pub / Sub topic in logging system test. (#4532) --- packages/google-cloud-logging/tests/system.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/tests/system.py b/packages/google-cloud-logging/tests/system.py index 312c1e5860da..3449438600cc 100644 --- a/packages/google-cloud-logging/tests/system.py +++ b/packages/google-cloud-logging/tests/system.py @@ -414,17 +414,16 @@ def test_create_sink_storage_bucket(self): self.assertTrue(sink.exists()) def test_create_sink_pubsub_topic(self): - import uuid - from google.cloud import pubsub_v1 SINK_NAME = 'test-create-sink-topic%s' % (_RESOURCE_ID,) - TOPIC_NAME = '%s-%s' % ('systest', str(uuid.uuid4())[0:8]) + TOPIC_NAME = 'logging-systest{}'.format(unique_resource_id('-')) # Create the destination topic, and set up the IAM policy to allow # Stackdriver Logging to write into it. publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(Config.CLIENT.project, TOPIC_NAME) + self.to_delete.append(_DeleteWrapper(publisher, topic_path)) publisher.create_topic(topic_path) policy = publisher.get_iam_policy(topic_path) @@ -439,7 +438,6 @@ def test_create_sink_pubsub_topic(self): sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, TOPIC_URI) self.assertFalse(sink.exists()) sink.create() - publisher.delete_topic(topic_path) self.assertTrue(sink.exists()) def _init_bigquery_dataset(self): @@ -517,3 +515,13 @@ def test_update_sink(self): sink.update() self.assertEqual(sink.filter_, UPDATED_FILTER) self.assertEqual(sink.destination, dataset_uri) + + +class _DeleteWrapper(object): + + def __init__(self, publisher, topic_path): + self.publisher = publisher + self.topic_path = topic_path + + def delete(self): + self.publisher.delete_topic(self.topic_path) From 6f40edb6bcf4d92deca01793f2ab92629ffe12ff Mon Sep 17 00:00:00 2001 From: Graham Polley Date: Thu, 14 Dec 2017 02:14:32 +1100 Subject: [PATCH 143/855] Added doc to highlight missing `uniqueWriterIdentity` field (#4579) --- packages/google-cloud-logging/README.rst | 4 ++++ packages/google-cloud-logging/google/cloud/logging/sink.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 24f2538a3b53..f12d764830b1 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -24,6 +24,10 @@ to `Python Development Environment Setup Guide`_ for Google Cloud Platform. .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup +**Note**: The creation of cross project sinks (log exports) is not currenlty supported. You may only create sinks within the same project set for the client. In other words, the parameter `uniqueWriterIdentity`_ is not yet available. + +.. _uniqueWriterIdentity: https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create + Authentication -------------- diff --git a/packages/google-cloud-logging/google/cloud/logging/sink.py b/packages/google-cloud-logging/google/cloud/logging/sink.py index 71ff28bf1334..843066b4fe4d 100644 --- a/packages/google-cloud-logging/google/cloud/logging/sink.py +++ b/packages/google-cloud-logging/google/cloud/logging/sink.py @@ -48,7 +48,7 @@ def __init__(self, name, filter_=None, destination=None, client=None): @property def client(self): - """Clent bound to the sink.""" + """Client bound to the sink.""" return self._client @property From 13a27d5d086d45030cded145ed691e10dd5e7faa Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Wed, 20 Dec 2017 10:36:51 -0800 Subject: [PATCH 144/855] Logging: Adding unique writer identity to `Sink`. (#4595) --- .../google/cloud/logging/_gax.py | 15 +++++++++-- .../google/cloud/logging/_http.py | 16 +++++++++-- .../google/cloud/logging/sink.py | 13 +++++++-- .../tests/unit/test__gax.py | 26 +++++++++++++++--- .../tests/unit/test__http.py | 27 +++++++++++++++++++ .../tests/unit/test_sink.py | 23 +++++++++++++--- 6 files changed, 107 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 08ea448d7875..0d5c7d574a22 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -192,7 +192,8 @@ def list_sinks(self, project, page_size=0, page_token=None): return page_iterator._GAXIterator( self._client, page_iter, _item_to_sink) - def sink_create(self, project, sink_name, filter_, destination): + def sink_create(self, project, sink_name, filter_, destination, + unique_writer_identity=False): """API call: create a sink resource. See @@ -211,13 +212,23 @@ def sink_create(self, project, sink_name, filter_, destination): :type destination: str :param destination: destination URI for the entries exported by the sink. + + :type unique_writer_identity: bool + :param unique_writer_identity: (Optional) determines the kind of + IAM identity returned as + writer_identity in the new sink. """ options = None parent = 'projects/%s' % (project,) sink_pb = LogSink(name=sink_name, filter=filter_, destination=destination) try: - self._gax_api.create_sink(parent, sink_pb, options=options) + self._gax_api.create_sink( + parent, + sink_pb, + unique_writer_identity=unique_writer_identity, + options=options, + ) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: path = 'projects/%s/sinks/%s' % (project, sink_name) diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index 810b3536f2ef..eacbe8c500d2 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -229,7 +229,8 @@ def list_sinks(self, project, page_size=None, page_token=None): page_token=page_token, extra_params=extra_params) - def sink_create(self, project, sink_name, filter_, destination): + def sink_create(self, project, sink_name, filter_, destination, + unique_writer_identity=False): """API call: create a sink resource. See @@ -248,6 +249,11 @@ def sink_create(self, project, sink_name, filter_, destination): :type destination: str :param destination: destination URI for the entries exported by the sink. + + :type unique_writer_identity: bool + :param unique_writer_identity: (Optional) determines the kind of + IAM identity returned as + writer_identity in the new sink. """ target = '/projects/%s/sinks' % (project,) data = { @@ -255,7 +261,13 @@ def sink_create(self, project, sink_name, filter_, destination): 'filter': filter_, 'destination': destination, } - self.api_request(method='POST', path=target, data=data) + query_params = {'uniqueWriterIdentity': unique_writer_identity} + self.api_request( + method='POST', + path=target, + data=data, + query_params=query_params, + ) def sink_get(self, project, sink_name): """API call: retrieve a sink resource. diff --git a/packages/google-cloud-logging/google/cloud/logging/sink.py b/packages/google-cloud-logging/google/cloud/logging/sink.py index 843066b4fe4d..ba3c9ca70afb 100644 --- a/packages/google-cloud-logging/google/cloud/logging/sink.py +++ b/packages/google-cloud-logging/google/cloud/logging/sink.py @@ -39,12 +39,19 @@ class Sink(object): :type client: :class:`google.cloud.logging.client.Client` :param client: A client which holds credentials and project configuration for the sink (which requires a project). + + :type unique_writer_identity: bool + :param unique_writer_identity: (Optional) determines the kind of + IAM identity returned as + writer_identity in the new sink. """ - def __init__(self, name, filter_=None, destination=None, client=None): + def __init__(self, name, filter_=None, destination=None, client=None, + unique_writer_identity=False): self.name = name self.filter_ = filter_ self.destination = destination self._client = client + self._unique_writer_identity = unique_writer_identity @property def client(self): @@ -116,7 +123,9 @@ def create(self, client=None): """ client = self._require_client(client) client.sinks_api.sink_create( - self.project, self.name, self.filter_, self.destination) + self.project, self.name, self.filter_, self.destination, + unique_writer_identity=self._unique_writer_identity, + ) def exists(self, client=None): """API call: test for the existence of the sink via a GET request diff --git a/packages/google-cloud-logging/tests/unit/test__gax.py b/packages/google-cloud-logging/tests/unit/test__gax.py index cb07af7d0dad..99e72188b742 100644 --- a/packages/google-cloud-logging/tests/unit/test__gax.py +++ b/packages/google-cloud-logging/tests/unit/test__gax.py @@ -737,7 +737,7 @@ def test_sink_create_ok(self): api.sink_create( self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) - parent, sink, options = ( + parent, sink, options, unique_writer_identity = ( gax_api._create_sink_called_with) self.assertEqual(parent, self.PROJECT_PATH) self.assertIsInstance(sink, LogSink) @@ -745,6 +745,26 @@ def test_sink_create_ok(self): self.assertEqual(sink.filter, self.FILTER) self.assertEqual(sink.destination, self.DESTINATION_URI) self.assertIsNone(options) + self.assertFalse(unique_writer_identity) + + def test_sink_create_with_unique_writer_identity(self): + from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink + + gax_api = _GAXSinksAPI() + api = self._make_one(gax_api, None) + api.sink_create( + self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + unique_writer_identity=True, + ) + parent, sink, options, unique_writer_identity = ( + gax_api._create_sink_called_with) + self.assertEqual(parent, self.PROJECT_PATH) + self.assertIsInstance(sink, LogSink) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertIsNone(options) + self.assertTrue(unique_writer_identity) def test_sink_get_error(self): from google.cloud.exceptions import NotFound @@ -1462,10 +1482,10 @@ def list_sinks(self, parent, page_size, options): self._list_sinks_called_with = parent, page_size, options return self._list_sinks_response - def create_sink(self, parent, sink, options): + def create_sink(self, parent, sink, options, unique_writer_identity=False): from google.gax.errors import GaxError - self._create_sink_called_with = parent, sink, options + self._create_sink_called_with = parent, sink, options, unique_writer_identity if self._random_gax_error: raise GaxError('error') if self._create_sink_conflict: diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index 2a920fce7ee6..ea3a5e15269f 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -476,6 +476,33 @@ def test_sink_create_ok(self): self.assertEqual(conn._called_with['path'], path) self.assertEqual(conn._called_with['data'], SENT) + def test_sink_create_unique_writer_identity(self): + sent = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + + conn = _Connection({}) + client = _Client(conn) + api = self._make_one(client) + + api.sink_create( + self.PROJECT, + self.SINK_NAME, + self.FILTER, + self.DESTINATION_URI, + unique_writer_identity=True, + ) + path = '/projects/%s/sinks' % (self.PROJECT,) + expected = { + 'method': 'POST', + 'path': path, + 'data': sent, + 'query_params': {'uniqueWriterIdentity': True}, + } + self.assertEqual(conn._called_with, expected) + def test_sink_get_miss(self): from google.cloud.exceptions import NotFound diff --git a/packages/google-cloud-logging/tests/unit/test_sink.py b/packages/google-cloud-logging/tests/unit/test_sink.py index 8a31fa047e0d..a833d85bae0f 100644 --- a/packages/google-cloud-logging/tests/unit/test_sink.py +++ b/packages/google-cloud-logging/tests/unit/test_sink.py @@ -102,7 +102,14 @@ def test_create_w_bound_client(self): self.assertEqual( api._sink_create_called_with, - (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) + ( + self.PROJECT, + self.SINK_NAME, + self.FILTER, + self.DESTINATION_URI, + False, + ), + ) def test_create_w_alternate_client(self): client1 = _Client(project=self.PROJECT) @@ -116,7 +123,14 @@ def test_create_w_alternate_client(self): self.assertEqual( api._sink_create_called_with, - (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) + ( + self.PROJECT, + self.SINK_NAME, + self.FILTER, + self.DESTINATION_URI, + False, + ), + ) def test_exists_miss_w_bound_client(self): client = _Client(project=self.PROJECT) @@ -255,9 +269,10 @@ def __init__(self, project): class _DummySinksAPI(object): - def sink_create(self, project, sink_name, filter_, destination): + def sink_create(self, project, sink_name, filter_, destination, + unique_writer_identity=False): self._sink_create_called_with = ( - project, sink_name, filter_, destination) + project, sink_name, filter_, destination, unique_writer_identity) def sink_get(self, project, sink_name): from google.cloud.exceptions import NotFound From 39a05c7ce8137c1f672c388aa3866216c590db1b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 5 Jan 2018 12:05:15 -0500 Subject: [PATCH 145/855] Accomodate back-end change making 'Sink.filter' optional. (#4699) Closes #4696. --- .../google/cloud/logging/sink.py | 9 ++++----- .../google-cloud-logging/tests/unit/test_sink.py | 15 ++++----------- 2 files changed, 8 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/sink.py b/packages/google-cloud-logging/google/cloud/logging/sink.py index ba3c9ca70afb..ef7b3318fdbd 100644 --- a/packages/google-cloud-logging/google/cloud/logging/sink.py +++ b/packages/google-cloud-logging/google/cloud/logging/sink.py @@ -27,9 +27,8 @@ class Sink(object): :param name: the name of the sink :type filter_: str - :param filter_: the advanced logs filter expression defining the entries - exported by the sink. If not passed, the instance should - already exist, to be refreshed via :meth:`reload`. + :param filter_: (optional) the advanced logs filter expression defining + the entries exported by the sink. :type destination: str :param destination: destination URI for the entries exported by the sink. @@ -91,8 +90,8 @@ def from_api_repr(cls, resource, client): from the client. """ sink_name = resource['name'] - filter_ = resource['filter'] destination = resource['destination'] + filter_ = resource.get('filter') return cls(sink_name, filter_, destination, client=client) def _require_client(self, client): @@ -163,8 +162,8 @@ def reload(self, client=None): """ client = self._require_client(client) data = client.sinks_api.sink_get(self.project, self.name) - self.filter_ = data['filter'] self.destination = data['destination'] + self.filter_ = data.get('filter') def update(self, client=None): """API call: update sink configuration via a PUT request diff --git a/packages/google-cloud-logging/tests/unit/test_sink.py b/packages/google-cloud-logging/tests/unit/test_sink.py index a833d85bae0f..e3446c0302ed 100644 --- a/packages/google-cloud-logging/tests/unit/test_sink.py +++ b/packages/google-cloud-logging/tests/unit/test_sink.py @@ -62,13 +62,12 @@ def test_from_api_repr_minimal(self): FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) RESOURCE = { 'name': self.SINK_NAME, - 'filter': self.FILTER, 'destination': self.DESTINATION_URI, } klass = self._get_target_class() sink = klass.from_api_repr(RESOURCE, client=client) self.assertEqual(sink.name, self.SINK_NAME) - self.assertEqual(sink.filter_, self.FILTER) + self.assertIsNone(sink.filter_) self.assertEqual(sink.destination, self.DESTINATION_URI) self.assertIs(sink._client, client) self.assertEqual(sink.project, self.PROJECT) @@ -164,24 +163,20 @@ def test_exists_hit_w_alternate_client(self): (self.PROJECT, self.SINK_NAME)) def test_reload_w_bound_client(self): - NEW_FILTER = 'logName:syslog AND severity>=INFO' NEW_DESTINATION_URI = 'faux.googleapis.com/other' RESOURCE = { 'name': self.SINK_NAME, - 'filter': NEW_FILTER, 'destination': NEW_DESTINATION_URI, } client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() api._sink_get_response = RESOURCE - sink = self._make_one(self.SINK_NAME, self.FILTER, - self.DESTINATION_URI, - client=client) + sink = self._make_one(self.SINK_NAME, client=client) sink.reload() - self.assertEqual(sink.filter_, NEW_FILTER) self.assertEqual(sink.destination, NEW_DESTINATION_URI) + self.assertIsNone(sink.filter_) self.assertEqual(api._sink_get_called_with, (self.PROJECT, self.SINK_NAME)) @@ -197,9 +192,7 @@ def test_reload_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.sinks_api = _DummySinksAPI() api._sink_get_response = RESOURCE - sink = self._make_one(self.SINK_NAME, self.FILTER, - self.DESTINATION_URI, - client=client1) + sink = self._make_one(self.SINK_NAME, client=client1) sink.reload(client=client2) From 96b1a5ec38b11b5ea344170e39ed58c239924d93 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 5 Jan 2018 12:36:25 -0500 Subject: [PATCH 146/855] Harden test for 'retrieve_metadata_server' against transparent DNS proxies (#4698) --- .../tests/unit/test__helpers.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/tests/unit/test__helpers.py b/packages/google-cloud-logging/tests/unit/test__helpers.py index 6098eaefd877..5177fe267fc6 100644 --- a/packages/google-cloud-logging/tests/unit/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/test__helpers.py @@ -105,15 +105,23 @@ def test_metadata_does_not_exist(self): self.assertIsNone(metadata) def test_request_exception(self): + import requests + metadata_key = 'test_url_cannot_connect' metadata_url = 'http://metadata.invalid/' - patch = mock.patch( + requests_get_mock = mock.Mock(spec=['__call__']) + requests_get_mock.side_effect = requests.exceptions.RequestException + + requests_get_patch = mock.patch('requests.get', requests_get_mock) + + url_patch = mock.patch( 'google.cloud.logging._helpers.METADATA_URL', new=metadata_url) - with patch: - metadata = self._call_fut(metadata_key) + with requests_get_patch: + with url_patch: + metadata = self._call_fut(metadata_key) self.assertIsNone(metadata) From a70a4e1e55f58f4f406401d6a00ade7e2b0dc591 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 5 Jan 2018 13:08:41 -0500 Subject: [PATCH 147/855] Expose writerIdentity returned in sink resource. (#4704) Closes #4703. --- .../google/cloud/logging/sink.py | 17 +++++++++++++---- .../tests/unit/test_sink.py | 15 +++++++++++---- 2 files changed, 24 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/sink.py b/packages/google-cloud-logging/google/cloud/logging/sink.py index ef7b3318fdbd..c815451143a8 100644 --- a/packages/google-cloud-logging/google/cloud/logging/sink.py +++ b/packages/google-cloud-logging/google/cloud/logging/sink.py @@ -51,6 +51,7 @@ def __init__(self, name, filter_=None, destination=None, client=None, self.destination = destination self._client = client self._unique_writer_identity = unique_writer_identity + self._writer_identity = None @property def client(self): @@ -72,6 +73,11 @@ def path(self): """URL path for the sink's APIs""" return '/%s' % (self.full_name) + @property + def writer_identity(self): + """Identity used for exports via the sink""" + return self._writer_identity + @classmethod def from_api_repr(cls, resource, client): """Factory: construct a sink given its API representation @@ -92,7 +98,9 @@ def from_api_repr(cls, resource, client): sink_name = resource['name'] destination = resource['destination'] filter_ = resource.get('filter') - return cls(sink_name, filter_, destination, client=client) + instance = cls(sink_name, filter_, destination, client=client) + instance._writer_identity = resource.get('writerIdentity') + return instance def _require_client(self, client): """Check client or verify over-ride. @@ -161,9 +169,10 @@ def reload(self, client=None): ``client`` stored on the current sink. """ client = self._require_client(client) - data = client.sinks_api.sink_get(self.project, self.name) - self.destination = data['destination'] - self.filter_ = data.get('filter') + resource = client.sinks_api.sink_get(self.project, self.name) + self.destination = resource['destination'] + self.filter_ = resource.get('filter') + self._writer_identity = resource.get('writerIdentity') def update(self, client=None): """API call: update sink configuration via a PUT request diff --git a/packages/google-cloud-logging/tests/unit/test_sink.py b/packages/google-cloud-logging/tests/unit/test_sink.py index e3446c0302ed..15b787ae00a9 100644 --- a/packages/google-cloud-logging/tests/unit/test_sink.py +++ b/packages/google-cloud-logging/tests/unit/test_sink.py @@ -21,6 +21,7 @@ class TestSink(unittest.TestCase): SINK_NAME = 'sink-name' FILTER = 'logName:syslog AND severity>=INFO' DESTINATION_URI = 'faux.googleapis.com/destination' + WRITER_IDENTITY = 'serviceAccount:project-123@example.com' @staticmethod def _get_target_class(): @@ -67,25 +68,28 @@ def test_from_api_repr_minimal(self): klass = self._get_target_class() sink = klass.from_api_repr(RESOURCE, client=client) self.assertEqual(sink.name, self.SINK_NAME) - self.assertIsNone(sink.filter_) self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertIsNone(sink.filter_) + self.assertIsNone(sink.writer_identity) self.assertIs(sink._client, client) self.assertEqual(sink.project, self.PROJECT) self.assertEqual(sink.full_name, FULL) - def test_from_api_repr_w_description(self): + def test_from_api_repr_full(self): client = _Client(project=self.PROJECT) FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) RESOURCE = { 'name': self.SINK_NAME, - 'filter': self.FILTER, 'destination': self.DESTINATION_URI, + 'filter': self.FILTER, + 'writerIdentity': self.WRITER_IDENTITY, } klass = self._get_target_class() sink = klass.from_api_repr(RESOURCE, client=client) self.assertEqual(sink.name, self.SINK_NAME) self.assertEqual(sink.filter_, self.FILTER) self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) self.assertIs(sink._client, client) self.assertEqual(sink.project, self.PROJECT) self.assertEqual(sink.full_name, FULL) @@ -177,6 +181,7 @@ def test_reload_w_bound_client(self): self.assertEqual(sink.destination, NEW_DESTINATION_URI) self.assertIsNone(sink.filter_) + self.assertIsNone(sink.writer_identity) self.assertEqual(api._sink_get_called_with, (self.PROJECT, self.SINK_NAME)) @@ -187,6 +192,7 @@ def test_reload_w_alternate_client(self): 'name': self.SINK_NAME, 'filter': NEW_FILTER, 'destination': NEW_DESTINATION_URI, + 'writerIdentity': self.WRITER_IDENTITY, } client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) @@ -196,8 +202,9 @@ def test_reload_w_alternate_client(self): sink.reload(client=client2) - self.assertEqual(sink.filter_, NEW_FILTER) self.assertEqual(sink.destination, NEW_DESTINATION_URI) + self.assertEqual(sink.filter_, NEW_FILTER) + self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) self.assertEqual(api._sink_get_called_with, (self.PROJECT, self.SINK_NAME)) From 42a1a557a176a6d23f094d195536dda9952aff46 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 5 Jan 2018 16:26:20 -0500 Subject: [PATCH 148/855] Move 'unique_writer_identity' from LogSink ctor to 'create' (#4706) Preps support for passing it to 'update' as well. See: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/4703#issuecomment-355622741 --- .../google/cloud/logging/sink.py | 18 ++++++++---------- .../tests/unit/test_sink.py | 4 ++-- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/sink.py b/packages/google-cloud-logging/google/cloud/logging/sink.py index c815451143a8..e4d672ad61e8 100644 --- a/packages/google-cloud-logging/google/cloud/logging/sink.py +++ b/packages/google-cloud-logging/google/cloud/logging/sink.py @@ -38,19 +38,12 @@ class Sink(object): :type client: :class:`google.cloud.logging.client.Client` :param client: A client which holds credentials and project configuration for the sink (which requires a project). - - :type unique_writer_identity: bool - :param unique_writer_identity: (Optional) determines the kind of - IAM identity returned as - writer_identity in the new sink. """ - def __init__(self, name, filter_=None, destination=None, client=None, - unique_writer_identity=False): + def __init__(self, name, filter_=None, destination=None, client=None): self.name = name self.filter_ = filter_ self.destination = destination self._client = client - self._unique_writer_identity = unique_writer_identity self._writer_identity = None @property @@ -117,7 +110,7 @@ def _require_client(self, client): client = self._client return client - def create(self, client=None): + def create(self, client=None, unique_writer_identity=False): """API call: create the sink via a PUT request See @@ -127,11 +120,16 @@ def create(self, client=None): ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current sink. + + :type unique_writer_identity: bool + :param unique_writer_identity: (Optional) determines the kind of + IAM identity returned as + writer_identity in the new sink. """ client = self._require_client(client) client.sinks_api.sink_create( self.project, self.name, self.filter_, self.destination, - unique_writer_identity=self._unique_writer_identity, + unique_writer_identity=unique_writer_identity, ) def exists(self, client=None): diff --git a/packages/google-cloud-logging/tests/unit/test_sink.py b/packages/google-cloud-logging/tests/unit/test_sink.py index 15b787ae00a9..f304c1394f85 100644 --- a/packages/google-cloud-logging/tests/unit/test_sink.py +++ b/packages/google-cloud-logging/tests/unit/test_sink.py @@ -122,7 +122,7 @@ def test_create_w_alternate_client(self): client=client1) api = client2.sinks_api = _DummySinksAPI() - sink.create(client=client2) + sink.create(client=client2, unique_writer_identity=True) self.assertEqual( api._sink_create_called_with, @@ -131,7 +131,7 @@ def test_create_w_alternate_client(self): self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - False, + True, ), ) From d5c77174e88916a8ae50033710aa8328a1cd41b7 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 5 Jan 2018 16:37:52 -0500 Subject: [PATCH 149/855] Capture server-generated 'writerIdentity' during 'LogSink.create' (#4707) --- .../google/cloud/logging/_gax.py | 7 ++- .../google/cloud/logging/_http.py | 5 +- .../google/cloud/logging/sink.py | 19 ++++---- .../tests/unit/test__gax.py | 47 ++++++++++++------- .../tests/unit/test__http.py | 40 ++++++---------- .../tests/unit/test_sink.py | 21 +++++++++ 6 files changed, 87 insertions(+), 52 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index 0d5c7d574a22..b2945846349b 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -217,13 +217,17 @@ def sink_create(self, project, sink_name, filter_, destination, :param unique_writer_identity: (Optional) determines the kind of IAM identity returned as writer_identity in the new sink. + + :rtype: dict + :returns: The sink resource returned from the API (converted from a + protobuf to a dictionary). """ options = None parent = 'projects/%s' % (project,) sink_pb = LogSink(name=sink_name, filter=filter_, destination=destination) try: - self._gax_api.create_sink( + created_pb = self._gax_api.create_sink( parent, sink_pb, unique_writer_identity=unique_writer_identity, @@ -234,6 +238,7 @@ def sink_create(self, project, sink_name, filter_, destination, path = 'projects/%s/sinks/%s' % (project, sink_name) raise Conflict(path) raise + return MessageToDict(created_pb) def sink_get(self, project, sink_name): """API call: retrieve a sink resource. diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index eacbe8c500d2..088fd569d81b 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -254,6 +254,9 @@ def sink_create(self, project, sink_name, filter_, destination, :param unique_writer_identity: (Optional) determines the kind of IAM identity returned as writer_identity in the new sink. + + :rtype: dict + :returns: The returned (created) resource. """ target = '/projects/%s/sinks' % (project,) data = { @@ -262,7 +265,7 @@ def sink_create(self, project, sink_name, filter_, destination, 'destination': destination, } query_params = {'uniqueWriterIdentity': unique_writer_identity} - self.api_request( + return self.api_request( method='POST', path=target, data=data, diff --git a/packages/google-cloud-logging/google/cloud/logging/sink.py b/packages/google-cloud-logging/google/cloud/logging/sink.py index e4d672ad61e8..71cba294235c 100644 --- a/packages/google-cloud-logging/google/cloud/logging/sink.py +++ b/packages/google-cloud-logging/google/cloud/logging/sink.py @@ -71,6 +71,12 @@ def writer_identity(self): """Identity used for exports via the sink""" return self._writer_identity + def _update_from_api_repr(self, resource): + """Helper for API methods returning sink resources.""" + self.destination = resource['destination'] + self.filter_ = resource.get('filter') + self._writer_identity = resource.get('writerIdentity') + @classmethod def from_api_repr(cls, resource, client): """Factory: construct a sink given its API representation @@ -89,10 +95,8 @@ def from_api_repr(cls, resource, client): from the client. """ sink_name = resource['name'] - destination = resource['destination'] - filter_ = resource.get('filter') - instance = cls(sink_name, filter_, destination, client=client) - instance._writer_identity = resource.get('writerIdentity') + instance = cls(sink_name, client=client) + instance._update_from_api_repr(resource) return instance def _require_client(self, client): @@ -127,10 +131,11 @@ def create(self, client=None, unique_writer_identity=False): writer_identity in the new sink. """ client = self._require_client(client) - client.sinks_api.sink_create( + resource = client.sinks_api.sink_create( self.project, self.name, self.filter_, self.destination, unique_writer_identity=unique_writer_identity, ) + self._update_from_api_repr(resource) def exists(self, client=None): """API call: test for the existence of the sink via a GET request @@ -168,9 +173,7 @@ def reload(self, client=None): """ client = self._require_client(client) resource = client.sinks_api.sink_get(self.project, self.name) - self.destination = resource['destination'] - self.filter_ = resource.get('filter') - self._writer_identity = resource.get('writerIdentity') + self._update_from_api_repr(resource) def update(self, client=None): """API call: update sink configuration via a PUT request diff --git a/packages/google-cloud-logging/tests/unit/test__gax.py b/packages/google-cloud-logging/tests/unit/test__gax.py index 99e72188b742..84b5c7dd1a7e 100644 --- a/packages/google-cloud-logging/tests/unit/test__gax.py +++ b/packages/google-cloud-logging/tests/unit/test__gax.py @@ -619,6 +619,7 @@ class Test_SinksAPI(_Base, unittest.TestCase): SINK_NAME = 'sink_name' SINK_PATH = 'projects/%s/sinks/%s' % (_Base.PROJECT, SINK_NAME) DESTINATION_URI = 'faux.googleapis.com/destination' + SINK_WRITER_IDENTITY = 'serviceAccount:project-123@example.com' @staticmethod def _get_target_class(): @@ -719,6 +720,7 @@ def test_sink_create_error(self): def test_sink_create_conflict(self): from google.cloud.exceptions import Conflict + from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink gax_api = _GAXSinksAPI(_create_sink_conflict=True) api = self._make_one(gax_api, None) @@ -728,16 +730,7 @@ def test_sink_create_conflict(self): self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) - def test_sink_create_ok(self): - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - - gax_api = _GAXSinksAPI() - api = self._make_one(gax_api, None) - - api.sink_create( - self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) - - parent, sink, options, unique_writer_identity = ( + parent, sink, unique_writer_identity, options = ( gax_api._create_sink_called_with) self.assertEqual(parent, self.PROJECT_PATH) self.assertIsInstance(sink, LogSink) @@ -747,24 +740,36 @@ def test_sink_create_ok(self): self.assertIsNone(options) self.assertFalse(unique_writer_identity) - def test_sink_create_with_unique_writer_identity(self): + def test_sink_create_ok(self): from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink gax_api = _GAXSinksAPI() api = self._make_one(gax_api, None) - api.sink_create( - self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + + returned = api.sink_create( + self.PROJECT, + self.SINK_NAME, + self.FILTER, + self.DESTINATION_URI, unique_writer_identity=True, ) - parent, sink, options, unique_writer_identity = ( + + self.assertEqual(returned, { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + 'writerIdentity': self.SINK_WRITER_IDENTITY, + }) + + parent, sink, unique_writer_identity, options = ( gax_api._create_sink_called_with) self.assertEqual(parent, self.PROJECT_PATH) self.assertIsInstance(sink, LogSink) self.assertEqual(sink.name, self.SINK_NAME) self.assertEqual(sink.filter, self.FILTER) self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertIsNone(options) self.assertTrue(unique_writer_identity) + self.assertIsNone(options) def test_sink_get_error(self): from google.cloud.exceptions import NotFound @@ -1482,14 +1487,22 @@ def list_sinks(self, parent, page_size, options): self._list_sinks_called_with = parent, page_size, options return self._list_sinks_response - def create_sink(self, parent, sink, options, unique_writer_identity=False): + def create_sink(self, parent, sink, unique_writer_identity, options): from google.gax.errors import GaxError + from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - self._create_sink_called_with = parent, sink, options, unique_writer_identity + self._create_sink_called_with = ( + parent, sink, unique_writer_identity, options) if self._random_gax_error: raise GaxError('error') if self._create_sink_conflict: raise GaxError('conflict', self._make_grpc_failed_precondition()) + return LogSink( + name=sink.name, + destination=sink.destination, + filter=sink.filter, + writer_identity=Test_SinksAPI.SINK_WRITER_IDENTITY, + ) def get_sink(self, sink_name, options): from google.gax.errors import GaxError diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index ea3a5e15269f..510bec30fec6 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -334,6 +334,7 @@ class Test_SinksAPI(unittest.TestCase): SINK_NAME = 'sink_name' SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) DESTINATION_URI = 'faux.googleapis.com/destination' + WRITER_IDENTITY = 'serviceAccount:project-123@example.com' @staticmethod def _get_target_class(): @@ -438,7 +439,7 @@ def test_list_sinks_w_paging(self): def test_sink_create_conflict(self): from google.cloud.exceptions import Conflict - SENT = { + sent = { 'name': self.SINK_NAME, 'filter': self.FILTER, 'destination': self.DESTINATION_URI, @@ -453,47 +454,36 @@ def test_sink_create_conflict(self): self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) - self.assertEqual(conn._called_with['method'], 'POST') path = '/projects/%s/sinks' % (self.PROJECT,) - self.assertEqual(conn._called_with['path'], path) - self.assertEqual(conn._called_with['data'], SENT) - - def test_sink_create_ok(self): - SENT = { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, + expected = { + 'method': 'POST', + 'path': path, + 'data': sent, + 'query_params': {'uniqueWriterIdentity': False}, } - conn = _Connection({}) - client = _Client(conn) - api = self._make_one(client) - - api.sink_create( - self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) - - self.assertEqual(conn._called_with['method'], 'POST') - path = '/projects/%s/sinks' % (self.PROJECT,) - self.assertEqual(conn._called_with['path'], path) - self.assertEqual(conn._called_with['data'], SENT) + self.assertEqual(conn._called_with, expected) - def test_sink_create_unique_writer_identity(self): + def test_sink_create_ok(self): sent = { 'name': self.SINK_NAME, 'filter': self.FILTER, 'destination': self.DESTINATION_URI, } - - conn = _Connection({}) + after_create = sent.copy() + after_create['writerIdentity'] = self.WRITER_IDENTITY + conn = _Connection(after_create) client = _Client(conn) api = self._make_one(client) - api.sink_create( + returned = api.sink_create( self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, unique_writer_identity=True, ) + + self.assertEqual(returned, after_create) path = '/projects/%s/sinks' % (self.PROJECT,) expected = { 'method': 'POST', diff --git a/packages/google-cloud-logging/tests/unit/test_sink.py b/packages/google-cloud-logging/tests/unit/test_sink.py index f304c1394f85..e7dea3b81c16 100644 --- a/packages/google-cloud-logging/tests/unit/test_sink.py +++ b/packages/google-cloud-logging/tests/unit/test_sink.py @@ -97,12 +97,22 @@ def test_from_api_repr_full(self): def test_create_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() + api._sink_create_response = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + 'writerIdentity': self.WRITER_IDENTITY, + } sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client) sink.create() + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) self.assertEqual( api._sink_create_called_with, ( @@ -121,9 +131,19 @@ def test_create_w_alternate_client(self): self.DESTINATION_URI, client=client1) api = client2.sinks_api = _DummySinksAPI() + api._sink_create_response = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + 'writerIdentity': self.WRITER_IDENTITY, + } sink.create(client=client2, unique_writer_identity=True) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) self.assertEqual( api._sink_create_called_with, ( @@ -273,6 +293,7 @@ def sink_create(self, project, sink_name, filter_, destination, unique_writer_identity=False): self._sink_create_called_with = ( project, sink_name, filter_, destination, unique_writer_identity) + return self._sink_create_response def sink_get(self, project, sink_name): from google.cloud.exceptions import NotFound From 6233115358b435868f5ae529ecb14630c8a6e03d Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 8 Jan 2018 12:09:36 -0500 Subject: [PATCH 150/855] Support passing 'uniqueWriterIdentity' to 'Sink.update'. (#4708) Also, capture server-set read-only properties returned from 'sinks.update', such as 'writerIdentity'. --- .../google/cloud/logging/_gax.py | 16 +++++- .../google/cloud/logging/_http.py | 12 +++- .../google/cloud/logging/sink.py | 14 ++++- .../tests/unit/test__gax.py | 56 ++++++++++++++----- .../tests/unit/test__http.py | 37 ++++++++---- .../tests/unit/test_sink.py | 44 +++++++++++++-- 6 files changed, 141 insertions(+), 38 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gax.py index b2945846349b..3f3624b47cc4 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gax.py @@ -265,7 +265,8 @@ def sink_get(self, project, sink_name): # so `MessageToDict`` can safely be used. return MessageToDict(sink_pb) - def sink_update(self, project, sink_name, filter_, destination): + def sink_update(self, project, sink_name, filter_, destination, + unique_writer_identity=False): """API call: update a sink resource. :type project: str @@ -282,15 +283,24 @@ def sink_update(self, project, sink_name, filter_, destination): :param destination: destination URI for the entries exported by the sink. + :type unique_writer_identity: bool + :param unique_writer_identity: (Optional) determines the kind of + IAM identity returned as + writer_identity in the new sink. + :rtype: dict - :returns: The sink object returned from the API (converted from a + :returns: The sink resource returned from the API (converted from a protobuf to a dictionary). """ options = None path = 'projects/%s/sinks/%s' % (project, sink_name) sink_pb = LogSink(name=path, filter=filter_, destination=destination) try: - sink_pb = self._gax_api.update_sink(path, sink_pb, options=options) + sink_pb = self._gax_api.update_sink( + path, + sink_pb, + unique_writer_identity=unique_writer_identity, + options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index 088fd569d81b..82207f173503 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -290,7 +290,8 @@ def sink_get(self, project, sink_name): target = '/projects/%s/sinks/%s' % (project, sink_name) return self.api_request(method='GET', path=target) - def sink_update(self, project, sink_name, filter_, destination): + def sink_update(self, project, sink_name, filter_, destination, + unique_writer_identity=False): """API call: update a sink resource. See @@ -310,6 +311,11 @@ def sink_update(self, project, sink_name, filter_, destination): :param destination: destination URI for the entries exported by the sink. + :type unique_writer_identity: bool + :param unique_writer_identity: (Optional) determines the kind of + IAM identity returned as + writer_identity in the new sink. + :rtype: dict :returns: The returned (updated) resource. """ @@ -319,7 +325,9 @@ def sink_update(self, project, sink_name, filter_, destination): 'filter': filter_, 'destination': destination, } - return self.api_request(method='PUT', path=target, data=data) + query_params = {'uniqueWriterIdentity': unique_writer_identity} + return self.api_request( + method='PUT', path=target, query_params=query_params, data=data) def sink_delete(self, project, sink_name): """API call: delete a sink resource. diff --git a/packages/google-cloud-logging/google/cloud/logging/sink.py b/packages/google-cloud-logging/google/cloud/logging/sink.py index 71cba294235c..2e9b7ef62e13 100644 --- a/packages/google-cloud-logging/google/cloud/logging/sink.py +++ b/packages/google-cloud-logging/google/cloud/logging/sink.py @@ -175,7 +175,7 @@ def reload(self, client=None): resource = client.sinks_api.sink_get(self.project, self.name) self._update_from_api_repr(resource) - def update(self, client=None): + def update(self, client=None, unique_writer_identity=False): """API call: update sink configuration via a PUT request See @@ -185,10 +185,18 @@ def update(self, client=None): ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current sink. + + :type unique_writer_identity: bool + :param unique_writer_identity: (Optional) determines the kind of + IAM identity returned as + writer_identity in the new sink. """ client = self._require_client(client) - client.sinks_api.sink_update( - self.project, self.name, self.filter_, self.destination) + resource = client.sinks_api.sink_update( + self.project, self.name, self.filter_, self.destination, + unique_writer_identity=unique_writer_identity, + ) + self._update_from_api_repr(resource) def delete(self, client=None): """API call: delete a sink via a DELETE request diff --git a/packages/google-cloud-logging/tests/unit/test__gax.py b/packages/google-cloud-logging/tests/unit/test__gax.py index 84b5c7dd1a7e..c2c5f3199abf 100644 --- a/packages/google-cloud-logging/tests/unit/test__gax.py +++ b/packages/google-cloud-logging/tests/unit/test__gax.py @@ -744,6 +744,12 @@ def test_sink_create_ok(self): from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink gax_api = _GAXSinksAPI() + gax_api._create_sink_response = LogSink( + name=self.SINK_NAME, + destination=self.DESTINATION_URI, + filter=self.FILTER, + writer_identity=self.SINK_WRITER_IDENTITY, + ) api = self._make_one(gax_api, None) returned = api.sink_create( @@ -824,6 +830,7 @@ def test_sink_update_error(self): def test_sink_update_miss(self): from google.cloud.exceptions import NotFound + from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink gax_api = _GAXSinksAPI() api = self._make_one(gax_api, None) @@ -833,25 +840,50 @@ def test_sink_update_miss(self): self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + sink_name, sink, unique_writer_identity, options = ( + gax_api._update_sink_called_with) + self.assertEqual(sink_name, self.SINK_PATH) + self.assertIsInstance(sink, LogSink) + self.assertEqual(sink.name, self.SINK_PATH) + self.assertEqual(sink.filter, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertFalse(unique_writer_identity) + self.assertIsNone(options) + def test_sink_update_hit(self): from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - response = LogSink(name=self.SINK_NAME, - destination=self.DESTINATION_URI, - filter=self.FILTER) + response = LogSink( + name=self.SINK_NAME, + destination=self.DESTINATION_URI, + filter=self.FILTER, + writer_identity=Test_SinksAPI.SINK_WRITER_IDENTITY, + ) gax_api = _GAXSinksAPI(_update_sink_response=response) api = self._make_one(gax_api, None) - api.sink_update( - self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + returned = api.sink_update( + self.PROJECT, + self.SINK_NAME, + self.FILTER, + self.DESTINATION_URI, + unique_writer_identity=True) - sink_name, sink, options = ( + self.assertEqual(returned, { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + 'writerIdentity': self.SINK_WRITER_IDENTITY, + }) + + sink_name, sink, unique_writer_identity, options = ( gax_api._update_sink_called_with) self.assertEqual(sink_name, self.SINK_PATH) self.assertIsInstance(sink, LogSink) self.assertEqual(sink.name, self.SINK_PATH) self.assertEqual(sink.filter, self.FILTER) self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertTrue(unique_writer_identity) self.assertIsNone(options) def test_sink_delete_error(self): @@ -1497,12 +1529,7 @@ def create_sink(self, parent, sink, unique_writer_identity, options): raise GaxError('error') if self._create_sink_conflict: raise GaxError('conflict', self._make_grpc_failed_precondition()) - return LogSink( - name=sink.name, - destination=sink.destination, - filter=sink.filter, - writer_identity=Test_SinksAPI.SINK_WRITER_IDENTITY, - ) + return self._create_sink_response def get_sink(self, sink_name, options): from google.gax.errors import GaxError @@ -1515,10 +1542,11 @@ def get_sink(self, sink_name, options): except AttributeError: raise GaxError('notfound', self._make_grpc_not_found()) - def update_sink(self, sink_name, sink, options=None): + def update_sink(self, sink_name, sink, unique_writer_identity, options): from google.gax.errors import GaxError - self._update_sink_called_with = sink_name, sink, options + self._update_sink_called_with = ( + sink_name, sink, unique_writer_identity, options) if self._random_gax_error: raise GaxError('error') try: diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index 510bec30fec6..aa4c54ac30d2 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -527,7 +527,7 @@ def test_sink_get_hit(self): def test_sink_update_miss(self): from google.cloud.exceptions import NotFound - SENT = { + sent = { 'name': self.SINK_NAME, 'filter': self.FILTER, 'destination': self.DESTINATION_URI, @@ -541,28 +541,43 @@ def test_sink_update_miss(self): self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) - self.assertEqual(conn._called_with['method'], 'PUT') path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) - self.assertEqual(conn._called_with['path'], path) - self.assertEqual(conn._called_with['data'], SENT) + expected = { + 'method': 'PUT', + 'path': path, + 'data': sent, + 'query_params': {'uniqueWriterIdentity': False}, + } + self.assertEqual(conn._called_with, expected) def test_sink_update_hit(self): - SENT = { + sent = { 'name': self.SINK_NAME, 'filter': self.FILTER, 'destination': self.DESTINATION_URI, } - conn = _Connection({}) + after_update = sent.copy() + after_update['writerIdentity'] = self.WRITER_IDENTITY + conn = _Connection(after_update) client = _Client(conn) api = self._make_one(client) - api.sink_update( - self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + returned = api.sink_update( + self.PROJECT, + self.SINK_NAME, + self.FILTER, + self.DESTINATION_URI, + unique_writer_identity=True) - self.assertEqual(conn._called_with['method'], 'PUT') + self.assertEqual(returned, after_update) path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) - self.assertEqual(conn._called_with['path'], path) - self.assertEqual(conn._called_with['data'], SENT) + expected = { + 'method': 'PUT', + 'path': path, + 'data': sent, + 'query_params': {'uniqueWriterIdentity': True}, + } + self.assertEqual(conn._called_with, expected) def test_sink_delete_miss(self): from google.cloud.exceptions import NotFound diff --git a/packages/google-cloud-logging/tests/unit/test_sink.py b/packages/google-cloud-logging/tests/unit/test_sink.py index e7dea3b81c16..fdc8f80f1e5b 100644 --- a/packages/google-cloud-logging/tests/unit/test_sink.py +++ b/packages/google-cloud-logging/tests/unit/test_sink.py @@ -231,29 +231,61 @@ def test_reload_w_alternate_client(self): def test_update_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() + api._sink_update_response = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + 'writerIdentity': self.WRITER_IDENTITY, + } sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client) sink.update() + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) self.assertEqual( api._sink_update_called_with, - (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) + ( + self.PROJECT, + self.SINK_NAME, + self.FILTER, + self.DESTINATION_URI, + False, + )) def test_update_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.sinks_api = _DummySinksAPI() + api._sink_update_response = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + 'writerIdentity': self.WRITER_IDENTITY, + } sink = self._make_one(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1) - sink.update(client=client2) + sink.update(client=client2, unique_writer_identity=True) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) self.assertEqual( api._sink_update_called_with, - (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) + ( + self.PROJECT, + self.SINK_NAME, + self.FILTER, + self.DESTINATION_URI, + True, + )) def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) @@ -304,9 +336,11 @@ def sink_get(self, project, sink_name): except AttributeError: raise NotFound('miss') - def sink_update(self, project, sink_name, filter_, destination): + def sink_update(self, project, sink_name, filter_, destination, + unique_writer_identity=False): self._sink_update_called_with = ( - project, sink_name, filter_, destination) + project, sink_name, filter_, destination, unique_writer_identity) + return self._sink_update_response def sink_delete(self, project, sink_name): self._sink_delete_called_with = (project, sink_name) From 0caaf785fb3facabc7a5ec95ecea643ffc1aabb0 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 19 Jan 2018 15:14:56 -0800 Subject: [PATCH 151/855] Generate gapic for logging, remove all references to gax (#4759) * Import logging_v2 generated by artman Command log: ``` set DEST ~/workspace/google-cloud-python/logging set GOOGLEAPIS_REV fca7f0706769f8ece08121a87394b7c6e6d87687 cd /tmp git clone https://github.com/googleapis/googleapis.git cd googleapis git reset --hard $GOOGLEAPIS_REV artman --config google/logging/artman_logging.yaml generate python_gapic set SRC ./artman-genfiles/python/logging-v2/ set IMPORT_PKG logging_v2 cp -r $SRC/docs $DEST cp -r $SRC/google/cloud/$IMPORT_PKG $DEST/google/cloud/ mkdir -p $DEST/tests/unit/gapic cp -r $SRC/tests/unit/gapic $DEST/tests/unit mkdir -p $DEST/tests/system/gapic cp -r $SRC/tests/system/gapic $DEST/tests/system ``` * Removing all references to gax * Fix typo * Add missing encoding * Remove unused test helper --- packages/google-cloud-logging/docs/conf.py | 310 ++++ .../docs/gapic/v2/api.rst | 6 + .../docs/gapic/v2/types.rst | 5 + packages/google-cloud-logging/docs/index.rst | 99 + .../cloud/logging/{_gax.py => _gapic.py} | 223 +-- .../google/cloud/logging/client.py | 16 +- .../google/cloud/logging_v2/__init__.py | 45 + .../google/cloud/logging_v2/gapic/__init__.py | 0 .../gapic/config_service_v2_client.py | 912 ++++++++++ .../gapic/config_service_v2_client_config.py | 82 + .../google/cloud/logging_v2/gapic/enums.py | 159 ++ .../gapic/logging_service_v2_client.py | 615 +++++++ .../gapic/logging_service_v2_client_config.py | 62 + .../gapic/metrics_service_v2_client.py | 465 +++++ .../gapic/metrics_service_v2_client_config.py | 48 + .../google/cloud/logging_v2/proto/__init__.py | 0 .../cloud/logging_v2/proto/log_entry_pb2.py | 508 ++++++ .../logging_v2/proto/log_entry_pb2_grpc.py | 3 + .../logging_v2/proto/logging_config_pb2.py | 1576 ++++++++++++++++ .../proto/logging_config_pb2_grpc.py | 211 +++ .../logging_v2/proto/logging_metrics_pb2.py | 895 +++++++++ .../proto/logging_metrics_pb2_grpc.py | 115 ++ .../cloud/logging_v2/proto/logging_pb2.py | 1146 ++++++++++++ .../logging_v2/proto/logging_pb2_grpc.py | 128 ++ .../google/cloud/logging_v2/types.py | 65 + packages/google-cloud-logging/nox.py | 3 +- packages/google-cloud-logging/setup.py | 5 +- .../v2/test_system_logging_service_v2_v2.py | 34 + .../{system.py => system/test_system.py} | 19 +- .../v2/test_config_service_v2_client_v2.py | 460 +++++ .../v2/test_logging_service_v2_client_v2.py | 238 +++ .../v2/test_metrics_service_v2_client_v2.py | 256 +++ .../tests/unit/test__gapic.py | 645 +++++++ .../tests/unit/test__gax.py | 1614 ----------------- .../tests/unit/test_client.py | 35 +- 35 files changed, 9190 insertions(+), 1813 deletions(-) create mode 100644 packages/google-cloud-logging/docs/conf.py create mode 100644 packages/google-cloud-logging/docs/gapic/v2/api.rst create mode 100644 packages/google-cloud-logging/docs/gapic/v2/types.rst create mode 100644 packages/google-cloud-logging/docs/index.rst rename packages/google-cloud-logging/google/cloud/logging/{_gax.py => _gapic.py} (74%) create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/types.py create mode 100644 packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py rename packages/google-cloud-logging/tests/{system.py => system/test_system.py} (97%) create mode 100644 packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py create mode 100644 packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py create mode 100644 packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py create mode 100644 packages/google-cloud-logging/tests/unit/test__gapic.py delete mode 100644 packages/google-cloud-logging/tests/unit/test__gax.py diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py new file mode 100644 index 000000000000..adaa0afcea45 --- /dev/null +++ b/packages/google-cloud-logging/docs/conf.py @@ -0,0 +1,310 @@ +# -*- coding: utf-8 -*- +# +# google-cloud-logging documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath('..')) + +__version__ = '0.91.4' + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.intersphinx', + 'sphinx.ext.coverage', + 'sphinx.ext.napoleon', + 'sphinx.ext.viewcode', +] + +# autodoc/autosummary flags +autoclass_content = 'both' +autodoc_default_flags = ['members'] +autosummary_generate = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'google-cloud-logging' +copyright = u'2017, Google' +author = u'Google APIs' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = '.'.join(release.split('.')[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = [] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'google-cloud-logging-doc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + #'preamble': '', + + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'google-cloud-logging.tex', + u'google-cloud-logging Documentation', author, 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, 'google-cloud-logging', + u'google-cloud-logging Documentation', [author], 1)] + +# If true, show URL addresses after external links. +#man_show_urls = False + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'google-cloud-logging', u'google-cloud-logging Documentation', + author, 'google-cloud-logging', + 'GAPIC library for the {metadata.shortName} v2 service', 'APIs'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python': ('http://python.readthedocs.org/en/latest/', None), + 'gax': ('https://gax-python.readthedocs.org/en/latest/', None), +} + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-logging/docs/gapic/v2/api.rst b/packages/google-cloud-logging/docs/gapic/v2/api.rst new file mode 100644 index 000000000000..2dc6bf6fcc6b --- /dev/null +++ b/packages/google-cloud-logging/docs/gapic/v2/api.rst @@ -0,0 +1,6 @@ +Client for Stackdriver Logging API +================================== + +.. automodule:: google.cloud.logging_v2 + :members: + :inherited-members: \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/gapic/v2/types.rst b/packages/google-cloud-logging/docs/gapic/v2/types.rst new file mode 100644 index 000000000000..5521d4f9bc12 --- /dev/null +++ b/packages/google-cloud-logging/docs/gapic/v2/types.rst @@ -0,0 +1,5 @@ +Types for Stackdriver Logging API Client +======================================== + +.. automodule:: google.cloud.logging_v2.types + :members: \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/index.rst b/packages/google-cloud-logging/docs/index.rst new file mode 100644 index 000000000000..5bb60aaf57ee --- /dev/null +++ b/packages/google-cloud-logging/docs/index.rst @@ -0,0 +1,99 @@ +Python Client for Stackdriver Logging API (`Beta`_) +=================================================== + +`Stackdriver Logging API`_: Writes log entries and manages your Stackdriver Logging configuration. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. _Beta: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst +.. _Stackdriver Logging API: https://cloud.google.com/logging +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging/usage.html +.. _Product Documentation: https://cloud.google.com/logging + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Stackdriver Logging API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Stackdriver Logging API.: https://cloud.google.com/logging +.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/stable/core/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-logging + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-logging + +Preview +~~~~~~~ + +LoggingServiceV2Client +^^^^^^^^^^^^^^^^^^^^^^ + +.. code:: py + + from google.cloud import logging_v2 + + client = logging_v2.LoggingServiceV2Client() + + entries = [] + + response = client.write_log_entries(entries) + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Stackdriver Logging API + API to see other available methods on the client. +- Read the `Stackdriver Logging API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _Stackdriver Logging API Product documentation: https://cloud.google.com/logging +.. _repository’s main README: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst + +Api Reference +------------- +.. toctree:: + :maxdepth: 2 + + gapic/v2/api + gapic/v2/types \ No newline at end of file diff --git a/packages/google-cloud-logging/google/cloud/logging/_gax.py b/packages/google-cloud-logging/google/cloud/logging/_gapic.py similarity index 74% rename from packages/google-cloud-logging/google/cloud/logging/_gax.py rename to packages/google-cloud-logging/google/cloud/logging/_gapic.py index 3f3624b47cc4..a292721111eb 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gax.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gapic.py @@ -12,50 +12,46 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""GAX wrapper for Logging API requests.""" +"""Wrapper for adapting the autogenerated gapic client to the hand-written +client.""" import functools -from google.api_core import page_iterator -from google.cloud.gapic.logging.v2.config_service_v2_client import ( +from google.api_core.gapic_v1 import client_info +from google.cloud.logging_v2.gapic.config_service_v2_client import ( ConfigServiceV2Client) -from google.cloud.gapic.logging.v2.logging_service_v2_client import ( +from google.cloud.logging_v2.gapic.logging_service_v2_client import ( LoggingServiceV2Client) -from google.cloud.gapic.logging.v2.metrics_service_v2_client import ( +from google.cloud.logging_v2.gapic.metrics_service_v2_client import ( MetricsServiceV2Client) -from google.gax import CallOptions -from google.gax import INITIAL_PAGE -from google.gax.errors import GaxError -from google.gax.grpc import exc_to_code -from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink -from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric -from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry +from google.cloud.logging_v2.proto.logging_config_pb2 import LogSink +from google.cloud.logging_v2.proto.logging_metrics_pb2 import LogMetric +from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry from google.protobuf.json_format import MessageToDict from google.protobuf.json_format import ParseDict -from grpc import StatusCode -from google.cloud._helpers import make_secure_channel -from google.cloud._http import DEFAULT_USER_AGENT -from google.cloud.exceptions import Conflict -from google.cloud.exceptions import NotFound from google.cloud.logging import __version__ from google.cloud.logging._helpers import entry_from_resource from google.cloud.logging.sink import Sink from google.cloud.logging.metric import Metric +_CLIENT_INFO = client_info.ClientInfo( + client_library_version=__version__) + + class _LoggingAPI(object): """Helper mapping logging-related APIs. - :type gax_api: + :type gapic_api: :class:`.logging_service_v2_client.LoggingServiceV2Client` - :param gax_api: API object used to make GAX requests. + :param gapic_api: API object used to make RPCs. :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that owns this API object. """ - def __init__(self, gax_api, client): - self._gax_api = gax_api + def __init__(self, gapic_api, client): + self._gapic_api = gapic_api self._client = client def list_entries(self, projects, filter_='', order_by='', @@ -88,21 +84,19 @@ def list_entries(self, projects, filter_='', order_by='', :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` accessible to the current API. """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - page_iter = self._gax_api.list_log_entries( + page_iter = self._gapic_api.list_log_entries( [], project_ids=projects, filter_=filter_, order_by=order_by, - page_size=page_size, options=options) + page_size=page_size) + page_iter.client = self._client + page_iter.next_page_token = page_token # We attach a mutable loggers dictionary so that as Logger # objects are created by entry_from_resource, they can be # re-used by other log entries from the same logger. loggers = {} - item_to_value = functools.partial( + page_iter._item_to_value = functools.partial( _item_to_entry, loggers=loggers) - return page_iterator._GAXIterator( - self._client, page_iter, item_to_value) + return page_iter def write_entries(self, entries, logger_name=None, resource=None, labels=None): @@ -123,12 +117,12 @@ def write_entries(self, entries, logger_name=None, resource=None, :param labels: default labels to associate with entries; individual entries may override. """ - options = None partial_success = False entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries] - self._gax_api.write_log_entries( + print(entry_pbs) + self._gapic_api.write_log_entries( entry_pbs, log_name=logger_name, resource=resource, labels=labels, - partial_success=partial_success, options=options) + partial_success=partial_success) def logger_delete(self, project, logger_name): """API call: delete all entries in a logger via a DELETE request @@ -139,28 +133,22 @@ def logger_delete(self, project, logger_name): :type logger_name: str :param logger_name: name of logger containing the log entries to delete """ - options = None path = 'projects/%s/logs/%s' % (project, logger_name) - try: - self._gax_api.delete_log(path, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(path) - raise + self._gapic_api.delete_log(path) class _SinksAPI(object): """Helper mapping sink-related APIs. - :type gax_api: + :type gapic_api: :class:`.config_service_v2_client.ConfigServiceV2Client` - :param gax_api: API object used to make GAX requests. + :param gapic_api: API object used to make RPCs. :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that owns this API object. """ - def __init__(self, gax_api, client): - self._gax_api = gax_api + def __init__(self, gapic_api, client): + self._gapic_api = gapic_api self._client = client def list_sinks(self, project, page_size=0, page_token=None): @@ -183,14 +171,13 @@ def list_sinks(self, project, page_size=0, page_token=None): if not None, indicates that more sinks can be retrieved with another call (pass that value as ``page_token``). """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_sinks(path, page_size=page_size, - options=options) - return page_iterator._GAXIterator( - self._client, page_iter, _item_to_sink) + page_iter = self._gapic_api.list_sinks( + path, page_size=page_size) + page_iter.client = self._client + page_iter.next_page_token = page_token + page_iter._item_to_value = _item_to_sink + return page_iter def sink_create(self, project, sink_name, filter_, destination, unique_writer_identity=False): @@ -222,22 +209,14 @@ def sink_create(self, project, sink_name, filter_, destination, :returns: The sink resource returned from the API (converted from a protobuf to a dictionary). """ - options = None parent = 'projects/%s' % (project,) - sink_pb = LogSink(name=sink_name, filter=filter_, - destination=destination) - try: - created_pb = self._gax_api.create_sink( - parent, - sink_pb, - unique_writer_identity=unique_writer_identity, - options=options, - ) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: - path = 'projects/%s/sinks/%s' % (project, sink_name) - raise Conflict(path) - raise + sink_pb = LogSink( + name=sink_name, filter=filter_, destination=destination) + created_pb = self._gapic_api.create_sink( + parent, + sink_pb, + unique_writer_identity=unique_writer_identity + ) return MessageToDict(created_pb) def sink_get(self, project, sink_name): @@ -253,14 +232,8 @@ def sink_get(self, project, sink_name): :returns: The sink object returned from the API (converted from a protobuf to a dictionary). """ - options = None path = 'projects/%s/sinks/%s' % (project, sink_name) - try: - sink_pb = self._gax_api.get_sink(path, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(path) - raise + sink_pb = self._gapic_api.get_sink(path) # NOTE: LogSink message type does not have an ``Any`` field # so `MessageToDict`` can safely be used. return MessageToDict(sink_pb) @@ -292,19 +265,12 @@ def sink_update(self, project, sink_name, filter_, destination, :returns: The sink resource returned from the API (converted from a protobuf to a dictionary). """ - options = None path = 'projects/%s/sinks/%s' % (project, sink_name) sink_pb = LogSink(name=path, filter=filter_, destination=destination) - try: - sink_pb = self._gax_api.update_sink( - path, - sink_pb, - unique_writer_identity=unique_writer_identity, - options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(path) - raise + sink_pb = self._gapic_api.update_sink( + path, + sink_pb, + unique_writer_identity=unique_writer_identity) # NOTE: LogSink message type does not have an ``Any`` field # so `MessageToDict`` can safely be used. return MessageToDict(sink_pb) @@ -318,29 +284,23 @@ def sink_delete(self, project, sink_name): :type sink_name: str :param sink_name: the name of the sink """ - options = None path = 'projects/%s/sinks/%s' % (project, sink_name) - try: - self._gax_api.delete_sink(path, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(path) - raise + self._gapic_api.delete_sink(path) class _MetricsAPI(object): """Helper mapping sink-related APIs. - :type gax_api: + :type gapic_api: :class:`.metrics_service_v2_client.MetricsServiceV2Client` - :param gax_api: API object used to make GAX requests. + :param gapic_api: API object used to make RPCs. :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that owns this API object. """ - def __init__(self, gax_api, client): - self._gax_api = gax_api + def __init__(self, gapic_api, client): + self._gapic_api = gapic_api self._client = client def list_metrics(self, project, page_size=0, page_token=None): @@ -363,14 +323,13 @@ def list_metrics(self, project, page_size=0, page_token=None): :class:`~google.cloud.logging.metric.Metric` accessible to the current API. """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_log_metrics( - path, page_size=page_size, options=options) - return page_iterator._GAXIterator( - self._client, page_iter, _item_to_metric) + page_iter = self._gapic_api.list_log_metrics( + path, page_size=page_size) + page_iter.client = self._client + page_iter.next_page_token = page_token + page_iter._item_to_value = _item_to_metric + return page_iter def metric_create(self, project, metric_name, filter_, description): """API call: create a metric resource. @@ -391,17 +350,10 @@ def metric_create(self, project, metric_name, filter_, description): :type description: str :param description: description of the metric. """ - options = None parent = 'projects/%s' % (project,) metric_pb = LogMetric(name=metric_name, filter=filter_, description=description) - try: - self._gax_api.create_log_metric(parent, metric_pb, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: - path = 'projects/%s/metrics/%s' % (project, metric_name) - raise Conflict(path) - raise + self._gapic_api.create_log_metric(parent, metric_pb) def metric_get(self, project, metric_name): """API call: retrieve a metric resource. @@ -416,14 +368,8 @@ def metric_get(self, project, metric_name): :returns: The metric object returned from the API (converted from a protobuf to a dictionary). """ - options = None path = 'projects/%s/metrics/%s' % (project, metric_name) - try: - metric_pb = self._gax_api.get_log_metric(path, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(path) - raise + metric_pb = self._gapic_api.get_log_metric(path) # NOTE: LogMetric message type does not have an ``Any`` field # so `MessageToDict`` can safely be used. return MessageToDict(metric_pb) @@ -448,17 +394,11 @@ def metric_update(self, project, metric_name, filter_, description): :returns: The metric object returned from the API (converted from a protobuf to a dictionary). """ - options = None path = 'projects/%s/metrics/%s' % (project, metric_name) metric_pb = LogMetric(name=path, filter=filter_, description=description) - try: - metric_pb = self._gax_api.update_log_metric( - path, metric_pb, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(path) - raise + metric_pb = self._gapic_api.update_log_metric( + path, metric_pb) # NOTE: LogMetric message type does not have an ``Any`` field # so `MessageToDict`` can safely be used. return MessageToDict(metric_pb) @@ -472,14 +412,8 @@ def metric_delete(self, project, metric_name): :type metric_name: str :param metric_name: the name of the metric """ - options = None path = 'projects/%s/metrics/%s' % (project, metric_name) - try: - self._gax_api.delete_log_metric(path, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(path) - raise + self._gapic_api.delete_log_metric(path) def _parse_log_entry(entry_pb): @@ -598,8 +532,8 @@ def _item_to_metric(iterator, log_metric_pb): return Metric.from_api_repr(resource, iterator.client) -def make_gax_logging_api(client): - """Create an instance of the GAX Logging API. +def make_logging_api(client): + """Create an instance of the Logging API adapter. :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that holds configuration details. @@ -607,16 +541,13 @@ def make_gax_logging_api(client): :rtype: :class:`_LoggingAPI` :returns: A metrics API instance with the proper credentials. """ - channel = make_secure_channel( - client._credentials, DEFAULT_USER_AGENT, - LoggingServiceV2Client.SERVICE_ADDRESS) generated = LoggingServiceV2Client( - channel=channel, lib_name='gccl', lib_version=__version__) + credentials=client._credentials, client_info=_CLIENT_INFO) return _LoggingAPI(generated, client) -def make_gax_metrics_api(client): - """Create an instance of the GAX Metrics API. +def make_metrics_api(client): + """Create an instance of the Metrics API adapter. :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that holds configuration details. @@ -624,16 +555,13 @@ def make_gax_metrics_api(client): :rtype: :class:`_MetricsAPI` :returns: A metrics API instance with the proper credentials. """ - channel = make_secure_channel( - client._credentials, DEFAULT_USER_AGENT, - MetricsServiceV2Client.SERVICE_ADDRESS) generated = MetricsServiceV2Client( - channel=channel, lib_name='gccl', lib_version=__version__) + credentials=client._credentials, client_info=_CLIENT_INFO) return _MetricsAPI(generated, client) -def make_gax_sinks_api(client): - """Create an instance of the GAX Sinks API. +def make_sinks_api(client): + """Create an instance of the Sinks API adapter. :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that holds configuration details. @@ -641,9 +569,6 @@ def make_gax_sinks_api(client): :rtype: :class:`_SinksAPI` :returns: A metrics API instance with the proper credentials. """ - channel = make_secure_channel( - client._credentials, DEFAULT_USER_AGENT, - ConfigServiceV2Client.SERVICE_ADDRESS) generated = ConfigServiceV2Client( - channel=channel, lib_name='gccl', lib_version=__version__) + credentials=client._credentials, client_info=_CLIENT_INFO) return _SinksAPI(generated, client) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 92506bdcc2f7..5be04dab6d37 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -18,14 +18,10 @@ import os try: - from google.cloud.logging._gax import make_gax_logging_api - from google.cloud.logging._gax import make_gax_metrics_api - from google.cloud.logging._gax import make_gax_sinks_api + from google.cloud.logging import _gapic except ImportError: # pragma: NO COVER _HAVE_GRPC = False - make_gax_logging_api = None - make_gax_metrics_api = None - make_gax_sinks_api = None + _gapic = None else: _HAVE_GRPC = True @@ -85,7 +81,7 @@ class Client(ClientWithProject): :type _use_grpc: bool :param _use_grpc: (Optional) Explicitly specifies whether - to use the gRPC transport (via GAX) or HTTP. If unset, + to use the gRPC transport or HTTP. If unset, falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` environment variable This parameter should be considered private, and could @@ -122,7 +118,7 @@ def logging_api(self): """ if self._logging_api is None: if self._use_grpc: - self._logging_api = make_gax_logging_api(self) + self._logging_api = _gapic.make_logging_api(self) else: self._logging_api = JSONLoggingAPI(self) return self._logging_api @@ -136,7 +132,7 @@ def sinks_api(self): """ if self._sinks_api is None: if self._use_grpc: - self._sinks_api = make_gax_sinks_api(self) + self._sinks_api = _gapic.make_sinks_api(self) else: self._sinks_api = JSONSinksAPI(self) return self._sinks_api @@ -150,7 +146,7 @@ def metrics_api(self): """ if self._metrics_api is None: if self._use_grpc: - self._metrics_api = make_gax_metrics_api(self) + self._metrics_api = _gapic.make_metrics_api(self) else: self._metrics_api = JSONMetricsAPI(self) return self._metrics_api diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py new file mode 100644 index 000000000000..536e5dd24330 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py @@ -0,0 +1,45 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.logging_v2 import types +from google.cloud.logging_v2.gapic import config_service_v2_client +from google.cloud.logging_v2.gapic import enums +from google.cloud.logging_v2.gapic import logging_service_v2_client +from google.cloud.logging_v2.gapic import metrics_service_v2_client + + +class LoggingServiceV2Client(logging_service_v2_client.LoggingServiceV2Client): + __doc__ = logging_service_v2_client.LoggingServiceV2Client.__doc__ + enums = enums + + +class ConfigServiceV2Client(config_service_v2_client.ConfigServiceV2Client): + __doc__ = config_service_v2_client.ConfigServiceV2Client.__doc__ + enums = enums + + +class MetricsServiceV2Client(metrics_service_v2_client.MetricsServiceV2Client): + __doc__ = metrics_service_v2_client.MetricsServiceV2Client.__doc__ + enums = enums + + +__all__ = ( + 'enums', + 'types', + 'LoggingServiceV2Client', + 'ConfigServiceV2Client', + 'MetricsServiceV2Client', +) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py new file mode 100644 index 000000000000..13e3693d403b --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -0,0 +1,912 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Accesses the google.logging.v2 ConfigServiceV2 API.""" + +import functools +import pkg_resources + +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import google.api_core.path_template + +from google.api import monitored_resource_pb2 +from google.cloud.logging_v2.gapic import config_service_v2_client_config +from google.cloud.logging_v2.gapic import enums +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2 +from google.cloud.logging_v2.proto import logging_pb2 +from google.protobuf import field_mask_pb2 + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + 'google-cloud-logging', ).version + + +class ConfigServiceV2Client(object): + """ + Service for configuring sinks used to export log entries outside of + Stackdriver Logging. + """ + + SERVICE_ADDRESS = 'logging.googleapis.com:443' + """The default address of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ) + + # The name of the interface for this client. This is the key used to find + # method configuration in the client_config dictionary. + _INTERFACE_NAME = 'google.logging.v2.ConfigServiceV2' + + @classmethod + def project_path(cls, project): + """Return a fully-qualified project string.""" + return google.api_core.path_template.expand( + 'projects/{project}', + project=project, + ) + + @classmethod + def sink_path(cls, project, sink): + """Return a fully-qualified sink string.""" + return google.api_core.path_template.expand( + 'projects/{project}/sinks/{sink}', + project=project, + sink=sink, + ) + + @classmethod + def exclusion_path(cls, project, exclusion): + """Return a fully-qualified exclusion string.""" + return google.api_core.path_template.expand( + 'projects/{project}/exclusions/{exclusion}', + project=project, + exclusion=exclusion, + ) + + def __init__(self, + channel=None, + credentials=None, + client_config=config_service_v2_client_config.config, + client_info=None): + """Constructor. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_config (dict): A dictionary of call options for each + method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + 'The `channel` and `credentials` arguments to {} are mutually ' + 'exclusive.'.format(self.__class__.__name__), ) + + # Create the channel. + if channel is None: + channel = google.api_core.grpc_helpers.create_channel( + self.SERVICE_ADDRESS, + credentials=credentials, + scopes=self._DEFAULT_SCOPES, + ) + + # Create the gRPC stubs. + self.config_service_v2_stub = ( + logging_config_pb2.ConfigServiceV2Stub(channel)) + + if client_info is None: + client_info = ( + google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config['interfaces'][self._INTERFACE_NAME], ) + + # Write the "inner API call" methods to the class. + # These are wrapped versions of the gRPC stub methods, with retry and + # timeout configuration applied, called by the public methods on + # this class. + self._list_sinks = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.ListSinks, + default_retry=method_configs['ListSinks'].retry, + default_timeout=method_configs['ListSinks'].timeout, + client_info=client_info, + ) + self._get_sink = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.GetSink, + default_retry=method_configs['GetSink'].retry, + default_timeout=method_configs['GetSink'].timeout, + client_info=client_info, + ) + self._create_sink = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.CreateSink, + default_retry=method_configs['CreateSink'].retry, + default_timeout=method_configs['CreateSink'].timeout, + client_info=client_info, + ) + self._update_sink = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.UpdateSink, + default_retry=method_configs['UpdateSink'].retry, + default_timeout=method_configs['UpdateSink'].timeout, + client_info=client_info, + ) + self._delete_sink = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.DeleteSink, + default_retry=method_configs['DeleteSink'].retry, + default_timeout=method_configs['DeleteSink'].timeout, + client_info=client_info, + ) + self._list_exclusions = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.ListExclusions, + default_retry=method_configs['ListExclusions'].retry, + default_timeout=method_configs['ListExclusions'].timeout, + client_info=client_info, + ) + self._get_exclusion = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.GetExclusion, + default_retry=method_configs['GetExclusion'].retry, + default_timeout=method_configs['GetExclusion'].timeout, + client_info=client_info, + ) + self._create_exclusion = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.CreateExclusion, + default_retry=method_configs['CreateExclusion'].retry, + default_timeout=method_configs['CreateExclusion'].timeout, + client_info=client_info, + ) + self._update_exclusion = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.UpdateExclusion, + default_retry=method_configs['UpdateExclusion'].retry, + default_timeout=method_configs['UpdateExclusion'].timeout, + client_info=client_info, + ) + self._delete_exclusion = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.DeleteExclusion, + default_retry=method_configs['DeleteExclusion'].retry, + default_timeout=method_configs['DeleteExclusion'].timeout, + client_info=client_info, + ) + + # Service calls + def list_sinks(self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists sinks. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> parent = client.project_path('[PROJECT]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_sinks(parent): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_sinks(parent, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. The parent resource whose sinks are to be listed: + + :: + + \"projects/[PROJECT_ID]\" + \"organizations/[ORGANIZATION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]\" + \"folders/[FOLDER_ID]\" + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.logging_v2.types.LogSink` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.ListSinksRequest( + parent=parent, + page_size=page_size, + ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_sinks, + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='sinks', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator + + def get_sink(self, + sink_name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Gets a sink. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> sink_name = client.sink_path('[PROJECT]', '[SINK]') + >>> + >>> response = client.get_sink(sink_name) + + Args: + sink_name (str): Required. The resource name of the sink: + + :: + + \"projects/[PROJECT_ID]/sinks/[SINK_ID]\" + \"organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]\" + \"folders/[FOLDER_ID]/sinks/[SINK_ID]\" + + Example: ``\"projects/my-project-id/sinks/my-sink-id\"``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogSink` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.GetSinkRequest(sink_name=sink_name, ) + return self._get_sink( + request, retry=retry, timeout=timeout, metadata=metadata) + + def create_sink(self, + parent, + sink, + unique_writer_identity=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Creates a sink that exports specified log entries to a destination. The + export of newly-ingested log entries begins immediately, unless the sink's + ``writer_identity`` is not permitted to write to the destination. A sink can + export log entries only from the resource owning the sink. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> parent = client.project_path('[PROJECT]') + >>> sink = {} + >>> + >>> response = client.create_sink(parent, sink) + + Args: + parent (str): Required. The resource in which to create the sink: + + :: + + \"projects/[PROJECT_ID]\" + \"organizations/[ORGANIZATION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]\" + \"folders/[FOLDER_ID]\" + + Examples: ``\"projects/my-logging-project\"``, ``\"organizations/123456789\"``. + sink (Union[dict, ~google.cloud.logging_v2.types.LogSink]): Required. The new sink, whose ``name`` parameter is a sink identifier that + is not already in use. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.LogSink` + unique_writer_identity (bool): Optional. Determines the kind of IAM identity returned as ``writer_identity`` + in the new sink. If this value is omitted or set to false, and if the + sink's parent is a project, then the value returned as ``writer_identity`` is + the same group or service account used by Stackdriver Logging before the + addition of writer identities to this API. The sink's destination must be + in the same project as the sink itself. + + If this field is set to true, or if the sink is owned by a non-project + resource such as an organization, then the value of ``writer_identity`` will + be a unique service account used only for exports from the new sink. For + more information, see ``writer_identity`` in ``LogSink``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogSink` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.CreateSinkRequest( + parent=parent, + sink=sink, + unique_writer_identity=unique_writer_identity, + ) + return self._create_sink( + request, retry=retry, timeout=timeout, metadata=metadata) + + def update_sink(self, + sink_name, + sink, + unique_writer_identity=None, + update_mask=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Updates a sink. This method replaces the following fields in the existing + sink with values from the new sink: ``destination``, and ``filter``. + The updated sink might also have a new ``writer_identity``; see the + ``unique_writer_identity`` field. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> sink_name = client.sink_path('[PROJECT]', '[SINK]') + >>> sink = {} + >>> + >>> response = client.update_sink(sink_name, sink) + + Args: + sink_name (str): Required. The full resource name of the sink to update, including the + parent resource and the sink identifier: + + :: + + \"projects/[PROJECT_ID]/sinks/[SINK_ID]\" + \"organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]\" + \"folders/[FOLDER_ID]/sinks/[SINK_ID]\" + + Example: ``\"projects/my-project-id/sinks/my-sink-id\"``. + sink (Union[dict, ~google.cloud.logging_v2.types.LogSink]): Required. The updated sink, whose name is the same identifier that appears + as part of ``sink_name``. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.LogSink` + unique_writer_identity (bool): Optional. See + `sinks.create `_ + for a description of this field. When updating a sink, the effect of this + field on the value of ``writer_identity`` in the updated sink depends on both + the old and new values of this field: + + + If the old and new values of this field are both false or both true, + :: + + then there is no change to the sink's `writer_identity`. + + If the old value is false and the new value is true, then + :: + + `writer_identity` is changed to a unique service account. + + It is an error if the old value is true and the new value is + :: + + set to false or defaulted to false. + update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Optional. Field mask that specifies the fields in ``sink`` that need + an update. A sink field will be overwritten if, and only if, it is + in the update mask. ``name`` and output only fields cannot be updated. + + An empty updateMask is temporarily treated as using the following mask + for backwards compatibility purposes: + destination,filter,includeChildren + At some point in the future, behavior will be removed and specifying an + empty updateMask will be an error. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + Example: ``updateMask=filter``. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogSink` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.UpdateSinkRequest( + sink_name=sink_name, + sink=sink, + unique_writer_identity=unique_writer_identity, + update_mask=update_mask, + ) + return self._update_sink( + request, retry=retry, timeout=timeout, metadata=metadata) + + def delete_sink(self, + sink_name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Deletes a sink. If the sink has a unique ``writer_identity``, then that + service account is also deleted. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> sink_name = client.sink_path('[PROJECT]', '[SINK]') + >>> + >>> client.delete_sink(sink_name) + + Args: + sink_name (str): Required. The full resource name of the sink to delete, including the + parent resource and the sink identifier: + + :: + + \"projects/[PROJECT_ID]/sinks/[SINK_ID]\" + \"organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]\" + \"folders/[FOLDER_ID]/sinks/[SINK_ID]\" + + Example: ``\"projects/my-project-id/sinks/my-sink-id\"``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name, ) + self._delete_sink( + request, retry=retry, timeout=timeout, metadata=metadata) + + def list_exclusions(self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists all the exclusions in a parent resource. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> parent = client.project_path('[PROJECT]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_exclusions(parent): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_exclusions(parent, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. The parent resource whose exclusions are to be listed. + + :: + + \"projects/[PROJECT_ID]\" + \"organizations/[ORGANIZATION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]\" + \"folders/[FOLDER_ID]\" + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.logging_v2.types.LogExclusion` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.ListExclusionsRequest( + parent=parent, + page_size=page_size, + ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_exclusions, + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='exclusions', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator + + def get_exclusion(self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Gets the description of an exclusion. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + >>> + >>> response = client.get_exclusion(name) + + Args: + name (str): Required. The resource name of an existing exclusion: + + :: + + \"projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]\" + \"organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]\" + \"folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]\" + + Example: ``\"projects/my-project-id/exclusions/my-exclusion-id\"``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogExclusion` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.GetExclusionRequest(name=name, ) + return self._get_exclusion( + request, retry=retry, timeout=timeout, metadata=metadata) + + def create_exclusion(self, + parent, + exclusion, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Creates a new exclusion in a specified parent resource. + Only log entries belonging to that resource can be excluded. + You can have up to 10 exclusions in a resource. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> parent = client.project_path('[PROJECT]') + >>> exclusion = {} + >>> + >>> response = client.create_exclusion(parent, exclusion) + + Args: + parent (str): Required. The parent resource in which to create the exclusion: + + :: + + \"projects/[PROJECT_ID]\" + \"organizations/[ORGANIZATION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]\" + \"folders/[FOLDER_ID]\" + + Examples: ``\"projects/my-logging-project\"``, ``\"organizations/123456789\"``. + exclusion (Union[dict, ~google.cloud.logging_v2.types.LogExclusion]): Required. The new exclusion, whose ``name`` parameter is an exclusion name + that is not already used in the parent resource. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.LogExclusion` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogExclusion` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.CreateExclusionRequest( + parent=parent, + exclusion=exclusion, + ) + return self._create_exclusion( + request, retry=retry, timeout=timeout, metadata=metadata) + + def update_exclusion(self, + name, + exclusion, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Changes one or more properties of an existing exclusion. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + >>> exclusion = {} + >>> update_mask = {} + >>> + >>> response = client.update_exclusion(name, exclusion, update_mask) + + Args: + name (str): Required. The resource name of the exclusion to update: + + :: + + \"projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]\" + \"organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]\" + \"folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]\" + + Example: ``\"projects/my-project-id/exclusions/my-exclusion-id\"``. + exclusion (Union[dict, ~google.cloud.logging_v2.types.LogExclusion]): Required. New values for the existing exclusion. Only the fields specified + in ``update_mask`` are relevant. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.LogExclusion` + update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Required. A nonempty list of fields to change in the existing exclusion. + New values for the fields are taken from the corresponding fields in the + ``LogExclusion`` included in this request. Fields not mentioned in + ``update_mask`` are not changed and are ignored in the request. + + For example, to change the filter and description of an exclusion, + specify an ``update_mask`` of ``\"filter,description\"``. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogExclusion` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.UpdateExclusionRequest( + name=name, + exclusion=exclusion, + update_mask=update_mask, + ) + return self._update_exclusion( + request, retry=retry, timeout=timeout, metadata=metadata) + + def delete_exclusion(self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Deletes an exclusion. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + >>> + >>> client.delete_exclusion(name) + + Args: + name (str): Required. The resource name of an existing exclusion to delete: + + :: + + \"projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]\" + \"organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]\" + \"folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]\" + + Example: ``\"projects/my-project-id/exclusions/my-exclusion-id\"``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.DeleteExclusionRequest(name=name, ) + self._delete_exclusion( + request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py new file mode 100644 index 000000000000..bc8363c6f3f2 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py @@ -0,0 +1,82 @@ +config = { + "interfaces": { + "google.logging.v2.ConfigServiceV2": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.2, + "max_retry_delay_millis": 1000, + "initial_rpc_timeout_millis": 30000, + "rpc_timeout_multiplier": 1.5, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 90000 + }, + "write_sink": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.2, + "max_retry_delay_millis": 1000, + "initial_rpc_timeout_millis": 30000, + "rpc_timeout_multiplier": 1.5, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 120000 + } + }, + "methods": { + "ListSinks": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "GetSink": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "CreateSink": { + "timeout_millis": 120000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "UpdateSink": { + "timeout_millis": 120000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "DeleteSink": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListExclusions": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "GetExclusion": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "CreateExclusion": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "UpdateExclusion": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "DeleteExclusion": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py new file mode 100644 index 000000000000..47212e5bc29f --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py @@ -0,0 +1,159 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Wrappers for protocol buffer enum types.""" + + +class LogSeverity(object): + """ + The severity of the event described in a log entry, expressed as one of the + standard severity levels listed below. For your reference, the levels are + assigned the listed numeric values. The effect of using numeric values other + than those listed is undefined. + + You can filter for log entries by severity. For example, the following + filter expression will match log entries with severities ``INFO``, ``NOTICE``, + and ``WARNING``: + + :: + + severity > DEBUG AND severity <= WARNING + + If you are writing log entries, you should map other severity encodings to + one of these standard levels. For example, you might map all of Java's FINE, + FINER, and FINEST levels to ``LogSeverity.DEBUG``. You can preserve the + original severity level in the log entry payload if you wish. + + Attributes: + DEFAULT (int): (0) The log entry has no assigned severity level. + DEBUG (int): (100) Debug or trace information. + INFO (int): (200) Routine information, such as ongoing status or performance. + NOTICE (int): (300) Normal but significant events, such as start up, shut down, or + a configuration change. + WARNING (int): (400) Warning events might cause problems. + ERROR (int): (500) Error events are likely to cause problems. + CRITICAL (int): (600) Critical events cause more severe problems or outages. + ALERT (int): (700) A person must take an action immediately. + EMERGENCY (int): (800) One or more systems are unusable. + """ + DEFAULT = 0 + DEBUG = 100 + INFO = 200 + NOTICE = 300 + WARNING = 400 + ERROR = 500 + CRITICAL = 600 + ALERT = 700 + EMERGENCY = 800 + + +class NullValue(object): + """ + ``NullValue`` is a singleton enumeration to represent the null value for the + ``Value`` type union. + + The JSON representation for ``NullValue`` is JSON ``null``. + + Attributes: + NULL_VALUE (int): Null value. + """ + NULL_VALUE = 0 + + +class LabelDescriptor(object): + class ValueType(object): + """ + Value types that can be used as label values. + + Attributes: + STRING (int): A variable-length string. This is the default. + BOOL (int): Boolean; true or false. + INT64 (int): A 64-bit signed integer. + """ + STRING = 0 + BOOL = 1 + INT64 = 2 + + +class LogSink(object): + class VersionFormat(object): + """ + Available log entry formats. Log entries can be written to Stackdriver + Logging in either format and can be exported in either format. + Version 2 is the preferred format. + + Attributes: + VERSION_FORMAT_UNSPECIFIED (int): An unspecified format version that will default to V2. + V2 (int): ``LogEntry`` version 2 format. + V1 (int): ``LogEntry`` version 1 format. + """ + VERSION_FORMAT_UNSPECIFIED = 0 + V2 = 1 + V1 = 2 + + +class MetricDescriptor(object): + class MetricKind(object): + """ + The kind of measurement. It describes how the data is reported. + + Attributes: + METRIC_KIND_UNSPECIFIED (int): Do not use this default value. + GAUGE (int): An instantaneous measurement of a value. + DELTA (int): The change in a value during a time interval. + CUMULATIVE (int): A value accumulated over a time interval. Cumulative + measurements in a time series should have the same start time + and increasing end times, until an event resets the cumulative + value to zero and sets a new start time for the following + points. + """ + METRIC_KIND_UNSPECIFIED = 0 + GAUGE = 1 + DELTA = 2 + CUMULATIVE = 3 + + class ValueType(object): + """ + The value type of a metric. + + Attributes: + VALUE_TYPE_UNSPECIFIED (int): Do not use this default value. + BOOL (int): The value is a boolean. + This value type can be used only if the metric kind is ``GAUGE``. + INT64 (int): The value is a signed 64-bit integer. + DOUBLE (int): The value is a double precision floating point number. + STRING (int): The value is a text string. + This value type can be used only if the metric kind is ``GAUGE``. + DISTRIBUTION (int): The value is a ````Distribution````. + MONEY (int): The value is money. + """ + VALUE_TYPE_UNSPECIFIED = 0 + BOOL = 1 + INT64 = 2 + DOUBLE = 3 + STRING = 4 + DISTRIBUTION = 5 + MONEY = 6 + + +class LogMetric(object): + class ApiVersion(object): + """ + Stackdriver Logging API version. + + Attributes: + V2 (int): Stackdriver Logging API v2. + V1 (int): Stackdriver Logging API v1. + """ + V2 = 0 + V1 = 1 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py new file mode 100644 index 000000000000..43d5de0d240f --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -0,0 +1,615 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Accesses the google.logging.v2 LoggingServiceV2 API.""" + +import functools +import pkg_resources + +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import google.api_core.path_template + +from google.api import monitored_resource_pb2 +from google.cloud.logging_v2.gapic import enums +from google.cloud.logging_v2.gapic import logging_service_v2_client_config +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_pb2 + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + 'google-cloud-logging', ).version + + +class LoggingServiceV2Client(object): + """Service for ingesting and querying logs.""" + + SERVICE_ADDRESS = 'logging.googleapis.com:443' + """The default address of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ) + + # The name of the interface for this client. This is the key used to find + # method configuration in the client_config dictionary. + _INTERFACE_NAME = 'google.logging.v2.LoggingServiceV2' + + @classmethod + def project_path(cls, project): + """Return a fully-qualified project string.""" + return google.api_core.path_template.expand( + 'projects/{project}', + project=project, + ) + + @classmethod + def log_path(cls, project, log): + """Return a fully-qualified log string.""" + return google.api_core.path_template.expand( + 'projects/{project}/logs/{log}', + project=project, + log=log, + ) + + def __init__(self, + channel=None, + credentials=None, + client_config=logging_service_v2_client_config.config, + client_info=None): + """Constructor. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_config (dict): A dictionary of call options for each + method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + 'The `channel` and `credentials` arguments to {} are mutually ' + 'exclusive.'.format(self.__class__.__name__), ) + + # Create the channel. + if channel is None: + channel = google.api_core.grpc_helpers.create_channel( + self.SERVICE_ADDRESS, + credentials=credentials, + scopes=self._DEFAULT_SCOPES, + ) + + # Create the gRPC stubs. + self.logging_service_v2_stub = ( + logging_pb2.LoggingServiceV2Stub(channel)) + + if client_info is None: + client_info = ( + google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config['interfaces'][self._INTERFACE_NAME], ) + + # Write the "inner API call" methods to the class. + # These are wrapped versions of the gRPC stub methods, with retry and + # timeout configuration applied, called by the public methods on + # this class. + self._delete_log = google.api_core.gapic_v1.method.wrap_method( + self.logging_service_v2_stub.DeleteLog, + default_retry=method_configs['DeleteLog'].retry, + default_timeout=method_configs['DeleteLog'].timeout, + client_info=client_info, + ) + self._write_log_entries = google.api_core.gapic_v1.method.wrap_method( + self.logging_service_v2_stub.WriteLogEntries, + default_retry=method_configs['WriteLogEntries'].retry, + default_timeout=method_configs['WriteLogEntries'].timeout, + client_info=client_info, + ) + self._list_log_entries = google.api_core.gapic_v1.method.wrap_method( + self.logging_service_v2_stub.ListLogEntries, + default_retry=method_configs['ListLogEntries'].retry, + default_timeout=method_configs['ListLogEntries'].timeout, + client_info=client_info, + ) + self._list_monitored_resource_descriptors = google.api_core.gapic_v1.method.wrap_method( + self.logging_service_v2_stub.ListMonitoredResourceDescriptors, + default_retry=method_configs[ + 'ListMonitoredResourceDescriptors'].retry, + default_timeout=method_configs['ListMonitoredResourceDescriptors'] + .timeout, + client_info=client_info, + ) + self._list_logs = google.api_core.gapic_v1.method.wrap_method( + self.logging_service_v2_stub.ListLogs, + default_retry=method_configs['ListLogs'].retry, + default_timeout=method_configs['ListLogs'].timeout, + client_info=client_info, + ) + + # Service calls + def delete_log(self, + log_name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Deletes all the log entries in a log. + The log reappears if it receives new entries. + Log entries written shortly before the delete operation might not be + deleted. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.LoggingServiceV2Client() + >>> + >>> log_name = client.log_path('[PROJECT]', '[LOG]') + >>> + >>> client.delete_log(log_name) + + Args: + log_name (str): Required. The resource name of the log to delete: + + :: + + \"projects/[PROJECT_ID]/logs/[LOG_ID]\" + \"organizations/[ORGANIZATION_ID]/logs/[LOG_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]\" + \"folders/[FOLDER_ID]/logs/[LOG_ID]\" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``\"projects/my-project-id/logs/syslog\"``, + ``\"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity\"``. + For more information about log names, see + ``LogEntry``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_pb2.DeleteLogRequest(log_name=log_name, ) + self._delete_log( + request, retry=retry, timeout=timeout, metadata=metadata) + + def write_log_entries(self, + entries, + log_name=None, + resource=None, + labels=None, + partial_success=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + ## Log entry resources + + Writes log entries to Stackdriver Logging. This API method is the + only way to send log entries to Stackdriver Logging. This method + is used, directly or indirectly, by the Stackdriver Logging agent + (fluentd) and all logging libraries configured to use Stackdriver + Logging. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.LoggingServiceV2Client() + >>> + >>> entries = [] + >>> + >>> response = client.write_log_entries(entries) + + Args: + entries (list[Union[dict, ~google.cloud.logging_v2.types.LogEntry]]): Required. The log entries to send to Stackdriver Logging. The order of log + entries in this list does not matter. Values supplied in this method's + ``log_name``, ``resource``, and ``labels`` fields are copied into those log + entries in this list that do not include values for their corresponding + fields. For more information, see the ``LogEntry`` type. + + If the ``timestamp`` or ``insert_id`` fields are missing in log entries, then + this method supplies the current time or a unique identifier, respectively. + The supplied values are chosen so that, among the log entries that did not + supply their own values, the entries earlier in the list will sort before + the entries later in the list. See the ``entries.list`` method. + + Log entries with timestamps that are more than the + `logs retention period `_ in the past or more than + 24 hours in the future might be discarded. Discarding does not return + an error. + + To improve throughput and to avoid exceeding the + `quota limit `_ for calls to ``entries.write``, + you should try to include several log entries in this list, + rather than calling this method for each individual log entry. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.LogEntry` + log_name (str): Optional. A default log resource name that is assigned to all log entries + in ``entries`` that do not specify a value for ``log_name``: + + :: + + \"projects/[PROJECT_ID]/logs/[LOG_ID]\" + \"organizations/[ORGANIZATION_ID]/logs/[LOG_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]\" + \"folders/[FOLDER_ID]/logs/[LOG_ID]\" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``\"projects/my-project-id/logs/syslog\"`` or + ``\"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity\"``. + For more information about log names, see + ``LogEntry``. + resource (Union[dict, ~google.cloud.logging_v2.types.MonitoredResource]): Optional. A default monitored resource object that is assigned to all log + entries in ``entries`` that do not specify a value for ``resource``. Example: + + :: + + { \"type\": \"gce_instance\", + \"labels\": { + \"zone\": \"us-central1-a\", \"instance_id\": \"00000000000000000000\" }} + + See ``LogEntry``. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.MonitoredResource` + labels (dict[str -> str]): Optional. Default labels that are added to the ``labels`` field of all log + entries in ``entries``. If a log entry already has a label with the same key + as a label in this parameter, then the log entry's label is not changed. + See ``LogEntry``. + partial_success (bool): Optional. Whether valid entries should be written even if some other + entries fail due to INVALID_ARGUMENT or PERMISSION_DENIED errors. If any + entry is not written, then the response status is the error associated + with one of the failed entries and the response includes error details + keyed by the entries' zero-based index in the ``entries.write`` method. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.WriteLogEntriesResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_pb2.WriteLogEntriesRequest( + entries=entries, + log_name=log_name, + resource=resource, + labels=labels, + partial_success=partial_success, + ) + return self._write_log_entries( + request, retry=retry, timeout=timeout, metadata=metadata) + + def list_log_entries(self, + resource_names, + project_ids=None, + filter_=None, + order_by=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists log entries. Use this method to retrieve log entries from + Stackdriver Logging. For ways to export log entries, see + `Exporting Logs `_. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.LoggingServiceV2Client() + >>> + >>> resource_names = [] + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_log_entries(resource_names): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_log_entries(resource_names, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + resource_names (list[str]): Required. Names of one or more parent resources from which to + retrieve log entries: + + :: + + \"projects/[PROJECT_ID]\" + \"organizations/[ORGANIZATION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]\" + \"folders/[FOLDER_ID]\" + + Projects listed in the ``project_ids`` field are added to this list. + project_ids (list[str]): Deprecated. Use ``resource_names`` instead. One or more project identifiers + or project numbers from which to retrieve log entries. Example: + ``\"my-project-1A\"``. If present, these project identifiers are converted to + resource name format and added to the list of resources in + ``resource_names``. + filter_ (str): Optional. A filter that chooses which log entries to return. See [Advanced + Logs Filters](/logging/docs/view/advanced_filters). Only log entries that + match the filter are returned. An empty filter matches all log entries in + the resources listed in ``resource_names``. Referencing a parent resource + that is not listed in ``resource_names`` will cause the filter to return no + results. + The maximum length of the filter is 20000 characters. + order_by (str): Optional. How the results should be sorted. Presently, the only permitted + values are ``\"timestamp asc\"`` (default) and ``\"timestamp desc\"``. The first + option returns entries in order of increasing values of + ``LogEntry.timestamp`` (oldest first), and the second option returns entries + in order of decreasing timestamps (newest first). Entries with equal + timestamps are returned in order of their ``insert_id`` values. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.logging_v2.types.LogEntry` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_pb2.ListLogEntriesRequest( + resource_names=resource_names, + project_ids=project_ids, + filter=filter_, + order_by=order_by, + page_size=page_size, + ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_log_entries, + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='entries', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator + + def list_monitored_resource_descriptors( + self, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists the descriptors for monitored resource types used by Stackdriver + Logging. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.LoggingServiceV2Client() + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_monitored_resource_descriptors(): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_monitored_resource_descriptors(options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.logging_v2.types.MonitoredResourceDescriptor` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_pb2.ListMonitoredResourceDescriptorsRequest( + page_size=page_size, ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_monitored_resource_descriptors, + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='resource_descriptors', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator + + def list_logs(self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists the logs in projects, organizations, folders, or billing accounts. + Only logs that have entries are listed. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.LoggingServiceV2Client() + >>> + >>> parent = client.project_path('[PROJECT]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_logs(parent): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_logs(parent, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. The resource name that owns the logs: + + :: + + \"projects/[PROJECT_ID]\" + \"organizations/[ORGANIZATION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]\" + \"folders/[FOLDER_ID]\" + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`str` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_pb2.ListLogsRequest( + parent=parent, + page_size=page_size, + ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_logs, + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='log_names', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py new file mode 100644 index 000000000000..d70c2ef6a65a --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py @@ -0,0 +1,62 @@ +config = { + "interfaces": { + "google.logging.v2.LoggingServiceV2": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.2, + "max_retry_delay_millis": 1000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.5, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 90000 + }, + "list": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.2, + "max_retry_delay_millis": 1000, + "initial_rpc_timeout_millis": 2000, + "rpc_timeout_multiplier": 1.5, + "max_rpc_timeout_millis": 10000, + "total_timeout_millis": 20000 + } + }, + "methods": { + "DeleteLog": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "WriteLogEntries": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + "bundling": { + "element_count_threshold": 1000, + "request_byte_threshold": 1048576, + "delay_threshold_millis": 50 + } + }, + "ListLogEntries": { + "timeout_millis": 10000, + "retry_codes_name": "idempotent", + "retry_params_name": "list" + }, + "ListMonitoredResourceDescriptors": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListLogs": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py new file mode 100644 index 000000000000..823588e27cfd --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -0,0 +1,465 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Accesses the google.logging.v2 MetricsServiceV2 API.""" + +import functools +import pkg_resources + +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import google.api_core.path_template + +from google.api import monitored_resource_pb2 +from google.cloud.logging_v2.gapic import enums +from google.cloud.logging_v2.gapic import metrics_service_v2_client_config +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2 +from google.cloud.logging_v2.proto import logging_metrics_pb2 +from google.cloud.logging_v2.proto import logging_pb2 +from google.protobuf import field_mask_pb2 + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + 'google-cloud-logging', ).version + + +class MetricsServiceV2Client(object): + """Service for configuring logs-based metrics.""" + + SERVICE_ADDRESS = 'logging.googleapis.com:443' + """The default address of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ) + + # The name of the interface for this client. This is the key used to find + # method configuration in the client_config dictionary. + _INTERFACE_NAME = 'google.logging.v2.MetricsServiceV2' + + @classmethod + def project_path(cls, project): + """Return a fully-qualified project string.""" + return google.api_core.path_template.expand( + 'projects/{project}', + project=project, + ) + + @classmethod + def metric_path(cls, project, metric): + """Return a fully-qualified metric string.""" + return google.api_core.path_template.expand( + 'projects/{project}/metrics/{metric}', + project=project, + metric=metric, + ) + + def __init__(self, + channel=None, + credentials=None, + client_config=metrics_service_v2_client_config.config, + client_info=None): + """Constructor. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_config (dict): A dictionary of call options for each + method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + 'The `channel` and `credentials` arguments to {} are mutually ' + 'exclusive.'.format(self.__class__.__name__), ) + + # Create the channel. + if channel is None: + channel = google.api_core.grpc_helpers.create_channel( + self.SERVICE_ADDRESS, + credentials=credentials, + scopes=self._DEFAULT_SCOPES, + ) + + # Create the gRPC stubs. + self.metrics_service_v2_stub = ( + logging_metrics_pb2.MetricsServiceV2Stub(channel)) + + if client_info is None: + client_info = ( + google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config['interfaces'][self._INTERFACE_NAME], ) + + # Write the "inner API call" methods to the class. + # These are wrapped versions of the gRPC stub methods, with retry and + # timeout configuration applied, called by the public methods on + # this class. + self._list_log_metrics = google.api_core.gapic_v1.method.wrap_method( + self.metrics_service_v2_stub.ListLogMetrics, + default_retry=method_configs['ListLogMetrics'].retry, + default_timeout=method_configs['ListLogMetrics'].timeout, + client_info=client_info, + ) + self._get_log_metric = google.api_core.gapic_v1.method.wrap_method( + self.metrics_service_v2_stub.GetLogMetric, + default_retry=method_configs['GetLogMetric'].retry, + default_timeout=method_configs['GetLogMetric'].timeout, + client_info=client_info, + ) + self._create_log_metric = google.api_core.gapic_v1.method.wrap_method( + self.metrics_service_v2_stub.CreateLogMetric, + default_retry=method_configs['CreateLogMetric'].retry, + default_timeout=method_configs['CreateLogMetric'].timeout, + client_info=client_info, + ) + self._update_log_metric = google.api_core.gapic_v1.method.wrap_method( + self.metrics_service_v2_stub.UpdateLogMetric, + default_retry=method_configs['UpdateLogMetric'].retry, + default_timeout=method_configs['UpdateLogMetric'].timeout, + client_info=client_info, + ) + self._delete_log_metric = google.api_core.gapic_v1.method.wrap_method( + self.metrics_service_v2_stub.DeleteLogMetric, + default_retry=method_configs['DeleteLogMetric'].retry, + default_timeout=method_configs['DeleteLogMetric'].timeout, + client_info=client_info, + ) + + # Service calls + def list_log_metrics(self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists logs-based metrics. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.MetricsServiceV2Client() + >>> + >>> parent = client.project_path('[PROJECT]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_log_metrics(parent): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_log_metrics(parent, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. The name of the project containing the metrics: + + :: + + \"projects/[PROJECT_ID]\" + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.logging_v2.types.LogMetric` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_metrics_pb2.ListLogMetricsRequest( + parent=parent, + page_size=page_size, + ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_log_metrics, + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='metrics', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator + + def get_log_metric(self, + metric_name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Gets a logs-based metric. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.MetricsServiceV2Client() + >>> + >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') + >>> + >>> response = client.get_log_metric(metric_name) + + Args: + metric_name (str): The resource name of the desired metric: + + :: + + \"projects/[PROJECT_ID]/metrics/[METRIC_ID]\" + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogMetric` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_metrics_pb2.GetLogMetricRequest( + metric_name=metric_name, ) + return self._get_log_metric( + request, retry=retry, timeout=timeout, metadata=metadata) + + def create_log_metric(self, + parent, + metric, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Creates a logs-based metric. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.MetricsServiceV2Client() + >>> + >>> parent = client.project_path('[PROJECT]') + >>> metric = {} + >>> + >>> response = client.create_log_metric(parent, metric) + + Args: + parent (str): The resource name of the project in which to create the metric: + + :: + + \"projects/[PROJECT_ID]\" + + The new metric must be provided in the request. + metric (Union[dict, ~google.cloud.logging_v2.types.LogMetric]): The new logs-based metric, which must not have an identifier that + already exists. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.LogMetric` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogMetric` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_metrics_pb2.CreateLogMetricRequest( + parent=parent, + metric=metric, + ) + return self._create_log_metric( + request, retry=retry, timeout=timeout, metadata=metadata) + + def update_log_metric(self, + metric_name, + metric, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Creates or updates a logs-based metric. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.MetricsServiceV2Client() + >>> + >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') + >>> metric = {} + >>> + >>> response = client.update_log_metric(metric_name, metric) + + Args: + metric_name (str): The resource name of the metric to update: + + :: + + \"projects/[PROJECT_ID]/metrics/[METRIC_ID]\" + + The updated metric must be provided in the request and it's + ``name`` field must be the same as ``[METRIC_ID]`` If the metric + does not exist in ``[PROJECT_ID]``, then a new metric is created. + metric (Union[dict, ~google.cloud.logging_v2.types.LogMetric]): The updated metric. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.LogMetric` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogMetric` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_metrics_pb2.UpdateLogMetricRequest( + metric_name=metric_name, + metric=metric, + ) + return self._update_log_metric( + request, retry=retry, timeout=timeout, metadata=metadata) + + def delete_log_metric(self, + metric_name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Deletes a logs-based metric. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.MetricsServiceV2Client() + >>> + >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') + >>> + >>> client.delete_log_metric(metric_name) + + Args: + metric_name (str): The resource name of the metric to delete: + + :: + + \"projects/[PROJECT_ID]/metrics/[METRIC_ID]\" + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_metrics_pb2.DeleteLogMetricRequest( + metric_name=metric_name, ) + self._delete_log_metric( + request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py new file mode 100644 index 000000000000..9ff717dd0213 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py @@ -0,0 +1,48 @@ +config = { + "interfaces": { + "google.logging.v2.MetricsServiceV2": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.2, + "max_retry_delay_millis": 1000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.5, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 90000 + } + }, + "methods": { + "ListLogMetrics": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "GetLogMetric": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "CreateLogMetric": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "UpdateLogMetric": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "DeleteLogMetric": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py new file mode 100644 index 000000000000..88c8f6954fac --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -0,0 +1,508 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/logging_v2/proto/log_entry.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2 +from google.logging.type import http_request_pb2 as google_dot_logging_dot_type_dot_http__request__pb2 +from google.logging.type import log_severity_pb2 as google_dot_logging_dot_type_dot_log__severity__pb2 +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/logging_v2/proto/log_entry.proto', + package='google.logging.v2', + syntax='proto3', + serialized_pb=_b('\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xba\x05\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload\"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08\"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR,google_dot_logging_dot_type_dot_http__request__pb2.DESCRIPTOR,google_dot_logging_dot_type_dot_log__severity__pb2.DESCRIPTOR,google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_LOGENTRY_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.logging.v2.LogEntry.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.logging.v2.LogEntry.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.logging.v2.LogEntry.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=948, + serialized_end=993, +) + +_LOGENTRY = _descriptor.Descriptor( + name='LogEntry', + full_name='google.logging.v2.LogEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='log_name', full_name='google.logging.v2.LogEntry.log_name', index=0, + number=12, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='resource', full_name='google.logging.v2.LogEntry.resource', index=1, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='proto_payload', full_name='google.logging.v2.LogEntry.proto_payload', index=2, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='text_payload', full_name='google.logging.v2.LogEntry.text_payload', index=3, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='json_payload', full_name='google.logging.v2.LogEntry.json_payload', index=4, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timestamp', full_name='google.logging.v2.LogEntry.timestamp', index=5, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='receive_timestamp', full_name='google.logging.v2.LogEntry.receive_timestamp', index=6, + number=24, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='severity', full_name='google.logging.v2.LogEntry.severity', index=7, + number=10, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='insert_id', full_name='google.logging.v2.LogEntry.insert_id', index=8, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='http_request', full_name='google.logging.v2.LogEntry.http_request', index=9, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.logging.v2.LogEntry.labels', index=10, + number=11, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='operation', full_name='google.logging.v2.LogEntry.operation', index=11, + number=15, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='trace', full_name='google.logging.v2.LogEntry.trace', index=12, + number=22, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='span_id', full_name='google.logging.v2.LogEntry.span_id', index=13, + number=27, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='source_location', full_name='google.logging.v2.LogEntry.source_location', index=14, + number=23, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_LOGENTRY_LABELSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='payload', full_name='google.logging.v2.LogEntry.payload', + index=0, containing_type=None, fields=[]), + ], + serialized_start=306, + serialized_end=1004, +) + + +_LOGENTRYOPERATION = _descriptor.Descriptor( + name='LogEntryOperation', + full_name='google.logging.v2.LogEntryOperation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='google.logging.v2.LogEntryOperation.id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='producer', full_name='google.logging.v2.LogEntryOperation.producer', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='first', full_name='google.logging.v2.LogEntryOperation.first', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='last', full_name='google.logging.v2.LogEntryOperation.last', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1006, + serialized_end=1084, +) + + +_LOGENTRYSOURCELOCATION = _descriptor.Descriptor( + name='LogEntrySourceLocation', + full_name='google.logging.v2.LogEntrySourceLocation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='file', full_name='google.logging.v2.LogEntrySourceLocation.file', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='line', full_name='google.logging.v2.LogEntrySourceLocation.line', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='function', full_name='google.logging.v2.LogEntrySourceLocation.function', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1086, + serialized_end=1156, +) + +_LOGENTRY_LABELSENTRY.containing_type = _LOGENTRY +_LOGENTRY.fields_by_name['resource'].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE +_LOGENTRY.fields_by_name['proto_payload'].message_type = google_dot_protobuf_dot_any__pb2._ANY +_LOGENTRY.fields_by_name['json_payload'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT +_LOGENTRY.fields_by_name['timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGENTRY.fields_by_name['receive_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGENTRY.fields_by_name['severity'].enum_type = google_dot_logging_dot_type_dot_log__severity__pb2._LOGSEVERITY +_LOGENTRY.fields_by_name['http_request'].message_type = google_dot_logging_dot_type_dot_http__request__pb2._HTTPREQUEST +_LOGENTRY.fields_by_name['labels'].message_type = _LOGENTRY_LABELSENTRY +_LOGENTRY.fields_by_name['operation'].message_type = _LOGENTRYOPERATION +_LOGENTRY.fields_by_name['source_location'].message_type = _LOGENTRYSOURCELOCATION +_LOGENTRY.oneofs_by_name['payload'].fields.append( + _LOGENTRY.fields_by_name['proto_payload']) +_LOGENTRY.fields_by_name['proto_payload'].containing_oneof = _LOGENTRY.oneofs_by_name['payload'] +_LOGENTRY.oneofs_by_name['payload'].fields.append( + _LOGENTRY.fields_by_name['text_payload']) +_LOGENTRY.fields_by_name['text_payload'].containing_oneof = _LOGENTRY.oneofs_by_name['payload'] +_LOGENTRY.oneofs_by_name['payload'].fields.append( + _LOGENTRY.fields_by_name['json_payload']) +_LOGENTRY.fields_by_name['json_payload'].containing_oneof = _LOGENTRY.oneofs_by_name['payload'] +DESCRIPTOR.message_types_by_name['LogEntry'] = _LOGENTRY +DESCRIPTOR.message_types_by_name['LogEntryOperation'] = _LOGENTRYOPERATION +DESCRIPTOR.message_types_by_name['LogEntrySourceLocation'] = _LOGENTRYSOURCELOCATION + +LogEntry = _reflection.GeneratedProtocolMessageType('LogEntry', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _LOGENTRY_LABELSENTRY, + __module__ = 'google.cloud.logging_v2.proto.log_entry_pb2' + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry.LabelsEntry) + )) + , + DESCRIPTOR = _LOGENTRY, + __module__ = 'google.cloud.logging_v2.proto.log_entry_pb2' + , + __doc__ = """An individual entry in a log. + + + Attributes: + log_name: + Required. The resource name of the log to which this log entry + belongs: :: "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" A project number may + optionally be used in place of PROJECT\_ID. The project number + is translated to its corresponding PROJECT\_ID internally and + the ``log_name`` field will contain PROJECT\_ID in queries and + exports. ``[LOG_ID]`` must be URL-encoded within + ``log_name``. Example: ``"organizations/1234567890/logs/cloudr + esourcemanager.googleapis.com%2Factivity"``. ``[LOG_ID]`` must + be less than 512 characters long and can only include the + following characters: upper and lower case alphanumeric + characters, forward-slash, underscore, hyphen, and period. + For backward compatibility, if ``log_name`` begins with a + forward-slash, such as ``/projects/...``, then the log entry + is ingested as usual but the forward-slash is removed. Listing + the log entry will not show the leading slash and filtering + for a log name with a leading slash will never return any + results. + resource: + Required. The monitored resource associated with this log + entry. Example: a log entry that reports a database error + would be associated with the monitored resource designating + the particular database that reported the error. + payload: + Optional. The log entry payload, which can be one of multiple + types. + proto_payload: + The log entry payload, represented as a protocol buffer. Some + Google Cloud Platform services use this field for their log + entry payloads. + text_payload: + The log entry payload, represented as a Unicode string + (UTF-8). + json_payload: + The log entry payload, represented as a structure that is + expressed as a JSON object. + timestamp: + Optional. The time the event described by the log entry + occurred. This time is used to compute the log entry's age and + to enforce the logs retention period. If this field is omitted + in a new log entry, then Stackdriver Logging assigns it the + current time. Incoming log entries should have timestamps + that are no more than the `logs retention period + `__ in the past, and no more than 24 + hours in the future. See the ``entries.write`` API method for + more information. + receive_timestamp: + Output only. The time the log entry was received by + Stackdriver Logging. + severity: + Optional. The severity of the log entry. The default value is + ``LogSeverity.DEFAULT``. + insert_id: + Optional. A unique identifier for the log entry. If you + provide a value, then Stackdriver Logging considers other log + entries in the same project, with the same ``timestamp``, and + with the same ``insert_id`` to be duplicates which can be + removed. If omitted in new log entries, then Stackdriver + Logging assigns its own unique identifier. The ``insert_id`` + is also used to order log entries that have the same + ``timestamp`` value. + http_request: + Optional. Information about the HTTP request associated with + this log entry, if applicable. + labels: + Optional. A set of user-defined (key, value) data that + provides additional information about the log entry. + operation: + Optional. Information about an operation associated with the + log entry, if applicable. + trace: + Optional. Resource name of the trace associated with the log + entry, if any. If it contains a relative resource name, the + name is assumed to be relative to + ``//tracing.googleapis.com``. Example: ``projects/my- + projectid/traces/06796866738c859f2f19b7cfb3214824`` + span_id: + Optional. Id of the span within the trace associated with the + log entry. e.g. "0000000000000042" For Stackdriver trace + spans, this is the same format that the Stackdriver trace API + uses. The ID is a 16-character hexadecimal encoding of an + 8-byte array. + source_location: + Optional. Source code location information associated with the + log entry, if any. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry) + )) +_sym_db.RegisterMessage(LogEntry) +_sym_db.RegisterMessage(LogEntry.LabelsEntry) + +LogEntryOperation = _reflection.GeneratedProtocolMessageType('LogEntryOperation', (_message.Message,), dict( + DESCRIPTOR = _LOGENTRYOPERATION, + __module__ = 'google.cloud.logging_v2.proto.log_entry_pb2' + , + __doc__ = """Additional information about a potentially long-running operation with + which a log entry is associated. + + + Attributes: + id: + Optional. An arbitrary operation identifier. Log entries with + the same identifier are assumed to be part of the same + operation. + producer: + Optional. An arbitrary producer identifier. The combination of + ``id`` and ``producer`` must be globally unique. Examples for + ``producer``: ``"MyDivision.MyBigCompany.com"``, + ``"github.com/MyProject/MyApplication"``. + first: + Optional. Set this to True if this is the first log entry in + the operation. + last: + Optional. Set this to True if this is the last log entry in + the operation. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntryOperation) + )) +_sym_db.RegisterMessage(LogEntryOperation) + +LogEntrySourceLocation = _reflection.GeneratedProtocolMessageType('LogEntrySourceLocation', (_message.Message,), dict( + DESCRIPTOR = _LOGENTRYSOURCELOCATION, + __module__ = 'google.cloud.logging_v2.proto.log_entry_pb2' + , + __doc__ = """Additional information about the source code location that produced the + log entry. + + + Attributes: + file: + Optional. Source file name. Depending on the runtime + environment, this might be a simple name or a fully-qualified + name. + line: + Optional. Line within the source file. 1-based; 0 indicates no + line number available. + function: + Optional. Human-readable name of the function or method being + invoked, with optional context such as the class or package + name. This information may be used in contexts such as the + logs viewer, where a file and line number are less meaningful. + The format can vary by language. For example: + ``qual.if.ied.Class.method`` (Java), ``dir/package.func`` + (Go), ``function`` (Python). + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntrySourceLocation) + )) +_sym_db.RegisterMessage(LogEntrySourceLocation) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2')) +_LOGENTRY_LABELSENTRY.has_options = True +_LOGENTRY_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py new file mode 100644 index 000000000000..a89435267cb2 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py new file mode 100644 index 000000000000..9fa39a546e85 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -0,0 +1,1576 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/logging_v2/proto/logging_config.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/logging_v2/proto/logging_config.proto', + package='google.logging.v2', + syntax='proto3', + serialized_pb=_b('\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xd7\x02\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12G\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormat\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12.\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02\"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\"S\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\x80\n\n\x0f\x43onfigServiceV2\x12v\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse\"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/v2/{parent=*/*}/sinks\x12m\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink\"#\x82\xd3\xe4\x93\x02\x1d\x12\x1b/v2/{sink_name=*/*/sinks/*}\x12t\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink\"$\x82\xd3\xe4\x93\x02\x1e\"\x16/v2/{parent=*/*}/sinks:\x04sink\x12y\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink\")\x82\xd3\xe4\x93\x02#\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sink\x12o\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty\"#\x82\xd3\xe4\x93\x02\x1d*\x1b/v2/{sink_name=*/*/sinks/*}\x12\x8a\x01\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse\"#\x82\xd3\xe4\x93\x02\x1d\x12\x1b/v2/{parent=*/*}/exclusions\x12|\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion\"#\x82\xd3\xe4\x93\x02\x1d\x12\x1b/v2/{name=*/*/exclusions/*}\x12\x8d\x01\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion\".\x82\xd3\xe4\x93\x02(\"\x1b/v2/{parent=*/*}/exclusions:\texclusion\x12\x8d\x01\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion\".\x82\xd3\xe4\x93\x02(2\x1b/v2/{name=*/*/exclusions/*}:\texclusion\x12y\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty\"#\x82\xd3\xe4\x93\x02\x1d*\x1b/v2/{name=*/*/exclusions/*}B\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_LOGSINK_VERSIONFORMAT = _descriptor.EnumDescriptor( + name='VersionFormat', + full_name='google.logging.v2.LogSink.VersionFormat', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='VERSION_FORMAT_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='V2', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='V1', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=480, + serialized_end=543, +) +_sym_db.RegisterEnumDescriptor(_LOGSINK_VERSIONFORMAT) + + +_LOGSINK = _descriptor.Descriptor( + name='LogSink', + full_name='google.logging.v2.LogSink', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.logging.v2.LogSink.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='destination', full_name='google.logging.v2.LogSink.destination', index=1, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filter', full_name='google.logging.v2.LogSink.filter', index=2, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='output_version_format', full_name='google.logging.v2.LogSink.output_version_format', index=3, + number=6, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='writer_identity', full_name='google.logging.v2.LogSink.writer_identity', index=4, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='include_children', full_name='google.logging.v2.LogSink.include_children', index=5, + number=9, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_time', full_name='google.logging.v2.LogSink.start_time', index=6, + number=10, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_time', full_name='google.logging.v2.LogSink.end_time', index=7, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _LOGSINK_VERSIONFORMAT, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=200, + serialized_end=543, +) + + +_LISTSINKSREQUEST = _descriptor.Descriptor( + name='ListSinksRequest', + full_name='google.logging.v2.ListSinksRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.logging.v2.ListSinksRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.logging.v2.ListSinksRequest.page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.logging.v2.ListSinksRequest.page_size', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=545, + serialized_end=618, +) + + +_LISTSINKSRESPONSE = _descriptor.Descriptor( + name='ListSinksResponse', + full_name='google.logging.v2.ListSinksResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sinks', full_name='google.logging.v2.ListSinksResponse.sinks', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.logging.v2.ListSinksResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=620, + serialized_end=707, +) + + +_GETSINKREQUEST = _descriptor.Descriptor( + name='GetSinkRequest', + full_name='google.logging.v2.GetSinkRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sink_name', full_name='google.logging.v2.GetSinkRequest.sink_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=709, + serialized_end=744, +) + + +_CREATESINKREQUEST = _descriptor.Descriptor( + name='CreateSinkRequest', + full_name='google.logging.v2.CreateSinkRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.logging.v2.CreateSinkRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sink', full_name='google.logging.v2.CreateSinkRequest.sink', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='unique_writer_identity', full_name='google.logging.v2.CreateSinkRequest.unique_writer_identity', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=746, + serialized_end=855, +) + + +_UPDATESINKREQUEST = _descriptor.Descriptor( + name='UpdateSinkRequest', + full_name='google.logging.v2.UpdateSinkRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sink_name', full_name='google.logging.v2.UpdateSinkRequest.sink_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sink', full_name='google.logging.v2.UpdateSinkRequest.sink', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='unique_writer_identity', full_name='google.logging.v2.UpdateSinkRequest.unique_writer_identity', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.logging.v2.UpdateSinkRequest.update_mask', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=858, + serialized_end=1019, +) + + +_DELETESINKREQUEST = _descriptor.Descriptor( + name='DeleteSinkRequest', + full_name='google.logging.v2.DeleteSinkRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sink_name', full_name='google.logging.v2.DeleteSinkRequest.sink_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1021, + serialized_end=1059, +) + + +_LOGEXCLUSION = _descriptor.Descriptor( + name='LogExclusion', + full_name='google.logging.v2.LogExclusion', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.logging.v2.LogExclusion.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='description', full_name='google.logging.v2.LogExclusion.description', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filter', full_name='google.logging.v2.LogExclusion.filter', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='disabled', full_name='google.logging.v2.LogExclusion.disabled', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1061, + serialized_end=1144, +) + + +_LISTEXCLUSIONSREQUEST = _descriptor.Descriptor( + name='ListExclusionsRequest', + full_name='google.logging.v2.ListExclusionsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.logging.v2.ListExclusionsRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.logging.v2.ListExclusionsRequest.page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.logging.v2.ListExclusionsRequest.page_size', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1146, + serialized_end=1224, +) + + +_LISTEXCLUSIONSRESPONSE = _descriptor.Descriptor( + name='ListExclusionsResponse', + full_name='google.logging.v2.ListExclusionsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='exclusions', full_name='google.logging.v2.ListExclusionsResponse.exclusions', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.logging.v2.ListExclusionsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1226, + serialized_end=1328, +) + + +_GETEXCLUSIONREQUEST = _descriptor.Descriptor( + name='GetExclusionRequest', + full_name='google.logging.v2.GetExclusionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.logging.v2.GetExclusionRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1330, + serialized_end=1365, +) + + +_CREATEEXCLUSIONREQUEST = _descriptor.Descriptor( + name='CreateExclusionRequest', + full_name='google.logging.v2.CreateExclusionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.logging.v2.CreateExclusionRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='exclusion', full_name='google.logging.v2.CreateExclusionRequest.exclusion', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1367, + serialized_end=1459, +) + + +_UPDATEEXCLUSIONREQUEST = _descriptor.Descriptor( + name='UpdateExclusionRequest', + full_name='google.logging.v2.UpdateExclusionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.logging.v2.UpdateExclusionRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='exclusion', full_name='google.logging.v2.UpdateExclusionRequest.exclusion', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.logging.v2.UpdateExclusionRequest.update_mask', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1462, + serialized_end=1601, +) + + +_DELETEEXCLUSIONREQUEST = _descriptor.Descriptor( + name='DeleteExclusionRequest', + full_name='google.logging.v2.DeleteExclusionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.logging.v2.DeleteExclusionRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1603, + serialized_end=1641, +) + +_LOGSINK.fields_by_name['output_version_format'].enum_type = _LOGSINK_VERSIONFORMAT +_LOGSINK.fields_by_name['start_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGSINK.fields_by_name['end_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGSINK_VERSIONFORMAT.containing_type = _LOGSINK +_LISTSINKSRESPONSE.fields_by_name['sinks'].message_type = _LOGSINK +_CREATESINKREQUEST.fields_by_name['sink'].message_type = _LOGSINK +_UPDATESINKREQUEST.fields_by_name['sink'].message_type = _LOGSINK +_UPDATESINKREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LISTEXCLUSIONSRESPONSE.fields_by_name['exclusions'].message_type = _LOGEXCLUSION +_CREATEEXCLUSIONREQUEST.fields_by_name['exclusion'].message_type = _LOGEXCLUSION +_UPDATEEXCLUSIONREQUEST.fields_by_name['exclusion'].message_type = _LOGEXCLUSION +_UPDATEEXCLUSIONREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +DESCRIPTOR.message_types_by_name['LogSink'] = _LOGSINK +DESCRIPTOR.message_types_by_name['ListSinksRequest'] = _LISTSINKSREQUEST +DESCRIPTOR.message_types_by_name['ListSinksResponse'] = _LISTSINKSRESPONSE +DESCRIPTOR.message_types_by_name['GetSinkRequest'] = _GETSINKREQUEST +DESCRIPTOR.message_types_by_name['CreateSinkRequest'] = _CREATESINKREQUEST +DESCRIPTOR.message_types_by_name['UpdateSinkRequest'] = _UPDATESINKREQUEST +DESCRIPTOR.message_types_by_name['DeleteSinkRequest'] = _DELETESINKREQUEST +DESCRIPTOR.message_types_by_name['LogExclusion'] = _LOGEXCLUSION +DESCRIPTOR.message_types_by_name['ListExclusionsRequest'] = _LISTEXCLUSIONSREQUEST +DESCRIPTOR.message_types_by_name['ListExclusionsResponse'] = _LISTEXCLUSIONSRESPONSE +DESCRIPTOR.message_types_by_name['GetExclusionRequest'] = _GETEXCLUSIONREQUEST +DESCRIPTOR.message_types_by_name['CreateExclusionRequest'] = _CREATEEXCLUSIONREQUEST +DESCRIPTOR.message_types_by_name['UpdateExclusionRequest'] = _UPDATEEXCLUSIONREQUEST +DESCRIPTOR.message_types_by_name['DeleteExclusionRequest'] = _DELETEEXCLUSIONREQUEST + +LogSink = _reflection.GeneratedProtocolMessageType('LogSink', (_message.Message,), dict( + DESCRIPTOR = _LOGSINK, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """Describes a sink used to export log entries to one of the following + destinations in any project: a Cloud Storage bucket, a BigQuery dataset, + or a Cloud Pub/Sub topic. A logs filter controls which log entries are + exported. The sink must be created within a project, organization, + billing account, or folder. + + + Attributes: + name: + Required. The client-assigned sink identifier, unique within + the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink + identifiers are limited to 100 characters and can include only + the following characters: upper and lower-case alphanumeric + characters, underscores, hyphens, and periods. + destination: + Required. The export destination: :: + "storage.googleapis.com/[GCS_BUCKET]" "bigquery.googleapis + .com/projects/[PROJECT_ID]/datasets/[DATASET]" "pubsub.goo + gleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" The + sink's ``writer_identity``, set when the sink is created, must + have permission to write to the destination or else the log + entries are not exported. For more information, see `Exporting + Logs With Sinks `__. + filter: + Optional. An `advanced logs filter + `__. The only exported + log entries are those that are in the resource owning the sink + and that match the filter. For example: :: + logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND + severity>=ERROR + output_version_format: + Deprecated. The log entry format to use for this sink's + exported log entries. The v2 format is used by default and + cannot be changed. + writer_identity: + Output only. An IAM identity—a service account or group—under + which Stackdriver Logging writes the exported log entries to + the sink's destination. This field is set by `sinks.create + `__ + and `sinks.update `__, based on the setting of + ``unique_writer_identity`` in those methods. Until you grant + this identity write-access to the destination, log entry + exports from this sink will fail. For more information, see + `Granting access for a resource `__. Consult the destination service's documentation to + determine the appropriate IAM roles to assign to the identity. + include_children: + Optional. This field applies only to sinks owned by + organizations and folders. If the field is false, the default, + only the logs owned by the sink's parent resource are + available for export. If the field is true, then logs from all + the projects, folders, and billing accounts contained in the + sink's parent resource are also available for export. Whether + a particular log entry from the children is exported depends + on the sink's filter expression. For example, if this field is + true, then the filter ``resource.type=gce_instance`` would + export all Compute Engine VM instance log entries from all + projects in the sink's parent. To only export entries from + certain child projects, filter on the project part of the log + name: :: logName:("projects/test-project1/" OR + "projects/test-project2/") AND resource.type=gce_instance + start_time: + Deprecated. This field is ignored when creating or updating + sinks. + end_time: + Deprecated. This field is ignored when creating or updating + sinks. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogSink) + )) +_sym_db.RegisterMessage(LogSink) + +ListSinksRequest = _reflection.GeneratedProtocolMessageType('ListSinksRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTSINKSREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``ListSinks``. + + + Attributes: + parent: + Required. The parent resource whose sinks are to be listed: + :: "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``pageToken`` must be + the value of ``nextPageToken`` from the previous response. The + values of other method parameters should be identical to those + in the previous call. + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more results + might be available. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksRequest) + )) +_sym_db.RegisterMessage(ListSinksRequest) + +ListSinksResponse = _reflection.GeneratedProtocolMessageType('ListSinksResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTSINKSRESPONSE, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """Result returned from ``ListSinks``. + + + Attributes: + sinks: + A list of sinks. + next_page_token: + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksResponse) + )) +_sym_db.RegisterMessage(ListSinksResponse) + +GetSinkRequest = _reflection.GeneratedProtocolMessageType('GetSinkRequest', (_message.Message,), dict( + DESCRIPTOR = _GETSINKREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``GetSink``. + + + Attributes: + sink_name: + Required. The resource name of the sink: :: + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: + ``"projects/my-project-id/sinks/my-sink-id"``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.GetSinkRequest) + )) +_sym_db.RegisterMessage(GetSinkRequest) + +CreateSinkRequest = _reflection.GeneratedProtocolMessageType('CreateSinkRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATESINKREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``CreateSink``. + + + Attributes: + parent: + Required. The resource in which to create the sink: :: + "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" Examples: ``"projects/my-logging- + project"``, ``"organizations/123456789"``. + sink: + Required. The new sink, whose ``name`` parameter is a sink + identifier that is not already in use. + unique_writer_identity: + Optional. Determines the kind of IAM identity returned as + ``writer_identity`` in the new sink. If this value is omitted + or set to false, and if the sink's parent is a project, then + the value returned as ``writer_identity`` is the same group or + service account used by Stackdriver Logging before the + addition of writer identities to this API. The sink's + destination must be in the same project as the sink itself. + If this field is set to true, or if the sink is owned by a + non-project resource such as an organization, then the value + of ``writer_identity`` will be a unique service account used + only for exports from the new sink. For more information, see + ``writer_identity`` in [LogSink][google.logging.v2.LogSink]. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.CreateSinkRequest) + )) +_sym_db.RegisterMessage(CreateSinkRequest) + +UpdateSinkRequest = _reflection.GeneratedProtocolMessageType('UpdateSinkRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATESINKREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``UpdateSink``. + + + Attributes: + sink_name: + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: :: + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: + ``"projects/my-project-id/sinks/my-sink-id"``. + sink: + Required. The updated sink, whose name is the same identifier + that appears as part of ``sink_name``. + unique_writer_identity: + Optional. See `sinks.create + `__ + for a description of this field. When updating a sink, the + effect of this field on the value of ``writer_identity`` in + the updated sink depends on both the old and new values of + this field: - If the old and new values of this field are + both false or both true, then there is no change to the + sink's ``writer_identity``. - If the old value is false and + the new value is true, then ``writer_identity`` is changed + to a unique service account. - It is an error if the old + value is true and the new value is set to false or + defaulted to false. + update_mask: + Optional. Field mask that specifies the fields in ``sink`` + that need an update. A sink field will be overwritten if, and + only if, it is in the update mask. ``name`` and output only + fields cannot be updated. An empty updateMask is temporarily + treated as using the following mask for backwards + compatibility purposes: destination,filter,includeChildren At + some point in the future, behavior will be removed and + specifying an empty updateMask will be an error. For a + detailed ``FieldMask`` definition, see + https://developers.google.com/protocol- + buffers/docs/reference/google.protobuf#fieldmask Example: + ``updateMask=filter``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateSinkRequest) + )) +_sym_db.RegisterMessage(UpdateSinkRequest) + +DeleteSinkRequest = _reflection.GeneratedProtocolMessageType('DeleteSinkRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETESINKREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``DeleteSink``. + + + Attributes: + sink_name: + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: :: + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: + ``"projects/my-project-id/sinks/my-sink-id"``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteSinkRequest) + )) +_sym_db.RegisterMessage(DeleteSinkRequest) + +LogExclusion = _reflection.GeneratedProtocolMessageType('LogExclusion', (_message.Message,), dict( + DESCRIPTOR = _LOGEXCLUSION, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """Specifies a set of log entries that are not to be stored in Stackdriver + Logging. If your project receives a large volume of logs, you might be + able to use exclusions to reduce your chargeable logs. Exclusions are + processed after log sinks, so you can export log entries before they are + excluded. Audit log entries and log entries from Amazon Web Services are + never excluded. + + + Attributes: + name: + Required. A client-assigned identifier, such as ``"load- + balancer-exclusion"``. Identifiers are limited to 100 + characters and can include only letters, digits, underscores, + hyphens, and periods. + description: + Optional. A description of this exclusion. + filter: + Required. An `advanced logs filter + `__ that matches the log + entries to be excluded. By using the `sample function + `__, you can + exclude less than 100% of the matching log entries. For + example, the following filter matches 99% of low-severity log + entries from load balancers: :: + "resource.type=http_load_balancer severity`__ to change the value + of this field. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogExclusion) + )) +_sym_db.RegisterMessage(LogExclusion) + +ListExclusionsRequest = _reflection.GeneratedProtocolMessageType('ListExclusionsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTEXCLUSIONSREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``ListExclusions``. + + + Attributes: + parent: + Required. The parent resource whose exclusions are to be + listed. :: "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``pageToken`` must be + the value of ``nextPageToken`` from the previous response. The + values of other method parameters should be identical to those + in the previous call. + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more results + might be available. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListExclusionsRequest) + )) +_sym_db.RegisterMessage(ListExclusionsRequest) + +ListExclusionsResponse = _reflection.GeneratedProtocolMessageType('ListExclusionsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTEXCLUSIONSRESPONSE, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """Result returned from ``ListExclusions``. + + + Attributes: + exclusions: + A list of exclusions. + next_page_token: + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListExclusionsResponse) + )) +_sym_db.RegisterMessage(ListExclusionsResponse) + +GetExclusionRequest = _reflection.GeneratedProtocolMessageType('GetExclusionRequest', (_message.Message,), dict( + DESCRIPTOR = _GETEXCLUSIONREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``GetExclusion``. + + + Attributes: + name: + Required. The resource name of an existing exclusion: :: + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID + ]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + Example: ``"projects/my-project-id/exclusions/my-exclusion- + id"``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.GetExclusionRequest) + )) +_sym_db.RegisterMessage(GetExclusionRequest) + +CreateExclusionRequest = _reflection.GeneratedProtocolMessageType('CreateExclusionRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATEEXCLUSIONREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``CreateExclusion``. + + + Attributes: + parent: + Required. The parent resource in which to create the + exclusion: :: "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" Examples: ``"projects/my-logging- + project"``, ``"organizations/123456789"``. + exclusion: + Required. The new exclusion, whose ``name`` parameter is an + exclusion name that is not already used in the parent + resource. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.CreateExclusionRequest) + )) +_sym_db.RegisterMessage(CreateExclusionRequest) + +UpdateExclusionRequest = _reflection.GeneratedProtocolMessageType('UpdateExclusionRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATEEXCLUSIONREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``UpdateExclusion``. + + + Attributes: + name: + Required. The resource name of the exclusion to update: :: + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID + ]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + Example: ``"projects/my-project-id/exclusions/my-exclusion- + id"``. + exclusion: + Required. New values for the existing exclusion. Only the + fields specified in ``update_mask`` are relevant. + update_mask: + Required. A nonempty list of fields to change in the existing + exclusion. New values for the fields are taken from the + corresponding fields in the + [LogExclusion][google.logging.v2.LogExclusion] included in + this request. Fields not mentioned in ``update_mask`` are not + changed and are ignored in the request. For example, to + change the filter and description of an exclusion, specify an + ``update_mask`` of ``"filter,description"``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateExclusionRequest) + )) +_sym_db.RegisterMessage(UpdateExclusionRequest) + +DeleteExclusionRequest = _reflection.GeneratedProtocolMessageType('DeleteExclusionRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETEEXCLUSIONREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``DeleteExclusion``. + + + Attributes: + name: + Required. The resource name of an existing exclusion to + delete: :: + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID + ]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + Example: ``"projects/my-project-id/exclusions/my-exclusion- + id"``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteExclusionRequest) + )) +_sym_db.RegisterMessage(DeleteExclusionRequest) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\025com.google.logging.v2B\022LoggingConfigProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class ConfigServiceV2Stub(object): + """Service for configuring sinks used to export log entries outside of + Stackdriver Logging. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListSinks = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListSinks', + request_serializer=ListSinksRequest.SerializeToString, + response_deserializer=ListSinksResponse.FromString, + ) + self.GetSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSink', + request_serializer=GetSinkRequest.SerializeToString, + response_deserializer=LogSink.FromString, + ) + self.CreateSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateSink', + request_serializer=CreateSinkRequest.SerializeToString, + response_deserializer=LogSink.FromString, + ) + self.UpdateSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSink', + request_serializer=UpdateSinkRequest.SerializeToString, + response_deserializer=LogSink.FromString, + ) + self.DeleteSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteSink', + request_serializer=DeleteSinkRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListExclusions = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListExclusions', + request_serializer=ListExclusionsRequest.SerializeToString, + response_deserializer=ListExclusionsResponse.FromString, + ) + self.GetExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetExclusion', + request_serializer=GetExclusionRequest.SerializeToString, + response_deserializer=LogExclusion.FromString, + ) + self.CreateExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateExclusion', + request_serializer=CreateExclusionRequest.SerializeToString, + response_deserializer=LogExclusion.FromString, + ) + self.UpdateExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + request_serializer=UpdateExclusionRequest.SerializeToString, + response_deserializer=LogExclusion.FromString, + ) + self.DeleteExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + request_serializer=DeleteExclusionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + + class ConfigServiceV2Servicer(object): + """Service for configuring sinks used to export log entries outside of + Stackdriver Logging. + """ + + def ListSinks(self, request, context): + """Lists sinks. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSink(self, request, context): + """Gets a sink. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateSink(self, request, context): + """Creates a sink that exports specified log entries to a destination. The + export of newly-ingested log entries begins immediately, unless the sink's + `writer_identity` is not permitted to write to the destination. A sink can + export log entries only from the resource owning the sink. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSink(self, request, context): + """Updates a sink. This method replaces the following fields in the existing + sink with values from the new sink: `destination`, and `filter`. + The updated sink might also have a new `writer_identity`; see the + `unique_writer_identity` field. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSink(self, request, context): + """Deletes a sink. If the sink has a unique `writer_identity`, then that + service account is also deleted. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListExclusions(self, request, context): + """Lists all the exclusions in a parent resource. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetExclusion(self, request, context): + """Gets the description of an exclusion. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateExclusion(self, request, context): + """Creates a new exclusion in a specified parent resource. + Only log entries belonging to that resource can be excluded. + You can have up to 10 exclusions in a resource. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateExclusion(self, request, context): + """Changes one or more properties of an existing exclusion. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteExclusion(self, request, context): + """Deletes an exclusion. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_ConfigServiceV2Servicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListSinks': grpc.unary_unary_rpc_method_handler( + servicer.ListSinks, + request_deserializer=ListSinksRequest.FromString, + response_serializer=ListSinksResponse.SerializeToString, + ), + 'GetSink': grpc.unary_unary_rpc_method_handler( + servicer.GetSink, + request_deserializer=GetSinkRequest.FromString, + response_serializer=LogSink.SerializeToString, + ), + 'CreateSink': grpc.unary_unary_rpc_method_handler( + servicer.CreateSink, + request_deserializer=CreateSinkRequest.FromString, + response_serializer=LogSink.SerializeToString, + ), + 'UpdateSink': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSink, + request_deserializer=UpdateSinkRequest.FromString, + response_serializer=LogSink.SerializeToString, + ), + 'DeleteSink': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSink, + request_deserializer=DeleteSinkRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListExclusions': grpc.unary_unary_rpc_method_handler( + servicer.ListExclusions, + request_deserializer=ListExclusionsRequest.FromString, + response_serializer=ListExclusionsResponse.SerializeToString, + ), + 'GetExclusion': grpc.unary_unary_rpc_method_handler( + servicer.GetExclusion, + request_deserializer=GetExclusionRequest.FromString, + response_serializer=LogExclusion.SerializeToString, + ), + 'CreateExclusion': grpc.unary_unary_rpc_method_handler( + servicer.CreateExclusion, + request_deserializer=CreateExclusionRequest.FromString, + response_serializer=LogExclusion.SerializeToString, + ), + 'UpdateExclusion': grpc.unary_unary_rpc_method_handler( + servicer.UpdateExclusion, + request_deserializer=UpdateExclusionRequest.FromString, + response_serializer=LogExclusion.SerializeToString, + ), + 'DeleteExclusion': grpc.unary_unary_rpc_method_handler( + servicer.DeleteExclusion, + request_deserializer=DeleteExclusionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.logging.v2.ConfigServiceV2', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaConfigServiceV2Servicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Service for configuring sinks used to export log entries outside of + Stackdriver Logging. + """ + def ListSinks(self, request, context): + """Lists sinks. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetSink(self, request, context): + """Gets a sink. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def CreateSink(self, request, context): + """Creates a sink that exports specified log entries to a destination. The + export of newly-ingested log entries begins immediately, unless the sink's + `writer_identity` is not permitted to write to the destination. A sink can + export log entries only from the resource owning the sink. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateSink(self, request, context): + """Updates a sink. This method replaces the following fields in the existing + sink with values from the new sink: `destination`, and `filter`. + The updated sink might also have a new `writer_identity`; see the + `unique_writer_identity` field. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteSink(self, request, context): + """Deletes a sink. If the sink has a unique `writer_identity`, then that + service account is also deleted. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListExclusions(self, request, context): + """Lists all the exclusions in a parent resource. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetExclusion(self, request, context): + """Gets the description of an exclusion. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def CreateExclusion(self, request, context): + """Creates a new exclusion in a specified parent resource. + Only log entries belonging to that resource can be excluded. + You can have up to 10 exclusions in a resource. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateExclusion(self, request, context): + """Changes one or more properties of an existing exclusion. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteExclusion(self, request, context): + """Deletes an exclusion. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaConfigServiceV2Stub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Service for configuring sinks used to export log entries outside of + Stackdriver Logging. + """ + def ListSinks(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists sinks. + """ + raise NotImplementedError() + ListSinks.future = None + def GetSink(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets a sink. + """ + raise NotImplementedError() + GetSink.future = None + def CreateSink(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a sink that exports specified log entries to a destination. The + export of newly-ingested log entries begins immediately, unless the sink's + `writer_identity` is not permitted to write to the destination. A sink can + export log entries only from the resource owning the sink. + """ + raise NotImplementedError() + CreateSink.future = None + def UpdateSink(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates a sink. This method replaces the following fields in the existing + sink with values from the new sink: `destination`, and `filter`. + The updated sink might also have a new `writer_identity`; see the + `unique_writer_identity` field. + """ + raise NotImplementedError() + UpdateSink.future = None + def DeleteSink(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes a sink. If the sink has a unique `writer_identity`, then that + service account is also deleted. + """ + raise NotImplementedError() + DeleteSink.future = None + def ListExclusions(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists all the exclusions in a parent resource. + """ + raise NotImplementedError() + ListExclusions.future = None + def GetExclusion(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets the description of an exclusion. + """ + raise NotImplementedError() + GetExclusion.future = None + def CreateExclusion(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a new exclusion in a specified parent resource. + Only log entries belonging to that resource can be excluded. + You can have up to 10 exclusions in a resource. + """ + raise NotImplementedError() + CreateExclusion.future = None + def UpdateExclusion(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Changes one or more properties of an existing exclusion. + """ + raise NotImplementedError() + UpdateExclusion.future = None + def DeleteExclusion(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes an exclusion. + """ + raise NotImplementedError() + DeleteExclusion.future = None + + + def beta_create_ConfigServiceV2_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.logging.v2.ConfigServiceV2', 'CreateExclusion'): CreateExclusionRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'CreateSink'): CreateSinkRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'DeleteExclusion'): DeleteExclusionRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'DeleteSink'): DeleteSinkRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'GetExclusion'): GetExclusionRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'GetSink'): GetSinkRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'ListExclusions'): ListExclusionsRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'ListSinks'): ListSinksRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'UpdateExclusion'): UpdateExclusionRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'UpdateSink'): UpdateSinkRequest.FromString, + } + response_serializers = { + ('google.logging.v2.ConfigServiceV2', 'CreateExclusion'): LogExclusion.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'CreateSink'): LogSink.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'DeleteExclusion'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'DeleteSink'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'GetExclusion'): LogExclusion.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'GetSink'): LogSink.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'ListExclusions'): ListExclusionsResponse.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'ListSinks'): ListSinksResponse.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'UpdateExclusion'): LogExclusion.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'UpdateSink'): LogSink.SerializeToString, + } + method_implementations = { + ('google.logging.v2.ConfigServiceV2', 'CreateExclusion'): face_utilities.unary_unary_inline(servicer.CreateExclusion), + ('google.logging.v2.ConfigServiceV2', 'CreateSink'): face_utilities.unary_unary_inline(servicer.CreateSink), + ('google.logging.v2.ConfigServiceV2', 'DeleteExclusion'): face_utilities.unary_unary_inline(servicer.DeleteExclusion), + ('google.logging.v2.ConfigServiceV2', 'DeleteSink'): face_utilities.unary_unary_inline(servicer.DeleteSink), + ('google.logging.v2.ConfigServiceV2', 'GetExclusion'): face_utilities.unary_unary_inline(servicer.GetExclusion), + ('google.logging.v2.ConfigServiceV2', 'GetSink'): face_utilities.unary_unary_inline(servicer.GetSink), + ('google.logging.v2.ConfigServiceV2', 'ListExclusions'): face_utilities.unary_unary_inline(servicer.ListExclusions), + ('google.logging.v2.ConfigServiceV2', 'ListSinks'): face_utilities.unary_unary_inline(servicer.ListSinks), + ('google.logging.v2.ConfigServiceV2', 'UpdateExclusion'): face_utilities.unary_unary_inline(servicer.UpdateExclusion), + ('google.logging.v2.ConfigServiceV2', 'UpdateSink'): face_utilities.unary_unary_inline(servicer.UpdateSink), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_ConfigServiceV2_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.logging.v2.ConfigServiceV2', 'CreateExclusion'): CreateExclusionRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'CreateSink'): CreateSinkRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'DeleteExclusion'): DeleteExclusionRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'DeleteSink'): DeleteSinkRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'GetExclusion'): GetExclusionRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'GetSink'): GetSinkRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'ListExclusions'): ListExclusionsRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'ListSinks'): ListSinksRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'UpdateExclusion'): UpdateExclusionRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'UpdateSink'): UpdateSinkRequest.SerializeToString, + } + response_deserializers = { + ('google.logging.v2.ConfigServiceV2', 'CreateExclusion'): LogExclusion.FromString, + ('google.logging.v2.ConfigServiceV2', 'CreateSink'): LogSink.FromString, + ('google.logging.v2.ConfigServiceV2', 'DeleteExclusion'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.logging.v2.ConfigServiceV2', 'DeleteSink'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.logging.v2.ConfigServiceV2', 'GetExclusion'): LogExclusion.FromString, + ('google.logging.v2.ConfigServiceV2', 'GetSink'): LogSink.FromString, + ('google.logging.v2.ConfigServiceV2', 'ListExclusions'): ListExclusionsResponse.FromString, + ('google.logging.v2.ConfigServiceV2', 'ListSinks'): ListSinksResponse.FromString, + ('google.logging.v2.ConfigServiceV2', 'UpdateExclusion'): LogExclusion.FromString, + ('google.logging.v2.ConfigServiceV2', 'UpdateSink'): LogSink.FromString, + } + cardinalities = { + 'CreateExclusion': cardinality.Cardinality.UNARY_UNARY, + 'CreateSink': cardinality.Cardinality.UNARY_UNARY, + 'DeleteExclusion': cardinality.Cardinality.UNARY_UNARY, + 'DeleteSink': cardinality.Cardinality.UNARY_UNARY, + 'GetExclusion': cardinality.Cardinality.UNARY_UNARY, + 'GetSink': cardinality.Cardinality.UNARY_UNARY, + 'ListExclusions': cardinality.Cardinality.UNARY_UNARY, + 'ListSinks': cardinality.Cardinality.UNARY_UNARY, + 'UpdateExclusion': cardinality.Cardinality.UNARY_UNARY, + 'UpdateSink': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.logging.v2.ConfigServiceV2', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py new file mode 100644 index 000000000000..4f218e7d43b6 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py @@ -0,0 +1,211 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.logging_v2.proto.logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2 +import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class ConfigServiceV2Stub(object): + """Service for configuring sinks used to export log entries outside of + Stackdriver Logging. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListSinks = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListSinks', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksResponse.FromString, + ) + self.GetSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSink', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetSinkRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString, + ) + self.CreateSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateSink', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateSinkRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString, + ) + self.UpdateSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSink', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateSinkRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString, + ) + self.DeleteSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteSink', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteSinkRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListExclusions = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListExclusions', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsResponse.FromString, + ) + self.GetExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetExclusion', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetExclusionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString, + ) + self.CreateExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateExclusion', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateExclusionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString, + ) + self.UpdateExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateExclusionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString, + ) + self.DeleteExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteExclusionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + +class ConfigServiceV2Servicer(object): + """Service for configuring sinks used to export log entries outside of + Stackdriver Logging. + """ + + def ListSinks(self, request, context): + """Lists sinks. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSink(self, request, context): + """Gets a sink. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateSink(self, request, context): + """Creates a sink that exports specified log entries to a destination. The + export of newly-ingested log entries begins immediately, unless the sink's + `writer_identity` is not permitted to write to the destination. A sink can + export log entries only from the resource owning the sink. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSink(self, request, context): + """Updates a sink. This method replaces the following fields in the existing + sink with values from the new sink: `destination`, and `filter`. + The updated sink might also have a new `writer_identity`; see the + `unique_writer_identity` field. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSink(self, request, context): + """Deletes a sink. If the sink has a unique `writer_identity`, then that + service account is also deleted. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListExclusions(self, request, context): + """Lists all the exclusions in a parent resource. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetExclusion(self, request, context): + """Gets the description of an exclusion. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateExclusion(self, request, context): + """Creates a new exclusion in a specified parent resource. + Only log entries belonging to that resource can be excluded. + You can have up to 10 exclusions in a resource. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateExclusion(self, request, context): + """Changes one or more properties of an existing exclusion. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteExclusion(self, request, context): + """Deletes an exclusion. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_ConfigServiceV2Servicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListSinks': grpc.unary_unary_rpc_method_handler( + servicer.ListSinks, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksResponse.SerializeToString, + ), + 'GetSink': grpc.unary_unary_rpc_method_handler( + servicer.GetSink, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetSinkRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString, + ), + 'CreateSink': grpc.unary_unary_rpc_method_handler( + servicer.CreateSink, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateSinkRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString, + ), + 'UpdateSink': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSink, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateSinkRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString, + ), + 'DeleteSink': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSink, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteSinkRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListExclusions': grpc.unary_unary_rpc_method_handler( + servicer.ListExclusions, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsResponse.SerializeToString, + ), + 'GetExclusion': grpc.unary_unary_rpc_method_handler( + servicer.GetExclusion, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetExclusionRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString, + ), + 'CreateExclusion': grpc.unary_unary_rpc_method_handler( + servicer.CreateExclusion, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateExclusionRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString, + ), + 'UpdateExclusion': grpc.unary_unary_rpc_method_handler( + servicer.UpdateExclusion, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateExclusionRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString, + ), + 'DeleteExclusion': grpc.unary_unary_rpc_method_handler( + servicer.DeleteExclusion, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteExclusionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.logging.v2.ConfigServiceV2', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py new file mode 100644 index 000000000000..76a68b0c83fe --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -0,0 +1,895 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/logging_v2/proto/logging_metrics.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import distribution_pb2 as google_dot_api_dot_distribution__pb2 +from google.api import metric_pb2 as google_dot_api_dot_metric__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/logging_v2/proto/logging_metrics.proto', + package='google.logging.v2', + syntax='proto3', + serialized_pb=_b('\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1dgoogle/api/distribution.proto\x1a\x17google/api/metric.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\"\xad\x03\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12\x38\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersion\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01\"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric\"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric\"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xd4\x05\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric\".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric\"/\x82\xd3\xe4\x93\x02)\"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric\"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty\".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}B\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_distribution__pb2.DESCRIPTOR,google_dot_api_dot_metric__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_LOGMETRIC_APIVERSION = _descriptor.EnumDescriptor( + name='ApiVersion', + full_name='google.logging.v2.LogMetric.ApiVersion', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='V2', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='V1', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=625, + serialized_end=653, +) +_sym_db.RegisterEnumDescriptor(_LOGMETRIC_APIVERSION) + + +_LOGMETRIC_LABELEXTRACTORSENTRY = _descriptor.Descriptor( + name='LabelExtractorsEntry', + full_name='google.logging.v2.LogMetric.LabelExtractorsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.logging.v2.LogMetric.LabelExtractorsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.logging.v2.LogMetric.LabelExtractorsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=569, + serialized_end=623, +) + +_LOGMETRIC = _descriptor.Descriptor( + name='LogMetric', + full_name='google.logging.v2.LogMetric', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.logging.v2.LogMetric.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='description', full_name='google.logging.v2.LogMetric.description', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filter', full_name='google.logging.v2.LogMetric.filter', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='metric_descriptor', full_name='google.logging.v2.LogMetric.metric_descriptor', index=3, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value_extractor', full_name='google.logging.v2.LogMetric.value_extractor', index=4, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='label_extractors', full_name='google.logging.v2.LogMetric.label_extractors', index=5, + number=7, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bucket_options', full_name='google.logging.v2.LogMetric.bucket_options', index=6, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='version', full_name='google.logging.v2.LogMetric.version', index=7, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY, ], + enum_types=[ + _LOGMETRIC_APIVERSION, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=224, + serialized_end=653, +) + + +_LISTLOGMETRICSREQUEST = _descriptor.Descriptor( + name='ListLogMetricsRequest', + full_name='google.logging.v2.ListLogMetricsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.logging.v2.ListLogMetricsRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.logging.v2.ListLogMetricsRequest.page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.logging.v2.ListLogMetricsRequest.page_size', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=655, + serialized_end=733, +) + + +_LISTLOGMETRICSRESPONSE = _descriptor.Descriptor( + name='ListLogMetricsResponse', + full_name='google.logging.v2.ListLogMetricsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='metrics', full_name='google.logging.v2.ListLogMetricsResponse.metrics', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.logging.v2.ListLogMetricsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=735, + serialized_end=831, +) + + +_GETLOGMETRICREQUEST = _descriptor.Descriptor( + name='GetLogMetricRequest', + full_name='google.logging.v2.GetLogMetricRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='metric_name', full_name='google.logging.v2.GetLogMetricRequest.metric_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=833, + serialized_end=875, +) + + +_CREATELOGMETRICREQUEST = _descriptor.Descriptor( + name='CreateLogMetricRequest', + full_name='google.logging.v2.CreateLogMetricRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.logging.v2.CreateLogMetricRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='metric', full_name='google.logging.v2.CreateLogMetricRequest.metric', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=877, + serialized_end=963, +) + + +_UPDATELOGMETRICREQUEST = _descriptor.Descriptor( + name='UpdateLogMetricRequest', + full_name='google.logging.v2.UpdateLogMetricRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='metric_name', full_name='google.logging.v2.UpdateLogMetricRequest.metric_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='metric', full_name='google.logging.v2.UpdateLogMetricRequest.metric', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=965, + serialized_end=1056, +) + + +_DELETELOGMETRICREQUEST = _descriptor.Descriptor( + name='DeleteLogMetricRequest', + full_name='google.logging.v2.DeleteLogMetricRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='metric_name', full_name='google.logging.v2.DeleteLogMetricRequest.metric_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1058, + serialized_end=1103, +) + +_LOGMETRIC_LABELEXTRACTORSENTRY.containing_type = _LOGMETRIC +_LOGMETRIC.fields_by_name['metric_descriptor'].message_type = google_dot_api_dot_metric__pb2._METRICDESCRIPTOR +_LOGMETRIC.fields_by_name['label_extractors'].message_type = _LOGMETRIC_LABELEXTRACTORSENTRY +_LOGMETRIC.fields_by_name['bucket_options'].message_type = google_dot_api_dot_distribution__pb2._DISTRIBUTION_BUCKETOPTIONS +_LOGMETRIC.fields_by_name['version'].enum_type = _LOGMETRIC_APIVERSION +_LOGMETRIC_APIVERSION.containing_type = _LOGMETRIC +_LISTLOGMETRICSRESPONSE.fields_by_name['metrics'].message_type = _LOGMETRIC +_CREATELOGMETRICREQUEST.fields_by_name['metric'].message_type = _LOGMETRIC +_UPDATELOGMETRICREQUEST.fields_by_name['metric'].message_type = _LOGMETRIC +DESCRIPTOR.message_types_by_name['LogMetric'] = _LOGMETRIC +DESCRIPTOR.message_types_by_name['ListLogMetricsRequest'] = _LISTLOGMETRICSREQUEST +DESCRIPTOR.message_types_by_name['ListLogMetricsResponse'] = _LISTLOGMETRICSRESPONSE +DESCRIPTOR.message_types_by_name['GetLogMetricRequest'] = _GETLOGMETRICREQUEST +DESCRIPTOR.message_types_by_name['CreateLogMetricRequest'] = _CREATELOGMETRICREQUEST +DESCRIPTOR.message_types_by_name['UpdateLogMetricRequest'] = _UPDATELOGMETRICREQUEST +DESCRIPTOR.message_types_by_name['DeleteLogMetricRequest'] = _DELETELOGMETRICREQUEST + +LogMetric = _reflection.GeneratedProtocolMessageType('LogMetric', (_message.Message,), dict( + + LabelExtractorsEntry = _reflection.GeneratedProtocolMessageType('LabelExtractorsEntry', (_message.Message,), dict( + DESCRIPTOR = _LOGMETRIC_LABELEXTRACTORSENTRY, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric.LabelExtractorsEntry) + )) + , + DESCRIPTOR = _LOGMETRIC, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + , + __doc__ = """Describes a logs-based metric. The value of the metric is the number of + log entries that match a logs filter in a given time interval. + + Logs-based metric can also be used to extract values from logs and + create a a distribution of the values. The distribution records the + statistics of the extracted values along with an optional histogram of + the values as specified by the bucket options. + + + Attributes: + name: + Required. The client-assigned metric identifier. Examples: + ``"error_count"``, ``"nginx/requests"``. Metric identifiers + are limited to 100 characters and can include only the + following characters: ``A-Z``, ``a-z``, ``0-9``, and the + special characters ``_-.,+!*',()%/``. The forward-slash + character (``/``) denotes a hierarchy of name pieces, and it + cannot be the first character of the name. The metric + identifier in this field must not be `URL-encoded + `__. However, + when the metric identifier appears as the ``[METRIC_ID]`` part + of a ``metric_name`` API parameter, then the metric identifier + must be URL-encoded. Example: ``"projects/my- + project/metrics/nginx%2Frequests"``. + description: + Optional. A description of this metric, which is used in + documentation. + filter: + Required. An `advanced logs filter + `__ which is used to + match log entries. Example: :: "resource.type=gae_app + AND severity>=ERROR" The maximum length of the filter is + 20000 characters. + metric_descriptor: + Optional. The metric descriptor associated with the logs-based + metric. If unspecified, it uses a default metric descriptor + with a DELTA metric kind, INT64 value type, with no labels and + a unit of "1". Such a metric counts the number of log entries + matching the ``filter`` expression. The ``name``, ``type``, + and ``description`` fields in the ``metric_descriptor`` are + output only, and is constructed using the ``name`` and + ``description`` field in the LogMetric. To create a logs- + based metric that records a distribution of log values, a + DELTA metric kind with a DISTRIBUTION value type must be used + along with a ``value_extractor`` expression in the LogMetric. + Each label in the metric descriptor must have a matching label + name as the key and an extractor expression as the value in + the ``label_extractors`` map. The ``metric_kind`` and + ``value_type`` fields in the ``metric_descriptor`` cannot be + updated once initially configured. New labels can be added in + the ``metric_descriptor``, but existing labels cannot be + modified except for their description. + value_extractor: + Optional. A ``value_extractor`` is required when using a + distribution logs-based metric to extract the values to record + from a log entry. Two functions are supported for value + extraction: ``EXTRACT(field)`` or ``REGEXP_EXTRACT(field, + regex)``. The argument are: 1. field: The name of the log + entry field from which the value is to be extracted. 2. regex: + A regular expression using the Google RE2 syntax + (https://github.com/google/re2/wiki/Syntax) with a single + capture group to extract data from the specified log entry + field. The value of the field is converted to a string before + applying the regex. It is an error to specify a regex that + does not include exactly one capture group. The result of the + extraction must be convertible to a double type, as the + distribution always records double values. If either the + extraction or the conversion to double fails, then those + values are not recorded in the distribution. Example: + ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` + label_extractors: + Optional. A map from a label key string to an extractor + expression which is used to extract data from a log entry + field and assign as the label value. Each label key specified + in the LabelDescriptor must have an associated extractor + expression in this map. The syntax of the extractor expression + is the same as for the ``value_extractor`` field. The + extracted value is converted to the type defined in the label + descriptor. If the either the extraction or the type + conversion fails, the label will have a default value. The + default value for a string label is an empty string, for an + integer label its 0, and for a boolean label its ``false``. + Note that there are upper bounds on the maximum number of + labels and the number of active time series that are allowed + in a project. + bucket_options: + Optional. The ``bucket_options`` are required when the logs- + based metric is using a DISTRIBUTION value type and it + describes the bucket boundaries used to create a histogram of + the extracted values. + version: + Deprecated. The API version that created or updated this + metric. The v2 format is used by default and cannot be + changed. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric) + )) +_sym_db.RegisterMessage(LogMetric) +_sym_db.RegisterMessage(LogMetric.LabelExtractorsEntry) + +ListLogMetricsRequest = _reflection.GeneratedProtocolMessageType('ListLogMetricsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTLOGMETRICSREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + , + __doc__ = """The parameters to ListLogMetrics. + + + Attributes: + parent: + Required. The name of the project containing the metrics: :: + "projects/[PROJECT_ID]" + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``pageToken`` must be + the value of ``nextPageToken`` from the previous response. The + values of other method parameters should be identical to those + in the previous call. + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more results + might be available. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsRequest) + )) +_sym_db.RegisterMessage(ListLogMetricsRequest) + +ListLogMetricsResponse = _reflection.GeneratedProtocolMessageType('ListLogMetricsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTLOGMETRICSRESPONSE, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + , + __doc__ = """Result returned from ListLogMetrics. + + + Attributes: + metrics: + A list of logs-based metrics. + next_page_token: + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsResponse) + )) +_sym_db.RegisterMessage(ListLogMetricsResponse) + +GetLogMetricRequest = _reflection.GeneratedProtocolMessageType('GetLogMetricRequest', (_message.Message,), dict( + DESCRIPTOR = _GETLOGMETRICREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + , + __doc__ = """The parameters to GetLogMetric. + + + Attributes: + metric_name: + The resource name of the desired metric: :: + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.GetLogMetricRequest) + )) +_sym_db.RegisterMessage(GetLogMetricRequest) + +CreateLogMetricRequest = _reflection.GeneratedProtocolMessageType('CreateLogMetricRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATELOGMETRICREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + , + __doc__ = """The parameters to CreateLogMetric. + + + Attributes: + parent: + The resource name of the project in which to create the + metric: :: "projects/[PROJECT_ID]" The new metric must + be provided in the request. + metric: + The new logs-based metric, which must not have an identifier + that already exists. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.CreateLogMetricRequest) + )) +_sym_db.RegisterMessage(CreateLogMetricRequest) + +UpdateLogMetricRequest = _reflection.GeneratedProtocolMessageType('UpdateLogMetricRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATELOGMETRICREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + , + __doc__ = """The parameters to UpdateLogMetric. + + + Attributes: + metric_name: + The resource name of the metric to update: :: + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" The updated + metric must be provided in the request and it's ``name`` field + must be the same as ``[METRIC_ID]`` If the metric does not + exist in ``[PROJECT_ID]``, then a new metric is created. + metric: + The updated metric. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateLogMetricRequest) + )) +_sym_db.RegisterMessage(UpdateLogMetricRequest) + +DeleteLogMetricRequest = _reflection.GeneratedProtocolMessageType('DeleteLogMetricRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETELOGMETRICREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + , + __doc__ = """The parameters to DeleteLogMetric. + + + Attributes: + metric_name: + The resource name of the metric to delete: :: + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogMetricRequest) + )) +_sym_db.RegisterMessage(DeleteLogMetricRequest) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\025com.google.logging.v2B\023LoggingMetricsProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2')) +_LOGMETRIC_LABELEXTRACTORSENTRY.has_options = True +_LOGMETRIC_LABELEXTRACTORSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class MetricsServiceV2Stub(object): + """Service for configuring logs-based metrics. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListLogMetrics = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + request_serializer=ListLogMetricsRequest.SerializeToString, + response_deserializer=ListLogMetricsResponse.FromString, + ) + self.GetLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/GetLogMetric', + request_serializer=GetLogMetricRequest.SerializeToString, + response_deserializer=LogMetric.FromString, + ) + self.CreateLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + request_serializer=CreateLogMetricRequest.SerializeToString, + response_deserializer=LogMetric.FromString, + ) + self.UpdateLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + request_serializer=UpdateLogMetricRequest.SerializeToString, + response_deserializer=LogMetric.FromString, + ) + self.DeleteLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + request_serializer=DeleteLogMetricRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + + class MetricsServiceV2Servicer(object): + """Service for configuring logs-based metrics. + """ + + def ListLogMetrics(self, request, context): + """Lists logs-based metrics. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetLogMetric(self, request, context): + """Gets a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateLogMetric(self, request, context): + """Creates a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateLogMetric(self, request, context): + """Creates or updates a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteLogMetric(self, request, context): + """Deletes a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_MetricsServiceV2Servicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListLogMetrics': grpc.unary_unary_rpc_method_handler( + servicer.ListLogMetrics, + request_deserializer=ListLogMetricsRequest.FromString, + response_serializer=ListLogMetricsResponse.SerializeToString, + ), + 'GetLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.GetLogMetric, + request_deserializer=GetLogMetricRequest.FromString, + response_serializer=LogMetric.SerializeToString, + ), + 'CreateLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.CreateLogMetric, + request_deserializer=CreateLogMetricRequest.FromString, + response_serializer=LogMetric.SerializeToString, + ), + 'UpdateLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.UpdateLogMetric, + request_deserializer=UpdateLogMetricRequest.FromString, + response_serializer=LogMetric.SerializeToString, + ), + 'DeleteLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.DeleteLogMetric, + request_deserializer=DeleteLogMetricRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.logging.v2.MetricsServiceV2', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaMetricsServiceV2Servicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Service for configuring logs-based metrics. + """ + def ListLogMetrics(self, request, context): + """Lists logs-based metrics. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetLogMetric(self, request, context): + """Gets a logs-based metric. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def CreateLogMetric(self, request, context): + """Creates a logs-based metric. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateLogMetric(self, request, context): + """Creates or updates a logs-based metric. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteLogMetric(self, request, context): + """Deletes a logs-based metric. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaMetricsServiceV2Stub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Service for configuring logs-based metrics. + """ + def ListLogMetrics(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists logs-based metrics. + """ + raise NotImplementedError() + ListLogMetrics.future = None + def GetLogMetric(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets a logs-based metric. + """ + raise NotImplementedError() + GetLogMetric.future = None + def CreateLogMetric(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a logs-based metric. + """ + raise NotImplementedError() + CreateLogMetric.future = None + def UpdateLogMetric(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates or updates a logs-based metric. + """ + raise NotImplementedError() + UpdateLogMetric.future = None + def DeleteLogMetric(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes a logs-based metric. + """ + raise NotImplementedError() + DeleteLogMetric.future = None + + + def beta_create_MetricsServiceV2_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.logging.v2.MetricsServiceV2', 'CreateLogMetric'): CreateLogMetricRequest.FromString, + ('google.logging.v2.MetricsServiceV2', 'DeleteLogMetric'): DeleteLogMetricRequest.FromString, + ('google.logging.v2.MetricsServiceV2', 'GetLogMetric'): GetLogMetricRequest.FromString, + ('google.logging.v2.MetricsServiceV2', 'ListLogMetrics'): ListLogMetricsRequest.FromString, + ('google.logging.v2.MetricsServiceV2', 'UpdateLogMetric'): UpdateLogMetricRequest.FromString, + } + response_serializers = { + ('google.logging.v2.MetricsServiceV2', 'CreateLogMetric'): LogMetric.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'DeleteLogMetric'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'GetLogMetric'): LogMetric.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'ListLogMetrics'): ListLogMetricsResponse.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'UpdateLogMetric'): LogMetric.SerializeToString, + } + method_implementations = { + ('google.logging.v2.MetricsServiceV2', 'CreateLogMetric'): face_utilities.unary_unary_inline(servicer.CreateLogMetric), + ('google.logging.v2.MetricsServiceV2', 'DeleteLogMetric'): face_utilities.unary_unary_inline(servicer.DeleteLogMetric), + ('google.logging.v2.MetricsServiceV2', 'GetLogMetric'): face_utilities.unary_unary_inline(servicer.GetLogMetric), + ('google.logging.v2.MetricsServiceV2', 'ListLogMetrics'): face_utilities.unary_unary_inline(servicer.ListLogMetrics), + ('google.logging.v2.MetricsServiceV2', 'UpdateLogMetric'): face_utilities.unary_unary_inline(servicer.UpdateLogMetric), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_MetricsServiceV2_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.logging.v2.MetricsServiceV2', 'CreateLogMetric'): CreateLogMetricRequest.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'DeleteLogMetric'): DeleteLogMetricRequest.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'GetLogMetric'): GetLogMetricRequest.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'ListLogMetrics'): ListLogMetricsRequest.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'UpdateLogMetric'): UpdateLogMetricRequest.SerializeToString, + } + response_deserializers = { + ('google.logging.v2.MetricsServiceV2', 'CreateLogMetric'): LogMetric.FromString, + ('google.logging.v2.MetricsServiceV2', 'DeleteLogMetric'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.logging.v2.MetricsServiceV2', 'GetLogMetric'): LogMetric.FromString, + ('google.logging.v2.MetricsServiceV2', 'ListLogMetrics'): ListLogMetricsResponse.FromString, + ('google.logging.v2.MetricsServiceV2', 'UpdateLogMetric'): LogMetric.FromString, + } + cardinalities = { + 'CreateLogMetric': cardinality.Cardinality.UNARY_UNARY, + 'DeleteLogMetric': cardinality.Cardinality.UNARY_UNARY, + 'GetLogMetric': cardinality.Cardinality.UNARY_UNARY, + 'ListLogMetrics': cardinality.Cardinality.UNARY_UNARY, + 'UpdateLogMetric': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.logging.v2.MetricsServiceV2', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py new file mode 100644 index 000000000000..b464e0b4b3a4 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py @@ -0,0 +1,115 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.logging_v2.proto.logging_metrics_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2 +import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class MetricsServiceV2Stub(object): + """Service for configuring logs-based metrics. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListLogMetrics = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.FromString, + ) + self.GetLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/GetLogMetric', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, + ) + self.CreateLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, + ) + self.UpdateLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, + ) + self.DeleteLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + +class MetricsServiceV2Servicer(object): + """Service for configuring logs-based metrics. + """ + + def ListLogMetrics(self, request, context): + """Lists logs-based metrics. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetLogMetric(self, request, context): + """Gets a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateLogMetric(self, request, context): + """Creates a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateLogMetric(self, request, context): + """Creates or updates a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteLogMetric(self, request, context): + """Deletes a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_MetricsServiceV2Servicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListLogMetrics': grpc.unary_unary_rpc_method_handler( + servicer.ListLogMetrics, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.SerializeToString, + ), + 'GetLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.GetLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, + ), + 'CreateLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.CreateLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, + ), + 'UpdateLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.UpdateLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, + ), + 'DeleteLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.DeleteLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.logging.v2.MetricsServiceV2', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py new file mode 100644 index 000000000000..9665bcdee9f2 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py @@ -0,0 +1,1146 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/logging_v2/proto/logging.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2 +from google.cloud.logging_v2.proto import log_entry_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/logging_v2/proto/logging.proto', + package='google.logging.v2', + syntax='proto3', + serialized_pb=_b('\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\"\x98\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x19\n\x17WriteLogEntriesResponse\"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01\"\x8d\x01\n\x15ListLogEntriesRequest\x12\x13\n\x0bproject_ids\x18\x01 \x03(\t\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t\"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t\"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xd5\x05\n\x10LoggingServiceV2\x12w\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v2beta1/{log_name=projects/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse\"(\x82\xd3\xe4\x93\x02\"\x12 /v2/monitoredResourceDescriptors\x12r\n\x08ListLogs\x12\".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse\"\x1d\x82\xd3\xe4\x93\x02\x17\x12\x15/v2/{parent=*/*}/logsB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR,google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_DELETELOGREQUEST = _descriptor.Descriptor( + name='DeleteLogRequest', + full_name='google.logging.v2.DeleteLogRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='log_name', full_name='google.logging.v2.DeleteLogRequest.log_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=299, + serialized_end=335, +) + + +_WRITELOGENTRIESREQUEST_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.logging.v2.WriteLogEntriesRequest.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.logging.v2.WriteLogEntriesRequest.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.logging.v2.WriteLogEntriesRequest.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=573, + serialized_end=618, +) + +_WRITELOGENTRIESREQUEST = _descriptor.Descriptor( + name='WriteLogEntriesRequest', + full_name='google.logging.v2.WriteLogEntriesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='log_name', full_name='google.logging.v2.WriteLogEntriesRequest.log_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='resource', full_name='google.logging.v2.WriteLogEntriesRequest.resource', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.logging.v2.WriteLogEntriesRequest.labels', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='entries', full_name='google.logging.v2.WriteLogEntriesRequest.entries', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='partial_success', full_name='google.logging.v2.WriteLogEntriesRequest.partial_success', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=338, + serialized_end=618, +) + + +_WRITELOGENTRIESRESPONSE = _descriptor.Descriptor( + name='WriteLogEntriesResponse', + full_name='google.logging.v2.WriteLogEntriesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=620, + serialized_end=645, +) + + +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY = _descriptor.Descriptor( + name='LogEntryErrorsEntry', + full_name='google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=775, + serialized_end=848, +) + +_WRITELOGENTRIESPARTIALERRORS = _descriptor.Descriptor( + name='WriteLogEntriesPartialErrors', + full_name='google.logging.v2.WriteLogEntriesPartialErrors', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='log_entry_errors', full_name='google.logging.v2.WriteLogEntriesPartialErrors.log_entry_errors', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=648, + serialized_end=848, +) + + +_LISTLOGENTRIESREQUEST = _descriptor.Descriptor( + name='ListLogEntriesRequest', + full_name='google.logging.v2.ListLogEntriesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_ids', full_name='google.logging.v2.ListLogEntriesRequest.project_ids', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='resource_names', full_name='google.logging.v2.ListLogEntriesRequest.resource_names', index=1, + number=8, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filter', full_name='google.logging.v2.ListLogEntriesRequest.filter', index=2, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='order_by', full_name='google.logging.v2.ListLogEntriesRequest.order_by', index=3, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.logging.v2.ListLogEntriesRequest.page_size', index=4, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.logging.v2.ListLogEntriesRequest.page_token', index=5, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=851, + serialized_end=992, +) + + +_LISTLOGENTRIESRESPONSE = _descriptor.Descriptor( + name='ListLogEntriesResponse', + full_name='google.logging.v2.ListLogEntriesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entries', full_name='google.logging.v2.ListLogEntriesResponse.entries', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.logging.v2.ListLogEntriesResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=994, + serialized_end=1089, +) + + +_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST = _descriptor.Descriptor( + name='ListMonitoredResourceDescriptorsRequest', + full_name='google.logging.v2.ListMonitoredResourceDescriptorsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='page_size', full_name='google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_size', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1091, + serialized_end=1171, +) + + +_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE = _descriptor.Descriptor( + name='ListMonitoredResourceDescriptorsResponse', + full_name='google.logging.v2.ListMonitoredResourceDescriptorsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='resource_descriptors', full_name='google.logging.v2.ListMonitoredResourceDescriptorsResponse.resource_descriptors', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.logging.v2.ListMonitoredResourceDescriptorsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1174, + serialized_end=1312, +) + + +_LISTLOGSREQUEST = _descriptor.Descriptor( + name='ListLogsRequest', + full_name='google.logging.v2.ListLogsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.logging.v2.ListLogsRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.logging.v2.ListLogsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.logging.v2.ListLogsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1314, + serialized_end=1386, +) + + +_LISTLOGSRESPONSE = _descriptor.Descriptor( + name='ListLogsResponse', + full_name='google.logging.v2.ListLogsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='log_names', full_name='google.logging.v2.ListLogsResponse.log_names', index=0, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.logging.v2.ListLogsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1388, + serialized_end=1450, +) + +_WRITELOGENTRIESREQUEST_LABELSENTRY.containing_type = _WRITELOGENTRIESREQUEST +_WRITELOGENTRIESREQUEST.fields_by_name['resource'].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE +_WRITELOGENTRIESREQUEST.fields_by_name['labels'].message_type = _WRITELOGENTRIESREQUEST_LABELSENTRY +_WRITELOGENTRIESREQUEST.fields_by_name['entries'].message_type = google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.fields_by_name['value'].message_type = google_dot_rpc_dot_status__pb2._STATUS +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.containing_type = _WRITELOGENTRIESPARTIALERRORS +_WRITELOGENTRIESPARTIALERRORS.fields_by_name['log_entry_errors'].message_type = _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY +_LISTLOGENTRIESRESPONSE.fields_by_name['entries'].message_type = google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY +_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE.fields_by_name['resource_descriptors'].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEDESCRIPTOR +DESCRIPTOR.message_types_by_name['DeleteLogRequest'] = _DELETELOGREQUEST +DESCRIPTOR.message_types_by_name['WriteLogEntriesRequest'] = _WRITELOGENTRIESREQUEST +DESCRIPTOR.message_types_by_name['WriteLogEntriesResponse'] = _WRITELOGENTRIESRESPONSE +DESCRIPTOR.message_types_by_name['WriteLogEntriesPartialErrors'] = _WRITELOGENTRIESPARTIALERRORS +DESCRIPTOR.message_types_by_name['ListLogEntriesRequest'] = _LISTLOGENTRIESREQUEST +DESCRIPTOR.message_types_by_name['ListLogEntriesResponse'] = _LISTLOGENTRIESRESPONSE +DESCRIPTOR.message_types_by_name['ListMonitoredResourceDescriptorsRequest'] = _LISTMONITOREDRESOURCEDESCRIPTORSREQUEST +DESCRIPTOR.message_types_by_name['ListMonitoredResourceDescriptorsResponse'] = _LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE +DESCRIPTOR.message_types_by_name['ListLogsRequest'] = _LISTLOGSREQUEST +DESCRIPTOR.message_types_by_name['ListLogsResponse'] = _LISTLOGSRESPONSE + +DeleteLogRequest = _reflection.GeneratedProtocolMessageType('DeleteLogRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETELOGREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """The parameters to DeleteLog. + + + Attributes: + log_name: + Required. The resource name of the log to delete: :: + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL- + encoded. For example, ``"projects/my-project- + id/logs/syslog"``, ``"organizations/1234567890/logs/cloudresou + rcemanager.googleapis.com%2Factivity"``. For more information + about log names, see [LogEntry][google.logging.v2.LogEntry]. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogRequest) + )) +_sym_db.RegisterMessage(DeleteLogRequest) + +WriteLogEntriesRequest = _reflection.GeneratedProtocolMessageType('WriteLogEntriesRequest', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _WRITELOGENTRIESREQUEST_LABELSENTRY, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest.LabelsEntry) + )) + , + DESCRIPTOR = _WRITELOGENTRIESREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """The parameters to WriteLogEntries. + + + Attributes: + log_name: + Optional. A default log resource name that is assigned to all + log entries in ``entries`` that do not specify a value for + ``log_name``: :: "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL- + encoded. For example, ``"projects/my-project-id/logs/syslog"`` + or ``"organizations/1234567890/logs/cloudresourcemanager.googl + eapis.com%2Factivity"``. For more information about log names, + see [LogEntry][google.logging.v2.LogEntry]. + resource: + Optional. A default monitored resource object that is assigned + to all log entries in ``entries`` that do not specify a value + for ``resource``. Example: :: { "type": "gce_instance", + "labels": { "zone": "us-central1-a", "instance_id": + "00000000000000000000" }} See + [LogEntry][google.logging.v2.LogEntry]. + labels: + Optional. Default labels that are added to the ``labels`` + field of all log entries in ``entries``. If a log entry + already has a label with the same key as a label in this + parameter, then the log entry's label is not changed. See + [LogEntry][google.logging.v2.LogEntry]. + entries: + Required. The log entries to send to Stackdriver Logging. The + order of log entries in this list does not matter. Values + supplied in this method's ``log_name``, ``resource``, and + ``labels`` fields are copied into those log entries in this + list that do not include values for their corresponding + fields. For more information, see the + [LogEntry][google.logging.v2.LogEntry] type. If the + ``timestamp`` or ``insert_id`` fields are missing in log + entries, then this method supplies the current time or a + unique identifier, respectively. The supplied values are + chosen so that, among the log entries that did not supply + their own values, the entries earlier in the list will sort + before the entries later in the list. See the ``entries.list`` + method. Log entries with timestamps that are more than the + `logs retention period `__ in the past + or more than 24 hours in the future might be discarded. + Discarding does not return an error. To improve throughput + and to avoid exceeding the `quota limit `__ for calls to ``entries.write``, you should try to + include several log entries in this list, rather than calling + this method for each individual log entry. + partial_success: + Optional. Whether valid entries should be written even if some + other entries fail due to INVALID\_ARGUMENT or + PERMISSION\_DENIED errors. If any entry is not written, then + the response status is the error associated with one of the + failed entries and the response includes error details keyed + by the entries' zero-based index in the ``entries.write`` + method. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest) + )) +_sym_db.RegisterMessage(WriteLogEntriesRequest) +_sym_db.RegisterMessage(WriteLogEntriesRequest.LabelsEntry) + +WriteLogEntriesResponse = _reflection.GeneratedProtocolMessageType('WriteLogEntriesResponse', (_message.Message,), dict( + DESCRIPTOR = _WRITELOGENTRIESRESPONSE, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """Result returned from WriteLogEntries. empty + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesResponse) + )) +_sym_db.RegisterMessage(WriteLogEntriesResponse) + +WriteLogEntriesPartialErrors = _reflection.GeneratedProtocolMessageType('WriteLogEntriesPartialErrors', (_message.Message,), dict( + + LogEntryErrorsEntry = _reflection.GeneratedProtocolMessageType('LogEntryErrorsEntry', (_message.Message,), dict( + DESCRIPTOR = _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry) + )) + , + DESCRIPTOR = _WRITELOGENTRIESPARTIALERRORS, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """Error details for WriteLogEntries with partial success. + + + Attributes: + log_entry_errors: + When ``WriteLogEntriesRequest.partial_success`` is true, + records the error status for entries that were not written due + to a permanent error, keyed by the entry's zero-based index in + ``WriteLogEntriesRequest.entries``. Failed requests for which + no entries are written will not include per-entry errors. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors) + )) +_sym_db.RegisterMessage(WriteLogEntriesPartialErrors) +_sym_db.RegisterMessage(WriteLogEntriesPartialErrors.LogEntryErrorsEntry) + +ListLogEntriesRequest = _reflection.GeneratedProtocolMessageType('ListLogEntriesRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTLOGENTRIESREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """The parameters to ``ListLogEntries``. + + + Attributes: + project_ids: + Deprecated. Use ``resource_names`` instead. One or more + project identifiers or project numbers from which to retrieve + log entries. Example: ``"my-project-1A"``. If present, these + project identifiers are converted to resource name format and + added to the list of resources in ``resource_names``. + resource_names: + Required. Names of one or more parent resources from which to + retrieve log entries: :: "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" Projects listed in the ``project_ids`` + field are added to this list. + filter: + Optional. A filter that chooses which log entries to return. + See `Advanced Logs Filters + `__. Only log entries + that match the filter are returned. An empty filter matches + all log entries in the resources listed in ``resource_names``. + Referencing a parent resource that is not listed in + ``resource_names`` will cause the filter to return no results. + The maximum length of the filter is 20000 characters. + order_by: + Optional. How the results should be sorted. Presently, the + only permitted values are ``"timestamp asc"`` (default) and + ``"timestamp desc"``. The first option returns entries in + order of increasing values of ``LogEntry.timestamp`` (oldest + first), and the second option returns entries in order of + decreasing timestamps (newest first). Entries with equal + timestamps are returned in order of their ``insert_id`` + values. + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``next_page_token`` in the response indicates that more + results might be available. + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``page_token`` must be + the value of ``next_page_token`` from the previous response. + The values of other method parameters should be identical to + those in the previous call. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesRequest) + )) +_sym_db.RegisterMessage(ListLogEntriesRequest) + +ListLogEntriesResponse = _reflection.GeneratedProtocolMessageType('ListLogEntriesResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTLOGENTRIESRESPONSE, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """Result returned from ``ListLogEntries``. + + + Attributes: + entries: + A list of log entries. If ``entries`` is empty, + ``nextPageToken`` may still be returned, indicating that more + entries may exist. See ``nextPageToken`` for more information. + next_page_token: + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the next + set of results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. If a value for + ``next_page_token`` appears and the ``entries`` field is + empty, it means that the search found no log entries so far + but it did not have time to search all the possible log + entries. Retry the method with this value for ``page_token`` + to continue the search. Alternatively, consider speeding up + the search by changing your filter to specify a single log + name or resource type, or to narrow the time range of the + search. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesResponse) + )) +_sym_db.RegisterMessage(ListLogEntriesResponse) + +ListMonitoredResourceDescriptorsRequest = _reflection.GeneratedProtocolMessageType('ListMonitoredResourceDescriptorsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """The parameters to ListMonitoredResourceDescriptors + + + Attributes: + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more results + might be available. + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``pageToken`` must be + the value of ``nextPageToken`` from the previous response. The + values of other method parameters should be identical to those + in the previous call. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsRequest) + )) +_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsRequest) + +ListMonitoredResourceDescriptorsResponse = _reflection.GeneratedProtocolMessageType('ListMonitoredResourceDescriptorsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """Result returned from ListMonitoredResourceDescriptors. + + + Attributes: + resource_descriptors: + A list of resource descriptors. + next_page_token: + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the next + set of results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsResponse) + )) +_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsResponse) + +ListLogsRequest = _reflection.GeneratedProtocolMessageType('ListLogsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTLOGSREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """The parameters to ListLogs. + + + Attributes: + parent: + Required. The resource name that owns the logs: :: + "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more results + might be available. + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``pageToken`` must be + the value of ``nextPageToken`` from the previous response. The + values of other method parameters should be identical to those + in the previous call. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsRequest) + )) +_sym_db.RegisterMessage(ListLogsRequest) + +ListLogsResponse = _reflection.GeneratedProtocolMessageType('ListLogsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTLOGSRESPONSE, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """Result returned from ListLogs. + + + Attributes: + log_names: + A list of log names. For example, ``"projects/my- + project/syslog"`` or ``"organizations/123/cloudresourcemanager + .googleapis.com%2Factivity"``. + next_page_token: + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the next + set of results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsResponse) + )) +_sym_db.RegisterMessage(ListLogsResponse) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\025com.google.logging.v2B\014LoggingProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2')) +_WRITELOGENTRIESREQUEST_LABELSENTRY.has_options = True +_WRITELOGENTRIESREQUEST_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.has_options = True +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class LoggingServiceV2Stub(object): + """Service for ingesting and querying logs. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.DeleteLog = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/DeleteLog', + request_serializer=DeleteLogRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.WriteLogEntries = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + request_serializer=WriteLogEntriesRequest.SerializeToString, + response_deserializer=WriteLogEntriesResponse.FromString, + ) + self.ListLogEntries = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogEntries', + request_serializer=ListLogEntriesRequest.SerializeToString, + response_deserializer=ListLogEntriesResponse.FromString, + ) + self.ListMonitoredResourceDescriptors = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', + request_serializer=ListMonitoredResourceDescriptorsRequest.SerializeToString, + response_deserializer=ListMonitoredResourceDescriptorsResponse.FromString, + ) + self.ListLogs = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogs', + request_serializer=ListLogsRequest.SerializeToString, + response_deserializer=ListLogsResponse.FromString, + ) + + + class LoggingServiceV2Servicer(object): + """Service for ingesting and querying logs. + """ + + def DeleteLog(self, request, context): + """Deletes all the log entries in a log. + The log reappears if it receives new entries. + Log entries written shortly before the delete operation might not be + deleted. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def WriteLogEntries(self, request, context): + """## Log entry resources + + Writes log entries to Stackdriver Logging. This API method is the + only way to send log entries to Stackdriver Logging. This method + is used, directly or indirectly, by the Stackdriver Logging agent + (fluentd) and all logging libraries configured to use Stackdriver + Logging. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListLogEntries(self, request, context): + """Lists log entries. Use this method to retrieve log entries from + Stackdriver Logging. For ways to export log entries, see + [Exporting Logs](/logging/docs/export). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListMonitoredResourceDescriptors(self, request, context): + """Lists the descriptors for monitored resource types used by Stackdriver + Logging. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListLogs(self, request, context): + """Lists the logs in projects, organizations, folders, or billing accounts. + Only logs that have entries are listed. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_LoggingServiceV2Servicer_to_server(servicer, server): + rpc_method_handlers = { + 'DeleteLog': grpc.unary_unary_rpc_method_handler( + servicer.DeleteLog, + request_deserializer=DeleteLogRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'WriteLogEntries': grpc.unary_unary_rpc_method_handler( + servicer.WriteLogEntries, + request_deserializer=WriteLogEntriesRequest.FromString, + response_serializer=WriteLogEntriesResponse.SerializeToString, + ), + 'ListLogEntries': grpc.unary_unary_rpc_method_handler( + servicer.ListLogEntries, + request_deserializer=ListLogEntriesRequest.FromString, + response_serializer=ListLogEntriesResponse.SerializeToString, + ), + 'ListMonitoredResourceDescriptors': grpc.unary_unary_rpc_method_handler( + servicer.ListMonitoredResourceDescriptors, + request_deserializer=ListMonitoredResourceDescriptorsRequest.FromString, + response_serializer=ListMonitoredResourceDescriptorsResponse.SerializeToString, + ), + 'ListLogs': grpc.unary_unary_rpc_method_handler( + servicer.ListLogs, + request_deserializer=ListLogsRequest.FromString, + response_serializer=ListLogsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.logging.v2.LoggingServiceV2', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaLoggingServiceV2Servicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Service for ingesting and querying logs. + """ + def DeleteLog(self, request, context): + """Deletes all the log entries in a log. + The log reappears if it receives new entries. + Log entries written shortly before the delete operation might not be + deleted. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def WriteLogEntries(self, request, context): + """## Log entry resources + + Writes log entries to Stackdriver Logging. This API method is the + only way to send log entries to Stackdriver Logging. This method + is used, directly or indirectly, by the Stackdriver Logging agent + (fluentd) and all logging libraries configured to use Stackdriver + Logging. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListLogEntries(self, request, context): + """Lists log entries. Use this method to retrieve log entries from + Stackdriver Logging. For ways to export log entries, see + [Exporting Logs](/logging/docs/export). + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListMonitoredResourceDescriptors(self, request, context): + """Lists the descriptors for monitored resource types used by Stackdriver + Logging. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListLogs(self, request, context): + """Lists the logs in projects, organizations, folders, or billing accounts. + Only logs that have entries are listed. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaLoggingServiceV2Stub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Service for ingesting and querying logs. + """ + def DeleteLog(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes all the log entries in a log. + The log reappears if it receives new entries. + Log entries written shortly before the delete operation might not be + deleted. + """ + raise NotImplementedError() + DeleteLog.future = None + def WriteLogEntries(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """## Log entry resources + + Writes log entries to Stackdriver Logging. This API method is the + only way to send log entries to Stackdriver Logging. This method + is used, directly or indirectly, by the Stackdriver Logging agent + (fluentd) and all logging libraries configured to use Stackdriver + Logging. + """ + raise NotImplementedError() + WriteLogEntries.future = None + def ListLogEntries(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists log entries. Use this method to retrieve log entries from + Stackdriver Logging. For ways to export log entries, see + [Exporting Logs](/logging/docs/export). + """ + raise NotImplementedError() + ListLogEntries.future = None + def ListMonitoredResourceDescriptors(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists the descriptors for monitored resource types used by Stackdriver + Logging. + """ + raise NotImplementedError() + ListMonitoredResourceDescriptors.future = None + def ListLogs(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists the logs in projects, organizations, folders, or billing accounts. + Only logs that have entries are listed. + """ + raise NotImplementedError() + ListLogs.future = None + + + def beta_create_LoggingServiceV2_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.logging.v2.LoggingServiceV2', 'DeleteLog'): DeleteLogRequest.FromString, + ('google.logging.v2.LoggingServiceV2', 'ListLogEntries'): ListLogEntriesRequest.FromString, + ('google.logging.v2.LoggingServiceV2', 'ListLogs'): ListLogsRequest.FromString, + ('google.logging.v2.LoggingServiceV2', 'ListMonitoredResourceDescriptors'): ListMonitoredResourceDescriptorsRequest.FromString, + ('google.logging.v2.LoggingServiceV2', 'WriteLogEntries'): WriteLogEntriesRequest.FromString, + } + response_serializers = { + ('google.logging.v2.LoggingServiceV2', 'DeleteLog'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'ListLogEntries'): ListLogEntriesResponse.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'ListLogs'): ListLogsResponse.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'ListMonitoredResourceDescriptors'): ListMonitoredResourceDescriptorsResponse.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'WriteLogEntries'): WriteLogEntriesResponse.SerializeToString, + } + method_implementations = { + ('google.logging.v2.LoggingServiceV2', 'DeleteLog'): face_utilities.unary_unary_inline(servicer.DeleteLog), + ('google.logging.v2.LoggingServiceV2', 'ListLogEntries'): face_utilities.unary_unary_inline(servicer.ListLogEntries), + ('google.logging.v2.LoggingServiceV2', 'ListLogs'): face_utilities.unary_unary_inline(servicer.ListLogs), + ('google.logging.v2.LoggingServiceV2', 'ListMonitoredResourceDescriptors'): face_utilities.unary_unary_inline(servicer.ListMonitoredResourceDescriptors), + ('google.logging.v2.LoggingServiceV2', 'WriteLogEntries'): face_utilities.unary_unary_inline(servicer.WriteLogEntries), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_LoggingServiceV2_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.logging.v2.LoggingServiceV2', 'DeleteLog'): DeleteLogRequest.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'ListLogEntries'): ListLogEntriesRequest.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'ListLogs'): ListLogsRequest.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'ListMonitoredResourceDescriptors'): ListMonitoredResourceDescriptorsRequest.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'WriteLogEntries'): WriteLogEntriesRequest.SerializeToString, + } + response_deserializers = { + ('google.logging.v2.LoggingServiceV2', 'DeleteLog'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.logging.v2.LoggingServiceV2', 'ListLogEntries'): ListLogEntriesResponse.FromString, + ('google.logging.v2.LoggingServiceV2', 'ListLogs'): ListLogsResponse.FromString, + ('google.logging.v2.LoggingServiceV2', 'ListMonitoredResourceDescriptors'): ListMonitoredResourceDescriptorsResponse.FromString, + ('google.logging.v2.LoggingServiceV2', 'WriteLogEntries'): WriteLogEntriesResponse.FromString, + } + cardinalities = { + 'DeleteLog': cardinality.Cardinality.UNARY_UNARY, + 'ListLogEntries': cardinality.Cardinality.UNARY_UNARY, + 'ListLogs': cardinality.Cardinality.UNARY_UNARY, + 'ListMonitoredResourceDescriptors': cardinality.Cardinality.UNARY_UNARY, + 'WriteLogEntries': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.logging.v2.LoggingServiceV2', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py new file mode 100644 index 000000000000..fc16e12e140f --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py @@ -0,0 +1,128 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.logging_v2.proto.logging_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2 +import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class LoggingServiceV2Stub(object): + """Service for ingesting and querying logs. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.DeleteLog = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/DeleteLog', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.WriteLogEntries = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.FromString, + ) + self.ListLogEntries = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogEntries', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.FromString, + ) + self.ListMonitoredResourceDescriptors = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.FromString, + ) + self.ListLogs = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogs', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.FromString, + ) + + +class LoggingServiceV2Servicer(object): + """Service for ingesting and querying logs. + """ + + def DeleteLog(self, request, context): + """Deletes all the log entries in a log. + The log reappears if it receives new entries. + Log entries written shortly before the delete operation might not be + deleted. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def WriteLogEntries(self, request, context): + """## Log entry resources + + Writes log entries to Stackdriver Logging. This API method is the + only way to send log entries to Stackdriver Logging. This method + is used, directly or indirectly, by the Stackdriver Logging agent + (fluentd) and all logging libraries configured to use Stackdriver + Logging. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListLogEntries(self, request, context): + """Lists log entries. Use this method to retrieve log entries from + Stackdriver Logging. For ways to export log entries, see + [Exporting Logs](/logging/docs/export). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListMonitoredResourceDescriptors(self, request, context): + """Lists the descriptors for monitored resource types used by Stackdriver + Logging. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListLogs(self, request, context): + """Lists the logs in projects, organizations, folders, or billing accounts. + Only logs that have entries are listed. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_LoggingServiceV2Servicer_to_server(servicer, server): + rpc_method_handlers = { + 'DeleteLog': grpc.unary_unary_rpc_method_handler( + servicer.DeleteLog, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'WriteLogEntries': grpc.unary_unary_rpc_method_handler( + servicer.WriteLogEntries, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.SerializeToString, + ), + 'ListLogEntries': grpc.unary_unary_rpc_method_handler( + servicer.ListLogEntries, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.SerializeToString, + ), + 'ListMonitoredResourceDescriptors': grpc.unary_unary_rpc_method_handler( + servicer.ListMonitoredResourceDescriptors, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.SerializeToString, + ), + 'ListLogs': grpc.unary_unary_rpc_method_handler( + servicer.ListLogs, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.logging.v2.LoggingServiceV2', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types.py b/packages/google-cloud-logging/google/cloud/logging_v2/types.py new file mode 100644 index 000000000000..d440d8f58f07 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types.py @@ -0,0 +1,65 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import sys + +from google.api_core.protobuf_helpers import get_messages + +from google.api import distribution_pb2 +from google.api import http_pb2 +from google.api import label_pb2 +from google.api import metric_pb2 +from google.api import monitored_resource_pb2 +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2 +from google.cloud.logging_v2.proto import logging_metrics_pb2 +from google.cloud.logging_v2.proto import logging_pb2 +from google.logging.type import http_request_pb2 +from google.protobuf import any_pb2 +from google.protobuf import descriptor_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 +from google.protobuf import struct_pb2 +from google.protobuf import timestamp_pb2 +from google.rpc import status_pb2 + +names = [] +for module in ( + distribution_pb2, + http_pb2, + label_pb2, + metric_pb2, + monitored_resource_pb2, + log_entry_pb2, + logging_config_pb2, + logging_metrics_pb2, + logging_pb2, + http_request_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + field_mask_pb2, + struct_pb2, + timestamp_pb2, + status_pb2, +): + for name, message in get_messages(module).items(): + message.__module__ = 'google.cloud.logging_v2.types' + setattr(sys.modules[__name__], name, message) + names.append(name) + +__all__ = tuple(sorted(names)) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index c25cc45e4b2d..fdf48fedafa4 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -112,8 +112,9 @@ def system(session, py): 'py.test', '-vvv', '-s', - 'tests/system.py', + 'tests/system', *session.posargs, + # Currently allowed to fail due to very high flakiness. success_codes=range(0, 100) ) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 5ecaed3aff74..6eb782259fed 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -51,9 +51,8 @@ REQUIREMENTS = [ - 'google-cloud-core[grpc] >= 0.28.0, < 0.29dev', - 'google-api-core >= 0.1.1, < 0.2.0dev', - 'gapic-google-cloud-logging-v2 >= 0.91.0, < 0.92dev', + 'google-cloud-core >= 0.28.0, < 0.29dev', + 'google-api-core[grpc] >= 0.1.1, < 0.2.0dev', ] setup( diff --git a/packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py b/packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py new file mode 100644 index 000000000000..ad99b1081cc5 --- /dev/null +++ b/packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py @@ -0,0 +1,34 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import time + +from google.api import monitored_resource_pb2 +from google.cloud import logging_v2 +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_pb2 + + +class TestSystemLoggingServiceV2(object): + def test_write_log_entries(self): + project_id = os.environ['PROJECT_ID'] + + client = logging_v2.LoggingServiceV2Client() + log_name = client.log_path(project_id, 'test-{0}'.format(time.time())) + resource = {} + labels = {} + entries = [] + response = client.write_log_entries( + entries, log_name=log_name, resource=resource, labels=labels) diff --git a/packages/google-cloud-logging/tests/system.py b/packages/google-cloud-logging/tests/system/test_system.py similarity index 97% rename from packages/google-cloud-logging/tests/system.py rename to packages/google-cloud-logging/tests/system/test_system.py index 3449438600cc..ffa4615612d9 100644 --- a/packages/google-cloud-logging/tests/system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -16,14 +16,11 @@ import logging import unittest -from google.gax.errors import GaxError -from google.gax.grpc import exc_to_code -from grpc import StatusCode - from google.cloud._helpers import UTC from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound from google.cloud.exceptions import TooManyRequests +from google.cloud.exceptions import ServiceUnavailable import google.cloud.logging import google.cloud.logging.handlers.handlers from google.cloud.logging.handlers.handlers import CloudLoggingHandler @@ -41,18 +38,6 @@ retry_429 = RetryErrors(TooManyRequests) -def _retry_on_unavailable(exc): - """Retry only errors whose status code is 'UNAVAILABLE'. - - :type exc: :class:`~google.gax.errors.GaxError` - :param exc: The exception that was caught. - - :rtype: bool - :returns: Boolean indicating if the exception was UNAVAILABLE. - """ - return exc_to_code(exc) == StatusCode.UNAVAILABLE - - def _consume_entries(logger): """Consume all log entries from logger iterator. @@ -78,7 +63,7 @@ def _list_entries(logger): :returns: List of all entries consumed. """ inner = RetryResult(_has_entries)(_consume_entries) - outer = RetryErrors(GaxError, _retry_on_unavailable)(inner) + outer = RetryErrors(ServiceUnavailable)(inner) return outer(logger) diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py new file mode 100644 index 000000000000..a6893944cff4 --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py @@ -0,0 +1,460 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import pytest + +from google.cloud import logging_v2 +from google.cloud.logging_v2.proto import logging_config_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestConfigServiceV2Client(object): + def test_list_sinks(self): + # Setup Expected Response + next_page_token = '' + sinks_element = {} + sinks = [sinks_element] + expected_response = { + 'next_page_token': next_page_token, + 'sinks': sinks + } + expected_response = logging_config_pb2.ListSinksResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_sinks(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.sinks[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.ListSinksRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_sinks_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_sinks(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_get_sink(self): + # Setup Expected Response + name = 'name3373707' + destination = 'destination-1429847026' + filter_ = 'filter-1274492040' + writer_identity = 'writerIdentity775638794' + include_children = True + expected_response = { + 'name': name, + 'destination': destination, + 'filter': filter_, + 'writer_identity': writer_identity, + 'include_children': include_children + } + expected_response = logging_config_pb2.LogSink(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + sink_name = client.sink_path('[PROJECT]', '[SINK]') + + response = client.get_sink(sink_name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.GetSinkRequest( + sink_name=sink_name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_sink_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + sink_name = client.sink_path('[PROJECT]', '[SINK]') + + with pytest.raises(CustomException): + client.get_sink(sink_name) + + def test_create_sink(self): + # Setup Expected Response + name = 'name3373707' + destination = 'destination-1429847026' + filter_ = 'filter-1274492040' + writer_identity = 'writerIdentity775638794' + include_children = True + expected_response = { + 'name': name, + 'destination': destination, + 'filter': filter_, + 'writer_identity': writer_identity, + 'include_children': include_children + } + expected_response = logging_config_pb2.LogSink(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + parent = client.project_path('[PROJECT]') + sink = {} + + response = client.create_sink(parent, sink) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.CreateSinkRequest( + parent=parent, sink=sink) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_sink_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + parent = client.project_path('[PROJECT]') + sink = {} + + with pytest.raises(CustomException): + client.create_sink(parent, sink) + + def test_update_sink(self): + # Setup Expected Response + name = 'name3373707' + destination = 'destination-1429847026' + filter_ = 'filter-1274492040' + writer_identity = 'writerIdentity775638794' + include_children = True + expected_response = { + 'name': name, + 'destination': destination, + 'filter': filter_, + 'writer_identity': writer_identity, + 'include_children': include_children + } + expected_response = logging_config_pb2.LogSink(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + sink_name = client.sink_path('[PROJECT]', '[SINK]') + sink = {} + + response = client.update_sink(sink_name, sink) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.UpdateSinkRequest( + sink_name=sink_name, sink=sink) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_sink_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + sink_name = client.sink_path('[PROJECT]', '[SINK]') + sink = {} + + with pytest.raises(CustomException): + client.update_sink(sink_name, sink) + + def test_delete_sink(self): + channel = ChannelStub() + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + sink_name = client.sink_path('[PROJECT]', '[SINK]') + + client.delete_sink(sink_name) + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.DeleteSinkRequest( + sink_name=sink_name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_sink_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + sink_name = client.sink_path('[PROJECT]', '[SINK]') + + with pytest.raises(CustomException): + client.delete_sink(sink_name) + + def test_list_exclusions(self): + # Setup Expected Response + next_page_token = '' + exclusions_element = {} + exclusions = [exclusions_element] + expected_response = { + 'next_page_token': next_page_token, + 'exclusions': exclusions + } + expected_response = logging_config_pb2.ListExclusionsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_exclusions(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.exclusions[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.ListExclusionsRequest( + parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_exclusions_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_exclusions(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_get_exclusion(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + description = 'description-1724546052' + filter_ = 'filter-1274492040' + disabled = True + expected_response = { + 'name': name_2, + 'description': description, + 'filter': filter_, + 'disabled': disabled + } + expected_response = logging_config_pb2.LogExclusion( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + + response = client.get_exclusion(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.GetExclusionRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_exclusion_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + + with pytest.raises(CustomException): + client.get_exclusion(name) + + def test_create_exclusion(self): + # Setup Expected Response + name = 'name3373707' + description = 'description-1724546052' + filter_ = 'filter-1274492040' + disabled = True + expected_response = { + 'name': name, + 'description': description, + 'filter': filter_, + 'disabled': disabled + } + expected_response = logging_config_pb2.LogExclusion( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + parent = client.project_path('[PROJECT]') + exclusion = {} + + response = client.create_exclusion(parent, exclusion) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.CreateExclusionRequest( + parent=parent, exclusion=exclusion) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_exclusion_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + parent = client.project_path('[PROJECT]') + exclusion = {} + + with pytest.raises(CustomException): + client.create_exclusion(parent, exclusion) + + def test_update_exclusion(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + description = 'description-1724546052' + filter_ = 'filter-1274492040' + disabled = True + expected_response = { + 'name': name_2, + 'description': description, + 'filter': filter_, + 'disabled': disabled + } + expected_response = logging_config_pb2.LogExclusion( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + exclusion = {} + update_mask = {} + + response = client.update_exclusion(name, exclusion, update_mask) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.UpdateExclusionRequest( + name=name, exclusion=exclusion, update_mask=update_mask) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_exclusion_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + exclusion = {} + update_mask = {} + + with pytest.raises(CustomException): + client.update_exclusion(name, exclusion, update_mask) + + def test_delete_exclusion(self): + channel = ChannelStub() + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + + client.delete_exclusion(name) + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.DeleteExclusionRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_exclusion_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + + with pytest.raises(CustomException): + client.delete_exclusion(name) diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py new file mode 100644 index 000000000000..7d942ec9e67f --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py @@ -0,0 +1,238 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import pytest + +from google.api import monitored_resource_pb2 +from google.cloud import logging_v2 +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_pb2 +from google.protobuf import empty_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestLoggingServiceV2Client(object): + def test_delete_log(self): + channel = ChannelStub() + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup Request + log_name = client.log_path('[PROJECT]', '[LOG]') + + client.delete_log(log_name) + + assert len(channel.requests) == 1 + expected_request = logging_pb2.DeleteLogRequest(log_name=log_name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_log_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup request + log_name = client.log_path('[PROJECT]', '[LOG]') + + with pytest.raises(CustomException): + client.delete_log(log_name) + + def test_write_log_entries(self): + # Setup Expected Response + expected_response = {} + expected_response = logging_pb2.WriteLogEntriesResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup Request + entries = [] + + response = client.write_log_entries(entries) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_pb2.WriteLogEntriesRequest(entries=entries) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_write_log_entries_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup request + entries = [] + + with pytest.raises(CustomException): + client.write_log_entries(entries) + + def test_list_log_entries(self): + # Setup Expected Response + next_page_token = '' + entries_element = {} + entries = [entries_element] + expected_response = { + 'next_page_token': next_page_token, + 'entries': entries + } + expected_response = logging_pb2.ListLogEntriesResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup Request + resource_names = [] + + paged_list_response = client.list_log_entries(resource_names) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.entries[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = logging_pb2.ListLogEntriesRequest( + resource_names=resource_names) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_log_entries_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup request + resource_names = [] + + paged_list_response = client.list_log_entries(resource_names) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_list_monitored_resource_descriptors(self): + # Setup Expected Response + next_page_token = '' + resource_descriptors_element = {} + resource_descriptors = [resource_descriptors_element] + expected_response = { + 'next_page_token': next_page_token, + 'resource_descriptors': resource_descriptors + } + expected_response = logging_pb2.ListMonitoredResourceDescriptorsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + paged_list_response = client.list_monitored_resource_descriptors() + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.resource_descriptors[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = logging_pb2.ListMonitoredResourceDescriptorsRequest( + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_monitored_resource_descriptors_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + paged_list_response = client.list_monitored_resource_descriptors() + with pytest.raises(CustomException): + list(paged_list_response) + + def test_list_logs(self): + # Setup Expected Response + next_page_token = '' + log_names_element = 'logNamesElement-1079688374' + log_names = [log_names_element] + expected_response = { + 'next_page_token': next_page_token, + 'log_names': log_names + } + expected_response = logging_pb2.ListLogsResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup Request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_logs(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.log_names[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = logging_pb2.ListLogsRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_logs_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_logs(parent) + with pytest.raises(CustomException): + list(paged_list_response) diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py new file mode 100644 index 000000000000..2efc90c24a1d --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py @@ -0,0 +1,256 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import pytest + +from google.cloud import logging_v2 +from google.cloud.logging_v2.proto import logging_metrics_pb2 +from google.protobuf import empty_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestMetricsServiceV2Client(object): + def test_list_log_metrics(self): + # Setup Expected Response + next_page_token = '' + metrics_element = {} + metrics = [metrics_element] + expected_response = { + 'next_page_token': next_page_token, + 'metrics': metrics + } + expected_response = logging_metrics_pb2.ListLogMetricsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup Request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_log_metrics(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.metrics[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = logging_metrics_pb2.ListLogMetricsRequest( + parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_log_metrics_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_log_metrics(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_get_log_metric(self): + # Setup Expected Response + name = 'name3373707' + description = 'description-1724546052' + filter_ = 'filter-1274492040' + value_extractor = 'valueExtractor2047672534' + expected_response = { + 'name': name, + 'description': description, + 'filter': filter_, + 'value_extractor': value_extractor + } + expected_response = logging_metrics_pb2.LogMetric(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup Request + metric_name = client.metric_path('[PROJECT]', '[METRIC]') + + response = client.get_log_metric(metric_name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_metrics_pb2.GetLogMetricRequest( + metric_name=metric_name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_log_metric_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup request + metric_name = client.metric_path('[PROJECT]', '[METRIC]') + + with pytest.raises(CustomException): + client.get_log_metric(metric_name) + + def test_create_log_metric(self): + # Setup Expected Response + name = 'name3373707' + description = 'description-1724546052' + filter_ = 'filter-1274492040' + value_extractor = 'valueExtractor2047672534' + expected_response = { + 'name': name, + 'description': description, + 'filter': filter_, + 'value_extractor': value_extractor + } + expected_response = logging_metrics_pb2.LogMetric(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup Request + parent = client.project_path('[PROJECT]') + metric = {} + + response = client.create_log_metric(parent, metric) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_metrics_pb2.CreateLogMetricRequest( + parent=parent, metric=metric) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_log_metric_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup request + parent = client.project_path('[PROJECT]') + metric = {} + + with pytest.raises(CustomException): + client.create_log_metric(parent, metric) + + def test_update_log_metric(self): + # Setup Expected Response + name = 'name3373707' + description = 'description-1724546052' + filter_ = 'filter-1274492040' + value_extractor = 'valueExtractor2047672534' + expected_response = { + 'name': name, + 'description': description, + 'filter': filter_, + 'value_extractor': value_extractor + } + expected_response = logging_metrics_pb2.LogMetric(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup Request + metric_name = client.metric_path('[PROJECT]', '[METRIC]') + metric = {} + + response = client.update_log_metric(metric_name, metric) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_metrics_pb2.UpdateLogMetricRequest( + metric_name=metric_name, metric=metric) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_log_metric_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup request + metric_name = client.metric_path('[PROJECT]', '[METRIC]') + metric = {} + + with pytest.raises(CustomException): + client.update_log_metric(metric_name, metric) + + def test_delete_log_metric(self): + channel = ChannelStub() + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup Request + metric_name = client.metric_path('[PROJECT]', '[METRIC]') + + client.delete_log_metric(metric_name) + + assert len(channel.requests) == 1 + expected_request = logging_metrics_pb2.DeleteLogMetricRequest( + metric_name=metric_name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_log_metric_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup request + metric_name = client.metric_path('[PROJECT]', '[METRIC]') + + with pytest.raises(CustomException): + client.delete_log_metric(metric_name) diff --git a/packages/google-cloud-logging/tests/unit/test__gapic.py b/packages/google-cloud-logging/tests/unit/test__gapic.py new file mode 100644 index 000000000000..c578d6d852bb --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/test__gapic.py @@ -0,0 +1,645 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +from google.api_core import grpc_helpers +import google.auth.credentials +from google.protobuf import empty_pb2 +import mock + +import google.cloud.logging +from google.cloud.logging import _gapic +from google.cloud.logging_v2.gapic import config_service_v2_client +from google.cloud.logging_v2.gapic import logging_service_v2_client +from google.cloud.logging_v2.gapic import metrics_service_v2_client +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2 +from google.cloud.logging_v2.proto import logging_metrics_pb2 + + +PROJECT = 'PROJECT' +PROJECT_PATH = 'projects/%s' % (PROJECT,) +FILTER = 'logName:syslog AND severity>=ERROR' + + +class Test_LoggingAPI(object): + LOG_NAME = 'log_name' + LOG_PATH = 'projects/%s/logs/%s' % (PROJECT, LOG_NAME) + + @staticmethod + def make_logging_api(): + channel = grpc_helpers.ChannelStub() + gapic_client = logging_service_v2_client.LoggingServiceV2Client( + channel=channel) + handwritten_client = mock.Mock() + api = _gapic._LoggingAPI(gapic_client, handwritten_client) + return channel, api + + def test_ctor(self): + channel = grpc_helpers.ChannelStub() + gapic_client = logging_service_v2_client.LoggingServiceV2Client( + channel=channel) + api = _gapic._LoggingAPI(gapic_client, mock.sentinel.client) + assert api._gapic_api is gapic_client + assert api._client is mock.sentinel.client + + def test_list_entries(self): + channel, api = self.make_logging_api() + + log_entry_msg = log_entry_pb2.LogEntry( + log_name=self.LOG_PATH, + text_payload='text') + channel.ListLogEntries.response = logging_pb2.ListLogEntriesResponse( + entries=[log_entry_msg]) + result = api.list_entries( + [PROJECT], FILTER, google.cloud.logging.DESCENDING) + + entries = list(result) + + # Check the response + assert len(entries) == 1 + entry = entries[0] + assert isinstance(entry, google.cloud.logging.entries.TextEntry) + assert entry.payload == 'text' + + # Check the request + assert len(channel.ListLogEntries.requests) == 1 + request = channel.ListLogEntries.requests[0] + assert request.project_ids == [PROJECT] + assert request.filter == FILTER + assert request.order_by == google.cloud.logging.DESCENDING + + def test_list_entries_with_options(self): + channel, api = self.make_logging_api() + + channel.ListLogEntries.response = logging_pb2.ListLogEntriesResponse( + entries=[]) + + result = api.list_entries( + [PROJECT], FILTER, google.cloud.logging.ASCENDING, page_size=42, + page_token='token') + + list(result) + + # Check the request + assert len(channel.ListLogEntries.requests) == 1 + request = channel.ListLogEntries.requests[0] + assert request.project_ids == [PROJECT] + assert request.filter == FILTER + assert request.order_by == google.cloud.logging.ASCENDING + assert request.page_size == 42 + assert request.page_token == 'token' + + def test_write_entries_single(self): + channel, api = self.make_logging_api() + + channel.WriteLogEntries.response = empty_pb2.Empty() + + entry = { + 'logName': self.LOG_PATH, + 'resource': {'type': 'global'}, + 'textPayload': 'text', + } + + api.write_entries([entry]) + + # Check the request + assert len(channel.WriteLogEntries.requests) == 1 + request = channel.WriteLogEntries.requests[0] + assert request.partial_success is False + assert len(request.entries) == 1 + assert request.entries[0].log_name == entry['logName'] + assert request.entries[0].resource.type == entry['resource']['type'] + assert request.entries[0].text_payload == 'text' + + def test_logger_delete(self): + channel, api = self.make_logging_api() + + channel.DeleteLog.response = empty_pb2.Empty() + + api.logger_delete(PROJECT, self.LOG_NAME) + + assert len(channel.DeleteLog.requests) == 1 + request = channel.DeleteLog.requests[0] + assert request.log_name == self.LOG_PATH + + +class Test_SinksAPI(object): + SINK_NAME = 'sink_name' + SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) + DESTINATION_URI = 'faux.googleapis.com/destination' + SINK_WRITER_IDENTITY = 'serviceAccount:project-123@example.com' + + @staticmethod + def make_sinks_api(): + channel = grpc_helpers.ChannelStub() + gapic_client = config_service_v2_client.ConfigServiceV2Client( + channel=channel) + handwritten_client = mock.Mock() + api = _gapic._SinksAPI(gapic_client, handwritten_client) + return channel, api + + def test_ctor(self): + channel = grpc_helpers.ChannelStub() + gapic_client = config_service_v2_client.ConfigServiceV2Client( + channel=channel) + api = _gapic._SinksAPI(gapic_client, mock.sentinel.client) + assert api._gapic_api is gapic_client + assert api._client is mock.sentinel.client + + def test_list_sinks(self): + channel, api = self.make_sinks_api() + + sink_msg = logging_config_pb2.LogSink( + name=self.SINK_PATH, + destination=self.DESTINATION_URI, + filter=FILTER) + channel.ListSinks.response = logging_config_pb2.ListSinksResponse( + sinks=[sink_msg]) + + result = api.list_sinks(PROJECT) + sinks = list(result) + + # Check the response + assert len(sinks) == 1 + sink = sinks[0] + assert isinstance(sink, google.cloud.logging.sink.Sink) + assert sink.name == self.SINK_PATH + assert sink.destination == self.DESTINATION_URI + assert sink.filter_ == FILTER + + # Check the request + assert len(channel.ListSinks.requests) == 1 + request = channel.ListSinks.requests[0] + assert request.parent == PROJECT_PATH + + def test_list_sinks_with_options(self): + channel, api = self.make_sinks_api() + + channel.ListSinks.response = logging_config_pb2.ListSinksResponse( + sinks=[]) + + result = api.list_sinks(PROJECT, page_size=42, page_token='token') + list(result) + + # Check the request + assert len(channel.ListSinks.requests) == 1 + request = channel.ListSinks.requests[0] + assert request.parent == 'projects/%s' % PROJECT + assert request.page_size == 42 + assert request.page_token == 'token' + + def test_sink_create(self): + channel, api = self.make_sinks_api() + + channel.CreateSink.response = logging_config_pb2.LogSink( + name=self.SINK_NAME, + destination=self.DESTINATION_URI, + filter=FILTER, + writer_identity=self.SINK_WRITER_IDENTITY, + ) + + result = api.sink_create( + PROJECT, + self.SINK_NAME, + FILTER, + self.DESTINATION_URI, + unique_writer_identity=True, + ) + + # Check response + assert result == { + 'name': self.SINK_NAME, + 'filter': FILTER, + 'destination': self.DESTINATION_URI, + 'writerIdentity': self.SINK_WRITER_IDENTITY, + } + + # Check request + assert len(channel.CreateSink.requests) == 1 + request = channel.CreateSink.requests[0] + assert request.parent == PROJECT_PATH + assert request.unique_writer_identity is True + assert request.sink.name == self.SINK_NAME + assert request.sink.filter == FILTER + assert request.sink.destination == self.DESTINATION_URI + + def test_sink_get(self): + channel, api = self.make_sinks_api() + + channel.GetSink.response = logging_config_pb2.LogSink( + name=self.SINK_PATH, + destination=self.DESTINATION_URI, + filter=FILTER) + + response = api.sink_get(PROJECT, self.SINK_NAME) + + # Check response + assert response == { + 'name': self.SINK_PATH, + 'filter': FILTER, + 'destination': self.DESTINATION_URI + } + + # Check request + assert len(channel.GetSink.requests) == 1 + request = channel.GetSink.requests[0] + assert request.sink_name == self.SINK_PATH + + def test_sink_update(self): + channel, api = self.make_sinks_api() + + channel.UpdateSink.response = logging_config_pb2.LogSink( + name=self.SINK_NAME, + destination=self.DESTINATION_URI, + filter=FILTER, + writer_identity=self.SINK_WRITER_IDENTITY, + ) + + result = api.sink_update( + PROJECT, + self.SINK_NAME, + FILTER, + self.DESTINATION_URI, + unique_writer_identity=True) + + # Check response + assert result == { + 'name': self.SINK_NAME, + 'filter': FILTER, + 'destination': self.DESTINATION_URI, + 'writerIdentity': self.SINK_WRITER_IDENTITY, + } + + # Check request + assert len(channel.UpdateSink.requests) == 1 + request = channel.UpdateSink.requests[0] + assert request.sink_name == self.SINK_PATH + assert request.unique_writer_identity is True + assert request.sink.name == self.SINK_PATH + assert request.sink.filter == FILTER + assert request.sink.destination == self.DESTINATION_URI + + def test_sink_delete(self): + channel, api = self.make_sinks_api() + + channel.DeleteSink.response = empty_pb2.Empty() + + api.sink_delete(PROJECT, self.SINK_NAME) + + assert len(channel.DeleteSink.requests) == 1 + request = channel.DeleteSink.requests[0] + assert request.sink_name == self.SINK_PATH + + +class Test_MetricsAPI(object): + METRIC_NAME = 'metric_name' + METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, METRIC_NAME) + DESCRIPTION = 'Description' + + @staticmethod + def make_metrics_api(): + channel = grpc_helpers.ChannelStub() + gapic_client = metrics_service_v2_client.MetricsServiceV2Client( + channel=channel) + handwritten_client = mock.Mock() + api = _gapic._MetricsAPI(gapic_client, handwritten_client) + return channel, api + + def test_ctor(self): + channel = grpc_helpers.ChannelStub() + gapic_client = metrics_service_v2_client.MetricsServiceV2Client( + channel=channel) + api = _gapic._MetricsAPI(gapic_client, mock.sentinel.client) + assert api._gapic_api is gapic_client + assert api._client is mock.sentinel.client + + def test_list_metrics(self): + channel, api = self.make_metrics_api() + + sink_msg = logging_metrics_pb2.LogMetric( + name=self.METRIC_PATH, + description=self.DESCRIPTION, + filter=FILTER) + channel.ListLogMetrics.response = ( + logging_metrics_pb2.ListLogMetricsResponse( + metrics=[sink_msg])) + + result = api.list_metrics(PROJECT) + metrics = list(result) + + # Check the response + assert len(metrics) == 1 + metric = metrics[0] + assert isinstance(metric, google.cloud.logging.metric.Metric) + assert metric.name == self.METRIC_PATH + assert metric.description == self.DESCRIPTION + assert metric.filter_ == FILTER + + # Check the request + assert len(channel.ListLogMetrics.requests) == 1 + request = channel.ListLogMetrics.requests[0] + assert request.parent == PROJECT_PATH + + def test_list_metrics_options(self): + channel, api = self.make_metrics_api() + + channel.ListLogMetrics.response = ( + logging_metrics_pb2.ListLogMetricsResponse( + metrics=[])) + + result = api.list_metrics(PROJECT, page_size=42, page_token='token') + list(result) + + # Check the request + assert len(channel.ListLogMetrics.requests) == 1 + request = channel.ListLogMetrics.requests[0] + assert request.parent == PROJECT_PATH + assert request.page_size == 42 + assert request.page_token == 'token' + + def test_metric_create(self): + channel, api = self.make_metrics_api() + + channel.CreateLogMetric.response = empty_pb2.Empty() + + api.metric_create( + PROJECT, self.METRIC_NAME, FILTER, self.DESCRIPTION) + + # Check the request + assert len(channel.CreateLogMetric.requests) == 1 + request = channel.CreateLogMetric.requests[0] + assert request.parent == PROJECT_PATH + assert request.metric.name == self.METRIC_NAME + assert request.metric.filter == FILTER + assert request.metric.description == self.DESCRIPTION + + def test_metric_get(self): + channel, api = self.make_metrics_api() + + channel.GetLogMetric.response = logging_metrics_pb2.LogMetric( + name=self.METRIC_PATH, + description=self.DESCRIPTION, + filter=FILTER) + + response = api.metric_get(PROJECT, self.METRIC_NAME) + + # Check the response + assert response == { + 'name': self.METRIC_PATH, + 'filter': FILTER, + 'description': self.DESCRIPTION, + } + + # Check the request + assert len(channel.GetLogMetric.requests) == 1 + request = channel.GetLogMetric.requests[0] + assert request.metric_name == self.METRIC_PATH + + def test_metric_update(self): + channel, api = self.make_metrics_api() + + channel.UpdateLogMetric.response = logging_metrics_pb2.LogMetric( + name=self.METRIC_PATH, + description=self.DESCRIPTION, + filter=FILTER) + + response = api.metric_update( + PROJECT, self.METRIC_NAME, FILTER, self.DESCRIPTION) + + # Check the response + assert response == { + 'name': self.METRIC_PATH, + 'filter': FILTER, + 'description': self.DESCRIPTION, + } + + # Check the request + assert len(channel.UpdateLogMetric.requests) == 1 + request = channel.UpdateLogMetric.requests[0] + assert request.metric_name == self.METRIC_PATH + assert request.metric.name == self.METRIC_PATH + assert request.metric.filter == FILTER + assert request.metric.description == self.DESCRIPTION + + def test_metric_delete(self): + channel, api = self.make_metrics_api() + + channel.DeleteLogMetric.response = empty_pb2.Empty() + + api.metric_delete(PROJECT, self.METRIC_NAME) + + assert len(channel.DeleteLogMetric.requests) == 1 + request = channel.DeleteLogMetric.requests[0] + assert request.metric_name == self.METRIC_PATH + + +class Test__parse_log_entry(unittest.TestCase): + + @staticmethod + def _call_fut(*args, **kwargs): + from google.cloud.logging._gapic import _parse_log_entry + + return _parse_log_entry(*args, **kwargs) + + def test_simple(self): + from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry + + entry_pb = LogEntry(log_name=u'lol-jk', text_payload=u'bah humbug') + result = self._call_fut(entry_pb) + expected = { + 'logName': entry_pb.log_name, + 'textPayload': entry_pb.text_payload, + } + self.assertEqual(result, expected) + + @mock.patch('google.cloud.logging._gapic.MessageToDict', + side_effect=TypeError) + def test_non_registry_failure(self, msg_to_dict_mock): + entry_pb = mock.Mock(spec=['HasField']) + entry_pb.HasField.return_value = False + with self.assertRaises(TypeError): + self._call_fut(entry_pb) + + entry_pb.HasField.assert_called_once_with('proto_payload') + msg_to_dict_mock.assert_called_once_with(entry_pb) + + def test_unregistered_type(self): + from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry + from google.protobuf import any_pb2 + from google.protobuf import descriptor_pool + from google.protobuf.timestamp_pb2 import Timestamp + + pool = descriptor_pool.Default() + type_name = 'google.bigtable.admin.v2.UpdateClusterMetadata' + # Make sure the descriptor is not known in the registry. + with self.assertRaises(KeyError): + pool.FindMessageTypeByName(type_name) + + type_url = 'type.googleapis.com/' + type_name + metadata_bytes = ( + b'\n\n\n\x03foo\x12\x03bar\x12\x06\x08\xbd\xb6\xfb\xc6\x05') + any_pb = any_pb2.Any(type_url=type_url, value=metadata_bytes) + timestamp = Timestamp(seconds=61, nanos=1234000) + + entry_pb = LogEntry(proto_payload=any_pb, timestamp=timestamp) + result = self._call_fut(entry_pb) + self.assertEqual(len(result), 2) + self.assertEqual(result['timestamp'], '1970-01-01T00:01:01.001234Z') + # NOTE: This "hack" is needed on Windows, where the equality check + # for an ``Any`` instance fails on unregistered types. + self.assertEqual(result['protoPayload'].type_url, type_url) + self.assertEqual(result['protoPayload'].value, metadata_bytes) + + def test_registered_type(self): + from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry + from google.protobuf import any_pb2 + from google.protobuf import descriptor_pool + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + pool = descriptor_pool.Default() + type_name = 'google.protobuf.Struct' + # Make sure the descriptor is known in the registry. + descriptor = pool.FindMessageTypeByName(type_name) + self.assertEqual(descriptor.name, 'Struct') + + type_url = 'type.googleapis.com/' + type_name + field_name = 'foo' + field_value = u'Bar' + struct_pb = Struct( + fields={field_name: Value(string_value=field_value)}) + any_pb = any_pb2.Any( + type_url=type_url, + value=struct_pb.SerializeToString(), + ) + + entry_pb = LogEntry(proto_payload=any_pb, log_name=u'all-good') + result = self._call_fut(entry_pb) + expected_proto = { + 'logName': entry_pb.log_name, + 'protoPayload': { + '@type': type_url, + 'value': {field_name: field_value}, + }, + } + self.assertEqual(result, expected_proto) + + +class Test__log_entry_mapping_to_pb(unittest.TestCase): + + @staticmethod + def _call_fut(*args, **kwargs): + from google.cloud.logging._gapic import _log_entry_mapping_to_pb + + return _log_entry_mapping_to_pb(*args, **kwargs) + + def test_simple(self): + from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry + + result = self._call_fut({}) + self.assertEqual(result, LogEntry()) + + def test_unregistered_type(self): + from google.protobuf import descriptor_pool + from google.protobuf.json_format import ParseError + + pool = descriptor_pool.Default() + type_name = 'google.bigtable.admin.v2.UpdateClusterMetadata' + # Make sure the descriptor is not known in the registry. + with self.assertRaises(KeyError): + pool.FindMessageTypeByName(type_name) + + type_url = 'type.googleapis.com/' + type_name + json_mapping = { + 'protoPayload': { + '@type': type_url, + 'originalRequest': { + 'name': 'foo', + 'location': 'bar', + }, + 'requestTime': { + 'seconds': 1491000125, + }, + }, + } + with self.assertRaises(ParseError): + self._call_fut(json_mapping) + + def test_registered_type(self): + from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry + from google.protobuf import any_pb2 + from google.protobuf import descriptor_pool + + pool = descriptor_pool.Default() + type_name = 'google.protobuf.Struct' + # Make sure the descriptor is known in the registry. + descriptor = pool.FindMessageTypeByName(type_name) + self.assertEqual(descriptor.name, 'Struct') + + type_url = 'type.googleapis.com/' + type_name + field_name = 'foo' + field_value = u'Bar' + json_mapping = { + 'logName': u'hi-everybody', + 'protoPayload': { + '@type': type_url, + 'value': {field_name: field_value}, + }, + } + # Convert to a valid LogEntry. + result = self._call_fut(json_mapping) + entry_pb = LogEntry( + log_name=json_mapping['logName'], + proto_payload=any_pb2.Any( + type_url=type_url, + value=b'\n\014\n\003foo\022\005\032\003Bar', + ), + ) + self.assertEqual(result, entry_pb) + + +@mock.patch( + 'google.cloud.logging._gapic.LoggingServiceV2Client', autospec=True) +def test_make_logging_api(gapic_client): + client = mock.Mock(spec=['_credentials']) + api = _gapic.make_logging_api(client) + assert api._client == client + assert api._gapic_api == gapic_client.return_value + gapic_client.assert_called_once_with( + credentials=client._credentials, client_info=_gapic._CLIENT_INFO) + + +@mock.patch( + 'google.cloud.logging._gapic.MetricsServiceV2Client', autospec=True) +def test_make_metrics_api(gapic_client): + client = mock.Mock(spec=['_credentials']) + api = _gapic.make_metrics_api(client) + assert api._client == client + assert api._gapic_api == gapic_client.return_value + gapic_client.assert_called_once_with( + credentials=client._credentials, client_info=_gapic._CLIENT_INFO) + + +@mock.patch( + 'google.cloud.logging._gapic.ConfigServiceV2Client', autospec=True) +def test_make_sinks_api(gapic_client): + client = mock.Mock(spec=['_credentials']) + api = _gapic.make_sinks_api(client) + assert api._client == client + assert api._gapic_api == gapic_client.return_value + gapic_client.assert_called_once_with( + credentials=client._credentials, client_info=_gapic._CLIENT_INFO) diff --git a/packages/google-cloud-logging/tests/unit/test__gax.py b/packages/google-cloud-logging/tests/unit/test__gax.py deleted file mode 100644 index c2c5f3199abf..000000000000 --- a/packages/google-cloud-logging/tests/unit/test__gax.py +++ /dev/null @@ -1,1614 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - -try: - # pylint: disable=unused-import - import google.cloud.logging._gax - # pylint: enable=unused-import -except ImportError: # pragma: NO COVER - _HAVE_GRPC = False -else: - _HAVE_GRPC = True - -from google.cloud._testing import _GAXBaseAPI - - -def _make_credentials(): - # pylint: disable=redefined-outer-name - import google.auth.credentials - # pylint: enable=redefined-outer-name - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class _Base(object): - PROJECT = 'PROJECT' - PROJECT_PATH = 'projects/%s' % (PROJECT,) - FILTER = 'logName:syslog AND severity>=ERROR' - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_LoggingAPI(_Base, unittest.TestCase): - LOG_NAME = 'log_name' - LOG_PATH = 'projects/%s/logs/%s' % (_Base.PROJECT, LOG_NAME) - - @staticmethod - def _get_target_class(): - from google.cloud.logging._gax import _LoggingAPI - - return _LoggingAPI - - def test_ctor(self): - gax_api = _GAXLoggingAPI() - client = object() - api = self._make_one(gax_api, client) - self.assertIs(api._gax_api, gax_api) - self.assertIs(api._client, client) - - def test_list_entries_no_paging(self): - import datetime - - from google.api.monitored_resource_pb2 import MonitoredResource - from google.gax import INITIAL_PAGE - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud._helpers import UTC - from google.cloud._testing import _GAXPageIterator - from google.cloud.logging import DESCENDING - from google.cloud.logging.client import Client - from google.cloud.logging.entries import TextEntry - from google.cloud.logging.logger import Logger - - TOKEN = 'TOKEN' - TEXT = 'TEXT' - resource_pb = MonitoredResource(type='global') - timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC) - timestamp_pb = _datetime_to_pb_timestamp(timestamp) - entry_pb = LogEntry(log_name=self.LOG_PATH, - resource=resource_pb, - timestamp=timestamp_pb, - text_payload=TEXT) - response = _GAXPageIterator([entry_pb], page_token=TOKEN) - gax_api = _GAXLoggingAPI(_list_log_entries_response=response) - client = Client(project=self.PROJECT, credentials=_make_credentials(), - _use_grpc=True) - api = self._make_one(gax_api, client) - - iterator = api.list_entries( - [self.PROJECT], self.FILTER, DESCENDING) - entries = list(iterator) - next_token = iterator.next_page_token - - # First check the token. - self.assertEqual(next_token, TOKEN) - # Then check the entries returned. - self.assertEqual(len(entries), 1) - entry = entries[0] - self.assertIsInstance(entry, TextEntry) - self.assertEqual(entry.payload, TEXT) - self.assertIsInstance(entry.logger, Logger) - self.assertEqual(entry.logger.name, self.LOG_NAME) - self.assertIsNone(entry.insert_id) - self.assertEqual(entry.timestamp, timestamp) - self.assertIsNone(entry.labels) - self.assertIsNone(entry.severity) - self.assertIsNone(entry.http_request) - - resource_names, projects, filter_, order_by, page_size, options = ( - gax_api._list_log_entries_called_with) - self.assertEqual(resource_names, []) - self.assertEqual(projects, [self.PROJECT]) - self.assertEqual(filter_, self.FILTER) - self.assertEqual(order_by, DESCENDING) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def _list_entries_with_paging_helper(self, payload, struct_pb): - import datetime - - from google.api.monitored_resource_pb2 import MonitoredResource - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud._helpers import UTC - from google.cloud._testing import _GAXPageIterator - from google.cloud.logging.client import Client - from google.cloud.logging.entries import StructEntry - from google.cloud.logging.logger import Logger - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - resource_pb = MonitoredResource(type='global') - timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC) - timestamp_pb = _datetime_to_pb_timestamp(timestamp) - entry_pb = LogEntry(log_name=self.LOG_PATH, - resource=resource_pb, - timestamp=timestamp_pb, - json_payload=struct_pb) - response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) - gax_api = _GAXLoggingAPI(_list_log_entries_response=response) - client = Client(project=self.PROJECT, credentials=_make_credentials(), - _use_grpc=True) - api = self._make_one(gax_api, client) - - iterator = api.list_entries( - [self.PROJECT], page_size=SIZE, page_token=TOKEN) - entries = list(iterator) - next_token = iterator.next_page_token - - # First check the token. - self.assertEqual(next_token, NEW_TOKEN) - self.assertEqual(len(entries), 1) - entry = entries[0] - self.assertIsInstance(entry, StructEntry) - self.assertEqual(entry.payload, payload) - self.assertIsInstance(entry.logger, Logger) - self.assertEqual(entry.logger.name, self.LOG_NAME) - self.assertIsNone(entry.insert_id) - self.assertEqual(entry.timestamp, timestamp) - self.assertIsNone(entry.labels) - self.assertIsNone(entry.severity) - self.assertIsNone(entry.http_request) - - resource_names, projects, filter_, order_by, page_size, options = ( - gax_api._list_log_entries_called_with) - self.assertEqual(resource_names, []) - self.assertEqual(projects, [self.PROJECT]) - self.assertEqual(filter_, '') - self.assertEqual(order_by, '') - self.assertEqual(page_size, SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_list_entries_with_paging(self): - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - payload = {'message': 'MESSAGE', 'weather': 'sunny'} - struct_pb = Struct(fields={ - key: Value(string_value=value) for key, value in payload.items() - }) - self._list_entries_with_paging_helper(payload, struct_pb) - - def test_list_entries_with_paging_nested_payload(self): - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - payload = {} - struct_fields = {} - # Add a simple key. - key = 'message' - payload[key] = 'MESSAGE' - struct_fields[key] = Value(string_value=payload[key]) - # Add a nested key. - key = 'weather' - sub_value = {} - sub_fields = {} - sub_key = 'temperature' - sub_value[sub_key] = 75 - sub_fields[sub_key] = Value(number_value=sub_value[sub_key]) - sub_key = 'precipitation' - sub_value[sub_key] = False - sub_fields[sub_key] = Value(bool_value=sub_value[sub_key]) - # Update the parent payload. - payload[key] = sub_value - struct_fields[key] = Value(struct_value=Struct(fields=sub_fields)) - # Make the struct_pb for our dict. - struct_pb = Struct(fields=struct_fields) - self._list_entries_with_paging_helper(payload, struct_pb) - - def _make_log_entry_with_extras(self, labels, iid, type_url, now): - from google.api.monitored_resource_pb2 import MonitoredResource - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - from google.cloud.proto.logging.v2.log_entry_pb2 import ( - LogEntryOperation) - from google.logging.type.http_request_pb2 import HttpRequest - from google.logging.type.log_severity_pb2 import WARNING - from google.protobuf.any_pb2 import Any - - from google.cloud._helpers import _datetime_to_pb_timestamp - - resource_pb = MonitoredResource( - type='global', labels=labels) - proto_payload = Any(type_url=type_url) - timestamp_pb = _datetime_to_pb_timestamp(now) - request_pb = HttpRequest( - request_url='http://example.com/requested', - request_method='GET', - status=200, - referer='http://example.com/referer', - user_agent='AGENT', - cache_hit=True, - request_size=256, - response_size=1024, - remote_ip='1.2.3.4', - ) - operation_pb = LogEntryOperation( - producer='PRODUCER', - first=True, - last=True, - id='OPID', - ) - entry_pb = LogEntry(log_name=self.LOG_PATH, - resource=resource_pb, - proto_payload=proto_payload, - timestamp=timestamp_pb, - severity=WARNING, - insert_id=iid, - http_request=request_pb, - labels=labels, - operation=operation_pb) - return entry_pb - - def test_list_entries_with_extra_properties(self): - import datetime - - # Import the wrappers to register the type URL for BoolValue - # pylint: disable=unused-variable - from google.protobuf import wrappers_pb2 - # pylint: enable=unused-variable - - from google.cloud._helpers import UTC - from google.cloud._testing import _GAXPageIterator - from google.cloud.logging.client import Client - from google.cloud.logging.entries import ProtobufEntry - from google.cloud.logging.logger import Logger - - NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - SEVERITY = 'WARNING' - LABELS = { - 'foo': 'bar', - } - IID = 'IID' - bool_type_url = 'type.googleapis.com/google.protobuf.BoolValue' - entry_pb = self._make_log_entry_with_extras( - LABELS, IID, bool_type_url, NOW) - - response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) - gax_api = _GAXLoggingAPI(_list_log_entries_response=response) - client = Client(project=self.PROJECT, credentials=_make_credentials(), - _use_grpc=True) - api = self._make_one(gax_api, client) - - iterator = api.list_entries( - [self.PROJECT], page_size=SIZE, page_token=TOKEN) - entries = list(iterator) - next_token = iterator.next_page_token - - # First check the token. - self.assertEqual(next_token, NEW_TOKEN) - # Then check the entries returned. - self.assertEqual(len(entries), 1) - entry = entries[0] - self.assertIsInstance(entry, ProtobufEntry) - self.assertEqual(entry.payload, { - '@type': bool_type_url, - 'value': False, - }) - self.assertIsInstance(entry.logger, Logger) - self.assertEqual(entry.logger.name, self.LOG_NAME) - self.assertEqual(entry.insert_id, IID) - self.assertEqual(entry.timestamp, NOW) - self.assertEqual(entry.labels, {'foo': 'bar'}) - self.assertEqual(entry.severity, SEVERITY) - self.assertEqual(entry.http_request, { - 'requestMethod': entry_pb.http_request.request_method, - 'requestUrl': entry_pb.http_request.request_url, - 'status': entry_pb.http_request.status, - 'requestSize': str(entry_pb.http_request.request_size), - 'responseSize': str(entry_pb.http_request.response_size), - 'referer': entry_pb.http_request.referer, - 'userAgent': entry_pb.http_request.user_agent, - 'remoteIp': entry_pb.http_request.remote_ip, - 'cacheHit': entry_pb.http_request.cache_hit, - }) - - resource_names, projects, filter_, order_by, page_size, options = ( - gax_api._list_log_entries_called_with) - self.assertEqual(resource_names, []) - self.assertEqual(projects, [self.PROJECT]) - self.assertEqual(filter_, '') - self.assertEqual(order_by, '') - self.assertEqual(page_size, SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_write_entries_single(self): - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - - TEXT = 'TEXT' - ENTRY = { - 'logName': self.LOG_PATH, - 'resource': {'type': 'global'}, - 'textPayload': TEXT, - } - gax_api = _GAXLoggingAPI() - api = self._make_one(gax_api, None) - - api.write_entries([ENTRY]) - - entries, log_name, resource, labels, partial_success, options = ( - gax_api._write_log_entries_called_with) - self.assertEqual(len(entries), 1) - - entry = entries[0] - self.assertIsInstance(entry, LogEntry) - self.assertEqual(entry.log_name, self.LOG_PATH) - self.assertEqual(entry.resource.type, 'global') - self.assertEqual(entry.labels, {}) - self.assertEqual(entry.text_payload, TEXT) - - self.assertIsNone(log_name) - self.assertIsNone(resource) - self.assertIsNone(labels) - self.assertEqual(partial_success, False) - self.assertIsNone(options) - - def test_write_entries_w_extra_properties(self): - # pylint: disable=too-many-statements - from datetime import datetime - from google.logging.type.log_severity_pb2 import WARNING - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - from google.cloud._helpers import _datetime_to_rfc3339 - from google.cloud._helpers import UTC, _pb_timestamp_to_datetime - - NOW = datetime.utcnow().replace(tzinfo=UTC) - TEXT = 'TEXT' - SEVERITY = 'WARNING' - LABELS = { - 'foo': 'bar', - } - IID = 'IID' - REQUEST_METHOD = 'GET' - REQUEST_URL = 'http://example.com/requested' - STATUS = 200 - REQUEST_SIZE = 256 - RESPONSE_SIZE = 1024 - REFERRER_URL = 'http://example.com/referer' - USER_AGENT = 'Agent/1.0' - REMOTE_IP = '1.2.3.4' - REQUEST = { - 'requestMethod': REQUEST_METHOD, - 'requestUrl': REQUEST_URL, - 'status': STATUS, - 'requestSize': REQUEST_SIZE, - 'responseSize': RESPONSE_SIZE, - 'referer': REFERRER_URL, - 'userAgent': USER_AGENT, - 'remoteIp': REMOTE_IP, - 'cacheHit': False, - } - PRODUCER = 'PRODUCER' - OPID = 'OPID' - OPERATION = { - 'producer': PRODUCER, - 'id': OPID, - 'first': False, - 'last': True, - } - ENTRY = { - 'logName': self.LOG_PATH, - 'resource': {'type': 'global'}, - 'textPayload': TEXT, - 'severity': SEVERITY, - 'labels': LABELS, - 'insertId': IID, - 'timestamp': _datetime_to_rfc3339(NOW), - 'httpRequest': REQUEST, - 'operation': OPERATION, - } - gax_api = _GAXLoggingAPI() - api = self._make_one(gax_api, None) - - api.write_entries([ENTRY]) - - entries, log_name, resource, labels, partial_success, options = ( - gax_api._write_log_entries_called_with) - self.assertEqual(len(entries), 1) - - entry = entries[0] - self.assertIsInstance(entry, LogEntry) - self.assertEqual(entry.log_name, self.LOG_PATH) - self.assertEqual(entry.resource.type, 'global') - self.assertEqual(entry.text_payload, TEXT) - self.assertEqual(entry.severity, WARNING) - self.assertEqual(entry.labels, LABELS) - self.assertEqual(entry.insert_id, IID) - stamp = _pb_timestamp_to_datetime(entry.timestamp) - self.assertEqual(stamp, NOW) - - request = entry.http_request - self.assertEqual(request.request_method, REQUEST_METHOD) - self.assertEqual(request.request_url, REQUEST_URL) - self.assertEqual(request.status, STATUS) - self.assertEqual(request.request_size, REQUEST_SIZE) - self.assertEqual(request.response_size, RESPONSE_SIZE) - self.assertEqual(request.referer, REFERRER_URL) - self.assertEqual(request.user_agent, USER_AGENT) - self.assertEqual(request.remote_ip, REMOTE_IP) - self.assertEqual(request.cache_hit, False) - - operation = entry.operation - self.assertEqual(operation.producer, PRODUCER) - self.assertEqual(operation.id, OPID) - self.assertFalse(operation.first) - self.assertTrue(operation.last) - - self.assertIsNone(log_name) - self.assertIsNone(resource) - self.assertIsNone(labels) - self.assertEqual(partial_success, False) - self.assertIsNone(options) - # pylint: enable=too-many-statements - - def _write_entries_multiple_helper(self, json_payload, json_struct_pb): - # pylint: disable=too-many-statements - import datetime - from google.logging.type.log_severity_pb2 import WARNING - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - from google.protobuf.any_pb2 import Any - from google.cloud._helpers import _datetime_to_rfc3339 - from google.cloud._helpers import UTC - - TEXT = 'TEXT' - NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) - TIMESTAMP_TYPE_URL = 'type.googleapis.com/google.protobuf.Timestamp' - PROTO = { - '@type': TIMESTAMP_TYPE_URL, - 'value': _datetime_to_rfc3339(NOW), - } - PRODUCER = 'PRODUCER' - OPID = 'OPID' - URL = 'http://example.com/' - ENTRIES = [ - {'textPayload': TEXT, - 'severity': WARNING}, - {'jsonPayload': json_payload, - 'operation': {'producer': PRODUCER, 'id': OPID}}, - {'protoPayload': PROTO, - 'httpRequest': {'requestUrl': URL}}, - ] - RESOURCE = { - 'type': 'global', - } - LABELS = { - 'foo': 'bar', - } - gax_api = _GAXLoggingAPI() - api = self._make_one(gax_api, None) - - api.write_entries(ENTRIES, self.LOG_PATH, RESOURCE, LABELS) - - entries, log_name, resource, labels, partial_success, options = ( - gax_api._write_log_entries_called_with) - self.assertEqual(len(entries), len(ENTRIES)) - - entry = entries[0] - self.assertIsInstance(entry, LogEntry) - self.assertEqual(entry.log_name, '') - self.assertEqual(entry.resource.type, '') - self.assertEqual(entry.labels, {}) - self.assertEqual(entry.text_payload, TEXT) - self.assertEqual(entry.severity, WARNING) - - entry = entries[1] - self.assertIsInstance(entry, LogEntry) - self.assertEqual(entry.log_name, '') - self.assertEqual(entry.resource.type, '') - self.assertEqual(entry.labels, {}) - self.assertEqual(entry.json_payload, json_struct_pb) - operation = entry.operation - self.assertEqual(operation.producer, PRODUCER) - self.assertEqual(operation.id, OPID) - - entry = entries[2] - self.assertIsInstance(entry, LogEntry) - self.assertEqual(entry.log_name, '') - self.assertEqual(entry.resource.type, '') - self.assertEqual(entry.labels, {}) - proto = entry.proto_payload - self.assertIsInstance(proto, Any) - self.assertEqual(proto.type_url, TIMESTAMP_TYPE_URL) - request = entry.http_request - self.assertEqual(request.request_url, URL) - - self.assertEqual(log_name, self.LOG_PATH) - self.assertEqual(resource, RESOURCE) - self.assertEqual(labels, LABELS) - self.assertEqual(partial_success, False) - self.assertIsNone(options) - # pylint: enable=too-many-statements - - def test_write_entries_multiple(self): - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - json_payload = {'payload': 'PAYLOAD', 'type': 'json'} - json_struct_pb = Struct(fields={ - key: Value(string_value=value) - for key, value in json_payload.items() - }) - self._write_entries_multiple_helper(json_payload, json_struct_pb) - - def test_write_entries_multiple_nested_payload(self): - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - json_payload = {} - struct_fields = {} - # Add a simple key. - key = 'hello' - json_payload[key] = 'me you looking for' - struct_fields[key] = Value(string_value=json_payload[key]) - # Add a nested key. - key = 'everything' - sub_value = {} - sub_fields = {} - sub_key = 'answer' - sub_value[sub_key] = 42 - sub_fields[sub_key] = Value(number_value=sub_value[sub_key]) - sub_key = 'really?' - sub_value[sub_key] = False - sub_fields[sub_key] = Value(bool_value=sub_value[sub_key]) - # Update the parent payload. - json_payload[key] = sub_value - struct_fields[key] = Value(struct_value=Struct(fields=sub_fields)) - # Make the struct_pb for our dict. - json_struct_pb = Struct(fields=struct_fields) - self._write_entries_multiple_helper(json_payload, json_struct_pb) - - def test_logger_delete(self): - gax_api = _GAXLoggingAPI() - api = self._make_one(gax_api, None) - - api.logger_delete(self.PROJECT, self.LOG_NAME) - - log_name, options = gax_api._delete_log_called_with - self.assertEqual(log_name, self.LOG_PATH) - self.assertIsNone(options) - - def test_logger_delete_not_found(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXLoggingAPI(_delete_not_found=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(NotFound): - api.logger_delete(self.PROJECT, self.LOG_NAME) - - log_name, options = gax_api._delete_log_called_with - self.assertEqual(log_name, self.LOG_PATH) - self.assertIsNone(options) - - def test_logger_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXLoggingAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.logger_delete(self.PROJECT, self.LOG_NAME) - - log_name, options = gax_api._delete_log_called_with - self.assertEqual(log_name, self.LOG_PATH) - self.assertIsNone(options) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_SinksAPI(_Base, unittest.TestCase): - SINK_NAME = 'sink_name' - SINK_PATH = 'projects/%s/sinks/%s' % (_Base.PROJECT, SINK_NAME) - DESTINATION_URI = 'faux.googleapis.com/destination' - SINK_WRITER_IDENTITY = 'serviceAccount:project-123@example.com' - - @staticmethod - def _get_target_class(): - from google.cloud.logging._gax import _SinksAPI - - return _SinksAPI - - def test_ctor(self): - gax_api = _GAXSinksAPI() - client = object() - api = self._make_one(gax_api, client) - self.assertIs(api._gax_api, gax_api) - self.assertIs(api._client, client) - - def test_list_sinks_no_paging(self): - import six - from google.gax import INITIAL_PAGE - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - from google.cloud._testing import _GAXPageIterator - from google.cloud.logging.sink import Sink - - TOKEN = 'TOKEN' - sink_pb = LogSink(name=self.SINK_PATH, - destination=self.DESTINATION_URI, - filter=self.FILTER) - response = _GAXPageIterator([sink_pb], page_token=TOKEN) - gax_api = _GAXSinksAPI(_list_sinks_response=response) - client = object() - api = self._make_one(gax_api, client) - - iterator = api.list_sinks(self.PROJECT) - page = six.next(iterator.pages) - sinks = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the sinks returned. - self.assertEqual(len(sinks), 1) - sink = sinks[0] - self.assertIsInstance(sink, Sink) - self.assertEqual(sink.name, self.SINK_PATH) - self.assertEqual(sink.filter_, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertIs(sink.client, client) - - project, page_size, options = gax_api._list_sinks_called_with - self.assertEqual(project, self.PROJECT_PATH) - self.assertEqual(page_size, 0) - self.assertEqual(options.page_token, INITIAL_PAGE) - - def test_list_sinks_w_paging(self): - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - from google.cloud._testing import _GAXPageIterator - from google.cloud.logging.sink import Sink - - TOKEN = 'TOKEN' - PAGE_SIZE = 42 - sink_pb = LogSink(name=self.SINK_PATH, - destination=self.DESTINATION_URI, - filter=self.FILTER) - response = _GAXPageIterator([sink_pb]) - gax_api = _GAXSinksAPI(_list_sinks_response=response) - client = object() - api = self._make_one(gax_api, client) - - iterator = api.list_sinks( - self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) - sinks = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the sinks returned. - self.assertEqual(len(sinks), 1) - sink = sinks[0] - self.assertIsInstance(sink, Sink) - self.assertEqual(sink.name, self.SINK_PATH) - self.assertEqual(sink.filter_, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertIs(sink.client, client) - - project, page_size, options = gax_api._list_sinks_called_with - self.assertEqual(project, self.PROJECT_PATH) - self.assertEqual(page_size, PAGE_SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_sink_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.sink_create( - self.PROJECT, self.SINK_NAME, self.FILTER, - self.DESTINATION_URI) - - def test_sink_create_conflict(self): - from google.cloud.exceptions import Conflict - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - - gax_api = _GAXSinksAPI(_create_sink_conflict=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(Conflict): - api.sink_create( - self.PROJECT, self.SINK_NAME, self.FILTER, - self.DESTINATION_URI) - - parent, sink, unique_writer_identity, options = ( - gax_api._create_sink_called_with) - self.assertEqual(parent, self.PROJECT_PATH) - self.assertIsInstance(sink, LogSink) - self.assertEqual(sink.name, self.SINK_NAME) - self.assertEqual(sink.filter, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertIsNone(options) - self.assertFalse(unique_writer_identity) - - def test_sink_create_ok(self): - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - - gax_api = _GAXSinksAPI() - gax_api._create_sink_response = LogSink( - name=self.SINK_NAME, - destination=self.DESTINATION_URI, - filter=self.FILTER, - writer_identity=self.SINK_WRITER_IDENTITY, - ) - api = self._make_one(gax_api, None) - - returned = api.sink_create( - self.PROJECT, - self.SINK_NAME, - self.FILTER, - self.DESTINATION_URI, - unique_writer_identity=True, - ) - - self.assertEqual(returned, { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - 'writerIdentity': self.SINK_WRITER_IDENTITY, - }) - - parent, sink, unique_writer_identity, options = ( - gax_api._create_sink_called_with) - self.assertEqual(parent, self.PROJECT_PATH) - self.assertIsInstance(sink, LogSink) - self.assertEqual(sink.name, self.SINK_NAME) - self.assertEqual(sink.filter, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertTrue(unique_writer_identity) - self.assertIsNone(options) - - def test_sink_get_error(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSinksAPI() - api = self._make_one(gax_api, None) - - with self.assertRaises(NotFound): - api.sink_get(self.PROJECT, self.SINK_NAME) - - def test_sink_get_miss(self): - from google.gax.errors import GaxError - - gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.sink_get(self.PROJECT, self.SINK_NAME) - - def test_sink_get_hit(self): - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - - RESPONSE = { - 'name': self.SINK_PATH, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - } - sink_pb = LogSink(name=self.SINK_PATH, - destination=self.DESTINATION_URI, - filter=self.FILTER) - gax_api = _GAXSinksAPI(_get_sink_response=sink_pb) - api = self._make_one(gax_api, None) - - response = api.sink_get(self.PROJECT, self.SINK_NAME) - - self.assertEqual(response, RESPONSE) - - sink_name, options = gax_api._get_sink_called_with - self.assertEqual(sink_name, self.SINK_PATH) - self.assertIsNone(options) - - def test_sink_update_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.sink_update( - self.PROJECT, self.SINK_NAME, self.FILTER, - self.DESTINATION_URI) - - def test_sink_update_miss(self): - from google.cloud.exceptions import NotFound - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - - gax_api = _GAXSinksAPI() - api = self._make_one(gax_api, None) - - with self.assertRaises(NotFound): - api.sink_update( - self.PROJECT, self.SINK_NAME, self.FILTER, - self.DESTINATION_URI) - - sink_name, sink, unique_writer_identity, options = ( - gax_api._update_sink_called_with) - self.assertEqual(sink_name, self.SINK_PATH) - self.assertIsInstance(sink, LogSink) - self.assertEqual(sink.name, self.SINK_PATH) - self.assertEqual(sink.filter, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertFalse(unique_writer_identity) - self.assertIsNone(options) - - def test_sink_update_hit(self): - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - - response = LogSink( - name=self.SINK_NAME, - destination=self.DESTINATION_URI, - filter=self.FILTER, - writer_identity=Test_SinksAPI.SINK_WRITER_IDENTITY, - ) - gax_api = _GAXSinksAPI(_update_sink_response=response) - api = self._make_one(gax_api, None) - - returned = api.sink_update( - self.PROJECT, - self.SINK_NAME, - self.FILTER, - self.DESTINATION_URI, - unique_writer_identity=True) - - self.assertEqual(returned, { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - 'writerIdentity': self.SINK_WRITER_IDENTITY, - }) - - sink_name, sink, unique_writer_identity, options = ( - gax_api._update_sink_called_with) - self.assertEqual(sink_name, self.SINK_PATH) - self.assertIsInstance(sink, LogSink) - self.assertEqual(sink.name, self.SINK_PATH) - self.assertEqual(sink.filter, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertTrue(unique_writer_identity) - self.assertIsNone(options) - - def test_sink_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.sink_delete(self.PROJECT, self.SINK_NAME) - - def test_sink_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSinksAPI(_sink_not_found=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(NotFound): - api.sink_delete(self.PROJECT, self.SINK_NAME) - - def test_sink_delete_hit(self): - gax_api = _GAXSinksAPI() - api = self._make_one(gax_api, None) - - api.sink_delete(self.PROJECT, self.SINK_NAME) - - sink_name, options = gax_api._delete_sink_called_with - self.assertEqual(sink_name, self.SINK_PATH) - self.assertIsNone(options) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_MetricsAPI(_Base, unittest.TestCase): - METRIC_NAME = 'metric_name' - METRIC_PATH = 'projects/%s/metrics/%s' % (_Base.PROJECT, METRIC_NAME) - DESCRIPTION = 'Description' - - @staticmethod - def _get_target_class(): - from google.cloud.logging._gax import _MetricsAPI - - return _MetricsAPI - - def test_ctor(self): - gax_api = _GAXMetricsAPI() - api = self._make_one(gax_api, None) - self.assertIs(api._gax_api, gax_api) - - def test_list_metrics_no_paging(self): - import six - from google.gax import INITIAL_PAGE - from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric - from google.cloud._testing import _GAXPageIterator - from google.cloud.logging.metric import Metric - - TOKEN = 'TOKEN' - metric_pb = LogMetric(name=self.METRIC_PATH, - description=self.DESCRIPTION, - filter=self.FILTER) - response = _GAXPageIterator([metric_pb], page_token=TOKEN) - gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) - client = object() - api = self._make_one(gax_api, client) - - iterator = api.list_metrics(self.PROJECT) - page = six.next(iterator.pages) - metrics = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the metrics returned. - self.assertEqual(len(metrics), 1) - metric = metrics[0] - self.assertIsInstance(metric, Metric) - self.assertEqual(metric.name, self.METRIC_PATH) - self.assertEqual(metric.filter_, self.FILTER) - self.assertEqual(metric.description, self.DESCRIPTION) - self.assertIs(metric.client, client) - - project, page_size, options = gax_api._list_log_metrics_called_with - self.assertEqual(project, self.PROJECT_PATH) - self.assertEqual(page_size, 0) - self.assertEqual(options.page_token, INITIAL_PAGE) - - def test_list_metrics_w_paging(self): - from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric - from google.cloud._testing import _GAXPageIterator - from google.cloud.logging.metric import Metric - - TOKEN = 'TOKEN' - PAGE_SIZE = 42 - metric_pb = LogMetric(name=self.METRIC_PATH, - description=self.DESCRIPTION, - filter=self.FILTER) - response = _GAXPageIterator([metric_pb]) - gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) - client = object() - api = self._make_one(gax_api, client) - - iterator = api.list_metrics( - self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) - metrics = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the metrics returned. - self.assertEqual(len(metrics), 1) - metric = metrics[0] - self.assertIsInstance(metric, Metric) - self.assertEqual(metric.name, self.METRIC_PATH) - self.assertEqual(metric.filter_, self.FILTER) - self.assertEqual(metric.description, self.DESCRIPTION) - self.assertIs(metric.client, client) - - project, page_size, options = gax_api._list_log_metrics_called_with - self.assertEqual(project, self.PROJECT_PATH) - self.assertEqual(page_size, PAGE_SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_metric_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.metric_create( - self.PROJECT, self.METRIC_NAME, self.FILTER, - self.DESCRIPTION) - - def test_metric_create_conflict(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXMetricsAPI(_create_log_metric_conflict=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(Conflict): - api.metric_create( - self.PROJECT, self.METRIC_NAME, self.FILTER, - self.DESCRIPTION) - - def test_metric_create_ok(self): - from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric - - gax_api = _GAXMetricsAPI() - api = self._make_one(gax_api, None) - - api.metric_create( - self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) - - parent, metric, options = ( - gax_api._create_log_metric_called_with) - self.assertEqual(parent, self.PROJECT_PATH) - self.assertIsInstance(metric, LogMetric) - self.assertEqual(metric.name, self.METRIC_NAME) - self.assertEqual(metric.filter, self.FILTER) - self.assertEqual(metric.description, self.DESCRIPTION) - self.assertIsNone(options) - - def test_metric_get_error(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXMetricsAPI() - api = self._make_one(gax_api, None) - - with self.assertRaises(NotFound): - api.metric_get(self.PROJECT, self.METRIC_NAME) - - def test_metric_get_miss(self): - from google.gax.errors import GaxError - - gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.metric_get(self.PROJECT, self.METRIC_NAME) - - def test_metric_get_hit(self): - from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric - - RESPONSE = { - 'name': self.METRIC_PATH, - 'filter': self.FILTER, - 'description': self.DESCRIPTION, - } - metric_pb = LogMetric(name=self.METRIC_PATH, - description=self.DESCRIPTION, - filter=self.FILTER) - gax_api = _GAXMetricsAPI(_get_log_metric_response=metric_pb) - api = self._make_one(gax_api, None) - - response = api.metric_get(self.PROJECT, self.METRIC_NAME) - - self.assertEqual(response, RESPONSE) - - metric_name, options = gax_api._get_log_metric_called_with - self.assertEqual(metric_name, self.METRIC_PATH) - self.assertIsNone(options) - - def test_metric_update_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.metric_update( - self.PROJECT, self.METRIC_NAME, self.FILTER, - self.DESCRIPTION) - - def test_metric_update_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXMetricsAPI() - api = self._make_one(gax_api, None) - - with self.assertRaises(NotFound): - api.metric_update( - self.PROJECT, self.METRIC_NAME, self.FILTER, - self.DESCRIPTION) - - def test_metric_update_hit(self): - from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric - - response = LogMetric(name=self.METRIC_NAME, - description=self.DESCRIPTION, - filter=self.FILTER) - gax_api = _GAXMetricsAPI(_update_log_metric_response=response) - api = self._make_one(gax_api, None) - - api.metric_update( - self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) - - metric_name, metric, options = ( - gax_api._update_log_metric_called_with) - self.assertEqual(metric_name, self.METRIC_PATH) - self.assertIsInstance(metric, LogMetric) - self.assertEqual(metric.name, self.METRIC_PATH) - self.assertEqual(metric.filter, self.FILTER) - self.assertEqual(metric.description, self.DESCRIPTION) - self.assertIsNone(options) - - def test_metric_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.metric_delete(self.PROJECT, self.METRIC_NAME) - - def test_metric_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXMetricsAPI(_log_metric_not_found=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(NotFound): - api.metric_delete(self.PROJECT, self.METRIC_NAME) - - def test_metric_delete_hit(self): - gax_api = _GAXMetricsAPI() - api = self._make_one(gax_api, None) - - api.metric_delete(self.PROJECT, self.METRIC_NAME) - - metric_name, options = gax_api._delete_log_metric_called_with - self.assertEqual(metric_name, self.METRIC_PATH) - self.assertIsNone(options) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test__parse_log_entry(unittest.TestCase): - - @staticmethod - def _call_fut(*args, **kwargs): - from google.cloud.logging._gax import _parse_log_entry - - return _parse_log_entry(*args, **kwargs) - - def test_simple(self): - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - - entry_pb = LogEntry(log_name=u'lol-jk', text_payload=u'bah humbug') - result = self._call_fut(entry_pb) - expected = { - 'logName': entry_pb.log_name, - 'textPayload': entry_pb.text_payload, - } - self.assertEqual(result, expected) - - @mock.patch('google.cloud.logging._gax.MessageToDict', - side_effect=TypeError) - def test_non_registry_failure(self, msg_to_dict_mock): - entry_pb = mock.Mock(spec=['HasField']) - entry_pb.HasField.return_value = False - with self.assertRaises(TypeError): - self._call_fut(entry_pb) - - entry_pb.HasField.assert_called_once_with('proto_payload') - msg_to_dict_mock.assert_called_once_with(entry_pb) - - def test_unregistered_type(self): - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - from google.protobuf import any_pb2 - from google.protobuf import descriptor_pool - from google.protobuf.timestamp_pb2 import Timestamp - - pool = descriptor_pool.Default() - type_name = 'google.bigtable.admin.v2.UpdateClusterMetadata' - # Make sure the descriptor is not known in the registry. - with self.assertRaises(KeyError): - pool.FindMessageTypeByName(type_name) - - type_url = 'type.googleapis.com/' + type_name - metadata_bytes = ( - b'\n\n\n\x03foo\x12\x03bar\x12\x06\x08\xbd\xb6\xfb\xc6\x05') - any_pb = any_pb2.Any(type_url=type_url, value=metadata_bytes) - timestamp = Timestamp(seconds=61, nanos=1234000) - - entry_pb = LogEntry(proto_payload=any_pb, timestamp=timestamp) - result = self._call_fut(entry_pb) - self.assertEqual(len(result), 2) - self.assertEqual(result['timestamp'], '1970-01-01T00:01:01.001234Z') - # NOTE: This "hack" is needed on Windows, where the equality check - # for an ``Any`` instance fails on unregistered types. - self.assertEqual(result['protoPayload'].type_url, type_url) - self.assertEqual(result['protoPayload'].value, metadata_bytes) - - def test_registered_type(self): - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - from google.protobuf import any_pb2 - from google.protobuf import descriptor_pool - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - pool = descriptor_pool.Default() - type_name = 'google.protobuf.Struct' - # Make sure the descriptor is known in the registry. - descriptor = pool.FindMessageTypeByName(type_name) - self.assertEqual(descriptor.name, 'Struct') - - type_url = 'type.googleapis.com/' + type_name - field_name = 'foo' - field_value = u'Bar' - struct_pb = Struct( - fields={field_name: Value(string_value=field_value)}) - any_pb = any_pb2.Any( - type_url=type_url, - value=struct_pb.SerializeToString(), - ) - - entry_pb = LogEntry(proto_payload=any_pb, log_name=u'all-good') - result = self._call_fut(entry_pb) - expected_proto = { - 'logName': entry_pb.log_name, - 'protoPayload': { - '@type': type_url, - 'value': {field_name: field_value}, - }, - } - self.assertEqual(result, expected_proto) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test__log_entry_mapping_to_pb(unittest.TestCase): - - @staticmethod - def _call_fut(*args, **kwargs): - from google.cloud.logging._gax import _log_entry_mapping_to_pb - - return _log_entry_mapping_to_pb(*args, **kwargs) - - def test_simple(self): - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - - result = self._call_fut({}) - self.assertEqual(result, LogEntry()) - - def test_unregistered_type(self): - from google.protobuf import descriptor_pool - from google.protobuf.json_format import ParseError - - pool = descriptor_pool.Default() - type_name = 'google.bigtable.admin.v2.UpdateClusterMetadata' - # Make sure the descriptor is not known in the registry. - with self.assertRaises(KeyError): - pool.FindMessageTypeByName(type_name) - - type_url = 'type.googleapis.com/' + type_name - json_mapping = { - 'protoPayload': { - '@type': type_url, - 'originalRequest': { - 'name': 'foo', - 'location': 'bar', - }, - 'requestTime': { - 'seconds': 1491000125, - }, - }, - } - with self.assertRaises(ParseError): - self._call_fut(json_mapping) - - def test_registered_type(self): - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - from google.protobuf import any_pb2 - from google.protobuf import descriptor_pool - - pool = descriptor_pool.Default() - type_name = 'google.protobuf.Struct' - # Make sure the descriptor is known in the registry. - descriptor = pool.FindMessageTypeByName(type_name) - self.assertEqual(descriptor.name, 'Struct') - - type_url = 'type.googleapis.com/' + type_name - field_name = 'foo' - field_value = u'Bar' - json_mapping = { - 'logName': u'hi-everybody', - 'protoPayload': { - '@type': type_url, - 'value': {field_name: field_value}, - }, - } - # Convert to a valid LogEntry. - result = self._call_fut(json_mapping) - entry_pb = LogEntry( - log_name=json_mapping['logName'], - proto_payload=any_pb2.Any( - type_url=type_url, - value=b'\n\014\n\003foo\022\005\032\003Bar', - ), - ) - self.assertEqual(result, entry_pb) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_make_gax_logging_api(unittest.TestCase): - - def _call_fut(self, client): - from google.cloud.logging._gax import make_gax_logging_api - - return make_gax_logging_api(client) - - def test_it(self): - from google.cloud.logging import __version__ - from google.cloud.logging._gax import _LoggingAPI - from google.cloud.logging._gax import DEFAULT_USER_AGENT - - creds = object() - client = mock.Mock(_credentials=creds, spec=['_credentials']) - channels = [] - channel_args = [] - generated_api_kwargs = [] - channel_obj = object() - generated = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - - def generated_api(channel=None, **kwargs): - channels.append(channel) - generated_api_kwargs.append(kwargs) - return generated - - host = 'foo.apis.invalid' - generated_api.SERVICE_ADDRESS = host - - patch = mock.patch.multiple( - 'google.cloud.logging._gax', - LoggingServiceV2Client=generated_api, - make_secure_channel=make_channel) - with patch: - logging_api = self._call_fut(client) - - self.assertEqual(channels, [channel_obj]) - self.assertEqual(channel_args, - [(creds, DEFAULT_USER_AGENT, host)]) - - self.assertEqual(len(generated_api_kwargs), 1) - self.assertEqual(generated_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(generated_api_kwargs[0]['lib_version'], __version__) - - self.assertIsInstance(logging_api, _LoggingAPI) - self.assertIs(logging_api._gax_api, generated) - self.assertIs(logging_api._client, client) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_make_gax_metrics_api(unittest.TestCase): - - def _call_fut(self, client): - from google.cloud.logging._gax import make_gax_metrics_api - - return make_gax_metrics_api(client) - - def test_it(self): - from google.cloud.logging import __version__ - from google.cloud.logging._gax import _MetricsAPI - from google.cloud.logging._gax import DEFAULT_USER_AGENT - - creds = object() - client = mock.Mock(_credentials=creds, spec=['_credentials']) - channels = [] - channel_args = [] - generated_api_kwargs = [] - channel_obj = object() - generated = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - - def generated_api(channel=None, **kwargs): - channels.append(channel) - generated_api_kwargs.append(kwargs) - return generated - - host = 'foo.apis.invalid' - generated_api.SERVICE_ADDRESS = host - - patch = mock.patch.multiple( - 'google.cloud.logging._gax', - MetricsServiceV2Client=generated_api, - make_secure_channel=make_channel) - with patch: - metrics_api = self._call_fut(client) - - self.assertEqual(channels, [channel_obj]) - self.assertEqual(channel_args, - [(creds, DEFAULT_USER_AGENT, host)]) - - self.assertEqual(len(generated_api_kwargs), 1) - self.assertEqual(generated_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(generated_api_kwargs[0]['lib_version'], __version__) - - self.assertIsInstance(metrics_api, _MetricsAPI) - self.assertIs(metrics_api._gax_api, generated) - self.assertIs(metrics_api._client, client) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_make_gax_sinks_api(unittest.TestCase): - - def _call_fut(self, client): - from google.cloud.logging._gax import make_gax_sinks_api - - return make_gax_sinks_api(client) - - def test_it(self): - from google.cloud.logging import __version__ - from google.cloud.logging._gax import _SinksAPI - from google.cloud.logging._gax import DEFAULT_USER_AGENT - - creds = object() - client = mock.Mock(_credentials=creds, spec=['_credentials']) - channels = [] - channel_args = [] - generated_api_kwargs = [] - channel_obj = object() - generated = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - - def generated_api(channel=None, **kwargs): - channels.append(channel) - generated_api_kwargs.append(kwargs) - return generated - - host = 'foo.apis.invalid' - generated_api.SERVICE_ADDRESS = host - - patch = mock.patch.multiple( - 'google.cloud.logging._gax', - ConfigServiceV2Client=generated_api, - make_secure_channel=make_channel) - with patch: - sinks_api = self._call_fut(client) - - self.assertEqual(channels, [channel_obj]) - self.assertEqual(channel_args, - [(creds, DEFAULT_USER_AGENT, host)]) - - self.assertEqual(len(generated_api_kwargs), 1) - self.assertEqual(generated_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(generated_api_kwargs[0]['lib_version'], __version__) - - self.assertIsInstance(sinks_api, _SinksAPI) - self.assertIs(sinks_api._gax_api, generated) - self.assertIs(sinks_api._client, client) - - -class _GAXLoggingAPI(_GAXBaseAPI): - - _delete_not_found = False - - def list_log_entries( - self, resource_names, project_ids, filter_, - order_by, page_size, options): - self._list_log_entries_called_with = ( - resource_names, project_ids, filter_, - order_by, page_size, options) - return self._list_log_entries_response - - def write_log_entries(self, entries, log_name, resource, labels, - partial_success, options): - self._write_log_entries_called_with = ( - entries, log_name, resource, labels, partial_success, options) - - def delete_log(self, log_name, options): - from google.gax.errors import GaxError - - self._delete_log_called_with = log_name, options - if self._random_gax_error: - raise GaxError('error') - if self._delete_not_found: - raise GaxError('notfound', self._make_grpc_not_found()) - - -class _GAXSinksAPI(_GAXBaseAPI): - - _create_sink_conflict = False - _sink_not_found = False - - def list_sinks(self, parent, page_size, options): - self._list_sinks_called_with = parent, page_size, options - return self._list_sinks_response - - def create_sink(self, parent, sink, unique_writer_identity, options): - from google.gax.errors import GaxError - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - - self._create_sink_called_with = ( - parent, sink, unique_writer_identity, options) - if self._random_gax_error: - raise GaxError('error') - if self._create_sink_conflict: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - return self._create_sink_response - - def get_sink(self, sink_name, options): - from google.gax.errors import GaxError - - self._get_sink_called_with = sink_name, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._get_sink_response - except AttributeError: - raise GaxError('notfound', self._make_grpc_not_found()) - - def update_sink(self, sink_name, sink, unique_writer_identity, options): - from google.gax.errors import GaxError - - self._update_sink_called_with = ( - sink_name, sink, unique_writer_identity, options) - if self._random_gax_error: - raise GaxError('error') - try: - return self._update_sink_response - except AttributeError: - raise GaxError('notfound', self._make_grpc_not_found()) - - def delete_sink(self, sink_name, options=None): - from google.gax.errors import GaxError - - self._delete_sink_called_with = sink_name, options - if self._random_gax_error: - raise GaxError('error') - if self._sink_not_found: - raise GaxError('notfound', self._make_grpc_not_found()) - - -class _GAXMetricsAPI(_GAXBaseAPI): - - _create_log_metric_conflict = False - _log_metric_not_found = False - - def list_log_metrics(self, parent, page_size, options): - self._list_log_metrics_called_with = parent, page_size, options - return self._list_log_metrics_response - - def create_log_metric(self, parent, metric, options): - from google.gax.errors import GaxError - - self._create_log_metric_called_with = parent, metric, options - if self._random_gax_error: - raise GaxError('error') - if self._create_log_metric_conflict: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - - def get_log_metric(self, metric_name, options): - from google.gax.errors import GaxError - - self._get_log_metric_called_with = metric_name, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._get_log_metric_response - except AttributeError: - raise GaxError('notfound', self._make_grpc_not_found()) - - def update_log_metric(self, metric_name, metric, options=None): - from google.gax.errors import GaxError - - self._update_log_metric_called_with = metric_name, metric, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._update_log_metric_response - except AttributeError: - raise GaxError('notfound', self._make_grpc_not_found()) - - def delete_log_metric(self, metric_name, options=None): - from google.gax.errors import GaxError - - self._delete_log_metric_called_with = metric_name, options - if self._random_gax_error: - raise GaxError('error') - if self._log_metric_not_found: - raise GaxError('notfound', self._make_grpc_not_found()) diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 312f933cad6f..9636e8ff6954 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -48,7 +48,7 @@ def test_ctor(self): client = self._make_one(project=self.PROJECT, credentials=creds) self.assertEqual(client.project, self.PROJECT) - def test_logging_api_wo_gax(self): + def test_logging_api_wo_gapic(self): from google.cloud.logging._http import _LoggingAPI client = self._make_one(self.PROJECT, @@ -64,7 +64,7 @@ def test_logging_api_wo_gax(self): again = client.logging_api self.assertIs(again, api) - def test_logging_api_w_gax(self): + def test_logging_api_w_gapic(self): clients = [] api_obj = object() @@ -76,10 +76,9 @@ def make_api(client_obj): client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) - patch = mock.patch( - 'google.cloud.logging.client.make_gax_logging_api', - new=make_api) - with patch: + patch = mock.patch('google.cloud.logging.client._gapic') + with patch as gapic_module: + gapic_module.make_logging_api.side_effect = make_api api = client.logging_api self.assertIs(api, api_obj) @@ -88,7 +87,7 @@ def make_api(client_obj): again = client.logging_api self.assertIs(again, api) - def test_no_gax_ctor(self): + def test_no_gapic_ctor(self): from google.cloud.logging._http import _LoggingAPI creds = _make_credentials() @@ -102,7 +101,7 @@ def test_no_gax_ctor(self): api = client.logging_api self.assertIsInstance(api, _LoggingAPI) - def test_sinks_api_wo_gax(self): + def test_sinks_api_wo_gapic(self): from google.cloud.logging._http import _SinksAPI client = self._make_one( @@ -118,7 +117,7 @@ def test_sinks_api_wo_gax(self): again = client.sinks_api self.assertIs(again, api) - def test_sinks_api_w_gax(self): + def test_sinks_api_w_gapic(self): clients = [] api_obj = object() @@ -130,10 +129,9 @@ def make_api(client_obj): client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) - patch = mock.patch( - 'google.cloud.logging.client.make_gax_sinks_api', - new=make_api) - with patch: + patch = mock.patch('google.cloud.logging.client._gapic') + with patch as gapic_module: + gapic_module.make_sinks_api.side_effect = make_api api = client.sinks_api self.assertIs(api, api_obj) @@ -142,7 +140,7 @@ def make_api(client_obj): again = client.sinks_api self.assertIs(again, api) - def test_metrics_api_wo_gax(self): + def test_metrics_api_wo_gapic(self): from google.cloud.logging._http import _MetricsAPI client = self._make_one( @@ -158,7 +156,7 @@ def test_metrics_api_wo_gax(self): again = client.metrics_api self.assertIs(again, api) - def test_metrics_api_w_gax(self): + def test_metrics_api_w_gapic(self): clients = [] api_obj = object() @@ -170,10 +168,9 @@ def make_api(client_obj): client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) - patch = mock.patch( - 'google.cloud.logging.client.make_gax_metrics_api', - new=make_api) - with patch: + patch = mock.patch('google.cloud.logging.client._gapic') + with patch as gapic_module: + gapic_module.make_metrics_api.side_effect = make_api api = client.metrics_api self.assertIs(api, api_obj) From 8cb879b7e5a24ac31f17ca443e108f77ad517c27 Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Sun, 21 Jan 2018 23:50:16 -0800 Subject: [PATCH 152/855] Logging: Fix logging system tests (#4768) --- packages/google-cloud-logging/nox.py | 5 +-- .../v2/test_system_logging_service_v2_v2.py | 3 +- .../tests/system/test_system.py | 33 ++++++++++--------- 3 files changed, 21 insertions(+), 20 deletions(-) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index fdf48fedafa4..d4dd19ce74b0 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -113,10 +113,7 @@ def system(session, py): '-vvv', '-s', 'tests/system', - *session.posargs, - # Currently allowed to fail due to very high flakiness. - success_codes=range(0, 100) - ) + *session.posargs) @nox.session diff --git a/packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py b/packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py index ad99b1081cc5..bf3ce70c0d92 100644 --- a/packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py +++ b/packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py @@ -15,6 +15,7 @@ import os import time +import google.auth from google.api import monitored_resource_pb2 from google.cloud import logging_v2 from google.cloud.logging_v2.proto import log_entry_pb2 @@ -23,7 +24,7 @@ class TestSystemLoggingServiceV2(object): def test_write_log_entries(self): - project_id = os.environ['PROJECT_ID'] + _, project_id = google.auth.default() client = logging_v2.LoggingServiceV2Client() log_name = client.log_path(project_id, 'test-{0}'.format(time.time())) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index ffa4615612d9..b211fd860781 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -103,7 +103,11 @@ def setUp(self): def tearDown(self): retry = RetryErrors(NotFound, max_tries=9) for doomed in self.to_delete: - retry(doomed.delete)() + try: + retry(doomed.delete)() + except AttributeError: + client, dataset = doomed + retry(client.delete_dataset)(dataset) logging.getLogger().handlers = self._handlers_cache[:] @staticmethod @@ -427,25 +431,24 @@ def test_create_sink_pubsub_topic(self): def _init_bigquery_dataset(self): from google.cloud import bigquery - from google.cloud.bigquery.dataset import AccessGrant - DATASET_NAME = ( + from google.cloud.bigquery.dataset import AccessEntry + dataset_name = ( 'system_testing_dataset' + _RESOURCE_ID).replace('-', '_') - DATASET_URI = 'bigquery.googleapis.com/projects/%s/datasets/%s' % ( - Config.CLIENT.project, DATASET_NAME,) + dataset_uri = 'bigquery.googleapis.com/projects/%s/datasets/%s' % ( + Config.CLIENT.project, dataset_name,) # Create the destination dataset, and set up the ACL to allow # Stackdriver Logging to write into it. bigquery_client = bigquery.Client() - dataset = bigquery_client.dataset(DATASET_NAME) - dataset.create() - self.to_delete.append(dataset) - dataset.reload() - grants = dataset.access_grants - grants.append(AccessGrant( - 'WRITER', 'groupByEmail', 'cloud-logs@google.com')) - dataset.access_grants = grants - dataset.update() - return DATASET_URI + dataset_ref = bigquery_client.dataset(dataset_name) + dataset = bigquery_client.create_dataset(bigquery.Dataset(dataset_ref)) + self.to_delete.append((bigquery_client, dataset)) + bigquery_client.get_dataset(dataset) + access = AccessEntry( + 'WRITER', 'groupByEmail', 'cloud-logs@google.com') + dataset.access_entries.append(access) + bigquery_client.update_dataset(dataset, ['access_entries']) + return dataset_uri def test_create_sink_bigquery_dataset(self): SINK_NAME = 'test-create-sink-dataset%s' % (_RESOURCE_ID,) From a812b5b1a092967b2a11f0eb60656b1dffd2c0ae Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 5 Feb 2018 11:19:33 -0800 Subject: [PATCH 153/855] Remove debug print statement (#4838) --- packages/google-cloud-logging/google/cloud/logging/_gapic.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gapic.py b/packages/google-cloud-logging/google/cloud/logging/_gapic.py index a292721111eb..af2a498eeb62 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gapic.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gapic.py @@ -119,7 +119,6 @@ def write_entries(self, entries, logger_name=None, resource=None, """ partial_success = False entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries] - print(entry_pbs) self._gapic_api.write_log_entries( entry_pbs, log_name=logger_name, resource=resource, labels=labels, partial_success=partial_success) From d44b9f74da9e6e6eb03e39b9ced93dbb1f234b4d Mon Sep 17 00:00:00 2001 From: Angela Li Date: Mon, 5 Feb 2018 11:20:07 -0800 Subject: [PATCH 154/855] Move the code path of get_gae_labels() to emit() (#4824) --- .../cloud/logging/handlers/app_engine.py | 33 ++++++++++++++----- .../google/cloud/logging/handlers/handlers.py | 2 ++ .../tests/unit/handlers/test_app_engine.py | 4 +-- 3 files changed, 28 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py index 4bace1f1e20e..1b0101cbf63d 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py @@ -18,10 +18,10 @@ and labels for App Engine logs. """ +import logging import os from google.cloud.logging.handlers._helpers import get_trace_id -from google.cloud.logging.handlers.handlers import CloudLoggingHandler from google.cloud.logging.handlers.transports import BackgroundThreadTransport from google.cloud.logging.resource import Resource @@ -34,7 +34,7 @@ _TRACE_ID_LABEL = 'appengine.googleapis.com/trace_id' -class AppEngineHandler(CloudLoggingHandler): +class AppEngineHandler(logging.StreamHandler): """A logging handler that sends App Engine-formatted logs to Stackdriver. :type client: :class:`~google.cloud.logging.client.Client` @@ -48,13 +48,13 @@ class AppEngineHandler(CloudLoggingHandler): """ def __init__(self, client, + name=_DEFAULT_GAE_LOGGER_NAME, transport=BackgroundThreadTransport): - super(AppEngineHandler, self).__init__( - client, - name=_DEFAULT_GAE_LOGGER_NAME, - transport=transport, - resource=self.get_gae_resource(), - labels=self.get_gae_labels()) + super(AppEngineHandler, self).__init__() + self.name = name + self.client = client + self.transport = transport(client, name) + self.resource = self.get_gae_resource() def get_gae_resource(self): """Return the GAE resource using the environment variables. @@ -88,3 +88,20 @@ def get_gae_labels(self): gae_labels[_TRACE_ID_LABEL] = trace_id return gae_labels + + def emit(self, record): + """Actually log the specified logging record. + + Overrides the default emit behavior of ``StreamHandler``. + + See https://docs.python.org/2/library/logging.html#handler-objects + + :type record: :class:`logging.LogRecord` + :param record: The record to be logged. + """ + message = super(AppEngineHandler, self).format(record) + self.transport.send( + record, + message, + resource=self.resource, + labels=self.get_gae_labels()) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py index c56e0393833b..5973006e403e 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py @@ -35,6 +35,8 @@ class CloudLoggingHandler(logging.StreamHandler): route Python standard logging messages directly to the Stackdriver Logging API. + This handler is used when not in GAE or GKE environment. + This handler supports both an asynchronous and synchronous transport. :type client: :class:`google.cloud.logging.client` diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index 07ac4eaa168a..8dd9bf108d08 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -46,7 +46,6 @@ def test_constructor(self): self.assertEqual(handler.resource.labels['project_id'], 'test_project') self.assertEqual(handler.resource.labels['module_id'], 'test_service') self.assertEqual(handler.resource.labels['version_id'], 'test_version') - self.assertEqual(handler.labels, {}) def test_emit(self): client = mock.Mock(project=self.PROJECT, spec=['project']) @@ -74,11 +73,10 @@ def _get_gae_labels_helper(self, trace_id): # The handler actually calls ``get_gae_labels()``. with get_trace_patch as mock_get_trace: handler = self._make_one(client, transport=_Transport) - mock_get_trace.assert_called_once_with() gae_labels = handler.get_gae_labels() self.assertEqual(mock_get_trace.mock_calls, - [mock.call(), mock.call()]) + [mock.call()]) return gae_labels From f9f420207c446e23f4e228563a31e6c339d4b274 Mon Sep 17 00:00:00 2001 From: Thomas Date: Tue, 13 Feb 2018 05:49:38 +0000 Subject: [PATCH 155/855] Add `max_latency` to `BackgroundThreadTransport` (#4762) --- .../handlers/transports/background_thread.py | 42 ++++++++-- .../transports/test_background_thread.py | 84 ++++++++++++++++++- 2 files changed, 117 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index d5f40d855cb3..df22118cfc00 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -22,6 +22,7 @@ import atexit import logging import threading +import time from six.moves import range from six.moves import queue @@ -30,12 +31,13 @@ _DEFAULT_GRACE_PERIOD = 5.0 # Seconds _DEFAULT_MAX_BATCH_SIZE = 10 +_DEFAULT_MAX_LATENCY = 0 # Seconds _WORKER_THREAD_NAME = 'google.cloud.logging.Worker' _WORKER_TERMINATOR = object() _LOGGER = logging.getLogger(__name__) -def _get_many(queue_, max_items=None): +def _get_many(queue_, max_items=None, max_latency=0): """Get multiple items from a Queue. Gets at least one (blocking) and at most ``max_items`` items @@ -48,14 +50,22 @@ def _get_many(queue_, max_items=None): :param max_items: The maximum number of items to get. If ``None``, then all available items in the queue are returned. + :type max_latency: float + :param max_latency: The maximum number of seconds to wait for more than one + item from a queue. This number includes the time required to retrieve + the first item. + :rtype: Sequence :returns: A sequence of items retrieved from the queue. """ + start = time.time() # Always return at least one item. items = [queue_.get()] while max_items is None or len(items) < max_items: try: - items.append(queue_.get_nowait()) + elapsed = time.time() - start + timeout = max(0, max_latency - elapsed) + items.append(queue_.get(timeout=timeout)) except queue.Empty: break return items @@ -74,13 +84,22 @@ class _Worker(object): :type max_batch_size: int :param max_batch_size: The maximum number of items to send at a time in the background thread. + + :type max_latency: float + :param max_latency: The amount of time to wait for new logs before + sending a new batch. It is strongly recommended to keep this smaller + than the grace_period. This means this is effectively the longest + amount of time the background thread will hold onto log entries + before sending them to the server. """ def __init__(self, cloud_logger, grace_period=_DEFAULT_GRACE_PERIOD, - max_batch_size=_DEFAULT_MAX_BATCH_SIZE): + max_batch_size=_DEFAULT_MAX_BATCH_SIZE, + max_latency=_DEFAULT_MAX_LATENCY): self._cloud_logger = cloud_logger self._grace_period = grace_period self._max_batch_size = max_batch_size + self._max_latency = max_latency self._queue = queue.Queue(0) self._operational_lock = threading.Lock() self._thread = None @@ -112,7 +131,9 @@ def _thread_main(self): quit_ = False while True: batch = self._cloud_logger.batch() - items = _get_many(self._queue, max_items=self._max_batch_size) + items = _get_many( + self._queue, max_items=self._max_batch_size, + max_latency=self._max_latency) for item in items: if item is _WORKER_TERMINATOR: @@ -249,15 +270,24 @@ class BackgroundThreadTransport(Transport): :type batch_size: int :param batch_size: The maximum number of items to send at a time in the background thread. + + :type max_latency: float + :param max_latency: The amount of time to wait for new logs before + sending a new batch. It is strongly recommended to keep this smaller + than the grace_period. This means this is effectively the longest + amount of time the background thread will hold onto log entries + before sending them to the server. """ def __init__(self, client, name, grace_period=_DEFAULT_GRACE_PERIOD, - batch_size=_DEFAULT_MAX_BATCH_SIZE): + batch_size=_DEFAULT_MAX_BATCH_SIZE, + max_latency=_DEFAULT_MAX_LATENCY): self.client = client logger = self.client.logger(name) self.worker = _Worker(logger, grace_period=grace_period, - max_batch_size=batch_size) + max_batch_size=batch_size, + max_latency=max_latency) self.worker.start() def send(self, record, message, resource=None, labels=None): diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 2be6198a69c5..604462bdc2e6 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -78,19 +78,24 @@ def test_flush(self): def test_worker(self): client = _Client(self.PROJECT) - name = 'python_logger' + name = 'python_logger' batch_size = 30 grace_period = 20. + max_latency = 0.1 transport, worker = self._make_one(client, name, grace_period=grace_period, - batch_size=batch_size) + batch_size=batch_size, + max_latency=max_latency) worker_grace_period = worker.call_args[1]['grace_period'] # **kwargs. worker_batch_size = worker.call_args[1]['max_batch_size'] + worker_max_latency = worker.call_args[1]['max_latency'] self.assertEqual(worker_grace_period, grace_period) self.assertEqual(worker_batch_size, batch_size) + self.assertEqual(worker_max_latency, + max_latency) class Test_Worker(unittest.TestCase): @@ -115,13 +120,16 @@ def test_constructor(self): logger = _Logger(self.NAME) grace_period = 50 max_batch_size = 50 + max_latency = 0.1 worker = self._make_one( - logger, grace_period=grace_period, max_batch_size=max_batch_size) + logger, grace_period=grace_period, max_batch_size=max_batch_size, + max_latency=max_latency) self.assertEqual(worker._cloud_logger, logger) self.assertEqual(worker._grace_period, grace_period) self.assertEqual(worker._max_batch_size, max_batch_size) + self.assertEqual(worker._max_latency, max_latency) self.assertFalse(worker.is_alive) self.assertIsNone(worker._thread) @@ -264,6 +272,74 @@ def test__thread_main_batches(self): self.assertFalse(worker._cloud_logger._batch.commit_called) self.assertEqual(worker._queue.qsize(), 0) + @mock.patch('time.time', autospec=True, return_value=1) + def test__thread_main_max_latency(self, time): + # Note: this test is a bit brittle as it assumes the operation of + # _get_many invokes queue.get() followed by queue._get(). It fails + # the "change detector" test in that way. However, this is still a + # useful test to verify the queue timeout is appropriately calculated. + from six.moves import queue + from google.cloud.logging.handlers.transports import background_thread + + # Use monotonically increasing time. + time.side_effect = range(1, 6) + + worker = self._make_one( + _Logger(self.NAME), max_latency=2, max_batch_size=10) + worker._queue = mock.create_autospec(queue.Queue, instance=True) + + worker._queue.get.side_effect = [ + {'info': {'message': '1'}}, # Single record. + queue.Empty(), # Emulate a queue.get() timeout. + {'info': {'message': '1'}}, # Second record. + background_thread._WORKER_TERMINATOR, # Stop the thread. + queue.Empty(), # Emulate a queue.get() timeout. + ] + + worker._thread_main() + + self.assertEqual(worker._cloud_logger._num_batches, 2) + self.assertTrue(worker._cloud_logger._batch.commit_called) + self.assertEqual(worker._cloud_logger._batch.commit_count, 1) + + # Time should have been called five times. + # + # For the first batch, it should have been called: + # * Once to get the start time. (1) + # * Once to get the elapsed time while grabbing the second item. + # (2) + # + # For the second batch, it should have been called: + # * Once to get start time. (3) + # * Once to get the elapsed time while grabbing the second item. + # (3) + # * Once to get the elapsed time while grabbing the final + # item. (4) + # * Once final time to get the elapsed time while receiving + # the empty queue. + # + self.assertEqual(time.call_count, 5) + + # Queue.get should've been called 5 times as well, but with different + # timeouts due to the monotonically increasing time. + # + # For the first batch, it will be called once without a timeout + # (for the first item) and then with timeout=1, as start will be + # 1 and now will be 2. + # + # For the second batch, it will be called once without a timeout + # (for the first item) and then with timeout=1, as start will be + # 3 and now will be 4, and finally with timeout=0 as start will be 3 + # and now will be 5. + # + worker._queue.get.assert_has_calls([ + mock.call(), + mock.call(timeout=1), + mock.call(), + mock.call(timeout=1), + mock.call(timeout=0) + ]) + def test_flush(self): worker = self._make_one(_Logger(self.NAME)) worker._queue = mock.Mock(spec=queue.Queue) @@ -331,9 +407,11 @@ def __init__(self, name): self.name = name self._batch_cls = _Batch self._batch = None + self._num_batches = 0 def batch(self): self._batch = self._batch_cls() + self._num_batches += 1 return self._batch From 028af62ad6a9bd5d2d81722150c71cea0607e688 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 20 Feb 2018 10:37:42 -0800 Subject: [PATCH 156/855] Release 1.5.0 (#4901) --- packages/google-cloud-logging/CHANGELOG.md | 30 ++++++++++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 878346719f68..e3993ef04776 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,36 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## 1.5.0 + +# New features + +- Added `max_latency` to `BackgroundThreadTransport`. (#4762) +- Added support for unique writer identity in `Sink`. (#4595, #4708, #4704, #4706) + +# Implementation changes + +- The underlying auto-generated client library was re-generated to pick up new features and bugfixes. (#4759) +- Moved the code path of `get_gae_labels()` to `emit()`. (#4824) +- Removed a debug print statement. (#4838) +- `LogSink.create` captures the server-generated `writerIdentity`. (#4707) +- Accomodated a back-end change making `Sink.filter` optional. (#4699) + +# Testing + +- Fixed system tests (#4768) +- Hardened test for `retrieve_metadata_server` against transparent DNS proxies. (#4698) +- Added cleanup for Pub / Sub topic in logging system test. (#4532) +- Added another check for Python 2.7 in Logging `nox -s default`. (#4523) +- Pinned `django` test dependency to `< 2.0` in Python 2.7. (#4519) +- Maked a `nox -s default` session for all packages. (#4324) +- Shortened test names. (#4321) + +# Documentation + +- Added doc to highlight missing `uniqueWriterIdentity` field. (#4579) +- Fixing "Fore" -> "For" typo in README docs. (#4317) + ## 1.4.0 ### Implementation Changes diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 6eb782259fed..3d49660bfb5f 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -57,7 +57,7 @@ setup( name='google-cloud-logging', - version='1.4.1.dev1', + version='1.5.0', description='Python Client for Stackdriver Logging', long_description=README, namespace_packages=[ From 257649e5f0fd7f41e4a6fecee4c77bd26eb4623d Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 22 Feb 2018 10:28:50 -0800 Subject: [PATCH 157/855] Normalize all setup.py files (#4909) --- packages/google-cloud-logging/setup.py | 99 +++++++++++++++----------- 1 file changed, 59 insertions(+), 40 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 3d49660bfb5f..c6a380578c56 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google LLC +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,59 +12,78 @@ # See the License for the specific language governing permissions and # limitations under the License. +import io import os -from setuptools import find_packages -from setuptools import setup +import setuptools -PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) +# Package metadata. + +name = 'google-cloud-logging' +description = 'Stackdriver Logging API client library' +version = '1.5.0' +# Should be one of: +# 'Development Status :: 3 - Alpha' +# 'Development Status :: 4 - Beta' +# 'Development Status :: 5 - Stable' +release_status = 'Development Status :: 5 - Production/Stable' +dependencies = [ + 'google-cloud-core<0.29dev,>=0.28.0', + 'google-api-core[grpc]<0.2.0dev,>=0.1.1', +] +extras = { +} + + +# Setup boilerplate below this line. + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +# Only include packages under the 'google' namespace. Do not include tests, +# benchmarks, etc. +packages = [ + package for package in setuptools.find_packages() + if package.startswith('google')] + +# Determine which namespaces are needed. +namespaces = ['google'] +if 'google.cloud' in packages: + namespaces.append('google.cloud') -with open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: - README = file_obj.read() -# NOTE: This is duplicated throughout and we should try to -# consolidate. -SETUP_BASE = { - 'author': 'Google Cloud Platform', - 'author_email': 'googleapis-publisher@google.com', - 'scripts': [], - 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', - 'license': 'Apache 2.0', - 'platforms': 'Posix; MacOS X; Windows', - 'include_package_data': True, - 'zip_safe': False, - 'classifiers': [ - 'Development Status :: 5 - Production/Stable', +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author='Google LLC', + author_email='googleapis-packages@google.com', + license='Apache 2.0', + url='https://github.com/GoogleCloudPlatform/google-cloud-python', + classifiers=[ + release_status, 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', - 'Operating System :: OS Independent', + 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Operating System :: OS Independent', 'Topic :: Internet', ], -} - - -REQUIREMENTS = [ - 'google-cloud-core >= 0.28.0, < 0.29dev', - 'google-api-core[grpc] >= 0.1.1, < 0.2.0dev', -] - -setup( - name='google-cloud-logging', - version='1.5.0', - description='Python Client for Stackdriver Logging', - long_description=README, - namespace_packages=[ - 'google', - 'google.cloud', - ], - packages=find_packages(exclude=('tests*',)), - install_requires=REQUIREMENTS, - **SETUP_BASE + platforms='Posix; MacOS X; Windows', + packages=packages, + namespace_packages=namespaces, + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, ) From a129e917e73d5e191751c38a00bf13ecfa7801f1 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 23 Feb 2018 16:34:37 -0800 Subject: [PATCH 158/855] Re-enable lint for tests, remove usage of pylint (#4921) --- packages/google-cloud-logging/.flake8 | 6 ++++++ packages/google-cloud-logging/nox.py | 11 ++--------- .../tests/unit/handlers/test__helpers.py | 1 - .../tests/unit/handlers/test_app_engine.py | 1 - .../tests/unit/handlers/test_handlers.py | 3 ++- .../handlers/transports/test_background_thread.py | 3 ++- .../google-cloud-logging/tests/unit/test_logger.py | 9 ++++----- 7 files changed, 16 insertions(+), 18 deletions(-) diff --git a/packages/google-cloud-logging/.flake8 b/packages/google-cloud-logging/.flake8 index 25168dc87605..1f44a90f8195 100644 --- a/packages/google-cloud-logging/.flake8 +++ b/packages/google-cloud-logging/.flake8 @@ -1,5 +1,11 @@ [flake8] exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + *_pb2.py + + # Standard linting exemptions. __pycache__, .git, *.pyc, diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index d4dd19ce74b0..d6ec42d041c4 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -124,16 +124,9 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) + session.install('flake8') session.install('.') - session.run('flake8', 'google/cloud/logging') - session.run( - 'gcp-devrel-py-tools', 'run-pylint', - '--config', 'pylint.config.py', - '--library-filesets', 'google', - '--test-filesets', 'tests', - # Temporarily allow this to fail. - success_codes=range(0, 100)) + session.run('flake8', 'google', 'tests') @nox.session diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index a448f339a046..b53098633d4e 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -80,7 +80,6 @@ def get(self): self.response.out.write(json.dumps(trace_id)) - @unittest.skipIf(six.PY3, 'webapp2 is Python 2 only') class Test_get_trace_id_from_webapp2(unittest.TestCase): diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index 8dd9bf108d08..df933bd19c01 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -33,7 +33,6 @@ def test_constructor(self): from google.cloud.logging.handlers.app_engine import _GAE_PROJECT_ENV from google.cloud.logging.handlers.app_engine import _GAE_SERVICE_ENV from google.cloud.logging.handlers.app_engine import _GAE_VERSION_ENV - from google.cloud.logging.handlers.app_engine import _TRACE_ID_LABEL client = mock.Mock(project=self.PROJECT, spec=['project']) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index c5a6e4434c43..a23a0296dced 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -38,7 +38,8 @@ def test_emit(self): from google.cloud.logging.logger import _GLOBAL_RESOURCE client = _Client(self.PROJECT) - handler = self._make_one(client, transport=_Transport, resource=_GLOBAL_RESOURCE) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE) logname = 'loggername' message = 'hello world' record = logging.LogRecord(logname, logging, None, None, message, diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 604462bdc2e6..1f49763b8ce4 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -380,7 +380,8 @@ def __init__(self): self.commit_called = False self.commit_count = None - def log_struct(self, info, severity=logging.INFO, resource=None, labels=None): + def log_struct( + self, info, severity=logging.INFO, resource=None, labels=None): from google.cloud.logging.logger import _GLOBAL_RESOURCE assert resource is None diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 7e9893f46cc9..30b07466c110 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -573,8 +573,9 @@ def test_log_text_explicit(self): type='gae_app', labels={ 'module_id': 'default', - 'version_id': 'test', - }) + 'version_id': 'test' + } + ) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() @@ -632,7 +633,7 @@ def test_log_struct_explicit(self): timestamp=TIMESTAMP, resource=RESOURCE) self.assertEqual( batch.entries, - [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP , + [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP, RESOURCE)]) def test_log_proto_defaults(self): @@ -732,7 +733,6 @@ def test_commit_w_bound_client(self): from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud.logging.logger import _GLOBAL_RESOURCE - TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} message = Struct(fields={'foo': Value(bool_value=True)}) @@ -825,7 +825,6 @@ def test_context_mgr_success(self): from google.cloud.logging.logger import Logger from google.cloud.logging.logger import _GLOBAL_RESOURCE - TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} message = Struct(fields={'foo': Value(bool_value=True)}) From 24801920bd82db44871368b6781927121b993e50 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 26 Feb 2018 14:24:04 -0800 Subject: [PATCH 159/855] Install local dependencies when running lint (#4936) --- packages/google-cloud-logging/nox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index d6ec42d041c4..a824c4ec866d 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -124,7 +124,7 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8') + session.install('flake8', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google', 'tests') From 5179a40d8439ff881c60e31502a70ed64849d89f Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 27 Feb 2018 11:17:14 -0800 Subject: [PATCH 160/855] Update dependency range for api-core to include v1.0.0 releases (#4944) --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index c6a380578c56..a1b2fed2af05 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -30,7 +30,7 @@ release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ 'google-cloud-core<0.29dev,>=0.28.0', - 'google-api-core[grpc]<0.2.0dev,>=0.1.1', + 'google-api-core[grpc]<2.0.0dev,>=0.1.1', ] extras = { } From ad56642d2b9390cac8c9fc74930b9d194cbf36ce Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 27 Feb 2018 13:21:52 -0800 Subject: [PATCH 161/855] Use public item_to_value property in Trace and Logging (#4947) --- .../google-cloud-logging/google/cloud/logging/_gapic.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gapic.py b/packages/google-cloud-logging/google/cloud/logging/_gapic.py index af2a498eeb62..2ff0d30cf3af 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gapic.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gapic.py @@ -94,7 +94,7 @@ def list_entries(self, projects, filter_='', order_by='', # objects are created by entry_from_resource, they can be # re-used by other log entries from the same logger. loggers = {} - page_iter._item_to_value = functools.partial( + page_iter.item_to_value = functools.partial( _item_to_entry, loggers=loggers) return page_iter @@ -175,7 +175,7 @@ def list_sinks(self, project, page_size=0, page_token=None): path, page_size=page_size) page_iter.client = self._client page_iter.next_page_token = page_token - page_iter._item_to_value = _item_to_sink + page_iter.item_to_value = _item_to_sink return page_iter def sink_create(self, project, sink_name, filter_, destination, @@ -327,7 +327,7 @@ def list_metrics(self, project, page_size=0, page_token=None): path, page_size=page_size) page_iter.client = self._client page_iter.next_page_token = page_token - page_iter._item_to_value = _item_to_metric + page_iter.item_to_value = _item_to_metric return page_iter def metric_create(self, project, metric_name, filter_, description): From de4a7486166d10008fff423a0d43f54aea048b40 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 27 Feb 2018 13:45:05 -0800 Subject: [PATCH 162/855] Update minimum api-core version to 1.0.0 for Datastore, BigQuery, Trace, Logging, and Spanner (#4946) --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index a1b2fed2af05..5cb0e3c40a41 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -30,7 +30,7 @@ release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ 'google-cloud-core<0.29dev,>=0.28.0', - 'google-api-core[grpc]<2.0.0dev,>=0.1.1', + 'google-api-core[grpc]<2.0.0dev,>=1.0.0', ] extras = { } From ce3787de4e4e1e951f5ec069123414a6a77ced59 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 28 Feb 2018 09:01:32 -0800 Subject: [PATCH 163/855] Release logging 1.6.0 (#4961) --- packages/google-cloud-logging/CHANGELOG.md | 12 ++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index e3993ef04776..548414bef7c8 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## 1.6.0 + +### Dependencies + +- The minimum version for `google-api-core` has been updated to version 1.0.0. This may cause some incompatibility with older google-cloud libraries, you will need to update those libraries if you have a dependency conflict. (#4944, #4946) + +### Testing and internal changes + +- Install local dependencies when running lint (#4936) +- Re-enable lint for tests, remove usage of pylint (#4921) +- Normalize all setup.py files (#4909) + ## 1.5.0 # New features diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 5cb0e3c40a41..8ec56c86fbde 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-logging' description = 'Stackdriver Logging API client library' -version = '1.5.0' +version = '1.6.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 6eeaf99db22c0116fbd91fb2076babded38c0b6d Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 15 Mar 2018 08:52:22 -0700 Subject: [PATCH 164/855] Fix bad trove classifier --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 8ec56c86fbde..536c918a5df9 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -26,7 +26,7 @@ # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Stable' +# 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ 'google-cloud-core<0.29dev,>=0.28.0', From b057d295ef6fd7cc952d30b639c93363e1db612e Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 27 Apr 2018 12:15:13 -0400 Subject: [PATCH 165/855] Plug leaky sink in systests. (#5247) Fixes #5242. --- packages/google-cloud-logging/tests/system/test_system.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index b211fd860781..00c94cbbf4c2 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -427,6 +427,7 @@ def test_create_sink_pubsub_topic(self): sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, TOPIC_URI) self.assertFalse(sink.exists()) sink.create() + self.to_delete.append(sink) self.assertTrue(sink.exists()) def _init_bigquery_dataset(self): From 437a139822f80d68725c5f14d0e964aabb490f66 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 4 May 2018 09:01:24 -0700 Subject: [PATCH 166/855] Add Test runs for Python 3.7 and remove 3.4 (#5295) * remove 3.4 from unit test runs * add 3.7 to most packages. PubSub, Monitoring, BigQuery not enabled * Fix #5292 by draining queue in a way compatible with SimpleQueue and Queue --- packages/google-cloud-logging/nox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index a824c4ec866d..c02870ba753b 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -72,7 +72,7 @@ def default(session): @nox.session -@nox.parametrize('py', ['2.7', '3.4', '3.5', '3.6']) +@nox.parametrize('py', ['2.7', '3.5', '3.6', '3.7']) def unit(session, py): """Run the unit test suite.""" From 40d6539c6995c39c6241dcbdcfd896ecaadbc93e Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 16 May 2018 10:14:30 -0700 Subject: [PATCH 167/855] Modify system tests to use prerelease versions of grpcio (#5304) --- packages/google-cloud-logging/nox.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index c02870ba753b..e977fe299748 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -100,6 +100,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) From 5573135c5bafbdc699ca7b4693f79a94e97a87b6 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 22 May 2018 16:03:07 -0400 Subject: [PATCH 168/855] Avoid overwriting '__module__' of messages from shared modules. (#5364) Note that we *are* still overwriting it for messages from modules defined within the current package. See #4715. --- .../google/cloud/logging_v2/types.py | 64 +++++++++++-------- 1 file changed, 38 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types.py b/packages/google-cloud-logging/google/cloud/logging_v2/types.py index d440d8f58f07..25787f31017f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types.py @@ -15,17 +15,11 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import distribution_pb2 from google.api import http_pb2 from google.api import label_pb2 from google.api import metric_pb2 from google.api import monitored_resource_pb2 -from google.cloud.logging_v2.proto import log_entry_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2 -from google.cloud.logging_v2.proto import logging_metrics_pb2 -from google.cloud.logging_v2.proto import logging_pb2 from google.logging.type import http_request_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 @@ -36,27 +30,45 @@ from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2 +from google.cloud.logging_v2.proto import logging_metrics_pb2 +from google.cloud.logging_v2.proto import logging_pb2 + + +_shared_modules = [ + distribution_pb2, + http_pb2, + label_pb2, + metric_pb2, + monitored_resource_pb2, + http_request_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + field_mask_pb2, + struct_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + log_entry_pb2, + logging_config_pb2, + logging_metrics_pb2, + logging_pb2, +] + names = [] -for module in ( - distribution_pb2, - http_pb2, - label_pb2, - metric_pb2, - monitored_resource_pb2, - log_entry_pb2, - logging_config_pb2, - logging_metrics_pb2, - logging_pb2, - http_request_pb2, - any_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - struct_pb2, - timestamp_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.logging_v2.types' setattr(sys.modules[__name__], name, message) From cf3e80c3944a041e5d5bc55e9286093b59750158 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 14 Jun 2018 12:50:07 -0400 Subject: [PATCH 169/855] Harden system tests against 'ResourceExhausted' quota errors. (#5486) Closes #5303. --- .../google-cloud-logging/tests/system/test_system.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 00c94cbbf4c2..5dc8fd670494 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -16,11 +16,12 @@ import logging import unittest +from google.api_core.exceptions import Conflict +from google.api_core.exceptions import NotFound +from google.api_core.exceptions import TooManyRequests +from google.api_core.exceptions import ResourceExhausted +from google.api_core.exceptions import ServiceUnavailable from google.cloud._helpers import UTC -from google.cloud.exceptions import Conflict -from google.cloud.exceptions import NotFound -from google.cloud.exceptions import TooManyRequests -from google.cloud.exceptions import ServiceUnavailable import google.cloud.logging import google.cloud.logging.handlers.handlers from google.cloud.logging.handlers.handlers import CloudLoggingHandler @@ -63,7 +64,7 @@ def _list_entries(logger): :returns: List of all entries consumed. """ inner = RetryResult(_has_entries)(_consume_entries) - outer = RetryErrors(ServiceUnavailable)(inner) + outer = RetryErrors((ServiceUnavailable, ResourceExhausted))(inner) return outer(logger) From 9b0e73a311e413229758c26d14cd253ab1c44e7e Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 14 Jun 2018 15:18:19 -0400 Subject: [PATCH 170/855] Harden logging systests (#5496) * Harden teardown against 429 TooManyRequests errors. * Harden bucket setup against 409 Conflict and 503 ServiceUnavailable errors. * Harden dataset setup against transient errors. Closes #5493, #5494. --- .../google-cloud-logging/tests/system/test_system.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 5dc8fd670494..ccfac6d7b272 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -16,6 +16,7 @@ import logging import unittest +from google.api_core.exceptions import BadGateway from google.api_core.exceptions import Conflict from google.api_core.exceptions import NotFound from google.api_core.exceptions import TooManyRequests @@ -102,7 +103,7 @@ def setUp(self): self._handlers_cache = logging.getLogger().handlers[:] def tearDown(self): - retry = RetryErrors(NotFound, max_tries=9) + retry = RetryErrors((NotFound, TooManyRequests), max_tries=9) for doomed in self.to_delete: try: retry(doomed.delete)() @@ -381,9 +382,10 @@ def _init_storage_bucket(self): # Create the destination bucket, and set up the ACL to allow # Stackdriver Logging to write into it. + retry = RetryErrors((Conflict, TooManyRequests, ServiceUnavailable)) storage_client = storage.Client() bucket = storage_client.bucket(BUCKET_NAME) - retry_429(bucket.create)() + retry(bucket.create)() self.to_delete.append(bucket) bucket.acl.reload() logs_group = bucket.acl.group('cloud-logs@google.com') @@ -441,9 +443,11 @@ def _init_bigquery_dataset(self): # Create the destination dataset, and set up the ACL to allow # Stackdriver Logging to write into it. + retry = RetryErrors((TooManyRequests, BadGateway, ServiceUnavailable)) bigquery_client = bigquery.Client() dataset_ref = bigquery_client.dataset(dataset_name) - dataset = bigquery_client.create_dataset(bigquery.Dataset(dataset_ref)) + dataset = retry(bigquery_client.create_dataset)( + bigquery.Dataset(dataset_ref)) self.to_delete.append((bigquery_client, dataset)) bigquery_client.get_dataset(dataset) access = AccessEntry( From 40e092520b5ed1f1794819913ab44e8f3a12fd3a Mon Sep 17 00:00:00 2001 From: Enrique Jose Padilla Date: Wed, 20 Jun 2018 11:36:44 -0700 Subject: [PATCH 171/855] #5024 Django Versioning Compatability for RequestMiddleware Makes 'logging/handlers/middleware/request.RequestMiddleware' compatible with all versions of Django. See the official upgrade Docs: https://docs.djangoproject.com/en/2.0/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware Closes #5504. --- .../logging/handlers/middleware/request.py | 14 ++++++++++++- packages/google-cloud-logging/nox.py | 21 +++++++++++++------ 2 files changed, 28 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py index 212327a0717a..3bede377ecce 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py @@ -33,9 +33,21 @@ def _get_django_request(): return getattr(_thread_locals, 'request', None) -class RequestMiddleware(object): +try: + # Django >= 1.10 + from django.utils.deprecation import MiddlewareMixin +except ImportError: + # Not required for Django <= 1.9, see: + # https://docs.djangoproject.com/en/1.10/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware + MiddlewareMixin = object + + +class RequestMiddleware(MiddlewareMixin): """Saves the request in thread local""" + def __init__(self, get_response=None): + self.get_response = get_response + def process_request(self, request): """Called on each request, before Django decides which view to execute. diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index e977fe299748..26ecb3239121 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -35,7 +35,7 @@ @nox.session -def default(session): +def default(session, django_dep=('django',)): """Default unit test session. This is intended to be run **without** an interpreter set, so @@ -45,12 +45,11 @@ def default(session): """ # Install all test dependencies, then install this package in-place. deps = UNIT_TEST_DEPS - if session.interpreter == 'python2.7': - deps += ('django >= 1.11.0, < 2.0.0dev',) - elif session.interpreter is None and sys.version_info[:2] == (2, 7): + + if session.interpreter is None and sys.version_info[:2] == (2, 7): deps += ('django >= 1.11.0, < 2.0.0dev',) else: - deps += ('django',) + deps += django_dep deps += LOCAL_DEPS session.install(*deps) @@ -82,7 +81,17 @@ def unit(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'unit-' + py - default(session) + # Testing multiple version of django + # See https://www.djangoproject.com/download/ for supported version + django_deps_27 = [ + ('django==1.8.19',), + ('django >= 1.11.0, < 2.0.0dev',), + ] + + if session.interpreter == 'python2.7': + [default(session, django_dep=django) for django in django_deps_27] + else: + default(session) @nox.session From 9ee8dd1a80cb37da60be54a6118a0cf2dbc300d0 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 3 Jul 2018 13:15:29 -0400 Subject: [PATCH 172/855] Harden '_list_entries' further against backoff failure. (#5551) Closes #5303. --- packages/google-cloud-logging/tests/system/test_system.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index ccfac6d7b272..c627f5304f00 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -65,7 +65,8 @@ def _list_entries(logger): :returns: List of all entries consumed. """ inner = RetryResult(_has_entries)(_consume_entries) - outer = RetryErrors((ServiceUnavailable, ResourceExhausted))(inner) + outer = RetryErrors( + (ServiceUnavailable, ResourceExhausted), max_tries=9)(inner) return outer(logger) From 53c81817650002e3632e0c28eb702f06bbe137c3 Mon Sep 17 00:00:00 2001 From: Helen Koike Date: Tue, 10 Jul 2018 14:29:22 -0300 Subject: [PATCH 173/855] Print to stderr instead of stdout when exiting the program (#5569) The function print sends the output to stdout disturbing the output of the application. Redirect its output to stderr instead with sys.stderr --- .../handlers/transports/background_thread.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index df22118cfc00..afb149853061 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -21,6 +21,7 @@ import atexit import logging +import sys import threading import time @@ -195,7 +196,9 @@ def stop(self, grace_period=None): self._queue.put_nowait(_WORKER_TERMINATOR) if grace_period is not None: - print('Waiting up to %d seconds.' % (grace_period,)) + print( + 'Waiting up to %d seconds.' % (grace_period,), + file=sys.stderr) self._thread.join(timeout=grace_period) @@ -216,12 +219,15 @@ def _main_thread_terminated(self): if not self._queue.empty(): print( 'Program shutting down, attempting to send %d queued log ' - 'entries to Stackdriver Logging...' % (self._queue.qsize(),)) + 'entries to Stackdriver Logging...' % (self._queue.qsize(),), + file=sys.stderr) if self.stop(self._grace_period): - print('Sent all pending logs.') + print('Sent all pending logs.', file=sys.stderr) else: - print('Failed to send %d pending logs.' % (self._queue.qsize(),)) + print( + 'Failed to send %d pending logs.' % (self._queue.qsize(),), + file=sys.stderr) def enqueue(self, record, message, resource=None, labels=None): """Queues a log entry to be written by the background thread. From d2ce0b38f74a3901559feb2614fce7b4c66e2ba3 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 30 Jul 2018 13:49:10 -0400 Subject: [PATCH 174/855] Logging: unflake systests (#5698) * Harden 'test_list_metrics' against concurrent CI runs. Closes #5543. * Harden 'test_list_sinks' against concurrent CI runs. Closes #5692. * Harden 'test_log_handler_async' against concurrent CI runs. Use unique ID for handler, and therefore the underlying back-end log. Closes #5632. Closes #5556. --- .../tests/system/test_system.py | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index c627f5304f00..e7ff6804215a 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -231,9 +231,10 @@ def test_log_struct(self): def test_log_handler_async(self): LOG_MESSAGE = 'It was the worst of times' - handler = CloudLoggingHandler(Config.CLIENT) + handler_name = 'gcp-async' + unique_resource_id('-') + handler = CloudLoggingHandler(Config.CLIENT, name=handler_name) # only create the logger to delete, hidden otherwise - logger = Config.CLIENT.logger(handler.name) + logger = Config.CLIENT.logger(handler_name) self.to_delete.append(logger) cloud_logger = logging.getLogger(handler.name) @@ -334,14 +335,14 @@ def test_list_metrics(self): METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) self.assertFalse(metric.exists()) before_metrics = list(Config.CLIENT.list_metrics()) - before_names = set(metric.name for metric in before_metrics) + before_names = set(before.name for before in before_metrics) + self.failIf(metric.name in before_names) metric.create() self.to_delete.append(metric) self.assertTrue(metric.exists()) after_metrics = list(Config.CLIENT.list_metrics()) - after_names = set(metric.name for metric in after_metrics) - self.assertEqual(after_names - before_names, - set([METRIC_NAME])) + after_names = set(after.name for after in after_metrics) + self.assertTrue(metric.name in after_names) def test_reload_metric(self): METRIC_NAME = 'test-reload-metric%s' % (_RESOURCE_ID,) @@ -472,14 +473,14 @@ def test_list_sinks(self): sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) self.assertFalse(sink.exists()) before_sinks = list(Config.CLIENT.list_sinks()) - before_names = set(sink.name for sink in before_sinks) + before_names = set(before.name for before in before_sinks) + self.failIf(sink.name in before_names) sink.create() self.to_delete.append(sink) self.assertTrue(sink.exists()) after_sinks = list(Config.CLIENT.list_sinks()) - after_names = set(sink.name for sink in after_sinks) - self.assertEqual(after_names - before_names, - set([SINK_NAME])) + after_names = set(after.name for after in after_sinks) + self.assertTrue(sink.name in after_names) def test_reload_sink(self): SINK_NAME = 'test-reload-sink%s' % (_RESOURCE_ID,) From 2fb46b43ea2dbae4340659a933cfbdd1a08da63f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 30 Aug 2018 11:50:23 -0400 Subject: [PATCH 175/855] Nox: use inplace installs (#5865) --- packages/google-cloud-logging/nox.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/nox.py index 26ecb3239121..3ac0c3c9e217 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/nox.py @@ -51,8 +51,9 @@ def default(session, django_dep=('django',)): else: deps += django_dep - deps += LOCAL_DEPS session.install(*deps) + for local_dep in LOCAL_DEPS: + session.install('-e', local_dep) session.install('-e', '.') # Run py.test against the unit tests. @@ -114,10 +115,18 @@ def system(session, py): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install('mock', 'pytest', *LOCAL_DEPS) - session.install('../test_utils/', '../bigquery/', '../pubsub/', - '../storage/') - session.install('.') + session.install('mock', 'pytest') + for local_dep in LOCAL_DEPS: + session.install('-e', local_dep) + systest_deps = [ + '../bigquery/', + '../pubsub/', + '../storage/', + '../test_utils/', + ] + for systest_dep in systest_deps: + session.install('-e', systest_dep) + session.install('-e', '.') # Run py.test against the system tests. session.run( From a42dacfe1bffe4d16a3efa972c23fa634b8f7ebb Mon Sep 17 00:00:00 2001 From: salrashid123 Date: Sat, 1 Sep 2018 09:46:32 -0700 Subject: [PATCH 176/855] Support 'trace' attribute of log entries (#5878) See: #5505 --- .../google/cloud/logging/entries.py | 18 ++- .../google/cloud/logging/logger.py | 69 ++++++--- .../tests/unit/test_entries.py | 15 +- .../tests/unit/test_logger.py | 133 +++++++++++++++--- 4 files changed, 188 insertions(+), 47 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py index 995aa6e410b3..64a7751cf5f6 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -76,9 +76,13 @@ class _BaseEntry(object): :type resource: :class:`~google.cloud.logging.resource.Resource` :param resource: (Optional) Monitored resource of the entry + + :type trace: str + :param trace: (optional) traceid to apply to the entry. """ def __init__(self, payload, logger, insert_id=None, timestamp=None, - labels=None, severity=None, http_request=None, resource=None): + labels=None, severity=None, http_request=None, resource=None, + trace=None): self.payload = payload self.logger = logger self.insert_id = insert_id @@ -87,6 +91,7 @@ def __init__(self, payload, logger, insert_id=None, timestamp=None, self.severity = severity self.http_request = http_request self.resource = resource + self.trace = trace @classmethod def from_api_repr(cls, resource, client, loggers=None): @@ -123,6 +128,7 @@ def from_api_repr(cls, resource, client, loggers=None): labels = resource.get('labels') severity = resource.get('severity') http_request = resource.get('httpRequest') + trace = resource.get('trace') monitored_resource_dict = resource.get('resource') monitored_resource = None @@ -131,7 +137,7 @@ def from_api_repr(cls, resource, client, loggers=None): return cls(payload, logger, insert_id=insert_id, timestamp=timestamp, labels=labels, severity=severity, http_request=http_request, - resource=monitored_resource) + resource=monitored_resource, trace=trace) class TextEntry(_BaseEntry): @@ -185,15 +191,19 @@ class ProtobufEntry(_BaseEntry): :type resource: :class:`~google.cloud.logging.resource.Resource` :param resource: (Optional) Monitored resource of the entry + + :type trace: str + :param trace: (optional) traceid to apply to the entry. """ _PAYLOAD_KEY = 'protoPayload' def __init__(self, payload, logger, insert_id=None, timestamp=None, - labels=None, severity=None, http_request=None, resource=None): + labels=None, severity=None, http_request=None, resource=None, + trace=None): super(ProtobufEntry, self).__init__( payload, logger, insert_id=insert_id, timestamp=timestamp, labels=labels, severity=severity, http_request=http_request, - resource=resource) + resource=resource, trace=trace) if isinstance(self.payload, any_pb2.Any): self.payload_pb = self.payload self.payload = None diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index b6db8828bffe..2b3bd577fbed 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -96,7 +96,7 @@ def batch(self, client=None): def _make_entry_resource(self, text=None, info=None, message=None, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE): + resource=_GLOBAL_RESOURCE, trace=None): """Return a log entry resource of the appropriate type. Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`. @@ -131,6 +131,9 @@ def _make_entry_resource(self, text=None, info=None, message=None, :type resource: :class:`~google.cloud.logging.resource.Resource` :param resource: (Optional) Monitored resource of the entry + :type trace: str + :param trace: (optional) traceid to apply to the entry. + :rtype: dict :returns: The JSON resource created. """ @@ -172,11 +175,14 @@ def _make_entry_resource(self, text=None, info=None, message=None, if timestamp is not None: entry['timestamp'] = _datetime_to_rfc3339(timestamp) + if trace is not None: + entry['trace'] = trace + return entry def log_text(self, text, client=None, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE): + resource=_GLOBAL_RESOURCE, trace=None): """API call: log a text message via a POST request See @@ -203,22 +209,26 @@ def log_text(self, text, client=None, labels=None, insert_id=None, :param http_request: (optional) info about HTTP request associated with the entry + :type timestamp: :class:`datetime.datetime` + :param timestamp: (optional) timestamp of event being logged. + :type resource: :class:`~google.cloud.logging.resource.Resource` :param resource: Monitored resource of the entry, defaults to the global resource type. - :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp of event being logged. + :type trace: str + :param trace: (optional) traceid to apply to the entry. """ client = self._require_client(client) entry_resource = self._make_entry_resource( text=text, labels=labels, insert_id=insert_id, severity=severity, - http_request=http_request, timestamp=timestamp, resource=resource) + http_request=http_request, timestamp=timestamp, resource=resource, + trace=trace) client.logging_api.write_entries([entry_resource]) def log_struct(self, info, client=None, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE): + resource=_GLOBAL_RESOURCE, trace=None): """API call: log a structured message via a POST request See @@ -245,22 +255,26 @@ def log_struct(self, info, client=None, labels=None, insert_id=None, :param http_request: (optional) info about HTTP request associated with the entry. + :type timestamp: :class:`datetime.datetime` + :param timestamp: (optional) timestamp of event being logged. + :type resource: :class:`~google.cloud.logging.resource.Resource` :param resource: Monitored resource of the entry, defaults to the global resource type. - :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp of event being logged. + :type trace: str + :param trace: (optional) traceid to apply to the entry. """ client = self._require_client(client) entry_resource = self._make_entry_resource( info=info, labels=labels, insert_id=insert_id, severity=severity, - http_request=http_request, timestamp=timestamp, resource=resource) + http_request=http_request, timestamp=timestamp, resource=resource, + trace=trace) client.logging_api.write_entries([entry_resource]) def log_proto(self, message, client=None, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE): + resource=_GLOBAL_RESOURCE, trace=None): """API call: log a protobuf message via a POST request See @@ -287,18 +301,21 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, :param http_request: (optional) info about HTTP request associated with the entry. + :type timestamp: :class:`datetime.datetime` + :param timestamp: (optional) timestamp of event being logged. + :type resource: :class:`~google.cloud.logging.resource.Resource` :param resource: Monitored resource of the entry, defaults to the global resource type. - :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp of event being logged. + :type trace: str + :param trace: (optional) traceid to apply to the entry. """ client = self._require_client(client) entry_resource = self._make_entry_resource( message=message, labels=labels, insert_id=insert_id, severity=severity, http_request=http_request, timestamp=timestamp, - resource=resource) + resource=resource, trace=trace) client.logging_api.write_entries([entry_resource]) def delete(self, client=None): @@ -392,7 +409,8 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.commit() def log_text(self, text, labels=None, insert_id=None, severity=None, - http_request=None, timestamp=None, resource=_GLOBAL_RESOURCE): + http_request=None, timestamp=None, resource=_GLOBAL_RESOURCE, + trace=None): """Add a text entry to be logged during :meth:`commit`. :type text: str @@ -420,14 +438,17 @@ def log_text(self, text, labels=None, insert_id=None, severity=None, resource of the batch is used for this entry. If both this resource and the Batch resource are None, the API will return an error. + + :type trace: str + :param trace: (optional) traceid to apply to the entry. """ self.entries.append( ('text', text, labels, insert_id, severity, http_request, - timestamp, resource)) + timestamp, resource, trace)) def log_struct(self, info, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE): + resource=_GLOBAL_RESOURCE, trace=None): """Add a struct entry to be logged during :meth:`commit`. :type info: dict @@ -455,14 +476,17 @@ def log_struct(self, info, labels=None, insert_id=None, severity=None, resource of the batch is used for this entry. If both this resource and the Batch resource are None, the API will return an error. + + :type trace: str + :param trace: (optional) traceid to apply to the entry. """ self.entries.append( ('struct', info, labels, insert_id, severity, http_request, - timestamp, resource)) + timestamp, resource, trace)) def log_proto(self, message, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE): + resource=_GLOBAL_RESOURCE, trace=None): """Add a protobuf entry to be logged during :meth:`commit`. :type message: protobuf message @@ -490,10 +514,13 @@ def log_proto(self, message, labels=None, insert_id=None, severity=None, resource of the batch is used for this entry. If both this resource and the Batch resource are None, the API will return an error. + + :type trace: str + :param trace: (optional) traceid to apply to the entry. """ self.entries.append( ('proto', message, labels, insert_id, severity, http_request, - timestamp, resource)) + timestamp, resource, trace)) def commit(self, client=None): """Send saved log entries as a single API call. @@ -517,7 +544,7 @@ def commit(self, client=None): entries = [] for (entry_type, entry, labels, iid, severity, http_req, - timestamp, resource) in self.entries: + timestamp, resource, trace) in self.entries: if entry_type == 'text': info = {'textPayload': entry} elif entry_type == 'struct': @@ -544,6 +571,8 @@ def commit(self, client=None): info['httpRequest'] = http_req if timestamp is not None: info['timestamp'] = _datetime_to_rfc3339(timestamp) + if trace is not None: + info['trace'] = trace entries.append(info) client.logging_api.write_entries(entries, **kwargs) diff --git a/packages/google-cloud-logging/tests/unit/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py index 71b3ce561299..b15974e77a2a 100644 --- a/packages/google-cloud-logging/tests/unit/test_entries.py +++ b/packages/google-cloud-logging/tests/unit/test_entries.py @@ -68,6 +68,7 @@ def test_ctor_defaults(self): self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) self.assertIsNone(entry.resource) + self.assertIsNone(entry.trace) def test_ctor_explicit(self): import datetime @@ -87,6 +88,7 @@ def test_ctor_explicit(self): 'status': STATUS, } resource = Resource(type='global', labels={}) + TRACE = '12345678-1234-5678-1234-567812345678' logger = _Logger(self.LOGGER_NAME, self.PROJECT) entry = self._make_one(PAYLOAD, logger, @@ -95,7 +97,8 @@ def test_ctor_explicit(self): labels=LABELS, severity=SEVERITY, http_request=REQUEST, - resource=resource) + resource=resource, + trace=TRACE) self.assertEqual(entry.payload, PAYLOAD) self.assertIs(entry.logger, logger) self.assertEqual(entry.insert_id, IID) @@ -106,6 +109,7 @@ def test_ctor_explicit(self): self.assertEqual(entry.http_request['requestUrl'], URI) self.assertEqual(entry.http_request['status'], STATUS) self.assertEqual(entry.resource, resource) + self.assertEqual(entry.trace, TRACE) def test_from_api_repr_missing_data_no_loggers(self): client = _Client(self.PROJECT) @@ -122,6 +126,7 @@ def test_from_api_repr_missing_data_no_loggers(self): self.assertIsNone(entry.timestamp) self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) + self.assertIsNone(entry.trace) logger = entry.logger self.assertIsInstance(logger, _Logger) self.assertIs(logger.client, client) @@ -154,6 +159,7 @@ def test_from_api_repr_w_loggers_no_logger_match(self): } ) STATUS = '500' + TRACE = '12345678-1234-5678-1234-567812345678' API_REPR = { 'dummyPayload': PAYLOAD, 'logName': LOG_NAME, @@ -167,6 +173,7 @@ def test_from_api_repr_w_loggers_no_logger_match(self): 'status': STATUS, }, 'resource': RESOURCE._to_dict(), + 'trace': TRACE } loggers = {} entry = klass.from_api_repr(API_REPR, client, loggers=loggers) @@ -184,6 +191,7 @@ def test_from_api_repr_w_loggers_no_logger_match(self): self.assertEqual(logger.name, self.LOGGER_NAME) self.assertEqual(loggers, {LOG_NAME: logger}) self.assertEqual(entry.resource, RESOURCE) + self.assertEqual(entry.trace, TRACE) def test_from_api_repr_w_loggers_w_logger_match(self): from datetime import datetime @@ -196,12 +204,14 @@ def test_from_api_repr_w_loggers_w_logger_match(self): TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) LABELS = {'foo': 'bar', 'baz': 'qux'} + TRACE = '12345678-1234-5678-1234-567812345678' API_REPR = { 'dummyPayload': PAYLOAD, 'logName': LOG_NAME, 'insertId': IID, 'timestamp': TIMESTAMP, 'labels': LABELS, + 'trace': TRACE } LOGGER = object() loggers = {LOG_NAME: LOGGER} @@ -211,6 +221,7 @@ def test_from_api_repr_w_loggers_w_logger_match(self): self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, NOW) self.assertEqual(entry.labels, LABELS) + self.assertEqual(entry.trace, TRACE) self.assertIs(entry.logger, LOGGER) @@ -239,6 +250,7 @@ def test_constructor_basic(self): self.assertIsNone(pb_entry.labels) self.assertIsNone(pb_entry.severity) self.assertIsNone(pb_entry.http_request) + self.assertIsNone(pb_entry.trace) def test_constructor_with_any(self): from google.protobuf.any_pb2 import Any @@ -253,6 +265,7 @@ def test_constructor_with_any(self): self.assertIsNone(pb_entry.labels) self.assertIsNone(pb_entry.severity) self.assertIsNone(pb_entry.http_request) + self.assertIsNone(pb_entry.trace) def test_parse_message(self): import json diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 30b07466c110..85c8e2568584 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -155,6 +155,29 @@ def test_log_text_w_timestamp(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_text_w_trace(self): + + TEXT = 'TEXT' + TRACE = '12345678-1234-5678-1234-567812345678' + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + 'labels': {}, + }, + 'trace': TRACE + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log_text(TEXT, trace=TRACE) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): TEXT = u'TEXT' DEFAULT_LABELS = {'foo': 'spam'} @@ -164,6 +187,7 @@ def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): METHOD = 'POST' URI = 'https://api.example.com/endpoint' STATUS = '500' + TRACE = '12345678-1234-5678-1234-567812345678' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, @@ -181,6 +205,7 @@ def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): 'insertId': IID, 'severity': SEVERITY, 'httpRequest': REQUEST, + 'trace': TRACE }] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) @@ -189,7 +214,8 @@ def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): labels=DEFAULT_LABELS) logger.log_text(TEXT, client=client2, labels=LABELS, - insert_id=IID, severity=SEVERITY, http_request=REQUEST) + insert_id=IID, severity=SEVERITY, http_request=REQUEST, + trace=TRACE) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) @@ -246,6 +272,7 @@ def test_log_struct_w_explicit_client_labels_severity_httpreq(self): METHOD = 'POST' URI = 'https://api.example.com/endpoint' STATUS = '500' + TRACE = '12345678-1234-5678-1234-567812345678' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, @@ -263,6 +290,7 @@ def test_log_struct_w_explicit_client_labels_severity_httpreq(self): 'insertId': IID, 'severity': SEVERITY, 'httpRequest': REQUEST, + 'trace': TRACE }] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) @@ -272,7 +300,7 @@ def test_log_struct_w_explicit_client_labels_severity_httpreq(self): logger.log_struct(STRUCT, client=client2, labels=LABELS, insert_id=IID, severity=SEVERITY, - http_request=REQUEST) + http_request=REQUEST, trace=TRACE) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) @@ -301,6 +329,29 @@ def test_log_struct_w_timestamp(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_struct_w_trace(self): + + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + TRACE = '12345678-1234-5678-1234-567812345678' + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + 'labels': {}, + }, + 'trace': TRACE + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log_struct(STRUCT, trace=TRACE) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + def test_log_proto_w_implicit_client(self): import json from google.protobuf.json_format import MessageToJson @@ -366,6 +417,7 @@ def test_log_proto_w_explicit_client_labels_severity_httpreq(self): METHOD = 'POST' URI = 'https://api.example.com/endpoint' STATUS = '500' + TRACE = '12345678-1234-5678-1234-567812345678' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, @@ -383,6 +435,7 @@ def test_log_proto_w_explicit_client_labels_severity_httpreq(self): 'insertId': IID, 'severity': SEVERITY, 'httpRequest': REQUEST, + 'trace': TRACE }] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) @@ -392,7 +445,7 @@ def test_log_proto_w_explicit_client_labels_severity_httpreq(self): logger.log_proto(message, client=client2, labels=LABELS, insert_id=IID, severity=SEVERITY, - http_request=REQUEST) + http_request=REQUEST, trace=TRACE) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) @@ -425,6 +478,33 @@ def test_log_proto_w_timestamp(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_proto_w_trace(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + message = Struct(fields={'foo': Value(bool_value=True)}) + TRACE = '12345678-1234-5678-1234-567812345678' + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + 'labels': {}, + }, + 'trace': TRACE + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log_proto(message, trace=TRACE) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() @@ -550,7 +630,7 @@ def test_log_text_defaults(self): batch.log_text(TEXT) self.assertEqual(batch.entries, [('text', TEXT, None, None, None, None, None, - _GLOBAL_RESOURCE)]) + _GLOBAL_RESOURCE, None)]) def test_log_text_explicit(self): import datetime @@ -576,17 +656,18 @@ def test_log_text_explicit(self): 'version_id': 'test' } ) + TRACE = '12345678-1234-5678-1234-567812345678' client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_text(TEXT, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, - resource=RESOURCE) + resource=RESOURCE, trace=TRACE) self.assertEqual( batch.entries, [('text', TEXT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP, - RESOURCE)]) + RESOURCE, TRACE)]) def test_log_struct_defaults(self): from google.cloud.logging.logger import _GLOBAL_RESOURCE @@ -598,7 +679,7 @@ def test_log_struct_defaults(self): self.assertEqual( batch.entries, [('struct', STRUCT, None, None, None, None, None, - _GLOBAL_RESOURCE)]) + _GLOBAL_RESOURCE, None)]) def test_log_struct_explicit(self): import datetime @@ -611,6 +692,7 @@ def test_log_struct_explicit(self): METHOD = 'POST' URI = 'https://api.example.com/endpoint' STATUS = '500' + TRACE = '12345678-1234-5678-1234-567812345678' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, @@ -630,11 +712,11 @@ def test_log_struct_explicit(self): batch = self._make_one(logger, client=client) batch.log_struct(STRUCT, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, - timestamp=TIMESTAMP, resource=RESOURCE) + timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE) self.assertEqual( batch.entries, [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP, - RESOURCE)]) + RESOURCE, TRACE)]) def test_log_proto_defaults(self): from google.cloud.logging.logger import _GLOBAL_RESOURCE @@ -648,7 +730,7 @@ def test_log_proto_defaults(self): batch.log_proto(message) self.assertEqual(batch.entries, [('proto', message, None, None, None, None, None, - _GLOBAL_RESOURCE)]) + _GLOBAL_RESOURCE, None)]) def test_log_proto_explicit(self): import datetime @@ -663,6 +745,7 @@ def test_log_proto_explicit(self): METHOD = 'POST' URI = 'https://api.example.com/endpoint' STATUS = '500' + TRACE = '12345678-1234-5678-1234-567812345678' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, @@ -681,18 +764,18 @@ def test_log_proto_explicit(self): batch = self._make_one(logger, client=client) batch.log_proto(message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, - timestamp=TIMESTAMP, resource=RESOURCE) + timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE) self.assertEqual( batch.entries, [('proto', message, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP, - RESOURCE)]) + RESOURCE, TRACE)]) def test_commit_w_invalid_entry_type(self): logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) batch = self._make_one(logger, client) batch.entries.append(('bogus', 'BOGUS', None, None, None, None, None, - None)) + None, None)) with self.assertRaises(ValueError): batch.commit() @@ -742,26 +825,32 @@ def test_commit_w_bound_client(self): TIMESTAMP1 = datetime.datetime(2016, 12, 31, 0, 0, 1, 999999) TIMESTAMP2 = datetime.datetime(2016, 12, 31, 0, 0, 2, 999999) TIMESTAMP3 = datetime.datetime(2016, 12, 31, 0, 0, 3, 999999) + TRACE1 = '12345678-1234-5678-1234-567812345678' + TRACE2 = '12345678-1234-5678-1234-567812345679' + TRACE3 = '12345678-1234-5678-1234-567812345670' ENTRIES = [ {'textPayload': TEXT, 'insertId': IID1, 'timestamp': _datetime_to_rfc3339(TIMESTAMP1), - 'resource': _GLOBAL_RESOURCE._to_dict()}, + 'resource': _GLOBAL_RESOURCE._to_dict(), 'trace': TRACE1}, {'jsonPayload': STRUCT, 'insertId': IID2, 'timestamp': _datetime_to_rfc3339(TIMESTAMP2), - 'resource': _GLOBAL_RESOURCE._to_dict()}, + 'resource': _GLOBAL_RESOURCE._to_dict(), 'trace': TRACE2}, {'protoPayload': json.loads(MessageToJson(message)), 'insertId': IID3, 'timestamp': _datetime_to_rfc3339(TIMESTAMP3), - 'resource': _GLOBAL_RESOURCE._to_dict()}, + 'resource': _GLOBAL_RESOURCE._to_dict(), 'trace': TRACE3}, ] client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = _Logger() batch = self._make_one(logger, client=client) - batch.log_text(TEXT, insert_id=IID1, timestamp=TIMESTAMP1) - batch.log_struct(STRUCT, insert_id=IID2, timestamp=TIMESTAMP2) - batch.log_proto(message, insert_id=IID3, timestamp=TIMESTAMP3) + batch.log_text(TEXT, insert_id=IID1, timestamp=TIMESTAMP1, + trace=TRACE1) + batch.log_struct(STRUCT, insert_id=IID2, timestamp=TIMESTAMP2, + trace=TRACE2) + batch.log_proto(message, insert_id=IID3, timestamp=TIMESTAMP3, + trace=TRACE3) batch.commit() self.assertEqual(list(batch.entries), []) @@ -888,11 +977,11 @@ def test_context_mgr_failure(self): logger = _Logger() UNSENT = [ ('text', TEXT, None, IID, None, None, TIMESTAMP, - _GLOBAL_RESOURCE), + _GLOBAL_RESOURCE, None), ('struct', STRUCT, None, None, SEVERITY, None, None, - _GLOBAL_RESOURCE), + _GLOBAL_RESOURCE, None), ('proto', message, LABELS, None, None, REQUEST, None, - _GLOBAL_RESOURCE), + _GLOBAL_RESOURCE, None), ] batch = self._make_one(logger, client=client) From 6c5bab81f13b62c20a491e751b5a9ba2a746d33a Mon Sep 17 00:00:00 2001 From: salrashid123 Date: Tue, 4 Sep 2018 14:26:06 -0700 Subject: [PATCH 177/855] Add span_id to LogEntry (#5885) --- .../google/cloud/logging/entries.py | 18 ++- .../google/cloud/logging/logger.py | 62 ++++++-- .../tests/unit/test_entries.py | 19 ++- .../tests/unit/test_logger.py | 140 ++++++++++++++---- 4 files changed, 193 insertions(+), 46 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py index 64a7751cf5f6..477237501e0c 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -79,10 +79,14 @@ class _BaseEntry(object): :type trace: str :param trace: (optional) traceid to apply to the entry. + + :type span_id: str + :param span_id: (optional) span_id within the trace for the log entry. + Specify the trace parameter if span_id is set. """ def __init__(self, payload, logger, insert_id=None, timestamp=None, labels=None, severity=None, http_request=None, resource=None, - trace=None): + trace=None, span_id=None): self.payload = payload self.logger = logger self.insert_id = insert_id @@ -92,6 +96,7 @@ def __init__(self, payload, logger, insert_id=None, timestamp=None, self.http_request = http_request self.resource = resource self.trace = trace + self.span_id = span_id @classmethod def from_api_repr(cls, resource, client, loggers=None): @@ -129,6 +134,7 @@ def from_api_repr(cls, resource, client, loggers=None): severity = resource.get('severity') http_request = resource.get('httpRequest') trace = resource.get('trace') + span_id = resource.get('spanId') monitored_resource_dict = resource.get('resource') monitored_resource = None @@ -137,7 +143,7 @@ def from_api_repr(cls, resource, client, loggers=None): return cls(payload, logger, insert_id=insert_id, timestamp=timestamp, labels=labels, severity=severity, http_request=http_request, - resource=monitored_resource, trace=trace) + resource=monitored_resource, trace=trace, span_id=span_id) class TextEntry(_BaseEntry): @@ -194,16 +200,20 @@ class ProtobufEntry(_BaseEntry): :type trace: str :param trace: (optional) traceid to apply to the entry. + + :type span_id: str + :param span_id: (optional) span_id within the trace for the log entry. + Specify the trace parameter if span_id is set. """ _PAYLOAD_KEY = 'protoPayload' def __init__(self, payload, logger, insert_id=None, timestamp=None, labels=None, severity=None, http_request=None, resource=None, - trace=None): + trace=None, span_id=None): super(ProtobufEntry, self).__init__( payload, logger, insert_id=insert_id, timestamp=timestamp, labels=labels, severity=severity, http_request=http_request, - resource=resource, trace=trace) + resource=resource, trace=trace, span_id=span_id) if isinstance(self.payload, any_pb2.Any): self.payload_pb = self.payload self.payload = None diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 2b3bd577fbed..7190afe44412 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -96,7 +96,8 @@ def batch(self, client=None): def _make_entry_resource(self, text=None, info=None, message=None, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE, trace=None): + resource=_GLOBAL_RESOURCE, trace=None, + span_id=None): """Return a log entry resource of the appropriate type. Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`. @@ -134,6 +135,10 @@ def _make_entry_resource(self, text=None, info=None, message=None, :type trace: str :param trace: (optional) traceid to apply to the entry. + :type span_id: str + :param span_id: (optional) span_id within the trace for the log entry. + Specify the trace parameter if span_id is set. + :rtype: dict :returns: The JSON resource created. """ @@ -178,11 +183,14 @@ def _make_entry_resource(self, text=None, info=None, message=None, if trace is not None: entry['trace'] = trace + if span_id is not None: + entry['spanId'] = span_id + return entry def log_text(self, text, client=None, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE, trace=None): + resource=_GLOBAL_RESOURCE, trace=None, span_id=None): """API call: log a text message via a POST request See @@ -218,17 +226,21 @@ def log_text(self, text, client=None, labels=None, insert_id=None, :type trace: str :param trace: (optional) traceid to apply to the entry. + + :type span_id: str + :param span_id: (optional) span_id within the trace for the log entry. + Specify the trace parameter if span_id is set. """ client = self._require_client(client) entry_resource = self._make_entry_resource( text=text, labels=labels, insert_id=insert_id, severity=severity, http_request=http_request, timestamp=timestamp, resource=resource, - trace=trace) + trace=trace, span_id=span_id) client.logging_api.write_entries([entry_resource]) def log_struct(self, info, client=None, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE, trace=None): + resource=_GLOBAL_RESOURCE, trace=None, span_id=None): """API call: log a structured message via a POST request See @@ -264,17 +276,21 @@ def log_struct(self, info, client=None, labels=None, insert_id=None, :type trace: str :param trace: (optional) traceid to apply to the entry. + + :type span_id: str + :param span_id: (optional) span_id within the trace for the log entry. + Specify the trace parameter if span_id is set. """ client = self._require_client(client) entry_resource = self._make_entry_resource( info=info, labels=labels, insert_id=insert_id, severity=severity, http_request=http_request, timestamp=timestamp, resource=resource, - trace=trace) + trace=trace, span_id=span_id) client.logging_api.write_entries([entry_resource]) def log_proto(self, message, client=None, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE, trace=None): + resource=_GLOBAL_RESOURCE, trace=None, span_id=None): """API call: log a protobuf message via a POST request See @@ -310,12 +326,16 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, :type trace: str :param trace: (optional) traceid to apply to the entry. + + :type span_id: str + :param span_id: (optional) span_id within the trace for the log entry. + Specify the trace parameter if span_id is set. """ client = self._require_client(client) entry_resource = self._make_entry_resource( message=message, labels=labels, insert_id=insert_id, severity=severity, http_request=http_request, timestamp=timestamp, - resource=resource, trace=trace) + resource=resource, trace=trace, span_id=span_id) client.logging_api.write_entries([entry_resource]) def delete(self, client=None): @@ -410,7 +430,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): def log_text(self, text, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None, resource=_GLOBAL_RESOURCE, - trace=None): + trace=None, span_id=None): """Add a text entry to be logged during :meth:`commit`. :type text: str @@ -441,14 +461,18 @@ def log_text(self, text, labels=None, insert_id=None, severity=None, :type trace: str :param trace: (optional) traceid to apply to the entry. + + :type span_id: str + :param span_id: (optional) span_id within the trace for the log entry. + Specify the trace parameter if span_id is set. """ self.entries.append( ('text', text, labels, insert_id, severity, http_request, - timestamp, resource, trace)) + timestamp, resource, trace, span_id)) def log_struct(self, info, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE, trace=None): + resource=_GLOBAL_RESOURCE, trace=None, span_id=None): """Add a struct entry to be logged during :meth:`commit`. :type info: dict @@ -479,14 +503,18 @@ def log_struct(self, info, labels=None, insert_id=None, severity=None, :type trace: str :param trace: (optional) traceid to apply to the entry. + + :type span_id: str + :param span_id: (optional) span_id within the trace for the log entry. + Specify the trace parameter if span_id is set. """ self.entries.append( ('struct', info, labels, insert_id, severity, http_request, - timestamp, resource, trace)) + timestamp, resource, trace, span_id)) def log_proto(self, message, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE, trace=None): + resource=_GLOBAL_RESOURCE, trace=None, span_id=None): """Add a protobuf entry to be logged during :meth:`commit`. :type message: protobuf message @@ -517,10 +545,14 @@ def log_proto(self, message, labels=None, insert_id=None, severity=None, :type trace: str :param trace: (optional) traceid to apply to the entry. + + :type span_id: str + :param span_id: (optional) span_id within the trace for the log entry. + Specify the trace parameter if span_id is set. """ self.entries.append( ('proto', message, labels, insert_id, severity, http_request, - timestamp, resource, trace)) + timestamp, resource, trace, span_id)) def commit(self, client=None): """Send saved log entries as a single API call. @@ -544,7 +576,7 @@ def commit(self, client=None): entries = [] for (entry_type, entry, labels, iid, severity, http_req, - timestamp, resource, trace) in self.entries: + timestamp, resource, trace, span_id) in self.entries: if entry_type == 'text': info = {'textPayload': entry} elif entry_type == 'struct': @@ -573,6 +605,8 @@ def commit(self, client=None): info['timestamp'] = _datetime_to_rfc3339(timestamp) if trace is not None: info['trace'] = trace + if span_id is not None: + info['spanId'] = span_id entries.append(info) client.logging_api.write_entries(entries, **kwargs) diff --git a/packages/google-cloud-logging/tests/unit/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py index b15974e77a2a..03eade9a54b2 100644 --- a/packages/google-cloud-logging/tests/unit/test_entries.py +++ b/packages/google-cloud-logging/tests/unit/test_entries.py @@ -69,6 +69,7 @@ def test_ctor_defaults(self): self.assertIsNone(entry.http_request) self.assertIsNone(entry.resource) self.assertIsNone(entry.trace) + self.assertIsNone(entry.span_id) def test_ctor_explicit(self): import datetime @@ -89,6 +90,7 @@ def test_ctor_explicit(self): } resource = Resource(type='global', labels={}) TRACE = '12345678-1234-5678-1234-567812345678' + SPANID = '000000000000004a' logger = _Logger(self.LOGGER_NAME, self.PROJECT) entry = self._make_one(PAYLOAD, logger, @@ -98,7 +100,8 @@ def test_ctor_explicit(self): severity=SEVERITY, http_request=REQUEST, resource=resource, - trace=TRACE) + trace=TRACE, + span_id=SPANID) self.assertEqual(entry.payload, PAYLOAD) self.assertIs(entry.logger, logger) self.assertEqual(entry.insert_id, IID) @@ -110,6 +113,7 @@ def test_ctor_explicit(self): self.assertEqual(entry.http_request['status'], STATUS) self.assertEqual(entry.resource, resource) self.assertEqual(entry.trace, TRACE) + self.assertEqual(entry.span_id, SPANID) def test_from_api_repr_missing_data_no_loggers(self): client = _Client(self.PROJECT) @@ -127,6 +131,7 @@ def test_from_api_repr_missing_data_no_loggers(self): self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) self.assertIsNone(entry.trace) + self.assertIsNone(entry.span_id) logger = entry.logger self.assertIsInstance(logger, _Logger) self.assertIs(logger.client, client) @@ -160,6 +165,7 @@ def test_from_api_repr_w_loggers_no_logger_match(self): ) STATUS = '500' TRACE = '12345678-1234-5678-1234-567812345678' + SPANID = '000000000000004a' API_REPR = { 'dummyPayload': PAYLOAD, 'logName': LOG_NAME, @@ -173,7 +179,8 @@ def test_from_api_repr_w_loggers_no_logger_match(self): 'status': STATUS, }, 'resource': RESOURCE._to_dict(), - 'trace': TRACE + 'trace': TRACE, + 'spanId': SPANID } loggers = {} entry = klass.from_api_repr(API_REPR, client, loggers=loggers) @@ -192,6 +199,7 @@ def test_from_api_repr_w_loggers_no_logger_match(self): self.assertEqual(loggers, {LOG_NAME: logger}) self.assertEqual(entry.resource, RESOURCE) self.assertEqual(entry.trace, TRACE) + self.assertEqual(entry.span_id, SPANID) def test_from_api_repr_w_loggers_w_logger_match(self): from datetime import datetime @@ -205,13 +213,15 @@ def test_from_api_repr_w_loggers_w_logger_match(self): LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) LABELS = {'foo': 'bar', 'baz': 'qux'} TRACE = '12345678-1234-5678-1234-567812345678' + SPANID = '000000000000004a' API_REPR = { 'dummyPayload': PAYLOAD, 'logName': LOG_NAME, 'insertId': IID, 'timestamp': TIMESTAMP, 'labels': LABELS, - 'trace': TRACE + 'trace': TRACE, + 'spanId': SPANID } LOGGER = object() loggers = {LOG_NAME: LOGGER} @@ -222,6 +232,7 @@ def test_from_api_repr_w_loggers_w_logger_match(self): self.assertEqual(entry.timestamp, NOW) self.assertEqual(entry.labels, LABELS) self.assertEqual(entry.trace, TRACE) + self.assertEqual(entry.span_id, SPANID) self.assertIs(entry.logger, LOGGER) @@ -251,6 +262,7 @@ def test_constructor_basic(self): self.assertIsNone(pb_entry.severity) self.assertIsNone(pb_entry.http_request) self.assertIsNone(pb_entry.trace) + self.assertIsNone(pb_entry.span_id) def test_constructor_with_any(self): from google.protobuf.any_pb2 import Any @@ -266,6 +278,7 @@ def test_constructor_with_any(self): self.assertIsNone(pb_entry.severity) self.assertIsNone(pb_entry.http_request) self.assertIsNone(pb_entry.trace) + self.assertIsNone(pb_entry.span_id) def test_parse_message(self): import json diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 85c8e2568584..0624abdda135 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -178,6 +178,29 @@ def test_log_text_w_trace(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_text_w_span(self): + + TEXT = 'TEXT' + SPANID = '000000000000004a' + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + 'labels': {}, + }, + 'spanId': SPANID + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log_text(TEXT, span_id=SPANID) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): TEXT = u'TEXT' DEFAULT_LABELS = {'foo': 'spam'} @@ -188,6 +211,7 @@ def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): URI = 'https://api.example.com/endpoint' STATUS = '500' TRACE = '12345678-1234-5678-1234-567812345678' + SPANID = '000000000000004a' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, @@ -205,7 +229,8 @@ def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): 'insertId': IID, 'severity': SEVERITY, 'httpRequest': REQUEST, - 'trace': TRACE + 'trace': TRACE, + 'spanId': SPANID }] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) @@ -215,7 +240,7 @@ def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): logger.log_text(TEXT, client=client2, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, - trace=TRACE) + trace=TRACE, span_id=SPANID) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) @@ -273,6 +298,7 @@ def test_log_struct_w_explicit_client_labels_severity_httpreq(self): URI = 'https://api.example.com/endpoint' STATUS = '500' TRACE = '12345678-1234-5678-1234-567812345678' + SPANID = '000000000000004a' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, @@ -290,7 +316,8 @@ def test_log_struct_w_explicit_client_labels_severity_httpreq(self): 'insertId': IID, 'severity': SEVERITY, 'httpRequest': REQUEST, - 'trace': TRACE + 'trace': TRACE, + 'spanId': SPANID }] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) @@ -300,7 +327,7 @@ def test_log_struct_w_explicit_client_labels_severity_httpreq(self): logger.log_struct(STRUCT, client=client2, labels=LABELS, insert_id=IID, severity=SEVERITY, - http_request=REQUEST, trace=TRACE) + http_request=REQUEST, trace=TRACE, span_id=SPANID) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) @@ -352,6 +379,29 @@ def test_log_struct_w_trace(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_struct_w_span(self): + + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + SPANID = '000000000000004a' + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + 'labels': {}, + }, + 'spanId': SPANID + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log_struct(STRUCT, span_id=SPANID) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + def test_log_proto_w_implicit_client(self): import json from google.protobuf.json_format import MessageToJson @@ -418,6 +468,7 @@ def test_log_proto_w_explicit_client_labels_severity_httpreq(self): URI = 'https://api.example.com/endpoint' STATUS = '500' TRACE = '12345678-1234-5678-1234-567812345678' + SPANID = '000000000000004a' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, @@ -435,7 +486,8 @@ def test_log_proto_w_explicit_client_labels_severity_httpreq(self): 'insertId': IID, 'severity': SEVERITY, 'httpRequest': REQUEST, - 'trace': TRACE + 'trace': TRACE, + 'spanId': SPANID }] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) @@ -445,7 +497,7 @@ def test_log_proto_w_explicit_client_labels_severity_httpreq(self): logger.log_proto(message, client=client2, labels=LABELS, insert_id=IID, severity=SEVERITY, - http_request=REQUEST, trace=TRACE) + http_request=REQUEST, trace=TRACE, span_id=SPANID) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) @@ -505,6 +557,33 @@ def test_log_proto_w_trace(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_proto_w_span(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + message = Struct(fields={'foo': Value(bool_value=True)}) + SPANID = '000000000000004a' + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + 'labels': {}, + }, + 'spanId': SPANID + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log_proto(message, span_id=SPANID) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() @@ -630,7 +709,7 @@ def test_log_text_defaults(self): batch.log_text(TEXT) self.assertEqual(batch.entries, [('text', TEXT, None, None, None, None, None, - _GLOBAL_RESOURCE, None)]) + _GLOBAL_RESOURCE, None, None)]) def test_log_text_explicit(self): import datetime @@ -657,17 +736,18 @@ def test_log_text_explicit(self): } ) TRACE = '12345678-1234-5678-1234-567812345678' + SPANID = '000000000000004a' client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_text(TEXT, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, - resource=RESOURCE, trace=TRACE) + resource=RESOURCE, trace=TRACE, span_id=SPANID) self.assertEqual( batch.entries, [('text', TEXT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP, - RESOURCE, TRACE)]) + RESOURCE, TRACE, SPANID)]) def test_log_struct_defaults(self): from google.cloud.logging.logger import _GLOBAL_RESOURCE @@ -679,7 +759,7 @@ def test_log_struct_defaults(self): self.assertEqual( batch.entries, [('struct', STRUCT, None, None, None, None, None, - _GLOBAL_RESOURCE, None)]) + _GLOBAL_RESOURCE, None, None)]) def test_log_struct_explicit(self): import datetime @@ -693,6 +773,7 @@ def test_log_struct_explicit(self): URI = 'https://api.example.com/endpoint' STATUS = '500' TRACE = '12345678-1234-5678-1234-567812345678' + SPANID = '000000000000004a' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, @@ -712,11 +793,12 @@ def test_log_struct_explicit(self): batch = self._make_one(logger, client=client) batch.log_struct(STRUCT, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, - timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE) + timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, + span_id=SPANID) self.assertEqual( batch.entries, [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP, - RESOURCE, TRACE)]) + RESOURCE, TRACE, SPANID)]) def test_log_proto_defaults(self): from google.cloud.logging.logger import _GLOBAL_RESOURCE @@ -730,7 +812,7 @@ def test_log_proto_defaults(self): batch.log_proto(message) self.assertEqual(batch.entries, [('proto', message, None, None, None, None, None, - _GLOBAL_RESOURCE, None)]) + _GLOBAL_RESOURCE, None, None)]) def test_log_proto_explicit(self): import datetime @@ -746,6 +828,7 @@ def test_log_proto_explicit(self): URI = 'https://api.example.com/endpoint' STATUS = '500' TRACE = '12345678-1234-5678-1234-567812345678' + SPANID = '000000000000004a' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, @@ -764,18 +847,19 @@ def test_log_proto_explicit(self): batch = self._make_one(logger, client=client) batch.log_proto(message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, - timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE) + timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, + span_id=SPANID) self.assertEqual( batch.entries, [('proto', message, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP, - RESOURCE, TRACE)]) + RESOURCE, TRACE, SPANID)]) def test_commit_w_invalid_entry_type(self): logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) batch = self._make_one(logger, client) batch.entries.append(('bogus', 'BOGUS', None, None, None, None, None, - None, None)) + None, None, None)) with self.assertRaises(ValueError): batch.commit() @@ -828,17 +912,23 @@ def test_commit_w_bound_client(self): TRACE1 = '12345678-1234-5678-1234-567812345678' TRACE2 = '12345678-1234-5678-1234-567812345679' TRACE3 = '12345678-1234-5678-1234-567812345670' + SPANID1 = '000000000000004a' + SPANID2 = '000000000000004b' + SPANID3 = '000000000000004c' ENTRIES = [ {'textPayload': TEXT, 'insertId': IID1, 'timestamp': _datetime_to_rfc3339(TIMESTAMP1), - 'resource': _GLOBAL_RESOURCE._to_dict(), 'trace': TRACE1}, + 'resource': _GLOBAL_RESOURCE._to_dict(), 'trace': TRACE1, + 'spanId': SPANID1}, {'jsonPayload': STRUCT, 'insertId': IID2, 'timestamp': _datetime_to_rfc3339(TIMESTAMP2), - 'resource': _GLOBAL_RESOURCE._to_dict(), 'trace': TRACE2}, + 'resource': _GLOBAL_RESOURCE._to_dict(), 'trace': TRACE2, + 'spanId': SPANID2}, {'protoPayload': json.loads(MessageToJson(message)), 'insertId': IID3, 'timestamp': _datetime_to_rfc3339(TIMESTAMP3), - 'resource': _GLOBAL_RESOURCE._to_dict(), 'trace': TRACE3}, + 'resource': _GLOBAL_RESOURCE._to_dict(), 'trace': TRACE3, + 'spanId': SPANID3}, ] client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() @@ -846,11 +936,11 @@ def test_commit_w_bound_client(self): batch = self._make_one(logger, client=client) batch.log_text(TEXT, insert_id=IID1, timestamp=TIMESTAMP1, - trace=TRACE1) + trace=TRACE1, span_id=SPANID1) batch.log_struct(STRUCT, insert_id=IID2, timestamp=TIMESTAMP2, - trace=TRACE2) + trace=TRACE2, span_id=SPANID2) batch.log_proto(message, insert_id=IID3, timestamp=TIMESTAMP3, - trace=TRACE3) + trace=TRACE3, span_id=SPANID3) batch.commit() self.assertEqual(list(batch.entries), []) @@ -977,11 +1067,11 @@ def test_context_mgr_failure(self): logger = _Logger() UNSENT = [ ('text', TEXT, None, IID, None, None, TIMESTAMP, - _GLOBAL_RESOURCE, None), + _GLOBAL_RESOURCE, None, None), ('struct', STRUCT, None, None, SEVERITY, None, None, - _GLOBAL_RESOURCE, None), + _GLOBAL_RESOURCE, None, None), ('proto', message, LABELS, None, None, REQUEST, None, - _GLOBAL_RESOURCE, None), + _GLOBAL_RESOURCE, None, None), ] batch = self._make_one(logger, client=client) From 676a92d40b522e8006d3131ba66be2dda05f9a16 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 10 Sep 2018 15:55:01 -0400 Subject: [PATCH 178/855] Docs: Replace links to '/stable/' with '/latest/'. (#5901) * Replace links to '/stable/' with '/latest/'. * DRY out duplicated 'README.rst' vs. 'docs/index.rst'. * Include websecurityscanner in docs. Toward #5894. --- packages/google-cloud-logging/docs/index.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/docs/index.rst b/packages/google-cloud-logging/docs/index.rst index 5bb60aaf57ee..fb584fa1ae8f 100644 --- a/packages/google-cloud-logging/docs/index.rst +++ b/packages/google-cloud-logging/docs/index.rst @@ -8,7 +8,7 @@ Python Client for Stackdriver Logging API (`Beta`_) .. _Beta: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst .. _Stackdriver Logging API: https://cloud.google.com/logging -.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging/usage.html +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/logging/usage.html .. _Product Documentation: https://cloud.google.com/logging Quick Start @@ -24,7 +24,7 @@ In order to use this library, you first need to go through the following steps: .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Stackdriver Logging API.: https://cloud.google.com/logging -.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/stable/core/auth.html +.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/latest/core/auth.html Installation ~~~~~~~~~~~~ @@ -96,4 +96,4 @@ Api Reference :maxdepth: 2 gapic/v2/api - gapic/v2/types \ No newline at end of file + gapic/v2/types From f3f199cfbbf992457ed1492a4d5b66846054ea1f Mon Sep 17 00:00:00 2001 From: brunoais Date: Tue, 11 Sep 2018 17:29:36 +0100 Subject: [PATCH 179/855] Add trace and span_id to logging async API (#5908) --- .../handlers/transports/background_thread.py | 25 ++++++++- .../cloud/logging/handlers/transports/base.py | 3 +- .../cloud/logging/handlers/transports/sync.py | 7 ++- .../transports/test_background_thread.py | 55 +++++++++++++++++-- .../unit/handlers/transports/test_sync.py | 9 ++- 5 files changed, 86 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index afb149853061..852e32dd42bb 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -229,7 +229,8 @@ def _main_thread_terminated(self): 'Failed to send %d pending logs.' % (self._queue.qsize(),), file=sys.stderr) - def enqueue(self, record, message, resource=None, labels=None): + def enqueue(self, record, message, resource=None, labels=None, + trace=None, span_id=None): """Queues a log entry to be written by the background thread. :type record: :class:`logging.LogRecord` @@ -244,6 +245,13 @@ def enqueue(self, record, message, resource=None, labels=None): :type labels: dict :param labels: (Optional) Mapping of labels for the entry. + + :type trace: str + :param trace: (optional) traceid to apply to the logging entry. + + :type span_id: str + :param span_id: (optional) span_id within the trace for the log entry. + Specify the trace parameter if span_id is set. """ self._queue.put_nowait({ 'info': { @@ -253,6 +261,8 @@ def enqueue(self, record, message, resource=None, labels=None): 'severity': record.levelname, 'resource': resource, 'labels': labels, + 'trace': trace, + 'span_id': span_id, }) def flush(self): @@ -296,7 +306,8 @@ def __init__(self, client, name, grace_period=_DEFAULT_GRACE_PERIOD, max_latency=max_latency) self.worker.start() - def send(self, record, message, resource=None, labels=None): + def send(self, record, message, resource=None, labels=None, + trace=None, span_id=None): """Overrides Transport.send(). :type record: :class:`logging.LogRecord` @@ -311,8 +322,16 @@ def send(self, record, message, resource=None, labels=None): :type labels: dict :param labels: (Optional) Mapping of labels for the entry. + + :type trace: str + :param trace: (optional) traceid to apply to the logging entry. + + :type span_id: str + :param span_id: (optional) span_id within the trace for the log entry. + Specify the trace parameter if span_id is set. """ - self.worker.enqueue(record, message, resource=resource, labels=labels) + self.worker.enqueue(record, message, resource=resource, labels=labels, + trace=trace, span_id=span_id) def flush(self): """Submit any pending log records.""" diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py index cefbb6909a07..9e40cc8a2194 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py @@ -22,7 +22,8 @@ class Transport(object): client and name object, and must override :meth:`send`. """ - def send(self, record, message, resource=None, labels=None): + def send(self, record, message, resource=None, labels=None, + trace=None, span_id=None): """Transport send to be implemented by subclasses. :type record: :class:`logging.LogRecord` diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py index e5979d1bdf58..589b15db5f6a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py @@ -29,7 +29,8 @@ class SyncTransport(Transport): def __init__(self, client, name): self.logger = client.logger(name) - def send(self, record, message, resource=None, labels=None): + def send(self, record, message, resource=None, labels=None, + trace=None, span_id=None): """Overrides transport.send(). :type record: :class:`logging.LogRecord` @@ -49,4 +50,6 @@ def send(self, record, message, resource=None, labels=None): self.logger.log_struct(info, severity=record.levelname, resource=resource, - labels=labels) + labels=labels, + trace=trace, + span_id=span_id) diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 1f49763b8ce4..e06083d2b756 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -61,10 +61,55 @@ def test_send(self): python_logger_name, logging.INFO, None, None, message, None, None) - transport.send(record, message, _GLOBAL_RESOURCE, None) + transport.send(record, message, _GLOBAL_RESOURCE) transport.worker.enqueue.assert_called_once_with( - record, message, _GLOBAL_RESOURCE, None) + record, message, _GLOBAL_RESOURCE, None, + trace=None, span_id=None) + + def test_trace_send(self): + from google.cloud.logging.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + name = 'python_logger' + + transport, _ = self._make_one(client, name) + + python_logger_name = 'mylogger' + message = 'hello world' + trace = 'the-project/trace/longlogTraceid' + + record = logging.LogRecord( + python_logger_name, logging.INFO, + None, None, message, None, None) + + transport.send(record, message, _GLOBAL_RESOURCE, trace=trace) + + transport.worker.enqueue.assert_called_once_with( + record, message, _GLOBAL_RESOURCE, None, + trace=trace, span_id=None) + + def test_span_send(self): + from google.cloud.logging.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + name = 'python_logger' + + transport, _ = self._make_one(client, name) + + python_logger_name = 'mylogger' + message = 'hello world' + span_id = 'the-project/trace/longlogTraceid/span/123456789012abbacdac' + + record = logging.LogRecord( + python_logger_name, logging.INFO, + None, None, message, None, None) + + transport.send(record, message, _GLOBAL_RESOURCE, span_id=span_id) + + transport.worker.enqueue.assert_called_once_with( + record, message, _GLOBAL_RESOURCE, None, + trace=None, span_id=span_id) def test_flush(self): client = _Client(self.PROJECT) @@ -381,13 +426,15 @@ def __init__(self): self.commit_count = None def log_struct( - self, info, severity=logging.INFO, resource=None, labels=None): + self, info, severity=logging.INFO, resource=None, labels=None, + trace=None, span_id=None): from google.cloud.logging.logger import _GLOBAL_RESOURCE assert resource is None resource = _GLOBAL_RESOURCE - self.log_struct_called_with = (info, severity, resource, labels) + self.log_struct_called_with = (info, severity, resource, labels, + trace, span_id) self.entries.append(info) def commit(self): diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py index 6c2e51f944fb..6a3f5dcf7a67 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py @@ -52,7 +52,8 @@ def test_send(self): 'message': message, 'python_logger': python_logger_name, } - EXPECTED_SENT = (EXPECTED_STRUCT, 'INFO', _GLOBAL_RESOURCE, None) + EXPECTED_SENT = (EXPECTED_STRUCT, 'INFO', _GLOBAL_RESOURCE, None, + None, None) self.assertEqual( transport.logger.log_struct_called_with, EXPECTED_SENT) @@ -64,8 +65,10 @@ def __init__(self, name): self.name = name def log_struct(self, message, severity=None, - resource=_GLOBAL_RESOURCE, labels=None): - self.log_struct_called_with = (message, severity, resource, labels) + resource=_GLOBAL_RESOURCE, labels=None, + trace=None, span_id=None): + self.log_struct_called_with = (message, severity, resource, labels, + trace, span_id) class _Client(object): From b77a5430b856a45f28c313f3ffd299c705ee880a Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 11 Sep 2018 16:19:38 -0400 Subject: [PATCH 180/855] Release logging 1.7.0 (#5926) --- packages/google-cloud-logging/CHANGELOG.md | 26 ++++++++++++++++++++++ packages/google-cloud-logging/setup.py | 4 ++-- 2 files changed, 28 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 548414bef7c8..7b95278e8351 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,32 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## 1.7.0 + +### Implementation Changes +- Print to stderr instead of stdout when exiting the program ([#5569](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5569)) +- Avoid overwriting '__module__' of messages from shared modules. ([#5364](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5364)) +- Support older Django versions in request middleware [#5024](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5024) +- Fix bad trove classifier [#5386](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5386) + +### New Features +- Add support for `trace` and `span_id` to logging async API ([#5908](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5908)) +- Add support for `span_id` attribute of log entries ([#5885](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5885)) +- Add support for `trace` attribute of log entries ([#5878](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5878)) +- Add support for Python 3.7 and remove 3.4 ([#5295](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5295)) + +### Documentation +- Replace links to '/stable/' with '/latest/'. ([#5901](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5901)) + +### Internal / Testing Changes +- Nox: use inplace installs ([#5865](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5865)) +- Unflake logging systests ([#5698](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5698)) +- Harden `_list_entries` system test further against backoff failure. ([#5551](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5551)) +- Harden logging systests ([#5496](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5496)) +- Harden system tests against 'ResourceExhausted' quota errors. ([#5486](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5486)) +- Modify system tests to use prerelease versions of grpcio ([#5304](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5304)) +- Plug leaky sink in systests. ([#5247](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5247)) + ## 1.6.0 ### Dependencies diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 536c918a5df9..d24e5b7399e6 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-logging' description = 'Stackdriver Logging API client library' -version = '1.6.0' +version = '1.7.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' @@ -73,9 +73,9 @@ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', 'Operating System :: OS Independent', 'Topic :: Internet', ], From 4eb4d901ff343f6dac59b8de1ad3716e1bdea8a4 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 12 Sep 2018 16:06:22 -0400 Subject: [PATCH 181/855] Prep logging docs for repo split. (#5943) - Move docs from 'docs/logging' into 'logging/docs' and leave symlink. - Harmonize / DRY 'logging/README.rst' and 'logging/docs/index.rst'. - Drop docs for GAPIC-generated helpers: they are not part of the surface. - Ensure that docs still build from top-level. Toward #5912. --- packages/google-cloud-logging/CHANGELOG.md | 8 +- packages/google-cloud-logging/README.rst | 98 +++-- .../google-cloud-logging/docs/changelog.md | 1 + packages/google-cloud-logging/docs/client.rst | 6 + .../google-cloud-logging/docs/entries.rst | 6 + .../docs/gapic/v2/api.rst | 6 - .../docs/gapic/v2/types.rst | 5 - .../docs/handlers-app-engine.rst | 6 + .../docs/handlers-container-engine.rst | 6 + .../google-cloud-logging/docs/handlers.rst | 6 + packages/google-cloud-logging/docs/index.rst | 121 ++---- packages/google-cloud-logging/docs/logger.rst | 6 + packages/google-cloud-logging/docs/metric.rst | 6 + packages/google-cloud-logging/docs/sink.rst | 6 + .../google-cloud-logging/docs/snippets.py | 408 ++++++++++++++++++ .../docs/stdlib-usage.rst | 70 +++ .../docs/transports-base.rst | 6 + .../docs/transports-sync.rst | 6 + .../docs/transports-thread.rst | 7 + packages/google-cloud-logging/docs/usage.rst | 347 +++++++++++++++ 20 files changed, 991 insertions(+), 140 deletions(-) create mode 120000 packages/google-cloud-logging/docs/changelog.md create mode 100644 packages/google-cloud-logging/docs/client.rst create mode 100644 packages/google-cloud-logging/docs/entries.rst delete mode 100644 packages/google-cloud-logging/docs/gapic/v2/api.rst delete mode 100644 packages/google-cloud-logging/docs/gapic/v2/types.rst create mode 100644 packages/google-cloud-logging/docs/handlers-app-engine.rst create mode 100644 packages/google-cloud-logging/docs/handlers-container-engine.rst create mode 100644 packages/google-cloud-logging/docs/handlers.rst create mode 100644 packages/google-cloud-logging/docs/logger.rst create mode 100644 packages/google-cloud-logging/docs/metric.rst create mode 100644 packages/google-cloud-logging/docs/sink.rst create mode 100644 packages/google-cloud-logging/docs/snippets.py create mode 100644 packages/google-cloud-logging/docs/stdlib-usage.rst create mode 100644 packages/google-cloud-logging/docs/transports-base.rst create mode 100644 packages/google-cloud-logging/docs/transports-sync.rst create mode 100644 packages/google-cloud-logging/docs/transports-thread.rst create mode 100644 packages/google-cloud-logging/docs/usage.rst diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 7b95278e8351..d79fe8029254 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -44,12 +44,12 @@ ## 1.5.0 -# New features +### New features - Added `max_latency` to `BackgroundThreadTransport`. (#4762) - Added support for unique writer identity in `Sink`. (#4595, #4708, #4704, #4706) -# Implementation changes +### Implementation changes - The underlying auto-generated client library was re-generated to pick up new features and bugfixes. (#4759) - Moved the code path of `get_gae_labels()` to `emit()`. (#4824) @@ -57,7 +57,7 @@ - `LogSink.create` captures the server-generated `writerIdentity`. (#4707) - Accomodated a back-end change making `Sink.filter` optional. (#4699) -# Testing +### Testing - Fixed system tests (#4768) - Hardened test for `retrieve_metadata_server` against transparent DNS proxies. (#4698) @@ -67,7 +67,7 @@ - Maked a `nox -s default` session for all packages. (#4324) - Shortened test names. (#4321) -# Documentation +### Documentation - Added doc to highlight missing `uniqueWriterIdentity` field. (#4579) - Fixing "Fore" -> "For" typo in README docs. (#4317) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index f12d764830b1..a0bc55247761 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -1,51 +1,82 @@ Python Client for Stackdriver Logging ===================================== - Python idiomatic client for `Stackdriver Logging`_ - -.. _Stackdriver Logging: https://cloud.google.com/logging/ - |pypi| |versions| -- `Documentation`_ +`Stackdriver Logging API`_: Writes log entries and manages your Stackdriver +Logging configuration. -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/logging/usage.html +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-logging.svg + :target: https://pypi.org/project/google-cloud-logging/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-logging.svg + :target: https://pypi.org/project/google-cloud-logging/ +.. _Stackdriver Logging API: https://cloud.google.com/logging +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/logging/usage.html +.. _Product Documentation: https://cloud.google.com/logging/docs Quick Start ----------- -.. code-block:: console +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Stackdriver Logging API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Stackdriver Logging API.: https://cloud.google.com/logging +.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/latest/core/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. - $ pip install --upgrade google-cloud-logging +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ -For more information on setting up your Python development environment, -such as installing ``pip`` and ``virtualenv`` on your system, please refer -to `Python Development Environment Setup Guide`_ for Google Cloud Platform. -.. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup +Mac/Linux +^^^^^^^^^ -**Note**: The creation of cross project sinks (log exports) is not currenlty supported. You may only create sinks within the same project set for the client. In other words, the parameter `uniqueWriterIdentity`_ is not yet available. +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-logging -.. _uniqueWriterIdentity: https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create -Authentication --------------- +Windows +^^^^^^^ -With ``google-cloud-python`` we try to make authentication as painless as -possible. Check out the `Authentication section`_ in our documentation to -learn more. You may also find the `authentication document`_ shared by all -the ``google-cloud-*`` libraries to be helpful. +.. code-block:: console -.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html -.. _authentication document: https://github.com/GoogleCloudPlatform/google-cloud-common/tree/master/authentication + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-logging Using the API ------------- -`Stackdriver Logging`_ API (`Logging API docs`_) allows you to store, search, -analyze, monitor, and alert on log data and events from Google Cloud Platform. +.. code:: python -.. _Logging API docs: https://cloud.google.com/logging/docs/ + from google.cloud import logging_v2 + + client = logging_v2.LoggingServiceV2Client() + entries = [] + response = client.write_log_entries(entries) .. code:: python @@ -58,15 +89,16 @@ Example of fetching entries: .. code:: python + from google.cloud import logging + client = logging.Client() + logger = client.logger('log_name') for entry in logger.list_entries(): print(entry.payload) -See the ``google-cloud-python`` API `logging documentation`_ to learn how to -connect to Stackdriver Logging using this Client Library. - -.. _logging documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/logging/usage.html +Next Steps +~~~~~~~~~~ -.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-logging.svg - :target: https://pypi.org/project/google-cloud-logging/ -.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-logging.svg - :target: https://pypi.org/project/google-cloud-logging/ +- Read the `Client Library Documentation`_ for to see other available + methods on the client. +- Read the `Product documentation`_ to learn more about the product and see + How-to Guides. diff --git a/packages/google-cloud-logging/docs/changelog.md b/packages/google-cloud-logging/docs/changelog.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-logging/docs/changelog.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/client.rst b/packages/google-cloud-logging/docs/client.rst new file mode 100644 index 000000000000..f04d5c5255f1 --- /dev/null +++ b/packages/google-cloud-logging/docs/client.rst @@ -0,0 +1,6 @@ +Stackdriver Logging Client +========================== + +.. automodule:: google.cloud.logging.client + :members: + :show-inheritance: diff --git a/packages/google-cloud-logging/docs/entries.rst b/packages/google-cloud-logging/docs/entries.rst new file mode 100644 index 000000000000..d384e9227ca5 --- /dev/null +++ b/packages/google-cloud-logging/docs/entries.rst @@ -0,0 +1,6 @@ +Entries +======= + +.. automodule:: google.cloud.logging.entries + :members: + :show-inheritance: diff --git a/packages/google-cloud-logging/docs/gapic/v2/api.rst b/packages/google-cloud-logging/docs/gapic/v2/api.rst deleted file mode 100644 index 2dc6bf6fcc6b..000000000000 --- a/packages/google-cloud-logging/docs/gapic/v2/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Stackdriver Logging API -================================== - -.. automodule:: google.cloud.logging_v2 - :members: - :inherited-members: \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/gapic/v2/types.rst b/packages/google-cloud-logging/docs/gapic/v2/types.rst deleted file mode 100644 index 5521d4f9bc12..000000000000 --- a/packages/google-cloud-logging/docs/gapic/v2/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Stackdriver Logging API Client -======================================== - -.. automodule:: google.cloud.logging_v2.types - :members: \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/handlers-app-engine.rst b/packages/google-cloud-logging/docs/handlers-app-engine.rst new file mode 100644 index 000000000000..71c45e3690be --- /dev/null +++ b/packages/google-cloud-logging/docs/handlers-app-engine.rst @@ -0,0 +1,6 @@ +Google App Engine flexible Log Handler +====================================== + +.. automodule:: google.cloud.logging.handlers.app_engine + :members: + :show-inheritance: diff --git a/packages/google-cloud-logging/docs/handlers-container-engine.rst b/packages/google-cloud-logging/docs/handlers-container-engine.rst new file mode 100644 index 000000000000..a0c6b2bc9228 --- /dev/null +++ b/packages/google-cloud-logging/docs/handlers-container-engine.rst @@ -0,0 +1,6 @@ +Google Container Engine Log Handler +=================================== + +.. automodule:: google.cloud.logging.handlers.container_engine + :members: + :show-inheritance: diff --git a/packages/google-cloud-logging/docs/handlers.rst b/packages/google-cloud-logging/docs/handlers.rst new file mode 100644 index 000000000000..1a258a88a541 --- /dev/null +++ b/packages/google-cloud-logging/docs/handlers.rst @@ -0,0 +1,6 @@ +Python Logging Module Handler +============================== + +.. automodule:: google.cloud.logging.handlers.handlers + :members: + :show-inheritance: diff --git a/packages/google-cloud-logging/docs/index.rst b/packages/google-cloud-logging/docs/index.rst index fb584fa1ae8f..0b0c45a7fe01 100644 --- a/packages/google-cloud-logging/docs/index.rst +++ b/packages/google-cloud-logging/docs/index.rst @@ -1,99 +1,36 @@ -Python Client for Stackdriver Logging API (`Beta`_) -=================================================== +.. include:: /../logging/README.rst -`Stackdriver Logging API`_: Writes log entries and manages your Stackdriver Logging configuration. - -- `Client Library Documentation`_ -- `Product Documentation`_ - -.. _Beta: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst -.. _Stackdriver Logging API: https://cloud.google.com/logging -.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/logging/usage.html -.. _Product Documentation: https://cloud.google.com/logging - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. `Enable the Stackdriver Logging API.`_ -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the Stackdriver Logging API.: https://cloud.google.com/logging -.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/latest/core/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - source /bin/activate - /bin/pip install google-cloud-logging - - -Windows -^^^^^^^ - -.. code-block:: console - - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-logging - -Preview -~~~~~~~ - -LoggingServiceV2Client -^^^^^^^^^^^^^^^^^^^^^^ - -.. code:: py - - from google.cloud import logging_v2 - - client = logging_v2.LoggingServiceV2Client() - - entries = [] - - response = client.write_log_entries(entries) - -Next Steps -~~~~~~~~~~ - -- Read the `Client Library Documentation`_ for Stackdriver Logging API - API to see other available methods on the client. -- Read the `Stackdriver Logging API Product documentation`_ to learn - more about the product and see How-to Guides. -- View this `repository’s main README`_ to see the full list of Cloud - APIs that we cover. +Usage Documentation +------------------- +.. toctree:: + :maxdepth: 2 -.. _Stackdriver Logging API Product documentation: https://cloud.google.com/logging -.. _repository’s main README: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst + usage Api Reference ------------- .. toctree:: - :maxdepth: 2 + :maxdepth: 2 + + client + logger + entries + metric + sink + stdlib-usage + handlers + handlers-app-engine + handlers-container-engine + transports-sync + transports-thread + transports-base + +Changelog +~~~~~~~~~ + +For a list of all ``google-cloud-logging`` releases: + +.. toctree:: + :maxdepth: 2 - gapic/v2/api - gapic/v2/types + changelog diff --git a/packages/google-cloud-logging/docs/logger.rst b/packages/google-cloud-logging/docs/logger.rst new file mode 100644 index 000000000000..72533ba33774 --- /dev/null +++ b/packages/google-cloud-logging/docs/logger.rst @@ -0,0 +1,6 @@ +Logger +====== + +.. automodule:: google.cloud.logging.logger + :members: + :show-inheritance: diff --git a/packages/google-cloud-logging/docs/metric.rst b/packages/google-cloud-logging/docs/metric.rst new file mode 100644 index 000000000000..ca30e3c89eca --- /dev/null +++ b/packages/google-cloud-logging/docs/metric.rst @@ -0,0 +1,6 @@ +Metrics +======= + +.. automodule:: google.cloud.logging.metric + :members: + :show-inheritance: diff --git a/packages/google-cloud-logging/docs/sink.rst b/packages/google-cloud-logging/docs/sink.rst new file mode 100644 index 000000000000..35e88562bbee --- /dev/null +++ b/packages/google-cloud-logging/docs/sink.rst @@ -0,0 +1,6 @@ +Sinks +===== + +.. automodule:: google.cloud.logging.sink + :members: + :show-inheritance: diff --git a/packages/google-cloud-logging/docs/snippets.py b/packages/google-cloud-logging/docs/snippets.py new file mode 100644 index 000000000000..eabc46fa8073 --- /dev/null +++ b/packages/google-cloud-logging/docs/snippets.py @@ -0,0 +1,408 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Testable usage examples for Stackdriver Logging API wrapper + +Each example function takes a ``client`` argument (which must be an instance +of :class:`google.cloud.logging.client.Client`) and uses it to perform a task +with the API. + +To facilitate running the examples as system tests, each example is also passed +a ``to_delete`` list; the function adds to the list any objects created which +need to be deleted during teardown. +""" + +import time + +from google.cloud.logging.client import Client + + +def snippet(func): + """Mark ``func`` as a snippet example function.""" + func._snippet = True + return func + + +def _millis(): + return time.time() * 1000 + + +def do_something_with(item): # pylint: disable=unused-argument + pass + + +# pylint: disable=reimported,unused-variable,unused-argument +@snippet +def instantiate_client(_unused_client, _unused_to_delete): + """Instantiate client.""" + + # [START client_create_default] + from google.cloud import logging + client = logging.Client() + # [END client_create_default] + + credentials = object() + # [START client_create_explicit] + from google.cloud import logging + client = logging.Client(project='my-project', credentials=credentials) + # [END client_create_explicit] +# pylint: enable=reimported,unused-variable,unused-argument + + +@snippet +def client_list_entries(client, to_delete): # pylint: disable=unused-argument + """List entries via client.""" + + # [START client_list_entries_default] + for entry in client.list_entries(): # API call(s) + do_something_with(entry) + # [END client_list_entries_default] + + # [START client_list_entries_filter] + FILTER = 'logName:log_name AND textPayload:simple' + for entry in client.list_entries(filter_=FILTER): # API call(s) + do_something_with(entry) + # [END client_list_entries_filter] + + # [START client_list_entries_order_by] + from google.cloud.logging import DESCENDING + for entry in client.list_entries(order_by=DESCENDING): # API call(s) + do_something_with(entry) + # [END client_list_entries_order_by] + + # [START client_list_entries_paged] + iterator = client.list_entries() + pages = iterator.pages + + page1 = next(pages) # API call + for entry in page1: + do_something_with(entry) + + page2 = next(pages) # API call + for entry in page2: + do_something_with(entry) + # [END client_list_entries_paged] + + +# @snippet Commented because we need real project IDs to test +def client_list_entries_multi_project( + client, to_delete): # pylint: disable=unused-argument + """List entries via client across multiple projects.""" + + # [START client_list_entries_multi_project] + PROJECT_IDS = ['one-project', 'another-project'] + for entry in client.list_entries(project_ids=PROJECT_IDS): # API call(s) + do_something_with(entry) + # [END client_list_entries_multi_project] + + +@snippet +def logger_usage(client, to_delete): + """Logger usage.""" + LOG_NAME = 'logger_usage_%d' % (_millis()) + + # [START logger_create] + logger = client.logger(LOG_NAME) + # [END logger_create] + to_delete.append(logger) + + # [START logger_log_text] + logger.log_text("A simple entry") # API call + # [END logger_log_text] + + # [START logger_log_struct] + logger.log_struct({ + 'message': 'My second entry', + 'weather': 'partly cloudy', + }) # API call + # [END logger_log_struct] + + # [START logger_list_entries] + from google.cloud.logging import DESCENDING + for entry in logger.list_entries(order_by=DESCENDING): # API call(s) + do_something_with(entry) + # [END logger_list_entries] + + def _logger_delete(): + # [START logger_delete] + logger.delete() # API call + # [END logger_delete] + + _backoff_not_found(_logger_delete) + to_delete.remove(logger) + + +@snippet +def metric_crud(client, to_delete): + """Metric CRUD.""" + METRIC_NAME = 'robots-%d' % (_millis(),) + DESCRIPTION = "Robots all up in your server" + FILTER = 'logName:apache-access AND textPayload:robot' + UPDATED_FILTER = 'textPayload:robot' + UPDATED_DESCRIPTION = "Danger, Will Robinson!" + + # [START client_list_metrics] + for metric in client.list_metrics(): # API call(s) + do_something_with(metric) + # [END client_list_metrics] + + # [START metric_create] + metric = client.metric( + METRIC_NAME, filter_=FILTER, description=DESCRIPTION) + assert not metric.exists() # API call + metric.create() # API call + assert metric.exists() # API call + # [END metric_create] + to_delete.append(metric) + + # [START metric_reload] + existing_metric = client.metric(METRIC_NAME) + existing_metric.reload() # API call + # [END metric_reload] + assert existing_metric.filter_ == FILTER + assert existing_metric.description == DESCRIPTION + + # [START metric_update] + existing_metric.filter_ = UPDATED_FILTER + existing_metric.description = UPDATED_DESCRIPTION + existing_metric.update() # API call + # [END metric_update] + existing_metric.reload() + assert existing_metric.filter_ == UPDATED_FILTER + assert existing_metric.description == UPDATED_DESCRIPTION + + def _metric_delete(): + # [START metric_delete] + metric.delete() + # [END metric_delete] + + _backoff_not_found(_metric_delete) + to_delete.remove(metric) + + +def _sink_storage_setup(client): + from google.cloud import storage + BUCKET_NAME = 'sink-storage-%d' % (_millis(),) + client = storage.Client() + bucket = client.bucket(BUCKET_NAME) + bucket.create() + + # [START sink_bucket_permissions] + bucket.acl.reload() # API call + logs_group = bucket.acl.group('cloud-logs@google.com') + logs_group.grant_owner() + bucket.acl.add_entity(logs_group) + bucket.acl.save() # API call + # [END sink_bucket_permissions] + + return bucket + + +@snippet +def sink_storage(client, to_delete): + """Sink log entries to storage.""" + bucket = _sink_storage_setup(client) + to_delete.append(bucket) + SINK_NAME = 'robots-storage-%d' % (_millis(),) + FILTER = 'textPayload:robot' + + # [START sink_storage_create] + DESTINATION = 'storage.googleapis.com/%s' % (bucket.name,) + sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) + assert not sink.exists() # API call + sink.create() # API call + assert sink.exists() # API call + # [END sink_storage_create] + to_delete.insert(0, sink) # delete sink before bucket + + +def _sink_bigquery_setup(client): + from google.cloud import bigquery + DATASET_NAME = 'sink_bigquery_%d' % (_millis(),) + client = bigquery.Client() + dataset = client.dataset(DATASET_NAME) + dataset.create() + dataset.reload() + + # [START sink_dataset_permissions] + from google.cloud.bigquery.dataset import AccessGrant + grants = dataset.access_grants + grants.append(AccessGrant( + 'WRITER', 'groupByEmail', 'cloud-logs@google.com')) + dataset.access_grants = grants + dataset.update() # API call + # [END sink_dataset_permissions] + + return dataset + + +@snippet +def sink_bigquery(client, to_delete): + """Sink log entries to bigquery.""" + dataset = _sink_bigquery_setup(client) + to_delete.append(dataset) + SINK_NAME = 'robots-bigquery-%d' % (_millis(),) + FILTER = 'textPayload:robot' + + # [START sink_bigquery_create] + DESTINATION = 'bigquery.googleapis.com%s' % (dataset.path,) + sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) + assert not sink.exists() # API call + sink.create() # API call + assert sink.exists() # API call + # [END sink_bigquery_create] + to_delete.insert(0, sink) # delete sink before dataset + + +def _sink_pubsub_setup(client): + from google.cloud import pubsub + TOPIC_NAME = 'sink-pubsub-%d' % (_millis(),) + client = pubsub.Client() + topic = client.topic(TOPIC_NAME) + topic.create() + + # [START sink_topic_permissions] + policy = topic.get_iam_policy() # API call + policy.owners.add(policy.group('cloud-logs@google.com')) + topic.set_iam_policy(policy) # API call + # [END sink_topic_permissions] + + return topic + + +@snippet +def sink_pubsub(client, to_delete): + """Sink log entries to pubsub.""" + topic = _sink_pubsub_setup(client) + to_delete.append(topic) + SINK_NAME = 'robots-pubsub-%d' % (_millis(),) + FILTER = 'logName:apache-access AND textPayload:robot' + UPDATED_FILTER = 'textPayload:robot' + + # [START sink_pubsub_create] + DESTINATION = 'pubsub.googleapis.com/%s' % (topic.full_name,) + sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) + assert not sink.exists() # API call + sink.create() # API call + assert sink.exists() # API call + # [END sink_pubsub_create] + to_delete.insert(0, sink) # delete sink before topic + + # [START client_list_sinks] + for sink in client.list_sinks(): # API call(s) + do_something_with(sink) + # [END client_list_sinks] + + # [START sink_reload] + existing_sink = client.sink(SINK_NAME) + existing_sink.reload() + # [END sink_reload] + assert existing_sink.filter_ == FILTER + assert existing_sink.destination == DESTINATION + + # [START sink_update] + existing_sink.filter_ = UPDATED_FILTER + existing_sink.update() + # [END sink_update] + existing_sink.reload() + assert existing_sink.filter_ == UPDATED_FILTER + + # [START sink_delete] + sink.delete() + # [END sink_delete] + to_delete.pop(0) + + +@snippet +def logging_handler(client): + # [START create_default_handler] + import logging + handler = client.get_default_handler() + cloud_logger = logging.getLogger('cloudLogger') + cloud_logger.setLevel(logging.INFO) + cloud_logger.addHandler(handler) + cloud_logger.error('bad news') + # [END create_default_handler] + + # [START create_cloud_handler] + from google.cloud.logging.handlers import CloudLoggingHandler + handler = CloudLoggingHandler(client) + cloud_logger = logging.getLogger('cloudLogger') + cloud_logger.setLevel(logging.INFO) + cloud_logger.addHandler(handler) + cloud_logger.error('bad news') + # [END create_cloud_handler] + + # [START create_named_handler] + handler = CloudLoggingHandler(client, name='mycustomlog') + # [END create_named_handler] + + +@snippet +def setup_logging(client): + import logging + # [START setup_logging] + client.setup_logging(log_level=logging.INFO) + # [END setup_logging] + + # [START setup_logging_excludes] + client.setup_logging(log_level=logging.INFO, + excluded_loggers=('werkzeug',)) + # [END setup_logging_excludes] + + +def _line_no(func): + return func.__code__.co_firstlineno + + +def _find_examples(): + funcs = [obj for obj in globals().values() + if getattr(obj, '_snippet', False)] + for func in sorted(funcs, key=_line_no): + yield func + + +def _name_and_doc(func): + return func.__name__, func.__doc__ + + +def _backoff_not_found(deleter): + from google.cloud.exceptions import NotFound + timeouts = [1, 2, 4, 8, 16] + while timeouts: + try: + deleter() + except NotFound: + time.sleep(timeouts.pop(0)) + else: + break + + +def main(): + client = Client() + for example in _find_examples(): + to_delete = [] + print('%-25s: %s' % _name_and_doc(example)) + try: + example(client, to_delete) + except AssertionError as failure: + print(' FAIL: %s' % (failure,)) + except Exception as error: # pylint: disable=broad-except + print(' ERROR: %r' % (error,)) + for item in to_delete: + _backoff_not_found(item.delete) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-logging/docs/stdlib-usage.rst b/packages/google-cloud-logging/docs/stdlib-usage.rst new file mode 100644 index 000000000000..cba4080b5f5e --- /dev/null +++ b/packages/google-cloud-logging/docs/stdlib-usage.rst @@ -0,0 +1,70 @@ +Integration with Python logging module +-------------------------------------- + + +It's possible to tie the Python :mod:`logging` module directly into Google Cloud Logging. To use it, +create a :class:`CloudLoggingHandler ` instance from your +Logging client. + +.. code-block:: python + + >>> import logging + >>> import google.cloud.logging # Don't conflict with standard logging + >>> from google.cloud.logging.handlers import CloudLoggingHandler + >>> client = google.cloud.logging.Client() + >>> handler = CloudLoggingHandler(client) + >>> cloud_logger = logging.getLogger('cloudLogger') + >>> cloud_logger.setLevel(logging.INFO) # defaults to WARN + >>> cloud_logger.addHandler(handler) + >>> cloud_logger.error('bad news') + +.. note:: + + This handler by default uses an asynchronous transport that sends log entries on a background + thread. However, the API call will still be made in the same process. For other transport + options, see the transports section. + +All logs will go to a single custom log, which defaults to "python". The name of the Python +logger will be included in the structured log entry under the "python_logger" field. You can +change it by providing a name to the handler: + +.. code-block:: python + + >>> handler = CloudLoggingHandler(client, name="mycustomlog") + +It is also possible to attach the handler to the root Python logger, so that for example a plain +`logging.warn` call would be sent to Cloud Logging, as well as any other loggers created. However, +you must avoid infinite recursion from the logging calls the client itself makes. A helper +method :meth:`setup_logging ` is provided to configure +this automatically: + +.. code-block:: python + + >>> import logging + >>> import google.cloud.logging # Don't conflict with standard logging + >>> from google.cloud.logging.handlers import CloudLoggingHandler, setup_logging + >>> client = google.cloud.logging.Client() + >>> handler = CloudLoggingHandler(client) + >>> logging.getLogger().setLevel(logging.INFO) # defaults to WARN + >>> setup_logging(handler) + >>> logging.error('bad news') + +You can also exclude certain loggers: + +.. code-block:: python + + >>> setup_logging(handler, excluded_loggers=('werkzeug',)) + + + +Python logging handler transports +================================== + +The Python logging handler can use different transports. The default is +:class:`google.cloud.logging.handlers.BackgroundThreadTransport`. + + 1. :class:`google.cloud.logging.handlers.BackgroundThreadTransport` this is the default. It writes + entries on a background :class:`python.threading.Thread`. + + 1. :class:`google.cloud.logging.handlers.SyncTransport` this handler does a direct API call on each + logging statement to write the entry. diff --git a/packages/google-cloud-logging/docs/transports-base.rst b/packages/google-cloud-logging/docs/transports-base.rst new file mode 100644 index 000000000000..5b52c46cadcb --- /dev/null +++ b/packages/google-cloud-logging/docs/transports-base.rst @@ -0,0 +1,6 @@ +Python Logging Handler Sync Transport +====================================== + +.. automodule:: google.cloud.logging.handlers.transports.base + :members: + :show-inheritance: diff --git a/packages/google-cloud-logging/docs/transports-sync.rst b/packages/google-cloud-logging/docs/transports-sync.rst new file mode 100644 index 000000000000..edb2b72f578d --- /dev/null +++ b/packages/google-cloud-logging/docs/transports-sync.rst @@ -0,0 +1,6 @@ +Python Logging Handler Sync Transport +====================================== + +.. automodule:: google.cloud.logging.handlers.transports.sync + :members: + :show-inheritance: diff --git a/packages/google-cloud-logging/docs/transports-thread.rst b/packages/google-cloud-logging/docs/transports-thread.rst new file mode 100644 index 000000000000..45780b27fe42 --- /dev/null +++ b/packages/google-cloud-logging/docs/transports-thread.rst @@ -0,0 +1,7 @@ +Python Logging Handler Threaded Transport +========================================= + + +.. automodule:: google.cloud.logging.handlers.transports.background_thread + :members: + :show-inheritance: diff --git a/packages/google-cloud-logging/docs/usage.rst b/packages/google-cloud-logging/docs/usage.rst new file mode 100644 index 000000000000..0204a584dd73 --- /dev/null +++ b/packages/google-cloud-logging/docs/usage.rst @@ -0,0 +1,347 @@ +Writing log entries +------------------- + +To write log entries, first create a +:class:`~google.cloud.logging.logger.Logger`, passing the "log name" with +which to associate the entries: + +.. literalinclude:: snippets.py + :start-after: [START logger_create] + :end-before: [END logger_create] + :dedent: 4 + +Write a simple text entry to the logger. + +.. literalinclude:: snippets.py + :start-after: [START logger_log_text] + :end-before: [END logger_log_text] + :dedent: 4 + +Write a dictionary entry to the logger. + +.. literalinclude:: snippets.py + :start-after: [START logger_log_struct] + :end-before: [END logger_log_struct] + :dedent: 4 + + +Retrieving log entries +---------------------- + +Fetch entries for the default project. + +.. literalinclude:: snippets.py + :start-after: [START client_list_entries_default] + :end-before: [END client_list_entries_default] + :dedent: 4 + +Entries returned by +:meth:`Client.list_entries ` +or +:meth:`Logger.list_entries ` +will be instances of one of the following classes: + +- :class:`~google.cloud.logging.entries.TextEntry` +- :class:`~google.cloud.logging.entries.StructEntry` +- :class:`~google.cloud.logging.entries.ProtobufEntry` + +Fetch entries across multiple projects. + +.. literalinclude:: snippets.py + :start-after: [START client_list_entries_multi_project] + :end-before: [END client_list_entries_multi_project] + :dedent: 4 + +Filter entries retrieved using the `Advanced Logs Filters`_ syntax + +.. _Advanced Logs Filters: https://cloud.google.com/logging/docs/view/advanced_filters + +Fetch entries for the default project. + +.. literalinclude:: snippets.py + :start-after: [START client_list_entries_filter] + :end-before: [END client_list_entries_filter] + :dedent: 4 + +Sort entries in descending timestamp order. + +.. literalinclude:: snippets.py + :start-after: [START client_list_entries_order_by] + :end-before: [END client_list_entries_order_by] + :dedent: 4 + +Retrieve entries in batches of 10, iterating until done. + +.. literalinclude:: snippets.py + :start-after: [START client_list_entries_paged] + :end-before: [END client_list_entries_paged] + :dedent: 4 + +Retrieve entries for a single logger, sorting in descending timestamp order: + +.. literalinclude:: snippets.py + :start-after: [START logger_list_entries] + :end-before: [END logger_list_entries] + :dedent: 4 + + +Delete all entries for a logger +------------------------------- + +.. literalinclude:: snippets.py + :start-after: [START logger_delete] + :end-before: [END logger_delete] + :dedent: 8 + + +Manage log metrics +------------------ + +Metrics are counters of entries which match a given filter. They can be +used within Stackdriver Monitoring to create charts and alerts. + +List all metrics for a project: + +.. literalinclude:: snippets.py + :start-after: [START client_list_metrics] + :end-before: [END client_list_metrics] + :dedent: 4 + +Create a metric: + +.. literalinclude:: snippets.py + :start-after: [START metric_create] + :end-before: [END metric_create] + :dedent: 4 + +Refresh local information about a metric: + +.. literalinclude:: snippets.py + :start-after: [START metric_reload] + :end-before: [END metric_reload] + :dedent: 4 + +Update a metric: + +.. literalinclude:: snippets.py + :start-after: [START metric_update] + :end-before: [END metric_update] + :dedent: 4 + +Delete a metric: + +.. literalinclude:: snippets.py + :start-after: [START metric_delete] + :end-before: [END metric_delete] + :dedent: 4 + +Export log entries using sinks +------------------------------ + +Sinks allow exporting entries which match a given filter to Cloud Storage +buckets, BigQuery datasets, or Cloud Pub/Sub topics. + +Export to Cloud Storage +~~~~~~~~~~~~~~~~~~~~~~~ + +Make sure that the storage bucket you want to export logs too has +``cloud-logs@google.com`` as the owner. See +`Setting permissions for Cloud Storage`_. + +.. _Setting permissions for Cloud Storage: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_cloud_storage + +Add ``cloud-logs@google.com`` as the owner of the bucket: + +.. literalinclude:: snippets.py + :start-after: [START sink_bucket_permissions] + :end-before: [END sink_bucket_permissions] + :dedent: 4 + +Create a Cloud Storage sink: + +.. literalinclude:: snippets.py + :start-after: [START sink_storage_create] + :end-before: [END sink_storage_create] + :dedent: 4 + + +Export to BigQuery +~~~~~~~~~~~~~~~~~~ + +To export logs to BigQuery you must log into the Cloud Platform Console +and add ``cloud-logs@google.com`` to a dataset. + +See: `Setting permissions for BigQuery`_ + +.. _Setting permissions for BigQuery: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_bigquery + +.. literalinclude:: snippets.py + :start-after: [START sink_dataset_permissions] + :end-before: [END sink_dataset_permissions] + :dedent: 4 + +Create a BigQuery sink: + +.. literalinclude:: snippets.py + :start-after: [START sink_bigquery_create] + :end-before: [END sink_bigquery_create] + :dedent: 4 + + +Export to Pub/Sub +~~~~~~~~~~~~~~~~~ + +To export logs to BigQuery you must log into the Cloud Platform Console +and add ``cloud-logs@google.com`` to a topic. + +See: `Setting permissions for Pub/Sub`_ + +.. _Setting permissions for Pub/Sub: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_logs_to_cloud_pubsub + +.. literalinclude:: snippets.py + :start-after: [START sink_topic_permissions] + :end-before: [END sink_topic_permissions] + :dedent: 4 + +Create a Cloud Pub/Sub sink: + +.. literalinclude:: snippets.py + :start-after: [START sink_pubsub_create] + :end-before: [END sink_pubsub_create] + :dedent: 4 + +Manage Sinks +~~~~~~~~~~~~ + +List all sinks for a project: + +.. literalinclude:: snippets.py + :start-after: [START client_list_sinks] + :end-before: [END client_list_sinks] + :dedent: 4 + +Refresh local information about a sink: + +.. literalinclude:: snippets.py + :start-after: [START sink_reload] + :end-before: [END sink_reload] + :dedent: 4 + +Update a sink: + +.. literalinclude:: snippets.py + :start-after: [START sink_update] + :end-before: [END sink_update] + :dedent: 4 + +Delete a sink: + +.. literalinclude:: snippets.py + :start-after: [START sink_delete] + :end-before: [END sink_delete] + :dedent: 4 + +Integration with Python logging module +-------------------------------------- + +It's possible to tie the Python :mod:`logging` module directly into Google +Stackdriver Logging. There are different handler options to accomplish this. +To automatically pick the default for your current environment, use +:meth:`~google.cloud.logging.client.Client.get_default_handler`. + +.. literalinclude:: snippets.py + :start-after: [START create_default_handler] + :end-before: [END create_default_handler] + :dedent: 4 + +It is also possible to attach the handler to the root Python logger, so that +for example a plain ``logging.warn`` call would be sent to Stackdriver Logging, +as well as any other loggers created. A helper method +:meth:`~google.cloud.logging.client.Client.setup_logging` is provided +to configure this automatically. + +.. literalinclude:: snippets.py + :start-after: [START setup_logging] + :end-before: [END setup_logging] + :dedent: 4 + +.. note:: + + To reduce cost and quota usage, do not enable Stackdriver logging + handlers while testing locally. + +You can also exclude certain loggers: + +.. literalinclude:: snippets.py + :start-after: [START setup_logging_excludes] + :end-before: [END setup_logging_excludes] + :dedent: 4 + +Cloud Logging Handler +~~~~~~~~~~~~~~~~~~~~~ + +If you prefer not to use +:meth:`~google.cloud.logging.client.Client.get_default_handler`, you can +directly create a +:class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler` instance +which will write directly to the API. + +.. literalinclude:: snippets.py + :start-after: [START create_cloud_handler] + :end-before: [END create_cloud_handler] + :dedent: 4 + +.. note:: + + This handler by default uses an asynchronous transport that sends log + entries on a background thread. However, the API call will still be made + in the same process. For other transport options, see the transports + section. + +All logs will go to a single custom log, which defaults to "python". The name +of the Python logger will be included in the structured log entry under the +"python_logger" field. You can change it by providing a name to the handler: + +.. literalinclude:: snippets.py + :start-after: [START create_named_handler] + :end-before: [END create_named_handler] + :dedent: 4 + +Cloud Logging Handler transports +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler` +logging handler can use different transports. The default is +:class:`~google.cloud.logging.handlers.BackgroundThreadTransport`. + + 1. :class:`~google.cloud.logging.handlers.BackgroundThreadTransport` this is + the default. It writes entries on a background + :class:`python.threading.Thread`. + + 1. :class:`~google.cloud.logging.handlers.SyncTransport` this handler does a + direct API call on each logging statement to write the entry. + + +.. _Google Container Engine: https://cloud.google.com/container-engine/ + +fluentd logging handlers +~~~~~~~~~~~~~~~~~~~~~~~~ + +Besides :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler`, +which writes directly to the API, two other handlers are provided. +:class:`~google.cloud.logging.handlers.app_engine.AppEngineHandler`, which is +recommended when running on the Google App Engine Flexible vanilla runtimes +(i.e. your app.yaml contains ``runtime: python``), and +:class:`~google.cloud.logging.handlers.container_engine.ContainerEngineHandler` +, which is recommended when running on `Google Container Engine`_ with the +Stackdriver Logging plugin enabled. + +:meth:`~google.cloud.logging.client.Client.get_default_handler` and +:meth:`~google.cloud.logging.client.Client.setup_logging` will attempt to use +the environment to automatically detect whether the code is running in +these platforms and use the appropriate handler. + +In both cases, the fluentd agent is configured to automatically parse log files +in an expected format and forward them to Stackdriver logging. The handlers +provided help set the correct metadata such as log level so that logs can be +filtered accordingly. From 411041d5c185bb28bca78f3f028bb7b563509de5 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 17 Sep 2018 12:56:03 -0400 Subject: [PATCH 182/855] Redirect renamed 'usage.html'/'client.html' -> 'index.html'. (#5996) Closes #5995. --- packages/google-cloud-logging/docs/usage.html | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 packages/google-cloud-logging/docs/usage.html diff --git a/packages/google-cloud-logging/docs/usage.html b/packages/google-cloud-logging/docs/usage.html new file mode 100644 index 000000000000..9b81d6976cda --- /dev/null +++ b/packages/google-cloud-logging/docs/usage.html @@ -0,0 +1,8 @@ + + + + + + From de92f0933750c94d23a76d4274d80675da2e5a2c Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 17 Sep 2018 16:37:43 -0400 Subject: [PATCH 183/855] Translate / Logging / Language: restore detailed usage docs. (#5999) See #5996, which shadowed existing `usage.rst` with `usage.html` redirect pages in error. --- packages/google-cloud-logging/docs/usage.html | 8 -------- 1 file changed, 8 deletions(-) delete mode 100644 packages/google-cloud-logging/docs/usage.html diff --git a/packages/google-cloud-logging/docs/usage.html b/packages/google-cloud-logging/docs/usage.html deleted file mode 100644 index 9b81d6976cda..000000000000 --- a/packages/google-cloud-logging/docs/usage.html +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - From 4443befc52668a9f90c1d5998326799975438c09 Mon Sep 17 00:00:00 2001 From: Phillip Pearson Date: Thu, 27 Sep 2018 16:15:13 -0700 Subject: [PATCH 184/855] Logging: support request-correlated logging in App Engine standard python37 runtime (#6118) The python37 runtime provides the GOOGLE_CLOUD_PROJECT environment variable instead of GCLOUD_PROJECT, and for log messages to be correlated with the request in StackDriver, their 'trace' value has to be passed as a top level member of the log record rather than as an appengine.googleapis.com/trace_id label. --- .../google/cloud/logging/client.py | 6 ++--- .../cloud/logging/handlers/app_engine.py | 23 +++++++++++++++---- .../tests/unit/handlers/test_app_engine.py | 18 +++++++++------ 3 files changed, 32 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 5be04dab6d37..1e854664c7d3 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -49,8 +49,8 @@ _APPENGINE_FLEXIBLE_ENV_VM = 'GAE_APPENGINE_HOSTNAME' """Environment variable set in App Engine when vm:true is set.""" -_APPENGINE_FLEXIBLE_ENV_FLEX = 'GAE_INSTANCE' -"""Environment variable set in App Engine when env:flex is set.""" +_APPENGINE_INSTANCE_ID = 'GAE_INSTANCE' +"""Environment variable set in App Engine standard and flexible environment.""" _GKE_CLUSTER_NAME = 'instance/attributes/cluster-name' """Attribute in metadata server when in GKE environment.""" @@ -301,7 +301,7 @@ def get_default_handler(self): gke_cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME) if (_APPENGINE_FLEXIBLE_ENV_VM in os.environ or - _APPENGINE_FLEXIBLE_ENV_FLEX in os.environ): + _APPENGINE_INSTANCE_ID in os.environ): return AppEngineHandler(self) elif gke_cluster_name is not None: return ContainerEngineHandler() diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py index 1b0101cbf63d..5f1334300e80 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py @@ -27,7 +27,8 @@ _DEFAULT_GAE_LOGGER_NAME = 'app' -_GAE_PROJECT_ENV = 'GCLOUD_PROJECT' +_GAE_PROJECT_ENV_FLEX = 'GCLOUD_PROJECT' +_GAE_PROJECT_ENV_STANDARD = 'GOOGLE_CLOUD_PROJECT' _GAE_SERVICE_ENV = 'GAE_SERVICE' _GAE_VERSION_ENV = 'GAE_VERSION' @@ -54,6 +55,11 @@ def __init__(self, client, self.name = name self.client = client self.transport = transport(client, name) + self.project_id = os.environ.get( + _GAE_PROJECT_ENV_FLEX, + os.environ.get(_GAE_PROJECT_ENV_STANDARD, '')) + self.module_id = os.environ.get(_GAE_SERVICE_ENV, '') + self.version_id = os.environ.get(_GAE_VERSION_ENV, '') self.resource = self.get_gae_resource() def get_gae_resource(self): @@ -65,9 +71,9 @@ def get_gae_resource(self): gae_resource = Resource( type='gae_app', labels={ - 'project_id': os.environ.get(_GAE_PROJECT_ENV), - 'module_id': os.environ.get(_GAE_SERVICE_ENV), - 'version_id': os.environ.get(_GAE_VERSION_ENV), + 'project_id': self.project_id, + 'module_id': self.module_id, + 'version_id': self.version_id, }, ) return gae_resource @@ -100,8 +106,15 @@ def emit(self, record): :param record: The record to be logged. """ message = super(AppEngineHandler, self).format(record) + gae_labels = self.get_gae_labels() + trace_id = ('projects/%s/traces/%s' % (self.project_id, + gae_labels[_TRACE_ID_LABEL]) + if _TRACE_ID_LABEL in gae_labels + else None) self.transport.send( record, message, resource=self.resource, - labels=self.get_gae_labels()) + labels=gae_labels, + trace=trace_id, + ) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index df933bd19c01..4376e03e57ba 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -30,15 +30,18 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_constructor(self): - from google.cloud.logging.handlers.app_engine import _GAE_PROJECT_ENV + from google.cloud.logging.handlers.app_engine import ( + _GAE_PROJECT_ENV_STANDARD) from google.cloud.logging.handlers.app_engine import _GAE_SERVICE_ENV from google.cloud.logging.handlers.app_engine import _GAE_VERSION_ENV client = mock.Mock(project=self.PROJECT, spec=['project']) - with mock.patch('os.environ', new={_GAE_PROJECT_ENV: 'test_project', - _GAE_SERVICE_ENV: 'test_service', - _GAE_VERSION_ENV: 'test_version'}): + with mock.patch('os.environ', new={ + _GAE_PROJECT_ENV_STANDARD: 'test_project', + _GAE_SERVICE_ENV: 'test_service', + _GAE_VERSION_ENV: 'test_version', + }): handler = self._make_one(client, transport=_Transport) self.assertIs(handler.client, client) self.assertEqual(handler.resource.type, 'gae_app') @@ -51,6 +54,7 @@ def test_emit(self): handler = self._make_one(client, transport=_Transport) gae_resource = handler.get_gae_resource() gae_labels = handler.get_gae_labels() + trace = None logname = 'app' message = 'hello world' record = logging.LogRecord(logname, logging, None, None, message, @@ -61,7 +65,7 @@ def test_emit(self): self.assertEqual(handler.transport.name, logname) self.assertEqual( handler.transport.send_called_with, - (record, message, gae_resource, gae_labels)) + (record, message, gae_resource, gae_labels, trace)) def _get_gae_labels_helper(self, trace_id): get_trace_patch = mock.patch( @@ -98,5 +102,5 @@ def __init__(self, client, name): self.client = client self.name = name - def send(self, record, message, resource, labels): - self.send_called_with = (record, message, resource, labels) + def send(self, record, message, resource, labels, trace): + self.send_called_with = (record, message, resource, labels, trace) From 575e46ce56c29d750cf8f804e7bcae5d4927eaed Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 28 Sep 2018 14:36:00 -0400 Subject: [PATCH 185/855] Accomodate payload-less log entries. (#6103) Closes #6063. --- .../google/cloud/logging/_helpers.py | 9 ++- .../google/cloud/logging/entries.py | 15 ++++- .../google/cloud/logging/logger.py | 47 ++++++++++++++ .../tests/unit/test__helpers.py | 11 ++-- .../tests/unit/test_entries.py | 12 ++-- .../tests/unit/test_logger.py | 63 +++++++++++++++++++ 6 files changed, 141 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/_helpers.py index 79ae6646e547..ed28dbab6dbd 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging/_helpers.py @@ -16,6 +16,7 @@ import requests +from google.cloud.logging.entries import EmptyEntry from google.cloud.logging.entries import ProtobufEntry from google.cloud.logging.entries import StructEntry from google.cloud.logging.entries import TextEntry @@ -46,12 +47,14 @@ def entry_from_resource(resource, client, loggers): """ if 'textPayload' in resource: return TextEntry.from_api_repr(resource, client, loggers) - elif 'jsonPayload' in resource: + + if 'jsonPayload' in resource: return StructEntry.from_api_repr(resource, client, loggers) - elif 'protoPayload' in resource: + + if 'protoPayload' in resource: return ProtobufEntry.from_api_repr(resource, client, loggers) - raise ValueError('Cannot parse log entry resource.') + return EmptyEntry.from_api_repr(resource, client, loggers) def retrieve_metadata_server(metadata_key): diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py index 477237501e0c..bf75ffccfb3d 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -84,6 +84,7 @@ class _BaseEntry(object): :param span_id: (optional) span_id within the trace for the log entry. Specify the trace parameter if span_id is set. """ + def __init__(self, payload, logger, insert_id=None, timestamp=None, labels=None, severity=None, http_request=None, resource=None, trace=None, span_id=None): @@ -125,7 +126,10 @@ def from_api_repr(cls, resource, client, loggers=None): if logger is None: logger_name = logger_name_from_path(logger_fullname) logger = loggers[logger_fullname] = client.logger(logger_name) - payload = resource[cls._PAYLOAD_KEY] + if cls._PAYLOAD_KEY is not None: + payload = resource[cls._PAYLOAD_KEY] + else: + payload = None insert_id = resource.get('insertId') timestamp = resource.get('timestamp') if timestamp is not None: @@ -146,6 +150,15 @@ def from_api_repr(cls, resource, client, loggers=None): resource=monitored_resource, trace=trace, span_id=span_id) +class EmptyEntry(_BaseEntry): + """Entry created with no payload. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry + """ + _PAYLOAD_KEY = None + + class TextEntry(_BaseEntry): """Entry created with ``textPayload``. diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 7190afe44412..24a3f2834244 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -188,6 +188,53 @@ def _make_entry_resource(self, text=None, info=None, message=None, return entry + def log_empty(self, client=None, labels=None, insert_id=None, + severity=None, http_request=None, timestamp=None, + resource=_GLOBAL_RESOURCE, trace=None, span_id=None): + """API call: log an empty message via a POST request + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write + + :type client: :class:`~google.cloud.logging.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + + :type labels: dict + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: str + :param insert_id: (optional) unique ID for log entry. + + :type severity: str + :param severity: (optional) severity of event being logged. + + :type http_request: dict + :param http_request: (optional) info about HTTP request associated with + the entry + + :type timestamp: :class:`datetime.datetime` + :param timestamp: (optional) timestamp of event being logged. + + :type resource: :class:`~google.cloud.logging.resource.Resource` + :param resource: Monitored resource of the entry, defaults + to the global resource type. + + :type trace: str + :param trace: (optional) traceid to apply to the entry. + + :type span_id: str + :param span_id: (optional) span_id within the trace for the log entry. + Specify the trace parameter if span_id is set. + """ + client = self._require_client(client) + entry_resource = self._make_entry_resource( + labels=labels, insert_id=insert_id, severity=severity, + http_request=http_request, timestamp=timestamp, resource=resource, + trace=trace, span_id=span_id) + client.logging_api.write_entries([entry_resource]) + def log_text(self, text, client=None, labels=None, insert_id=None, severity=None, http_request=None, timestamp=None, resource=_GLOBAL_RESOURCE, trace=None, span_id=None): diff --git a/packages/google-cloud-logging/tests/unit/test__helpers.py b/packages/google-cloud-logging/tests/unit/test__helpers.py index 5177fe267fc6..90bbf7333937 100644 --- a/packages/google-cloud-logging/tests/unit/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/test__helpers.py @@ -26,14 +26,12 @@ def _call_fut(resource, client, loggers): return entry_from_resource(resource, client, loggers) - def test_unknown_type(self): - with self.assertRaises(ValueError): - self._call_fut({}, None, {}) - def _payload_helper(self, key, class_name): import mock - resource = {key: 'yup'} + resource = {} + if key is not None: + resource[key] = 'yup' client = object() loggers = {} mock_class = EntryMock() @@ -45,6 +43,9 @@ def _payload_helper(self, key, class_name): self.assertIs(result, mock_class.sentinel) self.assertEqual(mock_class.called, (resource, client, loggers)) + def test_wo_payload(self): + self._payload_helper(None, 'EmptyEntry') + def test_text_payload(self): self._payload_helper('textPayload', 'TextEntry') diff --git a/packages/google-cloud-logging/tests/unit/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py index 03eade9a54b2..00451ee50ff7 100644 --- a/packages/google-cloud-logging/tests/unit/test_entries.py +++ b/packages/google-cloud-logging/tests/unit/test_entries.py @@ -45,11 +45,11 @@ class Test_BaseEntry(unittest.TestCase): LOGGER_NAME = 'LOGGER_NAME' @staticmethod - def _get_target_class(): + def _get_target_class(payload_key='dummyPayload'): from google.cloud.logging.entries import _BaseEntry class _Dummy(_BaseEntry): - _PAYLOAD_KEY = 'dummyPayload' + _PAYLOAD_KEY = payload_key return _Dummy @@ -115,17 +115,15 @@ def test_ctor_explicit(self): self.assertEqual(entry.trace, TRACE) self.assertEqual(entry.span_id, SPANID) - def test_from_api_repr_missing_data_no_loggers(self): + def test_from_api_repr_no_payload_missing_data_no_loggers(self): client = _Client(self.PROJECT) - PAYLOAD = 'PAYLOAD' LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) API_REPR = { - 'dummyPayload': PAYLOAD, 'logName': LOG_NAME, } - klass = self._get_target_class() + klass = self._get_target_class(payload_key=None) entry = klass.from_api_repr(API_REPR, client) - self.assertEqual(entry.payload, PAYLOAD) + self.assertIsNone(entry.payload) self.assertIsNone(entry.insert_id) self.assertIsNone(entry.timestamp) self.assertIsNone(entry.severity) diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 0624abdda135..a2c30b65c6f5 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -88,6 +88,69 @@ def test_batch_w_alternate_client(self): self.assertIs(batch.logger, logger) self.assertIs(batch.client, client2) + def test_log_empty_w_default_labels(self): + DEFAULT_LABELS = {'foo': 'spam'} + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'resource': { + 'type': 'global', + 'labels': {}, + }, + 'labels': DEFAULT_LABELS, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client, + labels=DEFAULT_LABELS) + + logger.log_empty() + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_empty_w_explicit_client_labels_severity_httpreq(self): + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + TRACE = '12345678-1234-5678-1234-567812345678' + SPANID = '000000000000004a' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'resource': { + 'type': 'global', + 'labels': {}, + }, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + 'trace': TRACE, + 'spanId': SPANID + }] + client1 = _Client(self.PROJECT) + client2 = _Client(self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client1, + labels=DEFAULT_LABELS) + + logger.log_empty(client=client2, labels=LABELS, + insert_id=IID, severity=SEVERITY, + http_request=REQUEST, trace=TRACE, span_id=SPANID) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + def test_log_text_w_str_implicit_client(self): TEXT = 'TEXT' ENTRIES = [{ From c680a823d89cd95c28d3a5854bed5acf57a27b44 Mon Sep 17 00:00:00 2001 From: Joar Wandborg Date: Fri, 5 Oct 2018 16:23:26 +0200 Subject: [PATCH 186/855] Logging: fix class reference in docstring (#6153) --- .../google/cloud/logging/handlers/handlers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py index 5973006e403e..e679840fe31d 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py @@ -39,7 +39,7 @@ class CloudLoggingHandler(logging.StreamHandler): This handler supports both an asynchronous and synchronous transport. - :type client: :class:`google.cloud.logging.client` + :type client: :class:`google.cloud.logging.client.Client` :param client: the authenticated Google Cloud Logging client for this handler to use From 4bcf430f375ec5689bd5c7d203f4a4e55a958336 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 9 Oct 2018 13:35:18 -0400 Subject: [PATCH 187/855] Logging: allow more tries on inner retry for '_list_entries'. (#6179) Also, make logger names unique between test runs. Closes #5303. --- .../tests/system/test_system.py | 78 ++++++++++--------- 1 file changed, 40 insertions(+), 38 deletions(-) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index e7ff6804215a..3448fc095ba3 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -64,7 +64,7 @@ def _list_entries(logger): :rtype: list :returns: List of all entries consumed. """ - inner = RetryResult(_has_entries)(_consume_entries) + inner = RetryResult(_has_entries, max_tries=9)(_consume_entries) outer = RetryErrors( (ServiceUnavailable, ResourceExhausted), max_tries=9)(inner) return outer(logger) @@ -114,8 +114,8 @@ def tearDown(self): logging.getLogger().handlers = self._handlers_cache[:] @staticmethod - def _logger_name(): - return 'system-tests-logger' + unique_resource_id('-') + def _logger_name(prefix): + return prefix + unique_resource_id('-') def test_list_entry_with_unregistered(self): from google.protobuf import any_pb2 @@ -144,7 +144,7 @@ def test_list_entry_with_unregistered(self): def test_log_text(self): TEXT_PAYLOAD = 'System test: test_log_text' - logger = Config.CLIENT.logger(self._logger_name()) + logger = Config.CLIENT.logger(self._logger_name('log_text')) self.to_delete.append(logger) logger.log_text(TEXT_PAYLOAD) entries = _list_entries(logger) @@ -153,7 +153,7 @@ def test_log_text(self): def test_log_text_with_timestamp(self): text_payload = 'System test: test_log_text_with_timestamp' - logger = Config.CLIENT.logger(self._logger_name()) + logger = Config.CLIENT.logger(self._logger_name('log_text_ts')) now = datetime.datetime.utcnow() self.to_delete.append(logger) @@ -167,7 +167,7 @@ def test_log_text_with_timestamp(self): def test_log_text_with_resource(self): text_payload = 'System test: test_log_text_with_timestamp' - logger = Config.CLIENT.logger(self._logger_name()) + logger = Config.CLIENT.logger(self._logger_name('log_text_res')) now = datetime.datetime.utcnow() resource = Resource( type='gae_app', @@ -199,7 +199,7 @@ def test_log_text_w_metadata(self): 'requestUrl': URI, 'status': STATUS, } - logger = Config.CLIENT.logger(self._logger_name()) + logger = Config.CLIENT.logger(self._logger_name('log_text_md')) self.to_delete.append(logger) logger.log_text(TEXT_PAYLOAD, insert_id=INSERT_ID, severity=SEVERITY, @@ -219,7 +219,7 @@ def test_log_text_w_metadata(self): self.assertEqual(request['status'], STATUS) def test_log_struct(self): - logger = Config.CLIENT.logger(self._logger_name()) + logger = Config.CLIENT.logger(self._logger_name('log_struct')) self.to_delete.append(logger) logger.log_struct(self.JSON_PAYLOAD) @@ -228,10 +228,37 @@ def test_log_struct(self): self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + def test_log_struct_w_metadata(self): + INSERT_ID = 'INSERTID' + SEVERITY = 'INFO' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = 500 + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + logger = Config.CLIENT.logger(self._logger_name('log_struct_md')) + self.to_delete.append(logger) + + logger.log_struct(self.JSON_PAYLOAD, insert_id=INSERT_ID, + severity=SEVERITY, http_request=REQUEST) + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + self.assertEqual(entries[0].insert_id, INSERT_ID) + self.assertEqual(entries[0].severity, SEVERITY) + request = entries[0].http_request + self.assertEqual(request['requestMethod'], METHOD) + self.assertEqual(request['requestUrl'], URI) + self.assertEqual(request['status'], STATUS) + def test_log_handler_async(self): LOG_MESSAGE = 'It was the worst of times' - handler_name = 'gcp-async' + unique_resource_id('-') + handler_name = self._logger_name('handler_async') handler = CloudLoggingHandler(Config.CLIENT, name=handler_name) # only create the logger to delete, hidden otherwise logger = Config.CLIENT.logger(handler_name) @@ -252,8 +279,9 @@ def test_log_handler_async(self): def test_log_handler_sync(self): LOG_MESSAGE = 'It was the best of times.' + handler_name = self._logger_name('handler_sync') handler = CloudLoggingHandler(Config.CLIENT, - name=self._logger_name(), + name=handler_name, transport=SyncTransport) # only create the logger to delete, hidden otherwise @@ -276,7 +304,8 @@ def test_log_handler_sync(self): def test_log_root_handler(self): LOG_MESSAGE = 'It was the best of times.' - handler = CloudLoggingHandler(Config.CLIENT, name=self._logger_name()) + handler = CloudLoggingHandler( + Config.CLIENT, name=self._logger_name('handler_root')) # only create the logger to delete, hidden otherwise logger = Config.CLIENT.logger(handler.name) self.to_delete.append(logger) @@ -293,33 +322,6 @@ def test_log_root_handler(self): self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) - def test_log_struct_w_metadata(self): - INSERT_ID = 'INSERTID' - SEVERITY = 'INFO' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = 500 - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } - logger = Config.CLIENT.logger(self._logger_name()) - self.to_delete.append(logger) - - logger.log_struct(self.JSON_PAYLOAD, insert_id=INSERT_ID, - severity=SEVERITY, http_request=REQUEST) - entries = _list_entries(logger) - - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) - self.assertEqual(entries[0].insert_id, INSERT_ID) - self.assertEqual(entries[0].severity, SEVERITY) - request = entries[0].http_request - self.assertEqual(request['requestMethod'], METHOD) - self.assertEqual(request['requestUrl'], URI) - self.assertEqual(request['status'], STATUS) - def test_create_metric(self): METRIC_NAME = 'test-create-metric%s' % (_RESOURCE_ID,) metric = Config.CLIENT.metric( From 52bf72e1382275460a1638e6eb1d0054bfcefe12 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 9 Oct 2018 13:35:59 -0400 Subject: [PATCH 188/855] Harden 'test_list_entry_with_unregistered' against 429 errors. (#6181) Closes #5759. --- packages/google-cloud-logging/tests/system/test_system.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 3448fc095ba3..833e2abbc151 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -132,7 +132,10 @@ def test_list_entry_with_unregistered(self): filter_ = self.TYPE_FILTER.format(type_url) entry_iter = iter( Config.CLIENT.list_entries(page_size=1, filter_=filter_)) - protobuf_entry = next(entry_iter) + + retry = RetryErrors(TooManyRequests) + protobuf_entry = retry(lambda: next(entry_iter))() + self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) if Config.CLIENT._use_grpc: self.assertIsNone(protobuf_entry.payload) From 3a580217b43d117f9f51c5f7cd15bcf515871244 Mon Sep 17 00:00:00 2001 From: Phillip Pearson Date: Tue, 9 Oct 2018 10:50:12 -0700 Subject: [PATCH 189/855] Logging: test both GCLOUD_PROJECT and GOOGLE_CLOUD_PROJECT env vars (#6138) --- .../tests/unit/handlers/test_app_engine.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index 4376e03e57ba..396709e65cde 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -30,6 +30,7 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_constructor(self): + from google.cloud.logging.handlers.app_engine import _GAE_PROJECT_ENV_FLEX from google.cloud.logging.handlers.app_engine import ( _GAE_PROJECT_ENV_STANDARD) from google.cloud.logging.handlers.app_engine import _GAE_SERVICE_ENV @@ -37,6 +38,7 @@ def test_constructor(self): client = mock.Mock(project=self.PROJECT, spec=['project']) + # Verify that project/service/version are picked up from the environment. with mock.patch('os.environ', new={ _GAE_PROJECT_ENV_STANDARD: 'test_project', _GAE_SERVICE_ENV: 'test_service', @@ -49,6 +51,21 @@ def test_constructor(self): self.assertEqual(handler.resource.labels['module_id'], 'test_service') self.assertEqual(handler.resource.labels['version_id'], 'test_version') + # Verify that _GAE_PROJECT_ENV_FLEX environment variable takes precedence + # over _GAE_PROJECT_ENV_STANDARD. + with mock.patch('os.environ', new={ + _GAE_PROJECT_ENV_FLEX: 'test_project_2', + _GAE_PROJECT_ENV_STANDARD: 'test_project_should_be_overridden', + _GAE_SERVICE_ENV: 'test_service_2', + _GAE_VERSION_ENV: 'test_version_2', + }): + handler = self._make_one(client, transport=_Transport) + self.assertIs(handler.client, client) + self.assertEqual(handler.resource.type, 'gae_app') + self.assertEqual(handler.resource.labels['project_id'], 'test_project_2') + self.assertEqual(handler.resource.labels['module_id'], 'test_service_2') + self.assertEqual(handler.resource.labels['version_id'], 'test_version_2') + def test_emit(self): client = mock.Mock(project=self.PROJECT, spec=['project']) handler = self._make_one(client, transport=_Transport) From 32ab53dfdc1268c3fd4e95ac98276a16b91cf8b6 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 9 Oct 2018 14:06:58 -0400 Subject: [PATCH 190/855] Harden sink / metric creation against transient errors. (#6180) Closes #5931. --- .../tests/system/test_system.py | 46 +++++++++++++++---- 1 file changed, 36 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 833e2abbc151..634134382310 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -330,7 +330,10 @@ def test_create_metric(self): metric = Config.CLIENT.metric( METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) self.assertFalse(metric.exists()) - metric.create() + retry = RetryErrors(Conflict) + + retry(metric.create)() + self.to_delete.append(metric) self.assertTrue(metric.exists()) @@ -341,11 +344,14 @@ def test_list_metrics(self): self.assertFalse(metric.exists()) before_metrics = list(Config.CLIENT.list_metrics()) before_names = set(before.name for before in before_metrics) - self.failIf(metric.name in before_names) - metric.create() + self.assertFalse(metric.name in before_names) + retry = RetryErrors(Conflict) + retry(metric.create)() self.to_delete.append(metric) self.assertTrue(metric.exists()) + after_metrics = list(Config.CLIENT.list_metrics()) + after_names = set(after.name for after in after_metrics) self.assertTrue(metric.name in after_names) @@ -359,7 +365,9 @@ def test_reload_metric(self): self.to_delete.append(metric) metric.filter_ = 'logName:other' metric.description = 'local changes' + metric.reload() + self.assertEqual(metric.filter_, DEFAULT_FILTER) self.assertEqual(metric.description, DEFAULT_DESCRIPTION) @@ -375,7 +383,9 @@ def test_update_metric(self): self.to_delete.append(metric) metric.filter_ = NEW_FILTER metric.description = NEW_DESCRIPTION + metric.update() + after_metrics = list(Config.CLIENT.list_metrics()) after_info = {metric.name: metric for metric in after_metrics} after = after_info[METRIC_NAME] @@ -406,9 +416,12 @@ def test_create_sink_storage_bucket(self): uri = self._init_storage_bucket() SINK_NAME = 'test-create-sink-bucket%s' % (_RESOURCE_ID,) + retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) self.assertFalse(sink.exists()) - sink.create() + + retry(sink.create)() + self.to_delete.append(sink) self.assertTrue(sink.exists()) @@ -434,9 +447,12 @@ def test_create_sink_pubsub_topic(self): TOPIC_URI = 'pubsub.googleapis.com/%s' % (topic_path,) + retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, TOPIC_URI) self.assertFalse(sink.exists()) - sink.create() + + retry(sink.create)() + self.to_delete.append(sink) self.assertTrue(sink.exists()) @@ -465,31 +481,37 @@ def _init_bigquery_dataset(self): def test_create_sink_bigquery_dataset(self): SINK_NAME = 'test-create-sink-dataset%s' % (_RESOURCE_ID,) + retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) uri = self._init_bigquery_dataset() sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) self.assertFalse(sink.exists()) - sink.create() + + retry(sink.create)() + self.to_delete.append(sink) self.assertTrue(sink.exists()) def test_list_sinks(self): SINK_NAME = 'test-list-sinks%s' % (_RESOURCE_ID,) uri = self._init_storage_bucket() + retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) self.assertFalse(sink.exists()) before_sinks = list(Config.CLIENT.list_sinks()) before_names = set(before.name for before in before_sinks) - self.failIf(sink.name in before_names) - sink.create() + self.assertFalse(sink.name in before_names) + retry(sink.create)() self.to_delete.append(sink) self.assertTrue(sink.exists()) + after_sinks = list(Config.CLIENT.list_sinks()) + after_names = set(after.name for after in after_sinks) self.assertTrue(sink.name in after_names) def test_reload_sink(self): SINK_NAME = 'test-reload-sink%s' % (_RESOURCE_ID,) - retry = RetryErrors(Conflict) + retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) uri = self._init_bigquery_dataset() sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) self.assertFalse(sink.exists()) @@ -497,13 +519,15 @@ def test_reload_sink(self): self.to_delete.append(sink) sink.filter_ = 'BOGUS FILTER' sink.destination = 'BOGUS DESTINATION' + sink.reload() + self.assertEqual(sink.filter_, DEFAULT_FILTER) self.assertEqual(sink.destination, uri) def test_update_sink(self): SINK_NAME = 'test-update-sink%s' % (_RESOURCE_ID,) - retry = RetryErrors(Conflict, max_tries=10) + retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) bucket_uri = self._init_storage_bucket() dataset_uri = self._init_bigquery_dataset() UPDATED_FILTER = 'logName:syslog' @@ -513,7 +537,9 @@ def test_update_sink(self): self.to_delete.append(sink) sink.filter_ = UPDATED_FILTER sink.destination = dataset_uri + sink.update() + self.assertEqual(sink.filter_, UPDATED_FILTER) self.assertEqual(sink.destination, dataset_uri) From 2f6909c74926bc82be27a7f10feda267a0c3c6e0 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 9 Oct 2018 14:45:48 -0400 Subject: [PATCH 191/855] Logging: fix lint errors. (#6183) Introduced in #6138. --- .../tests/unit/handlers/test_app_engine.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index 396709e65cde..641d06ca3b27 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -30,7 +30,8 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_constructor(self): - from google.cloud.logging.handlers.app_engine import _GAE_PROJECT_ENV_FLEX + from google.cloud.logging.handlers.app_engine import ( + _GAE_PROJECT_ENV_FLEX) from google.cloud.logging.handlers.app_engine import ( _GAE_PROJECT_ENV_STANDARD) from google.cloud.logging.handlers.app_engine import _GAE_SERVICE_ENV @@ -38,7 +39,8 @@ def test_constructor(self): client = mock.Mock(project=self.PROJECT, spec=['project']) - # Verify that project/service/version are picked up from the environment. + # Verify that project/service/version are picked up from the + # environment. with mock.patch('os.environ', new={ _GAE_PROJECT_ENV_STANDARD: 'test_project', _GAE_SERVICE_ENV: 'test_service', @@ -51,8 +53,8 @@ def test_constructor(self): self.assertEqual(handler.resource.labels['module_id'], 'test_service') self.assertEqual(handler.resource.labels['version_id'], 'test_version') - # Verify that _GAE_PROJECT_ENV_FLEX environment variable takes precedence - # over _GAE_PROJECT_ENV_STANDARD. + # Verify that _GAE_PROJECT_ENV_FLEX environment variable takes + # precedence over _GAE_PROJECT_ENV_STANDARD. with mock.patch('os.environ', new={ _GAE_PROJECT_ENV_FLEX: 'test_project_2', _GAE_PROJECT_ENV_STANDARD: 'test_project_should_be_overridden', @@ -62,9 +64,12 @@ def test_constructor(self): handler = self._make_one(client, transport=_Transport) self.assertIs(handler.client, client) self.assertEqual(handler.resource.type, 'gae_app') - self.assertEqual(handler.resource.labels['project_id'], 'test_project_2') - self.assertEqual(handler.resource.labels['module_id'], 'test_service_2') - self.assertEqual(handler.resource.labels['version_id'], 'test_version_2') + self.assertEqual( + handler.resource.labels['project_id'], 'test_project_2') + self.assertEqual( + handler.resource.labels['module_id'], 'test_service_2') + self.assertEqual( + handler.resource.labels['version_id'], 'test_version_2') def test_emit(self): client = mock.Mock(project=self.PROJECT, spec=['project']) From 70f43bb8ffa11014975cb05df66e9a621eaf2301 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 9 Oct 2018 15:13:58 -0400 Subject: [PATCH 192/855] Logging: harden systest teardown against 'DeadlineExceeded' retry errors. (#6182) Closes #6115. --- packages/google-cloud-logging/tests/system/test_system.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 634134382310..a8bb2601a295 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -21,6 +21,7 @@ from google.api_core.exceptions import NotFound from google.api_core.exceptions import TooManyRequests from google.api_core.exceptions import ResourceExhausted +from google.api_core.exceptions import RetryError from google.api_core.exceptions import ServiceUnavailable from google.cloud._helpers import UTC import google.cloud.logging @@ -104,7 +105,8 @@ def setUp(self): self._handlers_cache = logging.getLogger().handlers[:] def tearDown(self): - retry = RetryErrors((NotFound, TooManyRequests), max_tries=9) + retry = RetryErrors( + (NotFound, TooManyRequests, RetryError), max_tries=9) for doomed in self.to_delete: try: retry(doomed.delete)() From 8ef646ad3e55e5d7500cbda80db6d0430bef5950 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 10 Oct 2018 11:04:44 -0700 Subject: [PATCH 193/855] Use new Nox (#6175) --- .../{nox.py => noxfile.py} | 49 ++++--------------- 1 file changed, 10 insertions(+), 39 deletions(-) rename packages/google-cloud-logging/{nox.py => noxfile.py} (75%) diff --git a/packages/google-cloud-logging/nox.py b/packages/google-cloud-logging/noxfile.py similarity index 75% rename from packages/google-cloud-logging/nox.py rename to packages/google-cloud-logging/noxfile.py index 3ac0c3c9e217..dfc477e5d7dd 100644 --- a/packages/google-cloud-logging/nox.py +++ b/packages/google-cloud-logging/noxfile.py @@ -34,22 +34,13 @@ ) -@nox.session def default(session, django_dep=('django',)): """Default unit test session. - - This is intended to be run **without** an interpreter set, so - that the current ``python`` (on the ``PATH``) or the version of - Python corresponding to the ``nox`` binary the ``PATH`` can - run the tests. """ + # Install all test dependencies, then install this package in-place. deps = UNIT_TEST_DEPS - - if session.interpreter is None and sys.version_info[:2] == (2, 7): - deps += ('django >= 1.11.0, < 2.0.0dev',) - else: - deps += django_dep + deps += django_dep session.install(*deps) for local_dep in LOCAL_DEPS: @@ -71,17 +62,10 @@ def default(session, django_dep=('django',)): ) -@nox.session -@nox.parametrize('py', ['2.7', '3.5', '3.6', '3.7']) -def unit(session, py): +@nox.session(python=['2.7', '3.5', '3.6', '3.7']) +def unit(session): """Run the unit test suite.""" - # Run unit tests against all supported versions of Python. - session.interpreter = 'python{}'.format(py) - - # Set the virtualenv dirname. - session.virtualenv_dirname = 'unit-' + py - # Testing multiple version of django # See https://www.djangoproject.com/download/ for supported version django_deps_27 = [ @@ -89,27 +73,20 @@ def unit(session, py): ('django >= 1.11.0, < 2.0.0dev',), ] - if session.interpreter == 'python2.7': + if session.virtualenv.interpreter == '2.7': [default(session, django_dep=django) for django in django_deps_27] else: default(session) -@nox.session -@nox.parametrize('py', ['2.7', '3.6']) -def system(session, py): +@nox.session(python=['2.7', '3.6']) +def system(session): """Run the system test suite.""" # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): session.skip('Credentials must be set via environment variable.') - # Run the system tests against latest Python 2 and Python 3 only. - session.interpreter = 'python{}'.format(py) - - # Set the virtualenv dirname. - session.virtualenv_dirname = 'sys-' + py - # Use pre-release gRPC for system tests. session.install('--pre', 'grpcio') @@ -137,40 +114,34 @@ def system(session, py): *session.posargs) -@nox.session +@nox.session(python='3.6') def lint(session): """Run linters. Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.interpreter = 'python3.6' session.install('flake8', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google', 'tests') -@nox.session +@nox.session(python='3.6') def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" - session.interpreter = 'python3.6' - - # Set the virtualenv dirname. - session.virtualenv_dirname = 'setup' session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') -@nox.session +@nox.session(python='3.6') def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ - session.interpreter = 'python3.6' session.install('coverage', 'pytest-cov') session.run('coverage', 'report', '--show-missing', '--fail-under=100') session.run('coverage', 'erase') From 3fe205f656589323e0f69ef1148181517a35705b Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 17 Oct 2018 14:45:24 -0700 Subject: [PATCH 194/855] Release logging 1.8.0 (#6249) --- packages/google-cloud-logging/CHANGELOG.md | 29 ++++++++++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index d79fe8029254..b72a4fbe87bc 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,35 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## 1.8.0 + +10-17-2018 14:23 PDT + +### Implementation Changes + +- Logging: allow more tries on inner retry for '_list_entries'. ([#6179](https://github.com/googleapis/google-cloud-python/pull/6179)) +- Accommodate payload-less log entries. ([#6103](https://github.com/googleapis/google-cloud-python/pull/6103)) + +### New Features + +- Logging: support request-correlated logging in App Engine standard python37 runtime ([#6118](https://github.com/googleapis/google-cloud-python/pull/6118)) + +### Documentation + +- Logging: fix class reference in docstring ([#6153](https://github.com/googleapis/google-cloud-python/pull/6153)) +- Translate / Logging / Language: restore detailed usage docs. ([#5999](https://github.com/googleapis/google-cloud-python/pull/5999)) +- Redirect renamed 'usage.html'/'client.html' -> 'index.html'. ([#5996](https://github.com/googleapis/google-cloud-python/pull/5996)) + +### Internal / Testing Changes + +- Use new Nox ([#6175](https://github.com/googleapis/google-cloud-python/pull/6175)) +- Logging: harden systest teardown against 'DeadlineExceeded' retry errors. ([#6182](https://github.com/googleapis/google-cloud-python/pull/6182)) +- Logging: fix lint errors. ([#6183](https://github.com/googleapis/google-cloud-python/pull/6183)) +- Harden sink / metric creation against transient errors. ([#6180](https://github.com/googleapis/google-cloud-python/pull/6180)) +- Logging: test both GCLOUD_PROJECT and GOOGLE_CLOUD_PROJECT env vars ([#6138](https://github.com/googleapis/google-cloud-python/pull/6138)) +- Harden 'test_list_entry_with_unregistered' against 429 errors. ([#6181](https://github.com/googleapis/google-cloud-python/pull/6181)) +- Prep logging docs for repo split. ([#5943](https://github.com/googleapis/google-cloud-python/pull/5943)) + ## 1.7.0 ### Implementation Changes diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index d24e5b7399e6..fd996b380ff9 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-logging' description = 'Stackdriver Logging API client library' -version = '1.7.0' +version = '1.8.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 7154d1b299ff872e43f772359d8f116a4f203af0 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 22 Oct 2018 16:40:42 -0400 Subject: [PATCH 195/855] Logging: add support for additional 'LogEntry' fields (#6229) Use namedtuples to reduce boilerplate in 'logger.Batch', 'logger.Logger', and 'entries' implementations: remove 'entry_type' and 'PAYLOAD_KEY' warts, dispatching instead based on class to load / save payload. Parse 'LogEntry.receiveTimestamp' -> 'received_timestamp' attr. Make 'received_timestamp' default to 'None' for all instances. It is set only when parsing a server response. Add support for 'LogEntry.traceSampled' field. Add support for 'source_location' field of log entries. Add support for 'operation' field of log entries. Add 'ProtobufEntry.payload_json': Returns 'None' if the payload is a protobuf message. 'ProtobufEntry.payload_pb' now returns 'None' if the payload is *not* a protobuf message. Drop 'EmptyEntry': just use 'LogEntry'. Closes #5601. Closes #6094. --- .../google-cloud-logging/docs/entries.rst | 1 + .../google/cloud/logging/_helpers.py | 4 +- .../google/cloud/logging/entries.py | 316 +++++--- .../google/cloud/logging/logger.py | 461 +++-------- .../tests/system/test_system.py | 7 +- .../tests/unit/test__helpers.py | 2 +- .../tests/unit/test_entries.py | 654 ++++++++++++++-- .../tests/unit/test_logger.py | 713 +++++++++--------- 8 files changed, 1272 insertions(+), 886 deletions(-) diff --git a/packages/google-cloud-logging/docs/entries.rst b/packages/google-cloud-logging/docs/entries.rst index d384e9227ca5..223eadc0756e 100644 --- a/packages/google-cloud-logging/docs/entries.rst +++ b/packages/google-cloud-logging/docs/entries.rst @@ -4,3 +4,4 @@ Entries .. automodule:: google.cloud.logging.entries :members: :show-inheritance: + :member-order: groupwise diff --git a/packages/google-cloud-logging/google/cloud/logging/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/_helpers.py index ed28dbab6dbd..b817dfdaa96f 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging/_helpers.py @@ -16,7 +16,7 @@ import requests -from google.cloud.logging.entries import EmptyEntry +from google.cloud.logging.entries import LogEntry from google.cloud.logging.entries import ProtobufEntry from google.cloud.logging.entries import StructEntry from google.cloud.logging.entries import TextEntry @@ -54,7 +54,7 @@ def entry_from_resource(resource, client, loggers): if 'protoPayload' in resource: return ProtobufEntry.from_api_repr(resource, client, loggers) - return EmptyEntry.from_api_repr(resource, client, loggers) + return LogEntry.from_api_repr(resource, client, loggers) def retrieve_metadata_server(metadata_key): diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py index bf75ffccfb3d..c2c33020e697 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -14,15 +14,21 @@ """Log entries within the Google Stackdriver Logging API.""" +import collections import json import re -from google.protobuf import any_pb2 +from google.protobuf.any_pb2 import Any +from google.protobuf.json_format import MessageToDict from google.protobuf.json_format import Parse from google.cloud.logging.resource import Resource from google.cloud._helpers import _name_from_project_path from google.cloud._helpers import _rfc3339_nanos_to_datetime +from google.cloud._helpers import _datetime_to_rfc3339 + + +_GLOBAL_RESOURCE = Resource(type='global', labels={}) _LOGGER_TEMPLATE = re.compile(r""" @@ -48,31 +54,58 @@ def logger_name_from_path(path): return _name_from_project_path(path, None, _LOGGER_TEMPLATE) -class _BaseEntry(object): - """Base class for TextEntry, StructEntry, ProtobufEntry. +def _int_or_none(value): + """Helper: return an integer or ``None``.""" + if value is not None: + value = int(value) + return value - :type payload: text or dict - :param payload: The payload passed as ``textPayload``, ``jsonPayload``, - or ``protoPayload``. - :type logger: :class:`google.cloud.logging.logger.Logger` - :param logger: the logger used to write the entry. +_LOG_ENTRY_FIELDS = ( # (name, default) + ('log_name', None), + ('labels', None), + ('insert_id', None), + ('severity', None), + ('http_request', None), + ('timestamp', None), + ('resource', _GLOBAL_RESOURCE), + ('trace', None), + ('span_id', None), + ('trace_sampled', None), + ('source_location', None), + ('operation', None), + ('logger', None), + ('payload', None), +) - :type insert_id: text - :param insert_id: (optional) the ID used to identify an entry uniquely. - :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp for the entry +_LogEntryTuple = collections.namedtuple( + 'LogEntry', (field for field, _ in _LOG_ENTRY_FIELDS)) + +_LogEntryTuple.__new__.__defaults__ = tuple( + default for _, default in _LOG_ENTRY_FIELDS) + + +_LOG_ENTRY_PARAM_DOCSTRING = """\ + + :type log_name: str + :param log_name: the name of the logger used to post the entry. :type labels: dict :param labels: (optional) mapping of labels for the entry + :type insert_id: text + :param insert_id: (optional) the ID used to identify an entry uniquely. + :type severity: str :param severity: (optional) severity of event being logged. :type http_request: dict :param http_request: (optional) info about HTTP request associated with - the entry. + the entry. + + :type timestamp: :class:`datetime.datetime` + :param timestamp: (optional) timestamp for the entry :type resource: :class:`~google.cloud.logging.resource.Resource` :param resource: (Optional) Monitored resource of the entry @@ -83,21 +116,43 @@ class _BaseEntry(object): :type span_id: str :param span_id: (optional) span_id within the trace for the log entry. Specify the trace parameter if span_id is set. - """ - def __init__(self, payload, logger, insert_id=None, timestamp=None, - labels=None, severity=None, http_request=None, resource=None, - trace=None, span_id=None): - self.payload = payload - self.logger = logger - self.insert_id = insert_id - self.timestamp = timestamp - self.labels = labels - self.severity = severity - self.http_request = http_request - self.resource = resource - self.trace = trace - self.span_id = span_id + :type trace_sampled: bool + :param trace_sampled: (optional) the sampling decision of the trace + associated with the log entry. + + :type source_location: dict + :param source_location: (optional) location in source code from which + the entry was emitted. + + :type operation: dict + :param operation: (optional) additional information about a potentially + long-running operation associated with the log entry. + + :type logger: :class:`google.cloud.logging.logger.Logger` + :param logger: the logger used to write the entry. + +""" + +_LOG_ENTRY_SEE_ALSO_DOCSTRING = """\ + + See: + https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry +""" + + +class LogEntry(_LogEntryTuple): + __doc__ = """ + Log entry. + + """ + _LOG_ENTRY_PARAM_DOCSTRING + _LOG_ENTRY_SEE_ALSO_DOCSTRING + + received_timestamp = None + + @classmethod + def _extract_payload(cls, resource): + """Helper for :meth:`from_api_repr`""" + return None @classmethod def from_api_repr(cls, resource, client, loggers=None): @@ -116,8 +171,8 @@ def from_api_repr(cls, resource, client, loggers=None): (Optional) A mapping of logger fullnames -> loggers. If not passed, the entry will have a newly-created logger. - :rtype: :class:`google.cloud.logging.entries._BaseEntry` - :returns: Text entry parsed from ``resource``. + :rtype: :class:`google.cloud.logging.entries.LogEntry` + :returns: Log entry parsed from ``resource``. """ if loggers is None: loggers = {} @@ -126,10 +181,7 @@ def from_api_repr(cls, resource, client, loggers=None): if logger is None: logger_name = logger_name_from_path(logger_fullname) logger = loggers[logger_fullname] = client.logger(logger_name) - if cls._PAYLOAD_KEY is not None: - payload = resource[cls._PAYLOAD_KEY] - else: - payload = None + payload = cls._extract_payload(resource) insert_id = resource.get('insertId') timestamp = resource.get('timestamp') if timestamp is not None: @@ -139,99 +191,149 @@ def from_api_repr(cls, resource, client, loggers=None): http_request = resource.get('httpRequest') trace = resource.get('trace') span_id = resource.get('spanId') + trace_sampled = resource.get('traceSampled') + source_location = resource.get('sourceLocation') + if source_location is not None: + line = source_location.pop('line', None) + source_location['line'] = _int_or_none(line) + operation = resource.get('operation') monitored_resource_dict = resource.get('resource') monitored_resource = None if monitored_resource_dict is not None: monitored_resource = Resource._from_dict(monitored_resource_dict) - return cls(payload, logger, insert_id=insert_id, timestamp=timestamp, - labels=labels, severity=severity, http_request=http_request, - resource=monitored_resource, trace=trace, span_id=span_id) - - -class EmptyEntry(_BaseEntry): - """Entry created with no payload. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry - """ - _PAYLOAD_KEY = None - - -class TextEntry(_BaseEntry): - """Entry created with ``textPayload``. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry - """ - _PAYLOAD_KEY = 'textPayload' - - -class StructEntry(_BaseEntry): - """Entry created with ``jsonPayload``. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry - """ - _PAYLOAD_KEY = 'jsonPayload' + inst = cls( + log_name=logger_fullname, + insert_id=insert_id, + timestamp=timestamp, + labels=labels, + severity=severity, + http_request=http_request, + resource=monitored_resource, + trace=trace, + span_id=span_id, + trace_sampled=trace_sampled, + source_location=source_location, + operation=operation, + logger=logger, + payload=payload, + ) + received = resource.get('receiveTimestamp') + if received is not None: + inst.received_timestamp = _rfc3339_nanos_to_datetime(received) + return inst + + def to_api_repr(self): + """API repr (JSON format) for entry. + """ + info = {} + if self.log_name is not None: + info['logName'] = self.log_name + if self.resource is not None: + info['resource'] = self.resource._to_dict() + if self.labels is not None: + info['labels'] = self.labels + if self.insert_id is not None: + info['insertId'] = self.insert_id + if self.severity is not None: + info['severity'] = self.severity + if self.http_request is not None: + info['httpRequest'] = self.http_request + if self.timestamp is not None: + info['timestamp'] = _datetime_to_rfc3339(self.timestamp) + if self.trace is not None: + info['trace'] = self.trace + if self.span_id is not None: + info['spanId'] = self.span_id + if self.trace_sampled is not None: + info['traceSampled'] = self.trace_sampled + if self.source_location is not None: + source_location = self.source_location.copy() + source_location['line'] = str(source_location.pop('line', 0)) + info['sourceLocation'] = source_location + if self.operation is not None: + info['operation'] = self.operation + return info + + +class TextEntry(LogEntry): + __doc__ = """ + Log entry with text payload. + + """ + _LOG_ENTRY_PARAM_DOCSTRING + """ + + :type payload: str | unicode + :param payload: payload for the log entry. + """ + _LOG_ENTRY_SEE_ALSO_DOCSTRING + @classmethod + def _extract_payload(cls, resource): + """Helper for :meth:`from_api_repr`""" + return resource['textPayload'] -class ProtobufEntry(_BaseEntry): - """Entry created with ``protoPayload``. + def to_api_repr(self): + """API repr (JSON format) for entry. + """ + info = super(TextEntry, self).to_api_repr() + info['textPayload'] = self.payload + return info - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry - :type payload: str, dict or any_pb2.Any - :param payload: The payload passed as ``textPayload``, ``jsonPayload``, - or ``protoPayload``. This also may be passed as a raw - :class:`.any_pb2.Any` if the ``protoPayload`` could - not be deserialized. +class StructEntry(LogEntry): + __doc__ = """ + Log entry with JSON payload. - :type logger: :class:`~google.cloud.logging.logger.Logger` - :param logger: the logger used to write the entry. + """ + _LOG_ENTRY_PARAM_DOCSTRING + """ - :type insert_id: str - :param insert_id: (optional) the ID used to identify an entry uniquely. + :type payload: dict + :param payload: payload for the log entry. + """ + _LOG_ENTRY_SEE_ALSO_DOCSTRING - :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp for the entry + @classmethod + def _extract_payload(cls, resource): + """Helper for :meth:`from_api_repr`""" + return resource['jsonPayload'] - :type labels: dict - :param labels: (optional) mapping of labels for the entry + def to_api_repr(self): + """API repr (JSON format) for entry. + """ + info = super(StructEntry, self).to_api_repr() + info['jsonPayload'] = self.payload + return info - :type severity: str - :param severity: (optional) severity of event being logged. - :type http_request: dict - :param http_request: (optional) info about HTTP request associated with - the entry +class ProtobufEntry(LogEntry): + __doc__ = """ + Log entry with protobuf message payload. - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry + """ + _LOG_ENTRY_PARAM_DOCSTRING + """ - :type trace: str - :param trace: (optional) traceid to apply to the entry. + :type payload: protobuf message + :param payload: payload for the log entry. + """ + _LOG_ENTRY_SEE_ALSO_DOCSTRING - :type span_id: str - :param span_id: (optional) span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. - """ - _PAYLOAD_KEY = 'protoPayload' - - def __init__(self, payload, logger, insert_id=None, timestamp=None, - labels=None, severity=None, http_request=None, resource=None, - trace=None, span_id=None): - super(ProtobufEntry, self).__init__( - payload, logger, insert_id=insert_id, timestamp=timestamp, - labels=labels, severity=severity, http_request=http_request, - resource=resource, trace=trace, span_id=span_id) - if isinstance(self.payload, any_pb2.Any): - self.payload_pb = self.payload - self.payload = None - else: - self.payload_pb = None + @classmethod + def _extract_payload(cls, resource): + """Helper for :meth:`from_api_repr`""" + return resource['protoPayload'] + + @property + def payload_pb(self): + if isinstance(self.payload, Any): + return self.payload + + @property + def payload_json(self): + if not isinstance(self.payload, Any): + return self.payload + + def to_api_repr(self): + """API repr (JSON format) for entry. + """ + info = super(ProtobufEntry, self).to_api_repr() + info['protoPayload'] = MessageToDict(self.payload) + return info def parse_message(self, message): """Parse payload into a protobuf message. diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 24a3f2834244..acdb0940e4b1 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -14,14 +14,33 @@ """Define API Loggers.""" -from google.protobuf.json_format import MessageToDict -from google.cloud._helpers import _datetime_to_rfc3339 +from google.cloud.logging.entries import LogEntry +from google.cloud.logging.entries import ProtobufEntry +from google.cloud.logging.entries import StructEntry +from google.cloud.logging.entries import TextEntry from google.cloud.logging.resource import Resource _GLOBAL_RESOURCE = Resource(type='global', labels={}) +_OUTBOUND_ENTRY_FIELDS = ( # (name, default) + ('type_', None), + ('log_name', None), + ('payload', None), + ('labels', None), + ('insert_id', None), + ('severity', None), + ('http_request', None), + ('timestamp', None), + ('resource', _GLOBAL_RESOURCE), + ('trace', None), + ('span_id', None), + ('trace_sampled', None), + ('source_location', None), +) + + class Logger(object): """Loggers represent named targets for log entries. @@ -93,104 +112,25 @@ def batch(self, client=None): client = self._require_client(client) return Batch(self, client) - def _make_entry_resource(self, text=None, info=None, message=None, - labels=None, insert_id=None, severity=None, - http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE, trace=None, - span_id=None): - """Return a log entry resource of the appropriate type. - - Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`. - - Only one of ``text``, ``info``, or ``message`` should be passed. - - :type text: str - :param text: (Optional) text payload - - :type info: dict - :param info: (Optional) struct payload - - :type message: :class:`~google.protobuf.message.Message` - :param message: (Optional) The protobuf payload to log. - - :type labels: dict - :param labels: (Optional) labels passed in to calling method. - - :type insert_id: str - :param insert_id: (Optional) unique ID for log entry. - - :type severity: str - :param severity: (Optional) severity of event being logged. - - :type http_request: dict - :param http_request: (Optional) info about HTTP request associated with - the entry - - :type timestamp: :class:`datetime.datetime` - :param timestamp: (Optional) timestamp of event being logged. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry - - :type trace: str - :param trace: (optional) traceid to apply to the entry. - - :type span_id: str - :param span_id: (optional) span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. - - :rtype: dict - :returns: The JSON resource created. + def _do_log(self, client, _entry_class, payload=None, **kw): + """Helper for :meth:`log_empty`, :meth:`log_text`, etc. """ - entry = { - 'logName': self.full_name, - 'resource': resource._to_dict(), - } - - if text is not None: - entry['textPayload'] = text - - if info is not None: - entry['jsonPayload'] = info - - if message is not None: - # NOTE: If ``message`` contains an ``Any`` field with an - # unknown type, this will fail with a ``TypeError``. - # However, since ``message`` will be provided by a user, - # the assumption is that any types needed for the - # protobuf->JSON conversion will be known from already - # imported ``pb2`` modules. - entry['protoPayload'] = MessageToDict(message) - - if labels is None: - labels = self.labels - - if labels is not None: - entry['labels'] = labels - - if insert_id is not None: - entry['insertId'] = insert_id - - if severity is not None: - entry['severity'] = severity - - if http_request is not None: - entry['httpRequest'] = http_request - - if timestamp is not None: - entry['timestamp'] = _datetime_to_rfc3339(timestamp) + client = self._require_client(client) - if trace is not None: - entry['trace'] = trace + # Apply defaults + kw['log_name'] = kw.pop('log_name', self.full_name) + kw['labels'] = kw.pop('labels', self.labels) + kw['resource'] = kw.pop('resource', _GLOBAL_RESOURCE) - if span_id is not None: - entry['spanId'] = span_id + if payload is not None: + entry = _entry_class(payload=payload, **kw) + else: + entry = _entry_class(**kw) - return entry + api_repr = entry.to_api_repr() + client.logging_api.write_entries([api_repr]) - def log_empty(self, client=None, labels=None, insert_id=None, - severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE, trace=None, span_id=None): + def log_empty(self, client=None, **kw): """API call: log an empty message via a POST request See @@ -201,43 +141,13 @@ def log_empty(self, client=None, labels=None, insert_id=None, :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. - :type labels: dict - :param labels: (optional) mapping of labels for the entry. - - :type insert_id: str - :param insert_id: (optional) unique ID for log entry. - - :type severity: str - :param severity: (optional) severity of event being logged. - - :type http_request: dict - :param http_request: (optional) info about HTTP request associated with - the entry - - :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp of event being logged. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: Monitored resource of the entry, defaults - to the global resource type. - - :type trace: str - :param trace: (optional) traceid to apply to the entry. - - :type span_id: str - :param span_id: (optional) span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. + :type kw: dict + :param kw: (optional) additional keyword arguments for the entry. + See :class:`~google.cloud.logging.entries.LogEntry`. """ - client = self._require_client(client) - entry_resource = self._make_entry_resource( - labels=labels, insert_id=insert_id, severity=severity, - http_request=http_request, timestamp=timestamp, resource=resource, - trace=trace, span_id=span_id) - client.logging_api.write_entries([entry_resource]) - - def log_text(self, text, client=None, labels=None, insert_id=None, - severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE, trace=None, span_id=None): + self._do_log(client, LogEntry, **kw) + + def log_text(self, text, client=None, **kw): """API call: log a text message via a POST request See @@ -251,43 +161,13 @@ def log_text(self, text, client=None, labels=None, insert_id=None, :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. - :type labels: dict - :param labels: (optional) mapping of labels for the entry. - - :type insert_id: str - :param insert_id: (optional) unique ID for log entry. - - :type severity: str - :param severity: (optional) severity of event being logged. - - :type http_request: dict - :param http_request: (optional) info about HTTP request associated with - the entry - - :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp of event being logged. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: Monitored resource of the entry, defaults - to the global resource type. - - :type trace: str - :param trace: (optional) traceid to apply to the entry. - - :type span_id: str - :param span_id: (optional) span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. + :type kw: dict + :param kw: (optional) additional keyword arguments for the entry. + See :class:`~google.cloud.logging.entries.LogEntry`. """ - client = self._require_client(client) - entry_resource = self._make_entry_resource( - text=text, labels=labels, insert_id=insert_id, severity=severity, - http_request=http_request, timestamp=timestamp, resource=resource, - trace=trace, span_id=span_id) - client.logging_api.write_entries([entry_resource]) - - def log_struct(self, info, client=None, labels=None, insert_id=None, - severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE, trace=None, span_id=None): + self._do_log(client, TextEntry, text, **kw) + + def log_struct(self, info, client=None, **kw): """API call: log a structured message via a POST request See @@ -301,43 +181,13 @@ def log_struct(self, info, client=None, labels=None, insert_id=None, :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. - :type labels: dict - :param labels: (optional) mapping of labels for the entry. - - :type insert_id: str - :param insert_id: (optional) unique ID for log entry. - - :type severity: str - :param severity: (optional) severity of event being logged. - - :type http_request: dict - :param http_request: (optional) info about HTTP request associated with - the entry. - - :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp of event being logged. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: Monitored resource of the entry, defaults - to the global resource type. - - :type trace: str - :param trace: (optional) traceid to apply to the entry. - - :type span_id: str - :param span_id: (optional) span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. + :type kw: dict + :param kw: (optional) additional keyword arguments for the entry. + See :class:`~google.cloud.logging.entries.LogEntry`. """ - client = self._require_client(client) - entry_resource = self._make_entry_resource( - info=info, labels=labels, insert_id=insert_id, severity=severity, - http_request=http_request, timestamp=timestamp, resource=resource, - trace=trace, span_id=span_id) - client.logging_api.write_entries([entry_resource]) - - def log_proto(self, message, client=None, labels=None, insert_id=None, - severity=None, http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE, trace=None, span_id=None): + self._do_log(client, StructEntry, info, **kw) + + def log_proto(self, message, client=None, **kw): """API call: log a protobuf message via a POST request See @@ -351,39 +201,11 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. - :type labels: dict - :param labels: (optional) mapping of labels for the entry. - - :type insert_id: str - :param insert_id: (optional) unique ID for log entry. - - :type severity: str - :param severity: (optional) severity of event being logged. - - :type http_request: dict - :param http_request: (optional) info about HTTP request associated with - the entry. - - :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp of event being logged. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: Monitored resource of the entry, defaults - to the global resource type. - - :type trace: str - :param trace: (optional) traceid to apply to the entry. - - :type span_id: str - :param span_id: (optional) span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. + :type kw: dict + :param kw: (optional) additional keyword arguments for the entry. + See :class:`~google.cloud.logging.entries.LogEntry`. """ - client = self._require_client(client) - entry_resource = self._make_entry_resource( - message=message, labels=labels, insert_id=insert_id, - severity=severity, http_request=http_request, timestamp=timestamp, - resource=resource, trace=trace, span_id=span_id) - client.logging_api.write_entries([entry_resource]) + self._do_log(client, ProtobufEntry, message, **kw) def delete(self, client=None): """API call: delete all entries in a logger via a DELETE request @@ -429,8 +251,8 @@ def list_entries(self, projects=None, filter_=None, order_by=None, entries. :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` - accessible to the current logger. + :returns: Iterator of log entries accessible to the current logger. + See :class:`~google.cloud.logging.entries.LogEntry`. """ log_filter = 'logName=%s' % (self.full_name,) if filter_ is not None: @@ -475,131 +297,50 @@ def __exit__(self, exc_type, exc_val, exc_tb): if exc_type is None: self.commit() - def log_text(self, text, labels=None, insert_id=None, severity=None, - http_request=None, timestamp=None, resource=_GLOBAL_RESOURCE, - trace=None, span_id=None): + def log_empty(self, **kw): + """Add a entry without payload to be logged during :meth:`commit`. + + :type kw: dict + :param kw: (optional) additional keyword arguments for the entry. + See :class:`~google.cloud.logging.entries.LogEntry`. + """ + self.entries.append(LogEntry(**kw)) + + def log_text(self, text, **kw): """Add a text entry to be logged during :meth:`commit`. :type text: str :param text: the text entry - :type labels: dict - :param labels: (optional) mapping of labels for the entry. - - :type insert_id: str - :param insert_id: (optional) unique ID for log entry. - - :type severity: str - :param severity: (optional) severity of event being logged. - - :type http_request: dict - :param http_request: (optional) info about HTTP request associated with - the entry. - - :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp of event being logged. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry. Defaults - to the global resource type. If set to None, the - resource of the batch is used for this entry. If - both this resource and the Batch resource are None, - the API will return an error. - - :type trace: str - :param trace: (optional) traceid to apply to the entry. - - :type span_id: str - :param span_id: (optional) span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. + :type kw: dict + :param kw: (optional) additional keyword arguments for the entry. + See :class:`~google.cloud.logging.entries.LogEntry`. """ - self.entries.append( - ('text', text, labels, insert_id, severity, http_request, - timestamp, resource, trace, span_id)) + self.entries.append(TextEntry(payload=text, **kw)) - def log_struct(self, info, labels=None, insert_id=None, severity=None, - http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE, trace=None, span_id=None): + def log_struct(self, info, **kw): """Add a struct entry to be logged during :meth:`commit`. :type info: dict :param info: the struct entry - :type labels: dict - :param labels: (optional) mapping of labels for the entry. - - :type insert_id: str - :param insert_id: (optional) unique ID for log entry. - - :type severity: str - :param severity: (optional) severity of event being logged. - - :type http_request: dict - :param http_request: (optional) info about HTTP request associated with - the entry. - - :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp of event being logged. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry. Defaults - to the global resource type. If set to None, the - resource of the batch is used for this entry. If - both this resource and the Batch resource are None, - the API will return an error. - - :type trace: str - :param trace: (optional) traceid to apply to the entry. - - :type span_id: str - :param span_id: (optional) span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. + :type kw: dict + :param kw: (optional) additional keyword arguments for the entry. + See :class:`~google.cloud.logging.entries.LogEntry`. """ - self.entries.append( - ('struct', info, labels, insert_id, severity, http_request, - timestamp, resource, trace, span_id)) + self.entries.append(StructEntry(payload=info, **kw)) - def log_proto(self, message, labels=None, insert_id=None, severity=None, - http_request=None, timestamp=None, - resource=_GLOBAL_RESOURCE, trace=None, span_id=None): + def log_proto(self, message, **kw): """Add a protobuf entry to be logged during :meth:`commit`. :type message: protobuf message :param message: the protobuf entry - :type labels: dict - :param labels: (optional) mapping of labels for the entry. - - :type insert_id: str - :param insert_id: (optional) unique ID for log entry. - - :type severity: str - :param severity: (optional) severity of event being logged. - - :type http_request: dict - :param http_request: (optional) info about HTTP request associated with - the entry. - - :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp of event being logged. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry. Defaults - to the global resource type. If set to None, the - resource of the batch is used for this entry. If - both this resource and the Batch resource are None, - the API will return an error. - - :type trace: str - :param trace: (optional) traceid to apply to the entry. - - :type span_id: str - :param span_id: (optional) span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. + :type kw: dict + :param kw: (optional) additional keyword arguments for the entry. + See :class:`~google.cloud.logging.entries.LogEntry`. """ - self.entries.append( - ('proto', message, labels, insert_id, severity, http_request, - timestamp, resource, trace, span_id)) + self.entries.append(ProtobufEntry(payload=message, **kw)) def commit(self, client=None): """Send saved log entries as a single API call. @@ -618,43 +359,11 @@ def commit(self, client=None): if self.resource is not None: kwargs['resource'] = self.resource._to_dict() + if self.logger.labels is not None: kwargs['labels'] = self.logger.labels - entries = [] - for (entry_type, entry, labels, iid, severity, http_req, - timestamp, resource, trace, span_id) in self.entries: - if entry_type == 'text': - info = {'textPayload': entry} - elif entry_type == 'struct': - info = {'jsonPayload': entry} - elif entry_type == 'proto': - # NOTE: If ``entry`` contains an ``Any`` field with an - # unknown type, this will fail with a ``TypeError``. - # However, since ``entry`` was provided by a user in - # ``Batch.log_proto``, the assumption is that any types - # needed for the protobuf->JSON conversion will be known - # from already imported ``pb2`` modules. - info = {'protoPayload': MessageToDict(entry)} - else: - raise ValueError('Unknown entry type: %s' % (entry_type,)) - if resource is not None: - info['resource'] = resource._to_dict() - if labels is not None: - info['labels'] = labels - if iid is not None: - info['insertId'] = iid - if severity is not None: - info['severity'] = severity - if http_req is not None: - info['httpRequest'] = http_req - if timestamp is not None: - info['timestamp'] = _datetime_to_rfc3339(timestamp) - if trace is not None: - info['trace'] = trace - if span_id is not None: - info['spanId'] = span_id - entries.append(info) + entries = [entry.to_api_repr() for entry in self.entries] client.logging_api.write_entries(entries, **kwargs) del self.entries[:] diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index a8bb2601a295..c028ff3b4189 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -140,12 +140,12 @@ def test_list_entry_with_unregistered(self): self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) if Config.CLIENT._use_grpc: - self.assertIsNone(protobuf_entry.payload) + self.assertIsNone(protobuf_entry.payload_json) self.assertIsInstance(protobuf_entry.payload_pb, any_pb2.Any) self.assertEqual(protobuf_entry.payload_pb.type_url, type_url) else: self.assertIsNone(protobuf_entry.payload_pb) - self.assertEqual(protobuf_entry.payload['@type'], type_url) + self.assertEqual(protobuf_entry.payload_json['@type'], type_url) def test_log_text(self): TEXT_PAYLOAD = 'System test: test_log_text' @@ -157,6 +157,8 @@ def test_log_text(self): self.assertEqual(entries[0].payload, TEXT_PAYLOAD) def test_log_text_with_timestamp(self): + import datetime + text_payload = 'System test: test_log_text_with_timestamp' logger = Config.CLIENT.logger(self._logger_name('log_text_ts')) now = datetime.datetime.utcnow() @@ -168,6 +170,7 @@ def test_log_text_with_timestamp(self): self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, text_payload) self.assertEqual(entries[0].timestamp, now.replace(tzinfo=UTC)) + self.assertIsInstance(entries[0].received_timestamp, datetime.datetime) def test_log_text_with_resource(self): text_payload = 'System test: test_log_text_with_timestamp' diff --git a/packages/google-cloud-logging/tests/unit/test__helpers.py b/packages/google-cloud-logging/tests/unit/test__helpers.py index 90bbf7333937..8a4a0e7f362f 100644 --- a/packages/google-cloud-logging/tests/unit/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/test__helpers.py @@ -44,7 +44,7 @@ def _payload_helper(self, key, class_name): self.assertEqual(mock_class.called, (resource, client, loggers)) def test_wo_payload(self): - self._payload_helper(None, 'EmptyEntry') + self._payload_helper(None, 'LogEntry') def test_text_payload(self): self._payload_helper('textPayload', 'TextEntry') diff --git a/packages/google-cloud-logging/tests/unit/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py index 00451ee50ff7..7aaf16acc130 100644 --- a/packages/google-cloud-logging/tests/unit/test_entries.py +++ b/packages/google-cloud-logging/tests/unit/test_entries.py @@ -39,43 +39,62 @@ def test_w_name_w_all_extras(self): self.assertEqual(logger_name, LOGGER_NAME) -class Test_BaseEntry(unittest.TestCase): +class Test__int_or_none(unittest.TestCase): + + def _call_fut(self, value): + from google.cloud.logging.entries import _int_or_none + + return _int_or_none(value) + + def test_w_none(self): + self.assertIsNone(self._call_fut(None)) + + def test_w_int(self): + self.assertEqual(self._call_fut(123), 123) + + def test_w_str(self): + self.assertEqual(self._call_fut('123'), 123) + + +class TestLogEntry(unittest.TestCase): PROJECT = 'PROJECT' LOGGER_NAME = 'LOGGER_NAME' @staticmethod - def _get_target_class(payload_key='dummyPayload'): - from google.cloud.logging.entries import _BaseEntry - - class _Dummy(_BaseEntry): - _PAYLOAD_KEY = payload_key + def _get_target_class(): + from google.cloud.logging.entries import LogEntry - return _Dummy + return LogEntry def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): - PAYLOAD = 'PAYLOAD' - logger = _Logger(self.LOGGER_NAME, self.PROJECT) - entry = self._make_one(PAYLOAD, logger) - self.assertEqual(entry.payload, PAYLOAD) - self.assertIs(entry.logger, logger) - self.assertIsNone(entry.insert_id) - self.assertIsNone(entry.timestamp) + from google.cloud.logging.entries import _GLOBAL_RESOURCE + + entry = self._make_one() + + self.assertIsNone(entry.log_name) + self.assertIsNone(entry.logger) self.assertIsNone(entry.labels) + self.assertIsNone(entry.insert_id) self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) - self.assertIsNone(entry.resource) + self.assertIsNone(entry.timestamp) + self.assertIs(entry.resource, _GLOBAL_RESOURCE) self.assertIsNone(entry.trace) self.assertIsNone(entry.span_id) + self.assertIsNone(entry.trace_sampled) + self.assertIsNone(entry.source_location) + self.assertIsNone(entry.operation) + self.assertIsNone(entry.payload) def test_ctor_explicit(self): import datetime from google.cloud.logging.resource import Resource - PAYLOAD = 'PAYLOAD' + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) IID = 'IID' TIMESTAMP = datetime.datetime.now() LABELS = {'foo': 'bar', 'baz': 'qux'} @@ -91,18 +110,41 @@ def test_ctor_explicit(self): resource = Resource(type='global', labels={}) TRACE = '12345678-1234-5678-1234-567812345678' SPANID = '000000000000004a' - + FILE = 'my_file.py' + LINE_NO = 123 + FUNCTION = 'my_function' + SOURCE_LOCATION = { + 'file': FILE, + 'line': LINE_NO, + 'function': FUNCTION, + } + OP_ID = 'OP_ID' + PRODUCER = 'PRODUCER' + OPERATION = { + 'id': OP_ID, + 'producer': PRODUCER, + 'first': True, + 'last': False, + } logger = _Logger(self.LOGGER_NAME, self.PROJECT) - entry = self._make_one(PAYLOAD, logger, - insert_id=IID, - timestamp=TIMESTAMP, - labels=LABELS, - severity=SEVERITY, - http_request=REQUEST, - resource=resource, - trace=TRACE, - span_id=SPANID) - self.assertEqual(entry.payload, PAYLOAD) + + entry = self._make_one( + log_name=LOG_NAME, + logger=logger, + insert_id=IID, + timestamp=TIMESTAMP, + labels=LABELS, + severity=SEVERITY, + http_request=REQUEST, + resource=resource, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + source_location=SOURCE_LOCATION, + operation=OPERATION, + ) + + self.assertEqual(entry.log_name, LOG_NAME) self.assertIs(entry.logger, logger) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, TIMESTAMP) @@ -114,26 +156,41 @@ def test_ctor_explicit(self): self.assertEqual(entry.resource, resource) self.assertEqual(entry.trace, TRACE) self.assertEqual(entry.span_id, SPANID) + self.assertTrue(entry.trace_sampled) + + source_location = entry.source_location + self.assertEqual(source_location['file'], FILE) + self.assertEqual(source_location['line'], LINE_NO) + self.assertEqual(source_location['function'], FUNCTION) + + self.assertEqual(entry.operation, OPERATION) + self.assertIsNone(entry.payload) - def test_from_api_repr_no_payload_missing_data_no_loggers(self): + def test_from_api_repr_missing_data_no_loggers(self): client = _Client(self.PROJECT) LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) API_REPR = { 'logName': LOG_NAME, } - klass = self._get_target_class(payload_key=None) + klass = self._get_target_class() + entry = klass.from_api_repr(API_REPR, client) - self.assertIsNone(entry.payload) + + self.assertEqual(entry.log_name, LOG_NAME) + logger = entry.logger + self.assertIsInstance(logger, _Logger) + self.assertEqual(logger.name, self.LOGGER_NAME) self.assertIsNone(entry.insert_id) self.assertIsNone(entry.timestamp) self.assertIsNone(entry.severity) self.assertIsNone(entry.http_request) self.assertIsNone(entry.trace) self.assertIsNone(entry.span_id) - logger = entry.logger - self.assertIsInstance(logger, _Logger) + self.assertIsNone(entry.trace_sampled) + self.assertIsNone(entry.source_location) + self.assertIsNone(entry.operation) self.assertIs(logger.client, client) - self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIsNone(entry.payload) def test_from_api_repr_w_loggers_no_logger_match(self): from datetime import datetime @@ -142,7 +199,6 @@ def test_from_api_repr_w_loggers_no_logger_match(self): klass = self._get_target_class() client = _Client(self.PROJECT) - PAYLOAD = 'PAYLOAD' SEVERITY = 'CRITICAL' IID = 'IID' NOW = datetime.utcnow().replace(tzinfo=UTC) @@ -164,8 +220,23 @@ def test_from_api_repr_w_loggers_no_logger_match(self): STATUS = '500' TRACE = '12345678-1234-5678-1234-567812345678' SPANID = '000000000000004a' + FILE = 'my_file.py' + LINE_NO = 123 + FUNCTION = 'my_function' + SOURCE_LOCATION = { + 'file': FILE, + 'line': str(LINE_NO), + 'function': FUNCTION, + } + OP_ID = 'OP_ID' + PRODUCER = 'PRODUCER' + OPERATION = { + 'id': OP_ID, + 'producer': PRODUCER, + 'first': True, + 'last': False, + } API_REPR = { - 'dummyPayload': PAYLOAD, 'logName': LOG_NAME, 'insertId': IID, 'timestamp': TIMESTAMP, @@ -178,60 +249,428 @@ def test_from_api_repr_w_loggers_no_logger_match(self): }, 'resource': RESOURCE._to_dict(), 'trace': TRACE, - 'spanId': SPANID + 'spanId': SPANID, + 'traceSampled': True, + 'sourceLocation': SOURCE_LOCATION, + 'operation': OPERATION, } loggers = {} + entry = klass.from_api_repr(API_REPR, client, loggers=loggers) - self.assertEqual(entry.payload, PAYLOAD) + + self.assertEqual(entry.log_name, LOG_NAME) + logger = entry.logger + self.assertIsInstance(logger, _Logger) + self.assertEqual(logger.name, self.LOGGER_NAME) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, NOW) + self.assertIsNone(entry.received_timestamp) self.assertEqual(entry.labels, LABELS) self.assertEqual(entry.severity, SEVERITY) self.assertEqual(entry.http_request['requestMethod'], METHOD) self.assertEqual(entry.http_request['requestUrl'], URI) self.assertEqual(entry.http_request['status'], STATUS) - logger = entry.logger - self.assertIsInstance(logger, _Logger) self.assertIs(logger.client, client) self.assertEqual(logger.name, self.LOGGER_NAME) self.assertEqual(loggers, {LOG_NAME: logger}) self.assertEqual(entry.resource, RESOURCE) self.assertEqual(entry.trace, TRACE) self.assertEqual(entry.span_id, SPANID) + self.assertTrue(entry.trace_sampled) + + source_location = entry.source_location + self.assertEqual(source_location['file'], FILE) + self.assertEqual(source_location['line'], LINE_NO) + self.assertEqual(source_location['function'], FUNCTION) + + self.assertEqual(entry.operation, OPERATION) + self.assertIsNone(entry.payload) def test_from_api_repr_w_loggers_w_logger_match(self): from datetime import datetime + from datetime import timedelta from google.cloud._helpers import UTC client = _Client(self.PROJECT) - PAYLOAD = 'PAYLOAD' IID = 'IID' NOW = datetime.utcnow().replace(tzinfo=UTC) + LATER = NOW + timedelta(seconds=1) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + RECEIVED = _datetime_to_rfc3339_w_nanos(LATER) LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) LABELS = {'foo': 'bar', 'baz': 'qux'} TRACE = '12345678-1234-5678-1234-567812345678' SPANID = '000000000000004a' + FILE = 'my_file.py' + LINE_NO = 123 + FUNCTION = 'my_function' + SOURCE_LOCATION = { + 'file': FILE, + 'line': str(LINE_NO), + 'function': FUNCTION, + } + OP_ID = 'OP_ID' + PRODUCER = 'PRODUCER' + OPERATION = { + 'id': OP_ID, + 'producer': PRODUCER, + 'first': True, + 'last': False, + } API_REPR = { - 'dummyPayload': PAYLOAD, 'logName': LOG_NAME, 'insertId': IID, 'timestamp': TIMESTAMP, + 'receiveTimestamp': RECEIVED, 'labels': LABELS, 'trace': TRACE, - 'spanId': SPANID + 'spanId': SPANID, + 'traceSampled': True, + 'sourceLocation': SOURCE_LOCATION, + 'operation': OPERATION, } LOGGER = object() loggers = {LOG_NAME: LOGGER} klass = self._get_target_class() + entry = klass.from_api_repr(API_REPR, client, loggers=loggers) - self.assertEqual(entry.payload, PAYLOAD) + + self.assertEqual(entry.log_name, LOG_NAME) + self.assertIs(entry.logger, LOGGER) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, NOW) + self.assertEqual(entry.received_timestamp, LATER) self.assertEqual(entry.labels, LABELS) self.assertEqual(entry.trace, TRACE) self.assertEqual(entry.span_id, SPANID) - self.assertIs(entry.logger, LOGGER) + self.assertTrue(entry.trace_sampled) + + source_location = entry.source_location + self.assertEqual(source_location['file'], FILE) + self.assertEqual(source_location['line'], LINE_NO) + self.assertEqual(source_location['function'], FUNCTION) + + self.assertEqual(entry.operation, OPERATION) + self.assertIsNone(entry.payload) + + def test_to_api_repr_w_source_location_no_line(self): + from google.cloud.logging.logger import _GLOBAL_RESOURCE + + LOG_NAME = 'test.log' + FILE = 'my_file.py' + FUNCTION = 'my_function' + SOURCE_LOCATION = { + 'file': FILE, + 'function': FUNCTION, + } + entry = self._make_one( + log_name=LOG_NAME, source_location=SOURCE_LOCATION) + expected = { + 'logName': LOG_NAME, + 'resource': _GLOBAL_RESOURCE._to_dict(), + 'sourceLocation': { + 'file': FILE, + 'line': '0', + 'function': FUNCTION, + } + } + self.assertEqual(entry.to_api_repr(), expected) + + def test_to_api_repr_explicit(self): + import datetime + from google.cloud.logging.resource import Resource + from google.cloud._helpers import _datetime_to_rfc3339 + + LOG_NAME = 'test.log' + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + RESOURCE = Resource( + type='gae_app', + labels={ + 'module_id': 'default', + 'version_id': 'test' + } + ) + TRACE = '12345678-1234-5678-1234-567812345678' + SPANID = '000000000000004a' + FILE = 'my_file.py' + LINE = 123 + FUNCTION = 'my_function' + SOURCE_LOCATION = { + 'file': FILE, + 'line': LINE, + 'function': FUNCTION, + } + OP_ID = 'OP_ID' + PRODUCER = 'PRODUCER' + OPERATION = { + 'id': OP_ID, + 'producer': PRODUCER, + 'first': True, + 'last': False, + } + expected = { + 'logName': LOG_NAME, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + 'timestamp': _datetime_to_rfc3339(TIMESTAMP), + 'resource': RESOURCE._to_dict(), + 'trace': TRACE, + 'spanId': SPANID, + 'traceSampled': True, + 'sourceLocation': { + 'file': FILE, + 'line': str(LINE), + 'function': FUNCTION, + }, + 'operation': OPERATION, + } + entry = self._make_one( + log_name=LOG_NAME, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + source_location=SOURCE_LOCATION, + operation=OPERATION, + ) + + self.assertEqual(entry.to_api_repr(), expected) + + +class TestTextEntry(unittest.TestCase): + + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + + @staticmethod + def _get_target_class(): + from google.cloud.logging.entries import TextEntry + + return TextEntry + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def test_to_api_repr_defaults(self): + from google.cloud.logging.logger import _GLOBAL_RESOURCE + + LOG_NAME = 'test.log' + TEXT = 'TESTING' + entry = self._make_one(log_name=LOG_NAME, payload=TEXT) + expected = { + 'logName': LOG_NAME, + 'textPayload': TEXT, + 'resource': _GLOBAL_RESOURCE._to_dict(), + } + self.assertEqual(entry.to_api_repr(), expected) + + def test_to_api_repr_explicit(self): + import datetime + from google.cloud.logging.resource import Resource + from google.cloud._helpers import _datetime_to_rfc3339 + + LOG_NAME = 'test.log' + TEXT = 'This is the entry text' + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + RESOURCE = Resource( + type='gae_app', + labels={ + 'module_id': 'default', + 'version_id': 'test' + } + ) + TRACE = '12345678-1234-5678-1234-567812345678' + SPANID = '000000000000004a' + FILE = 'my_file.py' + LINE = 123 + FUNCTION = 'my_function' + SOURCE_LOCATION = { + 'file': FILE, + 'line': LINE, + 'function': FUNCTION, + } + OP_ID = 'OP_ID' + PRODUCER = 'PRODUCER' + OPERATION = { + 'id': OP_ID, + 'producer': PRODUCER, + 'first': True, + 'last': False, + } + expected = { + 'logName': LOG_NAME, + 'textPayload': TEXT, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + 'timestamp': _datetime_to_rfc3339(TIMESTAMP), + 'resource': RESOURCE._to_dict(), + 'trace': TRACE, + 'spanId': SPANID, + 'traceSampled': True, + 'sourceLocation': { + 'file': FILE, + 'line': str(LINE), + 'function': FUNCTION, + }, + 'operation': OPERATION, + } + entry = self._make_one( + log_name=LOG_NAME, + payload=TEXT, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + source_location=SOURCE_LOCATION, + operation=OPERATION, + ) + + self.assertEqual(entry.to_api_repr(), expected) + + +class TestStructEntry(unittest.TestCase): + + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + + @staticmethod + def _get_target_class(): + from google.cloud.logging.entries import StructEntry + + return StructEntry + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def test_to_api_repr_defaults(self): + from google.cloud.logging.logger import _GLOBAL_RESOURCE + + LOG_NAME = 'test.log' + JSON_PAYLOAD = {'key': 'value'} + entry = self._make_one(log_name=LOG_NAME, payload=JSON_PAYLOAD) + expected = { + 'logName': LOG_NAME, + 'jsonPayload': JSON_PAYLOAD, + 'resource': _GLOBAL_RESOURCE._to_dict(), + } + self.assertEqual(entry.to_api_repr(), expected) + + def test_to_api_repr_explicit(self): + import datetime + from google.cloud.logging.resource import Resource + from google.cloud._helpers import _datetime_to_rfc3339 + + LOG_NAME = 'test.log' + JSON_PAYLOAD = {'key': 'value'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + RESOURCE = Resource( + type='gae_app', + labels={ + 'module_id': 'default', + 'version_id': 'test' + } + ) + TRACE = '12345678-1234-5678-1234-567812345678' + SPANID = '000000000000004a' + FILE = 'my_file.py' + LINE = 123 + FUNCTION = 'my_function' + SOURCE_LOCATION = { + 'file': FILE, + 'line': LINE, + 'function': FUNCTION, + } + OP_ID = 'OP_ID' + PRODUCER = 'PRODUCER' + OPERATION = { + 'id': OP_ID, + 'producer': PRODUCER, + 'first': True, + 'last': False, + } + expected = { + 'logName': LOG_NAME, + 'jsonPayload': JSON_PAYLOAD, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + 'timestamp': _datetime_to_rfc3339(TIMESTAMP), + 'resource': RESOURCE._to_dict(), + 'trace': TRACE, + 'spanId': SPANID, + 'traceSampled': True, + 'sourceLocation': { + 'file': FILE, + 'line': str(LINE), + 'function': FUNCTION, + }, + 'operation': OPERATION, + } + entry = self._make_one( + log_name=LOG_NAME, + payload=JSON_PAYLOAD, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + source_location=SOURCE_LOCATION, + operation=OPERATION, + ) + + self.assertEqual(entry.to_api_repr(), expected) class TestProtobufEntry(unittest.TestCase): @@ -250,9 +689,12 @@ def _make_one(self, *args, **kw): def test_constructor_basic(self): payload = {'foo': 'bar'} - pb_entry = self._make_one(payload, mock.sentinel.logger) - self.assertEqual(pb_entry.payload, payload) + + pb_entry = self._make_one(payload=payload, logger=mock.sentinel.logger) + + self.assertIs(pb_entry.payload, payload) self.assertIsNone(pb_entry.payload_pb) + self.assertIs(pb_entry.payload_json, payload) self.assertIs(pb_entry.logger, mock.sentinel.logger) self.assertIsNone(pb_entry.insert_id) self.assertIsNone(pb_entry.timestamp) @@ -261,14 +703,19 @@ def test_constructor_basic(self): self.assertIsNone(pb_entry.http_request) self.assertIsNone(pb_entry.trace) self.assertIsNone(pb_entry.span_id) + self.assertIsNone(pb_entry.trace_sampled) + self.assertIsNone(pb_entry.source_location) def test_constructor_with_any(self): from google.protobuf.any_pb2 import Any payload = Any() - pb_entry = self._make_one(payload, mock.sentinel.logger) + + pb_entry = self._make_one(payload=payload, logger=mock.sentinel.logger) + + self.assertIs(pb_entry.payload, payload) self.assertIs(pb_entry.payload_pb, payload) - self.assertIsNone(pb_entry.payload) + self.assertIsNone(pb_entry.payload_json) self.assertIs(pb_entry.logger, mock.sentinel.logger) self.assertIsNone(pb_entry.insert_id) self.assertIsNone(pb_entry.timestamp) @@ -277,20 +724,125 @@ def test_constructor_with_any(self): self.assertIsNone(pb_entry.http_request) self.assertIsNone(pb_entry.trace) self.assertIsNone(pb_entry.span_id) + self.assertIsNone(pb_entry.trace_sampled) + self.assertIsNone(pb_entry.source_location) def test_parse_message(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value - LOGGER = object() message = Struct(fields={'foo': Value(bool_value=False)}) with_true = Struct(fields={'foo': Value(bool_value=True)}) - PAYLOAD = json.loads(MessageToJson(with_true)) - entry = self._make_one(PAYLOAD, LOGGER) + payload = json.loads(MessageToJson(with_true)) + entry = self._make_one(payload=payload, logger=mock.sentinel.logger) + entry.parse_message(message) + self.assertTrue(message.fields['foo']) + def test_to_api_repr_proto_defaults(self): + from google.protobuf.json_format import MessageToDict + from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + LOG_NAME = 'test.log' + message = Struct(fields={'foo': Value(bool_value=True)}) + + entry = self._make_one(log_name=LOG_NAME, payload=message) + expected = { + 'logName': LOG_NAME, + 'protoPayload': MessageToDict(message), + 'resource': _GLOBAL_RESOURCE._to_dict(), + } + self.assertEqual(entry.to_api_repr(), expected) + + def test_to_api_repr_proto_explicit(self): + import datetime + from google.protobuf.json_format import MessageToDict + from google.cloud.logging.resource import Resource + from google.cloud._helpers import _datetime_to_rfc3339 + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + LOG_NAME = 'test.log' + message = Struct(fields={'foo': Value(bool_value=True)}) + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + RESOURCE = Resource( + type='gae_app', + labels={ + 'module_id': 'default', + 'version_id': 'test' + } + ) + TRACE = '12345678-1234-5678-1234-567812345678' + SPANID = '000000000000004a' + FILE = 'my_file.py' + LINE = 123 + FUNCTION = 'my_function' + SOURCE_LOCATION = { + 'file': FILE, + 'line': LINE, + 'function': FUNCTION, + } + OP_ID = 'OP_ID' + PRODUCER = 'PRODUCER' + OPERATION = { + 'id': OP_ID, + 'producer': PRODUCER, + 'first': True, + 'last': False, + } + expected = { + 'logName': LOG_NAME, + 'protoPayload': MessageToDict(message), + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + 'timestamp': _datetime_to_rfc3339(TIMESTAMP), + 'resource': RESOURCE._to_dict(), + 'trace': TRACE, + 'spanId': SPANID, + 'traceSampled': True, + 'sourceLocation': { + 'file': FILE, + 'line': str(LINE), + 'function': FUNCTION, + }, + 'operation': OPERATION, + } + + entry = self._make_one( + log_name=LOG_NAME, + payload=message, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + source_location=SOURCE_LOCATION, + operation=OPERATION, + ) + + self.assertEqual(entry.to_api_repr(), expected) + def _datetime_to_rfc3339_w_nanos(value): from google.cloud._helpers import _RFC3339_NO_FRACTION diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index a2c30b65c6f5..158a727beeb1 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -88,7 +88,7 @@ def test_batch_w_alternate_client(self): self.assertIs(batch.logger, logger) self.assertIs(batch.client, client2) - def test_log_empty_w_default_labels(self): + def test_log_empty_defaults_w_default_labels(self): DEFAULT_LABELS = {'foo': 'spam'} ENTRIES = [{ 'logName': 'projects/%s/logs/%s' % ( @@ -109,7 +109,11 @@ def test_log_empty_w_default_labels(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - def test_log_empty_w_explicit_client_labels_severity_httpreq(self): + def test_log_empty_w_explicit(self): + import datetime + from google.cloud.logging.resource import Resource + + ALT_LOG_NAME = 'projects/foo/logs/alt.log.name' DEFAULT_LABELS = {'foo': 'spam'} LABELS = {'foo': 'bar', 'baz': 'qux'} IID = 'IID' @@ -124,19 +128,25 @@ def test_log_empty_w_explicit_client_labels_severity_httpreq(self): 'requestUrl': URI, 'status': STATUS, } + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + RESOURCE = Resource( + type='gae_app', + labels={ + 'module_id': 'default', + 'version_id': 'test' + } + ) ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'resource': { - 'type': 'global', - 'labels': {}, - }, + 'logName': ALT_LOG_NAME, 'labels': LABELS, 'insertId': IID, 'severity': SEVERITY, 'httpRequest': REQUEST, + 'timestamp': '2016-12-31T00:01:02.999999Z', + 'resource': RESOURCE._to_dict(), 'trace': TRACE, - 'spanId': SPANID + 'spanId': SPANID, + 'traceSampled': True, }] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) @@ -144,14 +154,24 @@ def test_log_empty_w_explicit_client_labels_severity_httpreq(self): logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) - logger.log_empty(client=client2, labels=LABELS, - insert_id=IID, severity=SEVERITY, - http_request=REQUEST, trace=TRACE, span_id=SPANID) + logger.log_empty( + log_name=ALT_LOG_NAME, + client=client2, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - def test_log_text_w_str_implicit_client(self): + def test_log_text_defaults(self): TEXT = 'TEXT' ENTRIES = [{ 'logName': 'projects/%s/logs/%s' % ( @@ -171,8 +191,8 @@ def test_log_text_w_str_implicit_client(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - def test_log_text_w_default_labels(self): - TEXT = 'TEXT' + def test_log_text_w_unicode_and_default_labels(self): + TEXT = u'TEXT' DEFAULT_LABELS = {'foo': 'spam'} ENTRIES = [{ 'logName': 'projects/%s/logs/%s' % ( @@ -194,78 +214,12 @@ def test_log_text_w_default_labels(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - def test_log_text_w_timestamp(self): + def test_log_text_explicit(self): import datetime + from google.cloud.logging.resource import Resource + ALT_LOG_NAME = 'projects/foo/logs/alt.log.name' TEXT = 'TEXT' - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'textPayload': TEXT, - 'timestamp': '2016-12-31T00:01:02.999999Z', - 'resource': { - 'type': 'global', - 'labels': {}, - }, - }] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client) - - logger.log_text(TEXT, timestamp=TIMESTAMP) - - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) - - def test_log_text_w_trace(self): - - TEXT = 'TEXT' - TRACE = '12345678-1234-5678-1234-567812345678' - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'textPayload': TEXT, - 'resource': { - 'type': 'global', - 'labels': {}, - }, - 'trace': TRACE - }] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client) - - logger.log_text(TEXT, trace=TRACE) - - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) - - def test_log_text_w_span(self): - - TEXT = 'TEXT' - SPANID = '000000000000004a' - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'textPayload': TEXT, - 'resource': { - 'type': 'global', - 'labels': {}, - }, - 'spanId': SPANID - }] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client) - - logger.log_text(TEXT, span_id=SPANID) - - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) - - def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): - TEXT = u'TEXT' DEFAULT_LABELS = {'foo': 'spam'} LABELS = {'foo': 'bar', 'baz': 'qux'} IID = 'IID' @@ -280,35 +234,52 @@ def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): 'requestUrl': URI, 'status': STATUS, } + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + RESOURCE = Resource( + type='gae_app', + labels={ + 'module_id': 'default', + 'version_id': 'test' + } + ) ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), + 'logName': ALT_LOG_NAME, 'textPayload': TEXT, - 'resource': { - 'type': 'global', - 'labels': {}, - }, 'labels': LABELS, 'insertId': IID, 'severity': SEVERITY, 'httpRequest': REQUEST, + 'timestamp': '2016-12-31T00:01:02.999999Z', + 'resource': RESOURCE._to_dict(), 'trace': TRACE, - 'spanId': SPANID + 'spanId': SPANID, + 'traceSampled': True, }] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client1, - labels=DEFAULT_LABELS) - - logger.log_text(TEXT, client=client2, labels=LABELS, - insert_id=IID, severity=SEVERITY, http_request=REQUEST, - trace=TRACE, span_id=SPANID) + logger = self._make_one( + self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) + + logger.log_text( + TEXT, + log_name=ALT_LOG_NAME, + client=client2, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - def test_log_struct_w_implicit_client(self): + def test_log_struct_defaults(self): STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} ENTRIES = [{ 'logName': 'projects/%s/logs/%s' % ( @@ -351,7 +322,11 @@ def test_log_struct_w_default_labels(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - def test_log_struct_w_explicit_client_labels_severity_httpreq(self): + def test_log_struct_w_explicit(self): + import datetime + from google.cloud.logging.resource import Resource + + ALT_LOG_NAME = 'projects/foo/logs/alt.log.name' STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} DEFAULT_LABELS = {'foo': 'spam'} LABELS = {'foo': 'bar', 'baz': 'qux'} @@ -367,20 +342,26 @@ def test_log_struct_w_explicit_client_labels_severity_httpreq(self): 'requestUrl': URI, 'status': STATUS, } + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + RESOURCE = Resource( + type='gae_app', + labels={ + 'module_id': 'default', + 'version_id': 'test' + } + ) ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), + 'logName': ALT_LOG_NAME, 'jsonPayload': STRUCT, - 'resource': { - 'type': 'global', - 'labels': {}, - }, 'labels': LABELS, 'insertId': IID, 'severity': SEVERITY, 'httpRequest': REQUEST, + 'timestamp': '2016-12-31T00:01:02.999999Z', + 'resource': RESOURCE._to_dict(), 'trace': TRACE, - 'spanId': SPANID + 'spanId': SPANID, + 'traceSampled': True, }] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) @@ -388,84 +369,25 @@ def test_log_struct_w_explicit_client_labels_severity_httpreq(self): logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) - logger.log_struct(STRUCT, client=client2, labels=LABELS, - insert_id=IID, severity=SEVERITY, - http_request=REQUEST, trace=TRACE, span_id=SPANID) - - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) - - def test_log_struct_w_timestamp(self): - import datetime - - STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'jsonPayload': STRUCT, - 'timestamp': '2016-12-31T00:01:02.999999Z', - 'resource': { - 'type': 'global', - 'labels': {}, - }, - }] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client) - - logger.log_struct(STRUCT, timestamp=TIMESTAMP) - - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) - - def test_log_struct_w_trace(self): - - STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} - TRACE = '12345678-1234-5678-1234-567812345678' - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'jsonPayload': STRUCT, - 'resource': { - 'type': 'global', - 'labels': {}, - }, - 'trace': TRACE - }] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client) - - logger.log_struct(STRUCT, trace=TRACE) - - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) - - def test_log_struct_w_span(self): - - STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} - SPANID = '000000000000004a' - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'jsonPayload': STRUCT, - 'resource': { - 'type': 'global', - 'labels': {}, - }, - 'spanId': SPANID - }] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client) - - logger.log_struct(STRUCT, span_id=SPANID) + logger.log_struct( + STRUCT, + log_name=ALT_LOG_NAME, + client=client2, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - def test_log_proto_w_implicit_client(self): + def test_log_proto_defaults(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value @@ -516,13 +438,16 @@ def test_log_proto_w_default_labels(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) - def test_log_proto_w_explicit_client_labels_severity_httpreq(self): + def test_log_proto_w_explicit(self): import json + import datetime from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value + from google.cloud.logging.resource import Resource message = Struct(fields={'foo': Value(bool_value=True)}) + ALT_LOG_NAME = 'projects/foo/logs/alt.log.name' DEFAULT_LABELS = {'foo': 'spam'} LABELS = {'foo': 'bar', 'baz': 'qux'} IID = 'IID' @@ -537,20 +462,26 @@ def test_log_proto_w_explicit_client_labels_severity_httpreq(self): 'requestUrl': URI, 'status': STATUS, } + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + RESOURCE = Resource( + type='gae_app', + labels={ + 'module_id': 'default', + 'version_id': 'test' + } + ) ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), + 'logName': ALT_LOG_NAME, 'protoPayload': json.loads(MessageToJson(message)), - 'resource': { - 'type': 'global', - 'labels': {}, - }, 'labels': LABELS, 'insertId': IID, 'severity': SEVERITY, 'httpRequest': REQUEST, + 'timestamp': '2016-12-31T00:01:02.999999Z', + 'resource': RESOURCE._to_dict(), 'trace': TRACE, - 'spanId': SPANID + 'spanId': SPANID, + 'traceSampled': True, }] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) @@ -558,91 +489,20 @@ def test_log_proto_w_explicit_client_labels_severity_httpreq(self): logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) - logger.log_proto(message, client=client2, labels=LABELS, - insert_id=IID, severity=SEVERITY, - http_request=REQUEST, trace=TRACE, span_id=SPANID) - - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) - - def test_log_proto_w_timestamp(self): - import json - import datetime - from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - message = Struct(fields={'foo': Value(bool_value=True)}) - TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'protoPayload': json.loads(MessageToJson(message)), - 'timestamp': '2016-12-31T00:01:02.999999Z', - 'resource': { - 'type': 'global', - 'labels': {}, - }, - }] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client) - - logger.log_proto(message, timestamp=TIMESTAMP) - - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) - - def test_log_proto_w_trace(self): - import json - from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - message = Struct(fields={'foo': Value(bool_value=True)}) - TRACE = '12345678-1234-5678-1234-567812345678' - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'protoPayload': json.loads(MessageToJson(message)), - 'resource': { - 'type': 'global', - 'labels': {}, - }, - 'trace': TRACE - }] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client) - - logger.log_proto(message, trace=TRACE) - - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) - - def test_log_proto_w_span(self): - import json - from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - message = Struct(fields={'foo': Value(bool_value=True)}) - SPANID = '000000000000004a' - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'protoPayload': json.loads(MessageToJson(message)), - 'resource': { - 'type': 'global', - 'labels': {}, - }, - 'spanId': SPANID - }] - client = _Client(self.PROJECT) - api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client) - - logger.log_proto(message, span_id=SPANID) + logger.log_proto( + message, + log_name=ALT_LOG_NAME, + client=client2, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) @@ -763,20 +623,86 @@ def test_ctor_defaults(self): self.assertIs(batch.client, client) self.assertEqual(len(batch.entries), 0) + def test_log_empty_defaults(self): + from google.cloud.logging.entries import LogEntry + + ENTRY = LogEntry() + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log_empty() + self.assertEqual(batch.entries, [ENTRY]) + + def test_log_empty_explicit(self): + import datetime + from google.cloud.logging.resource import Resource + from google.cloud.logging.entries import LogEntry + + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + RESOURCE = Resource( + type='gae_app', + labels={ + 'module_id': 'default', + 'version_id': 'test' + } + ) + TRACE = '12345678-1234-5678-1234-567812345678' + SPANID = '000000000000004a' + ENTRY = LogEntry( + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) + + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log_empty( + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) + self.assertEqual(batch.entries, [ENTRY]) + def test_log_text_defaults(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging.entries import _GLOBAL_RESOURCE + from google.cloud.logging.entries import TextEntry + TEXT = 'This is the entry text' + ENTRY = TextEntry(payload=TEXT, resource=_GLOBAL_RESOURCE) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_text(TEXT) - self.assertEqual(batch.entries, - [('text', TEXT, None, None, None, None, None, - _GLOBAL_RESOURCE, None, None)]) + self.assertEqual(batch.entries, [ENTRY]) def test_log_text_explicit(self): import datetime from google.cloud.logging.resource import Resource + from google.cloud.logging.entries import TextEntry TEXT = 'This is the entry text' LABELS = {'foo': 'bar', 'baz': 'qux'} @@ -800,33 +726,52 @@ def test_log_text_explicit(self): ) TRACE = '12345678-1234-5678-1234-567812345678' SPANID = '000000000000004a' + ENTRY = TextEntry( + payload=TEXT, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) - batch.log_text(TEXT, labels=LABELS, insert_id=IID, severity=SEVERITY, - http_request=REQUEST, timestamp=TIMESTAMP, - resource=RESOURCE, trace=TRACE, span_id=SPANID) - self.assertEqual( - batch.entries, - [('text', TEXT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP, - RESOURCE, TRACE, SPANID)]) + batch.log_text( + TEXT, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) + self.assertEqual(batch.entries, [ENTRY]) def test_log_struct_defaults(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging.entries import _GLOBAL_RESOURCE + from google.cloud.logging.entries import StructEntry + STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} + ENTRY = StructEntry(payload=STRUCT, resource=_GLOBAL_RESOURCE) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_struct(STRUCT) - self.assertEqual( - batch.entries, - [('struct', STRUCT, None, None, None, None, None, - _GLOBAL_RESOURCE, None, None)]) + self.assertEqual(batch.entries, [ENTRY]) def test_log_struct_explicit(self): import datetime from google.cloud.logging.resource import Resource + from google.cloud.logging.entries import StructEntry STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} LABELS = {'foo': 'bar', 'baz': 'qux'} @@ -850,36 +795,54 @@ def test_log_struct_explicit(self): 'version_id': 'test', } ) + ENTRY = StructEntry( + payload=STRUCT, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) - batch.log_struct(STRUCT, labels=LABELS, insert_id=IID, - severity=SEVERITY, http_request=REQUEST, - timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, - span_id=SPANID) - self.assertEqual( - batch.entries, - [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP, - RESOURCE, TRACE, SPANID)]) + batch.log_struct( + STRUCT, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) + self.assertEqual(batch.entries, [ENTRY]) def test_log_proto_defaults(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging.entries import _GLOBAL_RESOURCE + from google.cloud.logging.entries import ProtobufEntry from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value message = Struct(fields={'foo': Value(bool_value=True)}) + ENTRY = ProtobufEntry(payload=message, resource=_GLOBAL_RESOURCE) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_proto(message) - self.assertEqual(batch.entries, - [('proto', message, None, None, None, None, None, - _GLOBAL_RESOURCE, None, None)]) + self.assertEqual(batch.entries, [ENTRY]) def test_log_proto_explicit(self): import datetime from google.cloud.logging.resource import Resource + from google.cloud.logging.entries import ProtobufEntry from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value @@ -905,29 +868,57 @@ def test_log_proto_explicit(self): 'version_id': 'test', } ) + ENTRY = ProtobufEntry( + payload=message, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) - batch.log_proto(message, labels=LABELS, insert_id=IID, - severity=SEVERITY, http_request=REQUEST, - timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, - span_id=SPANID) - self.assertEqual( - batch.entries, - [('proto', message, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP, - RESOURCE, TRACE, SPANID)]) - - def test_commit_w_invalid_entry_type(self): + batch.log_proto( + message, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) + self.assertEqual(batch.entries, [ENTRY]) + + def test_commit_w_unknown_entry_type(self): + from google.cloud.logging.entries import _GLOBAL_RESOURCE + from google.cloud.logging.entries import LogEntry + logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) + api = client.logging_api = _DummyLoggingAPI() batch = self._make_one(logger, client) - batch.entries.append(('bogus', 'BOGUS', None, None, None, None, None, - None, None, None)) - with self.assertRaises(ValueError): - batch.commit() + batch.entries.append(LogEntry(severity='blah')) + ENTRY = { + 'severity': 'blah', + 'resource': _GLOBAL_RESOURCE._to_dict(), + } + + batch.commit() + + self.assertEqual(list(batch.entries), []) + self.assertEqual(api._write_entries_called_with, + ([ENTRY], logger.full_name, None, None)) def test_commit_w_resource_specified(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging.entries import _GLOBAL_RESOURCE from google.cloud.logging.resource import Resource logger = _Logger() @@ -961,7 +952,7 @@ def test_commit_w_bound_client(self): from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud._helpers import _datetime_to_rfc3339 - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging.entries import _GLOBAL_RESOURCE TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} @@ -978,32 +969,60 @@ def test_commit_w_bound_client(self): SPANID1 = '000000000000004a' SPANID2 = '000000000000004b' SPANID3 = '000000000000004c' - ENTRIES = [ - {'textPayload': TEXT, 'insertId': IID1, - 'timestamp': _datetime_to_rfc3339(TIMESTAMP1), - 'resource': _GLOBAL_RESOURCE._to_dict(), 'trace': TRACE1, - 'spanId': SPANID1}, - {'jsonPayload': STRUCT, 'insertId': IID2, - 'timestamp': _datetime_to_rfc3339(TIMESTAMP2), - 'resource': _GLOBAL_RESOURCE._to_dict(), 'trace': TRACE2, - 'spanId': SPANID2}, - {'protoPayload': json.loads(MessageToJson(message)), - 'insertId': IID3, - 'timestamp': _datetime_to_rfc3339(TIMESTAMP3), - 'resource': _GLOBAL_RESOURCE._to_dict(), 'trace': TRACE3, - 'spanId': SPANID3}, - ] + ENTRIES = [{ + 'textPayload': TEXT, + 'insertId': IID1, + 'timestamp': _datetime_to_rfc3339(TIMESTAMP1), + 'resource': _GLOBAL_RESOURCE._to_dict(), + 'trace': TRACE1, + 'spanId': SPANID1, + 'traceSampled': True, + }, { + 'jsonPayload': STRUCT, + 'insertId': IID2, + 'timestamp': _datetime_to_rfc3339(TIMESTAMP2), + 'resource': _GLOBAL_RESOURCE._to_dict(), + 'trace': TRACE2, + 'spanId': SPANID2, + 'traceSampled': False, + }, { + 'protoPayload': json.loads(MessageToJson(message)), + 'insertId': IID3, + 'timestamp': _datetime_to_rfc3339(TIMESTAMP3), + 'resource': _GLOBAL_RESOURCE._to_dict(), + 'trace': TRACE3, + 'spanId': SPANID3, + 'traceSampled': True, + }] client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = _Logger() batch = self._make_one(logger, client=client) - batch.log_text(TEXT, insert_id=IID1, timestamp=TIMESTAMP1, - trace=TRACE1, span_id=SPANID1) - batch.log_struct(STRUCT, insert_id=IID2, timestamp=TIMESTAMP2, - trace=TRACE2, span_id=SPANID2) - batch.log_proto(message, insert_id=IID3, timestamp=TIMESTAMP3, - trace=TRACE3, span_id=SPANID3) + batch.log_text( + TEXT, + insert_id=IID1, + timestamp=TIMESTAMP1, + trace=TRACE1, + span_id=SPANID1, + trace_sampled=True, + ) + batch.log_struct( + STRUCT, + insert_id=IID2, + timestamp=TIMESTAMP2, + trace=TRACE2, + span_id=SPANID2, + trace_sampled=False, + ) + batch.log_proto( + message, + insert_id=IID3, + timestamp=TIMESTAMP3, + trace=TRACE3, + span_id=SPANID3, + trace_sampled=True, + ) batch.commit() self.assertEqual(list(batch.entries), []) @@ -1016,7 +1035,7 @@ def test_commit_w_alternate_client(self): from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud.logging.logger import Logger - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging.entries import _GLOBAL_RESOURCE TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} @@ -1065,7 +1084,7 @@ def test_context_mgr_success(self): from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud.logging.logger import Logger - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging.entries import _GLOBAL_RESOURCE TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} @@ -1108,7 +1127,9 @@ def test_context_mgr_failure(self): import datetime from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging.entries import TextEntry + from google.cloud.logging.entries import StructEntry + from google.cloud.logging.entries import ProtobufEntry TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} @@ -1129,12 +1150,10 @@ def test_context_mgr_failure(self): api = client.logging_api = _DummyLoggingAPI() logger = _Logger() UNSENT = [ - ('text', TEXT, None, IID, None, None, TIMESTAMP, - _GLOBAL_RESOURCE, None, None), - ('struct', STRUCT, None, None, SEVERITY, None, None, - _GLOBAL_RESOURCE, None, None), - ('proto', message, LABELS, None, None, REQUEST, None, - _GLOBAL_RESOURCE, None, None), + TextEntry(payload=TEXT, insert_id=IID, timestamp=TIMESTAMP), + StructEntry(payload=STRUCT, severity=SEVERITY), + ProtobufEntry( + payload=message, labels=LABELS, http_request=REQUEST), ] batch = self._make_one(logger, client=client) From 943af8a68e0bea5d27f60494da130bf0cb4a8ff1 Mon Sep 17 00:00:00 2001 From: Yuvi Panda Date: Fri, 26 Oct 2018 15:12:31 -0700 Subject: [PATCH 196/855] Allow turning on JSON Detection in StackDriver (#6293) * Allow turning on JSON Detection in StackDriver To enable JSON detection in StackDriver, `jsonPayload` should have no fields other than `message`. This patch lets clients opt-in to this behavior, by creating a custom Transport. Fixes https://github.com/googleapis/google-cloud-python/issues/5799 --- .../handlers/transports/background_thread.py | 29 +++++++++++++++---- .../transports/test_background_thread.py | 22 ++++++++++++++ 2 files changed, 45 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 852e32dd42bb..62d5a0323d5b 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -92,12 +92,18 @@ class _Worker(object): than the grace_period. This means this is effectively the longest amount of time the background thread will hold onto log entries before sending them to the server. + + :type includer_logger_name: bool + :param include_logger_name: (optional) Include python_logger field in + jsonPayload. Turn this off to enable json detection in log messages. """ def __init__(self, cloud_logger, grace_period=_DEFAULT_GRACE_PERIOD, max_batch_size=_DEFAULT_MAX_BATCH_SIZE, - max_latency=_DEFAULT_MAX_LATENCY): + max_latency=_DEFAULT_MAX_LATENCY, + include_logger_name=True): self._cloud_logger = cloud_logger + self._include_logger_name = include_logger_name self._grace_period = grace_period self._max_batch_size = max_batch_size self._max_latency = max_latency @@ -253,17 +259,21 @@ def enqueue(self, record, message, resource=None, labels=None, :param span_id: (optional) span_id within the trace for the log entry. Specify the trace parameter if span_id is set. """ - self._queue.put_nowait({ + + log_record = { 'info': { 'message': message, - 'python_logger': record.name, }, 'severity': record.levelname, 'resource': resource, 'labels': labels, 'trace': trace, 'span_id': span_id, - }) + } + + if self._include_logger_name: + log_record['info']['python_logger'] = record.name + self._queue.put_nowait(log_record) def flush(self): """Submit any pending log records.""" @@ -293,17 +303,24 @@ class BackgroundThreadTransport(Transport): than the grace_period. This means this is effectively the longest amount of time the background thread will hold onto log entries before sending them to the server. + + :type includer_logger_name: bool + :param include_logger_name: (optional) Include python_logger field in + jsonPayload. Turn this off to enable jso + detection in log messages. """ def __init__(self, client, name, grace_period=_DEFAULT_GRACE_PERIOD, batch_size=_DEFAULT_MAX_BATCH_SIZE, - max_latency=_DEFAULT_MAX_LATENCY): + max_latency=_DEFAULT_MAX_LATENCY, + include_logger_name=True): self.client = client logger = self.client.logger(name) self.worker = _Worker(logger, grace_period=grace_period, max_batch_size=batch_size, - max_latency=max_latency) + max_latency=max_latency, + include_logger_name=include_logger_name) self.worker.start() def send(self, record, message, resource=None, labels=None, diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index e06083d2b756..a773b7aae210 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -175,6 +175,7 @@ def test_constructor(self): self.assertEqual(worker._grace_period, grace_period) self.assertEqual(worker._max_batch_size, max_batch_size) self.assertEqual(worker._max_latency, max_latency) + self.assertTrue(worker._include_logger_name) self.assertFalse(worker.is_alive) self.assertIsNone(worker._thread) @@ -282,6 +283,23 @@ def test__thread_main(self): self.assertEqual(worker._cloud_logger._batch.commit_count, 2) self.assertEqual(worker._queue.qsize(), 0) + def test__thread_main_no_python_logger(self): + from google.cloud.logging.handlers.transports import background_thread + + worker = self._make_one(_Logger(self.NAME), include_logger_name=False) + self.assertFalse(worker._include_logger_name) + + # Enqueue one record and the termination signal. + self._enqueue_record(worker, '1') + worker._queue.put_nowait(background_thread._WORKER_TERMINATOR) + + worker._thread_main() + + self.assertEqual(len(worker._cloud_logger._batch.all_entries), 1) + self.assertFalse( + 'python_logger' in worker._cloud_logger._batch.all_entries[0] + ) + def test__thread_main_error(self): from google.cloud.logging.handlers.transports import background_thread @@ -421,9 +439,12 @@ def join(self, timeout=None): class _Batch(object): def __init__(self): + # Entries waiting to be committed self.entries = [] self.commit_called = False self.commit_count = None + # All entries ever committed via this _Batch + self.all_entries = [] def log_struct( self, info, severity=logging.INFO, resource=None, labels=None, @@ -436,6 +457,7 @@ def log_struct( self.log_struct_called_with = (info, severity, resource, labels, trace, span_id) self.entries.append(info) + self.all_entries.append(info) def commit(self): self.commit_called = True From b28f8a5be87e4c8c4ae651725229a21ee41051c9 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 31 Oct 2018 15:58:44 -0400 Subject: [PATCH 197/855] Revert "Allow turning on JSON Detection in StackDriver" (#6352) This reverts commit ef038bfd9a1383e6ac367e19995bda3eae28617f. (PR #6293) --- .../handlers/transports/background_thread.py | 29 ++++--------------- .../transports/test_background_thread.py | 22 -------------- 2 files changed, 6 insertions(+), 45 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 62d5a0323d5b..852e32dd42bb 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -92,18 +92,12 @@ class _Worker(object): than the grace_period. This means this is effectively the longest amount of time the background thread will hold onto log entries before sending them to the server. - - :type includer_logger_name: bool - :param include_logger_name: (optional) Include python_logger field in - jsonPayload. Turn this off to enable json detection in log messages. """ def __init__(self, cloud_logger, grace_period=_DEFAULT_GRACE_PERIOD, max_batch_size=_DEFAULT_MAX_BATCH_SIZE, - max_latency=_DEFAULT_MAX_LATENCY, - include_logger_name=True): + max_latency=_DEFAULT_MAX_LATENCY): self._cloud_logger = cloud_logger - self._include_logger_name = include_logger_name self._grace_period = grace_period self._max_batch_size = max_batch_size self._max_latency = max_latency @@ -259,21 +253,17 @@ def enqueue(self, record, message, resource=None, labels=None, :param span_id: (optional) span_id within the trace for the log entry. Specify the trace parameter if span_id is set. """ - - log_record = { + self._queue.put_nowait({ 'info': { 'message': message, + 'python_logger': record.name, }, 'severity': record.levelname, 'resource': resource, 'labels': labels, 'trace': trace, 'span_id': span_id, - } - - if self._include_logger_name: - log_record['info']['python_logger'] = record.name - self._queue.put_nowait(log_record) + }) def flush(self): """Submit any pending log records.""" @@ -303,24 +293,17 @@ class BackgroundThreadTransport(Transport): than the grace_period. This means this is effectively the longest amount of time the background thread will hold onto log entries before sending them to the server. - - :type includer_logger_name: bool - :param include_logger_name: (optional) Include python_logger field in - jsonPayload. Turn this off to enable jso - detection in log messages. """ def __init__(self, client, name, grace_period=_DEFAULT_GRACE_PERIOD, batch_size=_DEFAULT_MAX_BATCH_SIZE, - max_latency=_DEFAULT_MAX_LATENCY, - include_logger_name=True): + max_latency=_DEFAULT_MAX_LATENCY): self.client = client logger = self.client.logger(name) self.worker = _Worker(logger, grace_period=grace_period, max_batch_size=batch_size, - max_latency=max_latency, - include_logger_name=include_logger_name) + max_latency=max_latency) self.worker.start() def send(self, record, message, resource=None, labels=None, diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index a773b7aae210..e06083d2b756 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -175,7 +175,6 @@ def test_constructor(self): self.assertEqual(worker._grace_period, grace_period) self.assertEqual(worker._max_batch_size, max_batch_size) self.assertEqual(worker._max_latency, max_latency) - self.assertTrue(worker._include_logger_name) self.assertFalse(worker.is_alive) self.assertIsNone(worker._thread) @@ -283,23 +282,6 @@ def test__thread_main(self): self.assertEqual(worker._cloud_logger._batch.commit_count, 2) self.assertEqual(worker._queue.qsize(), 0) - def test__thread_main_no_python_logger(self): - from google.cloud.logging.handlers.transports import background_thread - - worker = self._make_one(_Logger(self.NAME), include_logger_name=False) - self.assertFalse(worker._include_logger_name) - - # Enqueue one record and the termination signal. - self._enqueue_record(worker, '1') - worker._queue.put_nowait(background_thread._WORKER_TERMINATOR) - - worker._thread_main() - - self.assertEqual(len(worker._cloud_logger._batch.all_entries), 1) - self.assertFalse( - 'python_logger' in worker._cloud_logger._batch.all_entries[0] - ) - def test__thread_main_error(self): from google.cloud.logging.handlers.transports import background_thread @@ -439,12 +421,9 @@ def join(self, timeout=None): class _Batch(object): def __init__(self): - # Entries waiting to be committed self.entries = [] self.commit_called = False self.commit_count = None - # All entries ever committed via this _Batch - self.all_entries = [] def log_struct( self, info, severity=logging.INFO, resource=None, labels=None, @@ -457,7 +436,6 @@ def log_struct( self.log_struct_called_with = (info, severity, resource, labels, trace, span_id) self.entries.append(info) - self.all_entries.append(info) def commit(self): self.commit_called = True From 9823c932f92a5d3545068efd5f1af49967fc9c0b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 31 Oct 2018 16:54:42 -0400 Subject: [PATCH 198/855] Logging: add 'synth.py'. (#6081) Closes #6073. --- .../gapic/config_service_v2_client.py | 369 +++++---- .../gapic/config_service_v2_client_config.py | 26 +- .../google/cloud/logging_v2/gapic/enums.py | 122 ++- .../gapic/logging_service_v2_client.py | 314 +++++--- .../gapic/logging_service_v2_client_config.py | 24 +- .../gapic/metrics_service_v2_client.py | 245 +++--- .../gapic/metrics_service_v2_client_config.py | 12 +- .../logging_v2/gapic/transports/__init__.py | 0 .../config_service_v2_grpc_transport.py | 238 ++++++ .../logging_service_v2_grpc_transport.py | 177 +++++ .../metrics_service_v2_grpc_transport.py | 164 ++++ .../cloud/logging_v2/proto/log_entry_pb2.py | 158 ++-- .../logging_v2/proto/logging_config_pb2.py | 732 +++++------------- .../proto/logging_config_pb2_grpc.py | 12 +- .../logging_v2/proto/logging_metrics_pb2.py | 395 +++------- .../proto/logging_metrics_pb2_grpc.py | 4 +- .../cloud/logging_v2/proto/logging_pb2.py | 497 ++++-------- .../logging_v2/proto/logging_pb2_grpc.py | 23 +- packages/google-cloud-logging/synth.py | 39 + 19 files changed, 1868 insertions(+), 1683 deletions(-) create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py create mode 100644 packages/google-cloud-logging/synth.py diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index 13e3693d403b..169087562dbd 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,20 +17,27 @@ import functools import pkg_resources +import warnings +from google.oauth2 import service_account import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method import google.api_core.grpc_helpers import google.api_core.page_iterator import google.api_core.path_template +import grpc from google.api import monitored_resource_pb2 from google.cloud.logging_v2.gapic import config_service_v2_client_config from google.cloud.logging_v2.gapic import enums +from google.cloud.logging_v2.gapic.transports import config_service_v2_grpc_transport from google.cloud.logging_v2.proto import log_entry_pb2 from google.cloud.logging_v2.proto import logging_config_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2_grpc from google.cloud.logging_v2.proto import logging_pb2 +from google.cloud.logging_v2.proto import logging_pb2_grpc +from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( @@ -37,27 +46,38 @@ class ConfigServiceV2Client(object): """ - Service for configuring sinks used to export log entries outside of - Stackdriver Logging. + Service for configuring sinks used to export log entries out of + Logging. """ SERVICE_ADDRESS = 'logging.googleapis.com:443' """The default address of the service.""" - # The scopes needed to make gRPC calls to all of the methods defined in - # this service - _DEFAULT_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - ) - - # The name of the interface for this client. This is the key used to find - # method configuration in the client_config dictionary. + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. _INTERFACE_NAME = 'google.logging.v2.ConfigServiceV2' + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs['credentials'] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" @@ -85,6 +105,7 @@ def exclusion_path(cls, project, exclusion): ) def __init__(self, + transport=None, channel=None, credentials=None, client_config=config_service_v2_client_config.config, @@ -92,117 +113,83 @@ def __init__(self, """Constructor. Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive + transport (Union[~.ConfigServiceV2GrpcTransport, + Callable[[~.Credentials, type], ~.ConfigServiceV2GrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive with ``credentials``; providing both will raise an exception. credentials (google.auth.credentials.Credentials): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - client_config (dict): A dictionary of call options for each - method. If not specified, the default configuration is used. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - 'The `channel` and `credentials` arguments to {} are mutually ' - 'exclusive.'.format(self.__class__.__name__), ) - - # Create the channel. - if channel is None: - channel = google.api_core.grpc_helpers.create_channel( - self.SERVICE_ADDRESS, + # Raise deprecation warnings for things we want to go away. + if client_config: + warnings.warn('The `client_config` argument is deprecated.', + PendingDeprecationWarning) + if channel: + warnings.warn( + 'The `channel` argument is deprecated; use ' + '`transport` instead.', PendingDeprecationWarning) + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=config_service_v2_grpc_transport. + ConfigServiceV2GrpcTransport, + ) + else: + if credentials: + raise ValueError( + 'Received both a transport instance and ' + 'credentials; these are mutually exclusive.') + self.transport = transport + else: + self.transport = config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport( + address=self.SERVICE_ADDRESS, + channel=channel, credentials=credentials, - scopes=self._DEFAULT_SCOPES, ) - # Create the gRPC stubs. - self.config_service_v2_stub = ( - logging_config_pb2.ConfigServiceV2Stub(channel)) - if client_info is None: client_info = ( google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info # Parse out the default settings for retry and timeout for each RPC # from the client configuration. # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) - method_configs = google.api_core.gapic_v1.config.parse_method_configs( + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( client_config['interfaces'][self._INTERFACE_NAME], ) - # Write the "inner API call" methods to the class. - # These are wrapped versions of the gRPC stub methods, with retry and - # timeout configuration applied, called by the public methods on - # this class. - self._list_sinks = google.api_core.gapic_v1.method.wrap_method( - self.config_service_v2_stub.ListSinks, - default_retry=method_configs['ListSinks'].retry, - default_timeout=method_configs['ListSinks'].timeout, - client_info=client_info, - ) - self._get_sink = google.api_core.gapic_v1.method.wrap_method( - self.config_service_v2_stub.GetSink, - default_retry=method_configs['GetSink'].retry, - default_timeout=method_configs['GetSink'].timeout, - client_info=client_info, - ) - self._create_sink = google.api_core.gapic_v1.method.wrap_method( - self.config_service_v2_stub.CreateSink, - default_retry=method_configs['CreateSink'].retry, - default_timeout=method_configs['CreateSink'].timeout, - client_info=client_info, - ) - self._update_sink = google.api_core.gapic_v1.method.wrap_method( - self.config_service_v2_stub.UpdateSink, - default_retry=method_configs['UpdateSink'].retry, - default_timeout=method_configs['UpdateSink'].timeout, - client_info=client_info, - ) - self._delete_sink = google.api_core.gapic_v1.method.wrap_method( - self.config_service_v2_stub.DeleteSink, - default_retry=method_configs['DeleteSink'].retry, - default_timeout=method_configs['DeleteSink'].timeout, - client_info=client_info, - ) - self._list_exclusions = google.api_core.gapic_v1.method.wrap_method( - self.config_service_v2_stub.ListExclusions, - default_retry=method_configs['ListExclusions'].retry, - default_timeout=method_configs['ListExclusions'].timeout, - client_info=client_info, - ) - self._get_exclusion = google.api_core.gapic_v1.method.wrap_method( - self.config_service_v2_stub.GetExclusion, - default_retry=method_configs['GetExclusion'].retry, - default_timeout=method_configs['GetExclusion'].timeout, - client_info=client_info, - ) - self._create_exclusion = google.api_core.gapic_v1.method.wrap_method( - self.config_service_v2_stub.CreateExclusion, - default_retry=method_configs['CreateExclusion'].retry, - default_timeout=method_configs['CreateExclusion'].timeout, - client_info=client_info, - ) - self._update_exclusion = google.api_core.gapic_v1.method.wrap_method( - self.config_service_v2_stub.UpdateExclusion, - default_retry=method_configs['UpdateExclusion'].retry, - default_timeout=method_configs['UpdateExclusion'].timeout, - client_info=client_info, - ) - self._delete_exclusion = google.api_core.gapic_v1.method.wrap_method( - self.config_service_v2_stub.DeleteExclusion, - default_retry=method_configs['DeleteExclusion'].retry, - default_timeout=method_configs['DeleteExclusion'].timeout, - client_info=client_info, - ) + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} # Service calls def list_sinks(self, @@ -221,13 +208,15 @@ def list_sinks(self, >>> >>> parent = client.project_path('[PROJECT]') >>> - >>> >>> # Iterate over all results >>> for element in client.list_sinks(parent): ... # process element ... pass >>> - >>> # Or iterate over results one page at a time + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time >>> for page in client.list_sinks(parent, options=CallOptions(page_token=INITIAL_PAGE)): ... for element in page: ... # process element @@ -269,9 +258,16 @@ def list_sinks(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'list_sinks' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_sinks'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_sinks, + default_retry=self._method_configs['ListSinks'].retry, + default_timeout=self._method_configs['ListSinks'].timeout, + client_info=self._client_info, + ) + request = logging_config_pb2.ListSinksRequest( parent=parent, page_size=page_size, @@ -279,7 +275,7 @@ def list_sinks(self, iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._list_sinks, + self._inner_api_calls['list_sinks'], retry=retry, timeout=timeout, metadata=metadata), @@ -337,11 +333,18 @@ def get_sink(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'get_sink' not in self._inner_api_calls: + self._inner_api_calls[ + 'get_sink'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_sink, + default_retry=self._method_configs['GetSink'].retry, + default_timeout=self._method_configs['GetSink'].timeout, + client_info=self._client_info, + ) + request = logging_config_pb2.GetSinkRequest(sink_name=sink_name, ) - return self._get_sink( + return self._inner_api_calls['get_sink']( request, retry=retry, timeout=timeout, metadata=metadata) def create_sink(self, @@ -363,6 +366,8 @@ def create_sink(self, >>> client = logging_v2.ConfigServiceV2Client() >>> >>> parent = client.project_path('[PROJECT]') + >>> + >>> # TODO: Initialize ``sink``: >>> sink = {} >>> >>> response = client.create_sink(parent, sink) @@ -385,7 +390,7 @@ def create_sink(self, unique_writer_identity (bool): Optional. Determines the kind of IAM identity returned as ``writer_identity`` in the new sink. If this value is omitted or set to false, and if the sink's parent is a project, then the value returned as ``writer_identity`` is - the same group or service account used by Stackdriver Logging before the + the same group or service account used by Logging before the addition of writer identities to this API. The sink's destination must be in the same project as the sink itself. @@ -412,15 +417,22 @@ def create_sink(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'create_sink' not in self._inner_api_calls: + self._inner_api_calls[ + 'create_sink'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_sink, + default_retry=self._method_configs['CreateSink'].retry, + default_timeout=self._method_configs['CreateSink'].timeout, + client_info=self._client_info, + ) + request = logging_config_pb2.CreateSinkRequest( parent=parent, sink=sink, unique_writer_identity=unique_writer_identity, ) - return self._create_sink( + return self._inner_api_calls['create_sink']( request, retry=retry, timeout=timeout, metadata=metadata) def update_sink(self, @@ -443,6 +455,8 @@ def update_sink(self, >>> client = logging_v2.ConfigServiceV2Client() >>> >>> sink_name = client.sink_path('[PROJECT]', '[SINK]') + >>> + >>> # TODO: Initialize ``sink``: >>> sink = {} >>> >>> response = client.update_sink(sink_name, sink) @@ -492,7 +506,7 @@ def update_sink(self, empty updateMask will be an error. For a detailed ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask Example: ``updateMask=filter``. If a dict is provided, it must be of the same form as the protobuf @@ -516,16 +530,23 @@ def update_sink(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'update_sink' not in self._inner_api_calls: + self._inner_api_calls[ + 'update_sink'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_sink, + default_retry=self._method_configs['UpdateSink'].retry, + default_timeout=self._method_configs['UpdateSink'].timeout, + client_info=self._client_info, + ) + request = logging_config_pb2.UpdateSinkRequest( sink_name=sink_name, sink=sink, unique_writer_identity=unique_writer_identity, update_mask=update_mask, ) - return self._update_sink( + return self._inner_api_calls['update_sink']( request, retry=retry, timeout=timeout, metadata=metadata) def delete_sink(self, @@ -574,11 +595,18 @@ def delete_sink(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'delete_sink' not in self._inner_api_calls: + self._inner_api_calls[ + 'delete_sink'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_sink, + default_retry=self._method_configs['DeleteSink'].retry, + default_timeout=self._method_configs['DeleteSink'].timeout, + client_info=self._client_info, + ) + request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name, ) - self._delete_sink( + self._inner_api_calls['delete_sink']( request, retry=retry, timeout=timeout, metadata=metadata) def list_exclusions(self, @@ -597,13 +625,15 @@ def list_exclusions(self, >>> >>> parent = client.project_path('[PROJECT]') >>> - >>> >>> # Iterate over all results >>> for element in client.list_exclusions(parent): ... # process element ... pass >>> - >>> # Or iterate over results one page at a time + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time >>> for page in client.list_exclusions(parent, options=CallOptions(page_token=INITIAL_PAGE)): ... for element in page: ... # process element @@ -645,9 +675,17 @@ def list_exclusions(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'list_exclusions' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_exclusions'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_exclusions, + default_retry=self._method_configs['ListExclusions'].retry, + default_timeout=self._method_configs['ListExclusions']. + timeout, + client_info=self._client_info, + ) + request = logging_config_pb2.ListExclusionsRequest( parent=parent, page_size=page_size, @@ -655,7 +693,7 @@ def list_exclusions(self, iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._list_exclusions, + self._inner_api_calls['list_exclusions'], retry=retry, timeout=timeout, metadata=metadata), @@ -713,11 +751,19 @@ def get_exclusion(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'get_exclusion' not in self._inner_api_calls: + self._inner_api_calls[ + 'get_exclusion'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_exclusion, + default_retry=self._method_configs['GetExclusion'].retry, + default_timeout=self._method_configs['GetExclusion']. + timeout, + client_info=self._client_info, + ) + request = logging_config_pb2.GetExclusionRequest(name=name, ) - return self._get_exclusion( + return self._inner_api_calls['get_exclusion']( request, retry=retry, timeout=timeout, metadata=metadata) def create_exclusion(self, @@ -737,6 +783,8 @@ def create_exclusion(self, >>> client = logging_v2.ConfigServiceV2Client() >>> >>> parent = client.project_path('[PROJECT]') + >>> + >>> # TODO: Initialize ``exclusion``: >>> exclusion = {} >>> >>> response = client.create_exclusion(parent, exclusion) @@ -775,14 +823,23 @@ def create_exclusion(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'create_exclusion' not in self._inner_api_calls: + self._inner_api_calls[ + 'create_exclusion'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_exclusion, + default_retry=self._method_configs['CreateExclusion']. + retry, + default_timeout=self._method_configs['CreateExclusion']. + timeout, + client_info=self._client_info, + ) + request = logging_config_pb2.CreateExclusionRequest( parent=parent, exclusion=exclusion, ) - return self._create_exclusion( + return self._inner_api_calls['create_exclusion']( request, retry=retry, timeout=timeout, metadata=metadata) def update_exclusion(self, @@ -801,7 +858,11 @@ def update_exclusion(self, >>> client = logging_v2.ConfigServiceV2Client() >>> >>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + >>> + >>> # TODO: Initialize ``exclusion``: >>> exclusion = {} + >>> + >>> # TODO: Initialize ``update_mask``: >>> update_mask = {} >>> >>> response = client.update_exclusion(name, exclusion, update_mask) @@ -849,15 +910,24 @@ def update_exclusion(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'update_exclusion' not in self._inner_api_calls: + self._inner_api_calls[ + 'update_exclusion'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_exclusion, + default_retry=self._method_configs['UpdateExclusion']. + retry, + default_timeout=self._method_configs['UpdateExclusion']. + timeout, + client_info=self._client_info, + ) + request = logging_config_pb2.UpdateExclusionRequest( name=name, exclusion=exclusion, update_mask=update_mask, ) - return self._update_exclusion( + return self._inner_api_calls['update_exclusion']( request, retry=retry, timeout=timeout, metadata=metadata) def delete_exclusion(self, @@ -904,9 +974,18 @@ def delete_exclusion(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'delete_exclusion' not in self._inner_api_calls: + self._inner_api_calls[ + 'delete_exclusion'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_exclusion, + default_retry=self._method_configs['DeleteExclusion']. + retry, + default_timeout=self._method_configs['DeleteExclusion']. + timeout, + client_info=self._client_info, + ) + request = logging_config_pb2.DeleteExclusionRequest(name=name, ) - self._delete_exclusion( + self._inner_api_calls['delete_exclusion']( request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py index bc8363c6f3f2..4dfcb0f02c83 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py @@ -8,21 +8,21 @@ "retry_params": { "default": { "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.2, - "max_retry_delay_millis": 1000, - "initial_rpc_timeout_millis": 30000, - "rpc_timeout_multiplier": 1.5, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 90000 + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000 }, "write_sink": { "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.2, - "max_retry_delay_millis": 1000, - "initial_rpc_timeout_millis": 30000, - "rpc_timeout_multiplier": 1.5, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 120000 + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000 } }, "methods": { @@ -43,7 +43,7 @@ }, "UpdateSink": { "timeout_millis": 120000, - "retry_codes_name": "non_idempotent", + "retry_codes_name": "idempotent", "retry_params_name": "default" }, "DeleteSink": { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py index 47212e5bc29f..f448430659b1 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,8 +15,65 @@ # limitations under the License. """Wrappers for protocol buffer enum types.""" +import enum + + +class LaunchStage(enum.IntEnum): + """ + The launch stage as defined by [Google Cloud Platform + Launch Stages](http://cloud.google.com/terms/launch-stages). + + Attributes: + LAUNCH_STAGE_UNSPECIFIED (int): Do not use this default value. + EARLY_ACCESS (int): Early Access features are limited to a closed group of testers. To use + these features, you must sign up in advance and sign a Trusted Tester + agreement (which includes confidentiality provisions). These features may + be unstable, changed in backward-incompatible ways, and are not + guaranteed to be released. + ALPHA (int): Alpha is a limited availability test for releases before they are cleared + for widespread use. By Alpha, all significant design issues are resolved + and we are in the process of verifying functionality. Alpha customers + need to apply for access, agree to applicable terms, and have their + projects whitelisted. Alpha releases don’t have to be feature complete, + no SLAs are provided, and there are no technical support obligations, but + they will be far enough along that customers can actually use them in + test environments or for limited-use tests -- just like they would in + normal production cases. + BETA (int): Beta is the point at which we are ready to open a release for any + customer to use. There are no SLA or technical support obligations in a + Beta release. Products will be complete from a feature perspective, but + may have some open outstanding issues. Beta releases are suitable for + limited production use cases. + GA (int): GA features are open to all developers and are considered stable and + fully qualified for production use. + DEPRECATED (int): Deprecated features are scheduled to be shut down and removed. For more + information, see the “Deprecation Policy” section of our [Terms of + Service](https://cloud.google.com/terms/) + and the [Google Cloud Platform Subject to the Deprecation + Policy](https://cloud.google.com/terms/deprecation) documentation. + """ + LAUNCH_STAGE_UNSPECIFIED = 0 + EARLY_ACCESS = 1 + ALPHA = 2 + BETA = 3 + GA = 4 + DEPRECATED = 5 + -class LogSeverity(object): +class NullValue(enum.IntEnum): + """ + ``NullValue`` is a singleton enumeration to represent the null value for the + ``Value`` type union. + + The JSON representation for ``NullValue`` is JSON ``null``. + + Attributes: + NULL_VALUE (int): Null value. + """ + NULL_VALUE = 0 + + +class LogSeverity(enum.IntEnum): """ The severity of the event described in a log entry, expressed as one of the standard severity levels listed below. For your reference, the levels are @@ -57,21 +116,8 @@ class LogSeverity(object): EMERGENCY = 800 -class NullValue(object): - """ - ``NullValue`` is a singleton enumeration to represent the null value for the - ``Value`` type union. - - The JSON representation for ``NullValue`` is JSON ``null``. - - Attributes: - NULL_VALUE (int): Null value. - """ - NULL_VALUE = 0 - - class LabelDescriptor(object): - class ValueType(object): + class ValueType(enum.IntEnum): """ Value types that can be used as label values. @@ -85,25 +131,8 @@ class ValueType(object): INT64 = 2 -class LogSink(object): - class VersionFormat(object): - """ - Available log entry formats. Log entries can be written to Stackdriver - Logging in either format and can be exported in either format. - Version 2 is the preferred format. - - Attributes: - VERSION_FORMAT_UNSPECIFIED (int): An unspecified format version that will default to V2. - V2 (int): ``LogEntry`` version 2 format. - V1 (int): ``LogEntry`` version 1 format. - """ - VERSION_FORMAT_UNSPECIFIED = 0 - V2 = 1 - V1 = 2 - - class MetricDescriptor(object): - class MetricKind(object): + class MetricKind(enum.IntEnum): """ The kind of measurement. It describes how the data is reported. @@ -122,7 +151,7 @@ class MetricKind(object): DELTA = 2 CUMULATIVE = 3 - class ValueType(object): + class ValueType(enum.IntEnum): """ The value type of a metric. @@ -147,13 +176,30 @@ class ValueType(object): class LogMetric(object): - class ApiVersion(object): + class ApiVersion(enum.IntEnum): """ - Stackdriver Logging API version. + Logging API version. Attributes: - V2 (int): Stackdriver Logging API v2. - V1 (int): Stackdriver Logging API v1. + V2 (int): Logging API v2. + V1 (int): Logging API v1. """ V2 = 0 V1 = 1 + + +class LogSink(object): + class VersionFormat(enum.IntEnum): + """ + Available log entry formats. Log entries can be written to + Logging in either format and can be exported in either format. + Version 2 is the preferred format. + + Attributes: + VERSION_FORMAT_UNSPECIFIED (int): An unspecified format version that will default to V2. + V2 (int): ``LogEntry`` version 2 format. + V1 (int): ``LogEntry`` version 1 format. + """ + VERSION_FORMAT_UNSPECIFIED = 0 + V2 = 1 + V1 = 2 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index 43d5de0d240f..f0a29707b9dc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,19 +17,25 @@ import functools import pkg_resources +import warnings +from google.oauth2 import service_account import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method import google.api_core.grpc_helpers import google.api_core.page_iterator import google.api_core.path_template +import grpc from google.api import monitored_resource_pb2 from google.cloud.logging_v2.gapic import enums from google.cloud.logging_v2.gapic import logging_service_v2_client_config +from google.cloud.logging_v2.gapic.transports import logging_service_v2_grpc_transport from google.cloud.logging_v2.proto import log_entry_pb2 from google.cloud.logging_v2.proto import logging_pb2 +from google.cloud.logging_v2.proto import logging_pb2_grpc +from google.protobuf import empty_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( 'google-cloud-logging', ).version @@ -39,27 +47,30 @@ class LoggingServiceV2Client(object): SERVICE_ADDRESS = 'logging.googleapis.com:443' """The default address of the service.""" - # The scopes needed to make gRPC calls to all of the methods defined in - # this service - _DEFAULT_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - ) - - # The name of the interface for this client. This is the key used to find - # method configuration in the client_config dictionary. + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. _INTERFACE_NAME = 'google.logging.v2.LoggingServiceV2' @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - 'projects/{project}', - project=project, - ) + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs['credentials'] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file @classmethod def log_path(cls, project, log): @@ -70,7 +81,16 @@ def log_path(cls, project, log): log=log, ) + @classmethod + def project_path(cls, project): + """Return a fully-qualified project string.""" + return google.api_core.path_template.expand( + 'projects/{project}', + project=project, + ) + def __init__(self, + transport=None, channel=None, credentials=None, client_config=logging_service_v2_client_config.config, @@ -78,89 +98,83 @@ def __init__(self, """Constructor. Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive + transport (Union[~.LoggingServiceV2GrpcTransport, + Callable[[~.Credentials, type], ~.LoggingServiceV2GrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive with ``credentials``; providing both will raise an exception. credentials (google.auth.credentials.Credentials): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - client_config (dict): A dictionary of call options for each - method. If not specified, the default configuration is used. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - 'The `channel` and `credentials` arguments to {} are mutually ' - 'exclusive.'.format(self.__class__.__name__), ) - - # Create the channel. - if channel is None: - channel = google.api_core.grpc_helpers.create_channel( - self.SERVICE_ADDRESS, + # Raise deprecation warnings for things we want to go away. + if client_config: + warnings.warn('The `client_config` argument is deprecated.', + PendingDeprecationWarning) + if channel: + warnings.warn( + 'The `channel` argument is deprecated; use ' + '`transport` instead.', PendingDeprecationWarning) + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=logging_service_v2_grpc_transport. + LoggingServiceV2GrpcTransport, + ) + else: + if credentials: + raise ValueError( + 'Received both a transport instance and ' + 'credentials; these are mutually exclusive.') + self.transport = transport + else: + self.transport = logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport( + address=self.SERVICE_ADDRESS, + channel=channel, credentials=credentials, - scopes=self._DEFAULT_SCOPES, ) - # Create the gRPC stubs. - self.logging_service_v2_stub = ( - logging_pb2.LoggingServiceV2Stub(channel)) - if client_info is None: client_info = ( google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info # Parse out the default settings for retry and timeout for each RPC # from the client configuration. # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) - method_configs = google.api_core.gapic_v1.config.parse_method_configs( + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( client_config['interfaces'][self._INTERFACE_NAME], ) - # Write the "inner API call" methods to the class. - # These are wrapped versions of the gRPC stub methods, with retry and - # timeout configuration applied, called by the public methods on - # this class. - self._delete_log = google.api_core.gapic_v1.method.wrap_method( - self.logging_service_v2_stub.DeleteLog, - default_retry=method_configs['DeleteLog'].retry, - default_timeout=method_configs['DeleteLog'].timeout, - client_info=client_info, - ) - self._write_log_entries = google.api_core.gapic_v1.method.wrap_method( - self.logging_service_v2_stub.WriteLogEntries, - default_retry=method_configs['WriteLogEntries'].retry, - default_timeout=method_configs['WriteLogEntries'].timeout, - client_info=client_info, - ) - self._list_log_entries = google.api_core.gapic_v1.method.wrap_method( - self.logging_service_v2_stub.ListLogEntries, - default_retry=method_configs['ListLogEntries'].retry, - default_timeout=method_configs['ListLogEntries'].timeout, - client_info=client_info, - ) - self._list_monitored_resource_descriptors = google.api_core.gapic_v1.method.wrap_method( - self.logging_service_v2_stub.ListMonitoredResourceDescriptors, - default_retry=method_configs[ - 'ListMonitoredResourceDescriptors'].retry, - default_timeout=method_configs['ListMonitoredResourceDescriptors'] - .timeout, - client_info=client_info, - ) - self._list_logs = google.api_core.gapic_v1.method.wrap_method( - self.logging_service_v2_stub.ListLogs, - default_retry=method_configs['ListLogs'].retry, - default_timeout=method_configs['ListLogs'].timeout, - client_info=client_info, - ) + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} # Service calls def delete_log(self, @@ -214,11 +228,18 @@ def delete_log(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'delete_log' not in self._inner_api_calls: + self._inner_api_calls[ + 'delete_log'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_log, + default_retry=self._method_configs['DeleteLog'].retry, + default_timeout=self._method_configs['DeleteLog'].timeout, + client_info=self._client_info, + ) + request = logging_pb2.DeleteLogRequest(log_name=log_name, ) - self._delete_log( + self._inner_api_calls['delete_log']( request, retry=retry, timeout=timeout, metadata=metadata) def write_log_entries(self, @@ -227,33 +248,36 @@ def write_log_entries(self, resource=None, labels=None, partial_success=None, + dry_run=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): """ - ## Log entry resources - - Writes log entries to Stackdriver Logging. This API method is the - only way to send log entries to Stackdriver Logging. This method - is used, directly or indirectly, by the Stackdriver Logging agent - (fluentd) and all logging libraries configured to use Stackdriver - Logging. + Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method + is used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use Logging. + A single request may contain log entries for a maximum of 1000 + different resources (projects, organizations, billing accounts or + folders) Example: >>> from google.cloud import logging_v2 >>> >>> client = logging_v2.LoggingServiceV2Client() >>> + >>> # TODO: Initialize ``entries``: >>> entries = [] >>> >>> response = client.write_log_entries(entries) Args: - entries (list[Union[dict, ~google.cloud.logging_v2.types.LogEntry]]): Required. The log entries to send to Stackdriver Logging. The order of log + entries (list[Union[dict, ~google.cloud.logging_v2.types.LogEntry]]): Required. The log entries to send to Logging. The order of log entries in this list does not matter. Values supplied in this method's ``log_name``, ``resource``, and ``labels`` fields are copied into those log entries in this list that do not include values for their corresponding - fields. For more information, see the ``LogEntry`` type. + fields. For more information, see the + ``LogEntry`` type. If the ``timestamp`` or ``insert_id`` fields are missing in log entries, then this method supplies the current time or a unique identifier, respectively. @@ -263,8 +287,9 @@ def write_log_entries(self, Log entries with timestamps that are more than the `logs retention period `_ in the past or more than - 24 hours in the future might be discarded. Discarding does not return - an error. + 24 hours in the future will not be available when calling ``entries.list``. + However, those log entries can still be exported with + `LogSinks `_. To improve throughput and to avoid exceeding the `quota limit `_ for calls to ``entries.write``, @@ -282,11 +307,17 @@ def write_log_entries(self, \"billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]\" \"folders/[FOLDER_ID]/logs/[LOG_ID]\" - ``[LOG_ID]`` must be URL-encoded. For example, - ``\"projects/my-project-id/logs/syslog\"`` or - ``\"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity\"``. - For more information about log names, see - ``LogEntry``. + ``[LOG_ID]`` must be URL-encoded. For example: + + :: + + \"projects/my-project-id/logs/syslog\" + \"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity\" + + The permission logging.logEntries.create is needed on each + project, organization, billing account, or folder that is receiving + new log entries, whether the resource is specified in + logName or in an individual log entry. resource (Union[dict, ~google.cloud.logging_v2.types.MonitoredResource]): Optional. A default monitored resource object that is assigned to all log entries in ``entries`` that do not specify a value for ``resource``. Example: @@ -308,6 +339,9 @@ def write_log_entries(self, entry is not written, then the response status is the error associated with one of the failed entries and the response includes error details keyed by the entries' zero-based index in the ``entries.write`` method. + dry_run (bool): Optional. If true, the request should expect normal response, but the + entries won't be persisted nor exported. Useful for checking whether the + logging API endpoints are working properly before sending valuable data. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -327,17 +361,27 @@ def write_log_entries(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'write_log_entries' not in self._inner_api_calls: + self._inner_api_calls[ + 'write_log_entries'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.write_log_entries, + default_retry=self._method_configs['WriteLogEntries']. + retry, + default_timeout=self._method_configs['WriteLogEntries']. + timeout, + client_info=self._client_info, + ) + request = logging_pb2.WriteLogEntriesRequest( entries=entries, log_name=log_name, resource=resource, labels=labels, partial_success=partial_success, + dry_run=dry_run, ) - return self._write_log_entries( + return self._inner_api_calls['write_log_entries']( request, retry=retry, timeout=timeout, metadata=metadata) def list_log_entries(self, @@ -351,7 +395,7 @@ def list_log_entries(self, metadata=None): """ Lists log entries. Use this method to retrieve log entries from - Stackdriver Logging. For ways to export log entries, see + Logging. For ways to export log entries, see `Exporting Logs `_. Example: @@ -359,15 +403,18 @@ def list_log_entries(self, >>> >>> client = logging_v2.LoggingServiceV2Client() >>> + >>> # TODO: Initialize ``resource_names``: >>> resource_names = [] >>> - >>> >>> # Iterate over all results >>> for element in client.list_log_entries(resource_names): ... # process element ... pass >>> - >>> # Or iterate over results one page at a time + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time >>> for page in client.list_log_entries(resource_names, options=CallOptions(page_token=INITIAL_PAGE)): ... for element in page: ... # process element @@ -430,9 +477,17 @@ def list_log_entries(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'list_log_entries' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_log_entries'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_log_entries, + default_retry=self._method_configs['ListLogEntries'].retry, + default_timeout=self._method_configs['ListLogEntries']. + timeout, + client_info=self._client_info, + ) + request = logging_pb2.ListLogEntriesRequest( resource_names=resource_names, project_ids=project_ids, @@ -443,7 +498,7 @@ def list_log_entries(self, iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._list_log_entries, + self._inner_api_calls['list_log_entries'], retry=retry, timeout=timeout, metadata=metadata), @@ -461,21 +516,22 @@ def list_monitored_resource_descriptors( timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): """ - Lists the descriptors for monitored resource types used by Stackdriver - Logging. + Lists the descriptors for monitored resource types used by Logging. Example: >>> from google.cloud import logging_v2 >>> >>> client = logging_v2.LoggingServiceV2Client() >>> - >>> >>> # Iterate over all results >>> for element in client.list_monitored_resource_descriptors(): ... # process element ... pass >>> - >>> # Or iterate over results one page at a time + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time >>> for page in client.list_monitored_resource_descriptors(options=CallOptions(page_token=INITIAL_PAGE)): ... for element in page: ... # process element @@ -509,15 +565,24 @@ def list_monitored_resource_descriptors( to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'list_monitored_resource_descriptors' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_monitored_resource_descriptors'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_monitored_resource_descriptors, + default_retry=self. + _method_configs['ListMonitoredResourceDescriptors'].retry, + default_timeout=self._method_configs[ + 'ListMonitoredResourceDescriptors'].timeout, + client_info=self._client_info, + ) + request = logging_pb2.ListMonitoredResourceDescriptorsRequest( page_size=page_size, ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._list_monitored_resource_descriptors, + self._inner_api_calls['list_monitored_resource_descriptors'], retry=retry, timeout=timeout, metadata=metadata), @@ -545,13 +610,15 @@ def list_logs(self, >>> >>> parent = client.project_path('[PROJECT]') >>> - >>> >>> # Iterate over all results >>> for element in client.list_logs(parent): ... # process element ... pass >>> - >>> # Or iterate over results one page at a time + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time >>> for page in client.list_logs(parent, options=CallOptions(page_token=INITIAL_PAGE)): ... for element in page: ... # process element @@ -593,9 +660,16 @@ def list_logs(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'list_logs' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_logs'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_logs, + default_retry=self._method_configs['ListLogs'].retry, + default_timeout=self._method_configs['ListLogs'].timeout, + client_info=self._client_info, + ) + request = logging_pb2.ListLogsRequest( parent=parent, page_size=page_size, @@ -603,7 +677,7 @@ def list_logs(self, iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._list_logs, + self._inner_api_calls['list_logs'], retry=retry, timeout=timeout, metadata=metadata), diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py index d70c2ef6a65a..26fca76eaef1 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py @@ -8,21 +8,21 @@ "retry_params": { "default": { "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.2, - "max_retry_delay_millis": 1000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.5, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 90000 + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000 }, "list": { "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.2, - "max_retry_delay_millis": 1000, - "initial_rpc_timeout_millis": 2000, - "rpc_timeout_multiplier": 1.5, - "max_rpc_timeout_millis": 10000, - "total_timeout_millis": 20000 + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000 } }, "methods": { @@ -44,7 +44,7 @@ "ListLogEntries": { "timeout_millis": 10000, "retry_codes_name": "idempotent", - "retry_params_name": "list" + "retry_params_name": "default" }, "ListMonitoredResourceDescriptors": { "timeout_millis": 60000, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 823588e27cfd..6dde895a5e3e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,21 +17,29 @@ import functools import pkg_resources +import warnings +from google.oauth2 import service_account import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method import google.api_core.grpc_helpers import google.api_core.page_iterator import google.api_core.path_template +import grpc from google.api import monitored_resource_pb2 from google.cloud.logging_v2.gapic import enums from google.cloud.logging_v2.gapic import metrics_service_v2_client_config +from google.cloud.logging_v2.gapic.transports import metrics_service_v2_grpc_transport from google.cloud.logging_v2.proto import log_entry_pb2 from google.cloud.logging_v2.proto import logging_config_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2_grpc from google.cloud.logging_v2.proto import logging_metrics_pb2 +from google.cloud.logging_v2.proto import logging_metrics_pb2_grpc from google.cloud.logging_v2.proto import logging_pb2 +from google.cloud.logging_v2.proto import logging_pb2_grpc +from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( @@ -42,20 +52,31 @@ class MetricsServiceV2Client(object): SERVICE_ADDRESS = 'logging.googleapis.com:443' """The default address of the service.""" - # The scopes needed to make gRPC calls to all of the methods defined in - # this service - _DEFAULT_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - ) - - # The name of the interface for this client. This is the key used to find - # method configuration in the client_config dictionary. + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. _INTERFACE_NAME = 'google.logging.v2.MetricsServiceV2' + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs['credentials'] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" @@ -74,6 +95,7 @@ def metric_path(cls, project, metric): ) def __init__(self, + transport=None, channel=None, credentials=None, client_config=metrics_service_v2_client_config.config, @@ -81,87 +103,83 @@ def __init__(self, """Constructor. Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive + transport (Union[~.MetricsServiceV2GrpcTransport, + Callable[[~.Credentials, type], ~.MetricsServiceV2GrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive with ``credentials``; providing both will raise an exception. credentials (google.auth.credentials.Credentials): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - client_config (dict): A dictionary of call options for each - method. If not specified, the default configuration is used. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - 'The `channel` and `credentials` arguments to {} are mutually ' - 'exclusive.'.format(self.__class__.__name__), ) - - # Create the channel. - if channel is None: - channel = google.api_core.grpc_helpers.create_channel( - self.SERVICE_ADDRESS, + # Raise deprecation warnings for things we want to go away. + if client_config: + warnings.warn('The `client_config` argument is deprecated.', + PendingDeprecationWarning) + if channel: + warnings.warn( + 'The `channel` argument is deprecated; use ' + '`transport` instead.', PendingDeprecationWarning) + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=metrics_service_v2_grpc_transport. + MetricsServiceV2GrpcTransport, + ) + else: + if credentials: + raise ValueError( + 'Received both a transport instance and ' + 'credentials; these are mutually exclusive.') + self.transport = transport + else: + self.transport = metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport( + address=self.SERVICE_ADDRESS, + channel=channel, credentials=credentials, - scopes=self._DEFAULT_SCOPES, ) - # Create the gRPC stubs. - self.metrics_service_v2_stub = ( - logging_metrics_pb2.MetricsServiceV2Stub(channel)) - if client_info is None: client_info = ( google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info # Parse out the default settings for retry and timeout for each RPC # from the client configuration. # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) - method_configs = google.api_core.gapic_v1.config.parse_method_configs( + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( client_config['interfaces'][self._INTERFACE_NAME], ) - # Write the "inner API call" methods to the class. - # These are wrapped versions of the gRPC stub methods, with retry and - # timeout configuration applied, called by the public methods on - # this class. - self._list_log_metrics = google.api_core.gapic_v1.method.wrap_method( - self.metrics_service_v2_stub.ListLogMetrics, - default_retry=method_configs['ListLogMetrics'].retry, - default_timeout=method_configs['ListLogMetrics'].timeout, - client_info=client_info, - ) - self._get_log_metric = google.api_core.gapic_v1.method.wrap_method( - self.metrics_service_v2_stub.GetLogMetric, - default_retry=method_configs['GetLogMetric'].retry, - default_timeout=method_configs['GetLogMetric'].timeout, - client_info=client_info, - ) - self._create_log_metric = google.api_core.gapic_v1.method.wrap_method( - self.metrics_service_v2_stub.CreateLogMetric, - default_retry=method_configs['CreateLogMetric'].retry, - default_timeout=method_configs['CreateLogMetric'].timeout, - client_info=client_info, - ) - self._update_log_metric = google.api_core.gapic_v1.method.wrap_method( - self.metrics_service_v2_stub.UpdateLogMetric, - default_retry=method_configs['UpdateLogMetric'].retry, - default_timeout=method_configs['UpdateLogMetric'].timeout, - client_info=client_info, - ) - self._delete_log_metric = google.api_core.gapic_v1.method.wrap_method( - self.metrics_service_v2_stub.DeleteLogMetric, - default_retry=method_configs['DeleteLogMetric'].retry, - default_timeout=method_configs['DeleteLogMetric'].timeout, - client_info=client_info, - ) + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} # Service calls def list_log_metrics(self, @@ -180,13 +198,15 @@ def list_log_metrics(self, >>> >>> parent = client.project_path('[PROJECT]') >>> - >>> >>> # Iterate over all results >>> for element in client.list_log_metrics(parent): ... # process element ... pass >>> - >>> # Or iterate over results one page at a time + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time >>> for page in client.list_log_metrics(parent, options=CallOptions(page_token=INITIAL_PAGE)): ... for element in page: ... # process element @@ -225,9 +245,17 @@ def list_log_metrics(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'list_log_metrics' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_log_metrics'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_log_metrics, + default_retry=self._method_configs['ListLogMetrics'].retry, + default_timeout=self._method_configs['ListLogMetrics']. + timeout, + client_info=self._client_info, + ) + request = logging_metrics_pb2.ListLogMetricsRequest( parent=parent, page_size=page_size, @@ -235,7 +263,7 @@ def list_log_metrics(self, iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._list_log_metrics, + self._inner_api_calls['list_log_metrics'], retry=retry, timeout=timeout, metadata=metadata), @@ -288,12 +316,20 @@ def get_log_metric(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'get_log_metric' not in self._inner_api_calls: + self._inner_api_calls[ + 'get_log_metric'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_log_metric, + default_retry=self._method_configs['GetLogMetric'].retry, + default_timeout=self._method_configs['GetLogMetric']. + timeout, + client_info=self._client_info, + ) + request = logging_metrics_pb2.GetLogMetricRequest( metric_name=metric_name, ) - return self._get_log_metric( + return self._inner_api_calls['get_log_metric']( request, retry=retry, timeout=timeout, metadata=metadata) def create_log_metric(self, @@ -311,6 +347,8 @@ def create_log_metric(self, >>> client = logging_v2.MetricsServiceV2Client() >>> >>> parent = client.project_path('[PROJECT]') + >>> + >>> # TODO: Initialize ``metric``: >>> metric = {} >>> >>> response = client.create_log_metric(parent, metric) @@ -346,14 +384,23 @@ def create_log_metric(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'create_log_metric' not in self._inner_api_calls: + self._inner_api_calls[ + 'create_log_metric'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_log_metric, + default_retry=self._method_configs['CreateLogMetric']. + retry, + default_timeout=self._method_configs['CreateLogMetric']. + timeout, + client_info=self._client_info, + ) + request = logging_metrics_pb2.CreateLogMetricRequest( parent=parent, metric=metric, ) - return self._create_log_metric( + return self._inner_api_calls['create_log_metric']( request, retry=retry, timeout=timeout, metadata=metadata) def update_log_metric(self, @@ -371,6 +418,8 @@ def update_log_metric(self, >>> client = logging_v2.MetricsServiceV2Client() >>> >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') + >>> + >>> # TODO: Initialize ``metric``: >>> metric = {} >>> >>> response = client.update_log_metric(metric_name, metric) @@ -407,14 +456,23 @@ def update_log_metric(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'update_log_metric' not in self._inner_api_calls: + self._inner_api_calls[ + 'update_log_metric'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_log_metric, + default_retry=self._method_configs['UpdateLogMetric']. + retry, + default_timeout=self._method_configs['UpdateLogMetric']. + timeout, + client_info=self._client_info, + ) + request = logging_metrics_pb2.UpdateLogMetricRequest( metric_name=metric_name, metric=metric, ) - return self._update_log_metric( + return self._inner_api_calls['update_log_metric']( request, retry=retry, timeout=timeout, metadata=metadata) def delete_log_metric(self, @@ -456,10 +514,19 @@ def delete_log_metric(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - if metadata is None: - metadata = [] - metadata = list(metadata) + # Wrap the transport method to add retry and timeout logic. + if 'delete_log_metric' not in self._inner_api_calls: + self._inner_api_calls[ + 'delete_log_metric'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_log_metric, + default_retry=self._method_configs['DeleteLogMetric']. + retry, + default_timeout=self._method_configs['DeleteLogMetric']. + timeout, + client_info=self._client_info, + ) + request = logging_metrics_pb2.DeleteLogMetricRequest( metric_name=metric_name, ) - self._delete_log_metric( + self._inner_api_calls['delete_log_metric']( request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py index 9ff717dd0213..57377ba1e29d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py @@ -8,12 +8,12 @@ "retry_params": { "default": { "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.2, - "max_retry_delay_millis": 1000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.5, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 90000 + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000 } }, "methods": { @@ -34,7 +34,7 @@ }, "UpdateLogMetric": { "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", + "retry_codes_name": "idempotent", "retry_params_name": "default" }, "DeleteLogMetric": { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py new file mode 100644 index 000000000000..be5b43eda2fa --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py @@ -0,0 +1,238 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import google.api_core.grpc_helpers + +from google.cloud.logging_v2.proto import logging_config_pb2_grpc + + +class ConfigServiceV2GrpcTransport(object): + """gRPC transport class providing stubs for + google.logging.v2 ConfigServiceV2 API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ) + + def __init__(self, + channel=None, + credentials=None, + address='logging.googleapis.com:443'): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + 'The `channel` and `credentials` arguments are mutually ' + 'exclusive.', ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + ) + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + 'config_service_v2_stub': + logging_config_pb2_grpc.ConfigServiceV2Stub(channel), + } + + @classmethod + def create_channel(cls, + address='logging.googleapis.com:443', + credentials=None): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, + credentials=credentials, + scopes=cls._OAUTH_SCOPES, + ) + + @property + def list_sinks(self): + """Return the gRPC stub for {$apiMethod.name}. + + Lists sinks. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['config_service_v2_stub'].ListSinks + + @property + def get_sink(self): + """Return the gRPC stub for {$apiMethod.name}. + + Gets a sink. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['config_service_v2_stub'].GetSink + + @property + def create_sink(self): + """Return the gRPC stub for {$apiMethod.name}. + + Creates a sink that exports specified log entries to a destination. The + export of newly-ingested log entries begins immediately, unless the sink's + ``writer_identity`` is not permitted to write to the destination. A sink can + export log entries only from the resource owning the sink. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['config_service_v2_stub'].CreateSink + + @property + def update_sink(self): + """Return the gRPC stub for {$apiMethod.name}. + + Updates a sink. This method replaces the following fields in the existing + sink with values from the new sink: ``destination``, and ``filter``. + The updated sink might also have a new ``writer_identity``; see the + ``unique_writer_identity`` field. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['config_service_v2_stub'].UpdateSink + + @property + def delete_sink(self): + """Return the gRPC stub for {$apiMethod.name}. + + Deletes a sink. If the sink has a unique ``writer_identity``, then that + service account is also deleted. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['config_service_v2_stub'].DeleteSink + + @property + def list_exclusions(self): + """Return the gRPC stub for {$apiMethod.name}. + + Lists all the exclusions in a parent resource. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['config_service_v2_stub'].ListExclusions + + @property + def get_exclusion(self): + """Return the gRPC stub for {$apiMethod.name}. + + Gets the description of an exclusion. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['config_service_v2_stub'].GetExclusion + + @property + def create_exclusion(self): + """Return the gRPC stub for {$apiMethod.name}. + + Creates a new exclusion in a specified parent resource. + Only log entries belonging to that resource can be excluded. + You can have up to 10 exclusions in a resource. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['config_service_v2_stub'].CreateExclusion + + @property + def update_exclusion(self): + """Return the gRPC stub for {$apiMethod.name}. + + Changes one or more properties of an existing exclusion. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['config_service_v2_stub'].UpdateExclusion + + @property + def delete_exclusion(self): + """Return the gRPC stub for {$apiMethod.name}. + + Deletes an exclusion. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['config_service_v2_stub'].DeleteExclusion diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py new file mode 100644 index 000000000000..19ac7878636d --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import google.api_core.grpc_helpers + +from google.cloud.logging_v2.proto import logging_pb2_grpc + + +class LoggingServiceV2GrpcTransport(object): + """gRPC transport class providing stubs for + google.logging.v2 LoggingServiceV2 API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ) + + def __init__(self, + channel=None, + credentials=None, + address='logging.googleapis.com:443'): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + 'The `channel` and `credentials` arguments are mutually ' + 'exclusive.', ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + ) + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + 'logging_service_v2_stub': + logging_pb2_grpc.LoggingServiceV2Stub(channel), + } + + @classmethod + def create_channel(cls, + address='logging.googleapis.com:443', + credentials=None): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, + credentials=credentials, + scopes=cls._OAUTH_SCOPES, + ) + + @property + def delete_log(self): + """Return the gRPC stub for {$apiMethod.name}. + + Deletes all the log entries in a log. + The log reappears if it receives new entries. + Log entries written shortly before the delete operation might not be + deleted. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['logging_service_v2_stub'].DeleteLog + + @property + def write_log_entries(self): + """Return the gRPC stub for {$apiMethod.name}. + + Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method + is used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use Logging. + A single request may contain log entries for a maximum of 1000 + different resources (projects, organizations, billing accounts or + folders) + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['logging_service_v2_stub'].WriteLogEntries + + @property + def list_log_entries(self): + """Return the gRPC stub for {$apiMethod.name}. + + Lists log entries. Use this method to retrieve log entries from + Logging. For ways to export log entries, see + `Exporting Logs `_. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['logging_service_v2_stub'].ListLogEntries + + @property + def list_monitored_resource_descriptors(self): + """Return the gRPC stub for {$apiMethod.name}. + + Lists the descriptors for monitored resource types used by Logging. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs[ + 'logging_service_v2_stub'].ListMonitoredResourceDescriptors + + @property + def list_logs(self): + """Return the gRPC stub for {$apiMethod.name}. + + Lists the logs in projects, organizations, folders, or billing accounts. + Only logs that have entries are listed. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['logging_service_v2_stub'].ListLogs diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py new file mode 100644 index 000000000000..b87218e67645 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py @@ -0,0 +1,164 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import google.api_core.grpc_helpers + +from google.cloud.logging_v2.proto import logging_metrics_pb2_grpc + + +class MetricsServiceV2GrpcTransport(object): + """gRPC transport class providing stubs for + google.logging.v2 MetricsServiceV2 API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ) + + def __init__(self, + channel=None, + credentials=None, + address='logging.googleapis.com:443'): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + 'The `channel` and `credentials` arguments are mutually ' + 'exclusive.', ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + ) + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + 'metrics_service_v2_stub': + logging_metrics_pb2_grpc.MetricsServiceV2Stub(channel), + } + + @classmethod + def create_channel(cls, + address='logging.googleapis.com:443', + credentials=None): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, + credentials=credentials, + scopes=cls._OAUTH_SCOPES, + ) + + @property + def list_log_metrics(self): + """Return the gRPC stub for {$apiMethod.name}. + + Lists logs-based metrics. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['metrics_service_v2_stub'].ListLogMetrics + + @property + def get_log_metric(self): + """Return the gRPC stub for {$apiMethod.name}. + + Gets a logs-based metric. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['metrics_service_v2_stub'].GetLogMetric + + @property + def create_log_metric(self): + """Return the gRPC stub for {$apiMethod.name}. + + Creates a logs-based metric. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['metrics_service_v2_stub'].CreateLogMetric + + @property + def update_log_metric(self): + """Return the gRPC stub for {$apiMethod.name}. + + Creates or updates a logs-based metric. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['metrics_service_v2_stub'].UpdateLogMetric + + @property + def delete_log_metric(self): + """Return the gRPC stub for {$apiMethod.name}. + + Deletes a logs-based metric. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['metrics_service_v2_stub'].DeleteLogMetric diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py index 88c8f6954fac..0b5f10f863cc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/log_entry.proto @@ -26,10 +27,9 @@ name='google/cloud/logging_v2/proto/log_entry.proto', package='google.logging.v2', syntax='proto3', - serialized_pb=_b('\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xba\x05\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload\"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08\"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') + serialized_pb=_b('\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x8a\x06\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12\x37\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadata\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload\"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08\"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') , dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR,google_dot_logging_dot_type_dot_http__request__pb2.DESCRIPTOR,google_dot_logging_dot_type_dot_log__severity__pb2.DESCRIPTOR,google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -47,14 +47,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='value', full_name='google.logging.v2.LogEntry.LabelsEntry.value', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -67,8 +67,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=948, - serialized_end=993, + serialized_start=1028, + serialized_end=1073, ) _LOGENTRY = _descriptor.Descriptor( @@ -84,105 +84,119 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='resource', full_name='google.logging.v2.LogEntry.resource', index=1, number=8, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='proto_payload', full_name='google.logging.v2.LogEntry.proto_payload', index=2, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='text_payload', full_name='google.logging.v2.LogEntry.text_payload', index=3, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='json_payload', full_name='google.logging.v2.LogEntry.json_payload', index=4, number=6, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='timestamp', full_name='google.logging.v2.LogEntry.timestamp', index=5, number=9, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='receive_timestamp', full_name='google.logging.v2.LogEntry.receive_timestamp', index=6, number=24, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='severity', full_name='google.logging.v2.LogEntry.severity', index=7, number=10, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='insert_id', full_name='google.logging.v2.LogEntry.insert_id', index=8, number=4, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='http_request', full_name='google.logging.v2.LogEntry.http_request', index=9, number=7, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='labels', full_name='google.logging.v2.LogEntry.labels', index=10, number=11, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='operation', full_name='google.logging.v2.LogEntry.operation', index=11, + name='metadata', full_name='google.logging.v2.LogEntry.metadata', index=11, + number=25, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='operation', full_name='google.logging.v2.LogEntry.operation', index=12, number=15, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='trace', full_name='google.logging.v2.LogEntry.trace', index=12, + name='trace', full_name='google.logging.v2.LogEntry.trace', index=13, number=22, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='span_id', full_name='google.logging.v2.LogEntry.span_id', index=13, + name='span_id', full_name='google.logging.v2.LogEntry.span_id', index=14, number=27, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='trace_sampled', full_name='google.logging.v2.LogEntry.trace_sampled', index=15, + number=30, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='source_location', full_name='google.logging.v2.LogEntry.source_location', index=14, + name='source_location', full_name='google.logging.v2.LogEntry.source_location', index=16, number=23, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -199,7 +213,7 @@ index=0, containing_type=None, fields=[]), ], serialized_start=306, - serialized_end=1004, + serialized_end=1084, ) @@ -216,28 +230,28 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='producer', full_name='google.logging.v2.LogEntryOperation.producer', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='first', full_name='google.logging.v2.LogEntryOperation.first', index=2, number=3, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='last', full_name='google.logging.v2.LogEntryOperation.last', index=3, number=4, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -250,8 +264,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1006, - serialized_end=1084, + serialized_start=1086, + serialized_end=1164, ) @@ -268,21 +282,21 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='line', full_name='google.logging.v2.LogEntrySourceLocation.line', index=1, number=2, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='function', full_name='google.logging.v2.LogEntrySourceLocation.function', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -295,8 +309,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1086, - serialized_end=1156, + serialized_start=1166, + serialized_end=1236, ) _LOGENTRY_LABELSENTRY.containing_type = _LOGENTRY @@ -308,6 +322,7 @@ _LOGENTRY.fields_by_name['severity'].enum_type = google_dot_logging_dot_type_dot_log__severity__pb2._LOGSEVERITY _LOGENTRY.fields_by_name['http_request'].message_type = google_dot_logging_dot_type_dot_http__request__pb2._HTTPREQUEST _LOGENTRY.fields_by_name['labels'].message_type = _LOGENTRY_LABELSENTRY +_LOGENTRY.fields_by_name['metadata'].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEMETADATA _LOGENTRY.fields_by_name['operation'].message_type = _LOGENTRYOPERATION _LOGENTRY.fields_by_name['source_location'].message_type = _LOGENTRYSOURCELOCATION _LOGENTRY.oneofs_by_name['payload'].fields.append( @@ -322,6 +337,7 @@ DESCRIPTOR.message_types_by_name['LogEntry'] = _LOGENTRY DESCRIPTOR.message_types_by_name['LogEntryOperation'] = _LOGENTRYOPERATION DESCRIPTOR.message_types_by_name['LogEntrySourceLocation'] = _LOGENTRYSOURCELOCATION +_sym_db.RegisterFileDescriptor(DESCRIPTOR) LogEntry = _reflection.GeneratedProtocolMessageType('LogEntry', (_message.Message,), dict( @@ -360,8 +376,8 @@ for a log name with a leading slash will never return any results. resource: - Required. The monitored resource associated with this log - entry. Example: a log entry that reports a database error + Required. The primary monitored resource associated with this + log entry. Example: a log entry that reports a database error would be associated with the monitored resource designating the particular database that reported the error. payload: @@ -381,33 +397,39 @@ Optional. The time the event described by the log entry occurred. This time is used to compute the log entry's age and to enforce the logs retention period. If this field is omitted - in a new log entry, then Stackdriver Logging assigns it the - current time. Incoming log entries should have timestamps - that are no more than the `logs retention period - `__ in the past, and no more than 24 - hours in the future. See the ``entries.write`` API method for - more information. + in a new log entry, then Logging assigns it the current time. + Timestamps have nanosecond accuracy, but trailing zeros in the + fractional seconds might be omitted when the timestamp is + displayed. Incoming log entries should have timestamps that + are no more than the `logs retention period + `__ in the past, and no more than 24 hours in + the future. Log entries outside those time boundaries will not + be available when calling ``entries.list``, but those log + entries can still be exported with `LogSinks + `__. receive_timestamp: - Output only. The time the log entry was received by - Stackdriver Logging. + Output only. The time the log entry was received by Logging. severity: Optional. The severity of the log entry. The default value is ``LogSeverity.DEFAULT``. insert_id: Optional. A unique identifier for the log entry. If you - provide a value, then Stackdriver Logging considers other log - entries in the same project, with the same ``timestamp``, and - with the same ``insert_id`` to be duplicates which can be - removed. If omitted in new log entries, then Stackdriver - Logging assigns its own unique identifier. The ``insert_id`` - is also used to order log entries that have the same - ``timestamp`` value. + provide a value, then Logging considers other log entries in + the same project, with the same ``timestamp``, and with the + same ``insert_id`` to be duplicates which can be removed. If + omitted in new log entries, then Logging assigns its own + unique identifier. The ``insert_id`` is also used to order log + entries that have the same ``timestamp`` value. http_request: Optional. Information about the HTTP request associated with this log entry, if applicable. labels: Optional. A set of user-defined (key, value) data that provides additional information about the log entry. + metadata: + Output only. Additional metadata about the monitored resource. + Only ``k8s_container``, ``k8s_pod``, and ``k8s_node`` + MonitoredResources have this field populated. operation: Optional. Information about an operation associated with the log entry, if applicable. @@ -418,11 +440,19 @@ ``//tracing.googleapis.com``. Example: ``projects/my- projectid/traces/06796866738c859f2f19b7cfb3214824`` span_id: - Optional. Id of the span within the trace associated with the - log entry. e.g. "0000000000000042" For Stackdriver trace - spans, this is the same format that the Stackdriver trace API - uses. The ID is a 16-character hexadecimal encoding of an - 8-byte array. + Optional. The span ID within the trace associated with the log + entry. For Trace spans, this is the same format that the Trace + API v2 uses: a 16-character hexadecimal encoding of an 8-byte + array, such as "000000000000004a". + trace_sampled: + Optional. The sampling decision of the trace associated with + the log entry. True means that the trace resource name in the + ``trace`` field was sampled for storage in a trace backend. + False means that the trace was not sampled for storage when + this log entry was written, or the sampling decision was + unknown at the time. A non-sampled ``trace`` value is still + useful as a request correlation identifier. The default is + False. source_location: Optional. Source code location information associated with the log entry, if any. @@ -495,14 +525,4 @@ DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2')) _LOGENTRY_LABELSENTRY.has_options = True _LOGENTRY_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) -try: - # THESE ELEMENTS WILL BE DEPRECATED. - # Please use the generated *_pb2_grpc.py files instead. - import grpc - from grpc.beta import implementations as beta_implementations - from grpc.beta import interfaces as beta_interfaces - from grpc.framework.common import cardinality - from grpc.framework.interfaces.face import utilities as face_utilities -except ImportError: - pass # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py index 9fa39a546e85..17a3ec78985c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -24,10 +24,9 @@ name='google/cloud/logging_v2/proto/logging_config.proto', package='google.logging.v2', syntax='proto3', - serialized_pb=_b('\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xd7\x02\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12G\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormat\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12.\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02\"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\"S\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\x80\n\n\x0f\x43onfigServiceV2\x12v\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse\"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/v2/{parent=*/*}/sinks\x12m\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink\"#\x82\xd3\xe4\x93\x02\x1d\x12\x1b/v2/{sink_name=*/*/sinks/*}\x12t\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink\"$\x82\xd3\xe4\x93\x02\x1e\"\x16/v2/{parent=*/*}/sinks:\x04sink\x12y\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink\")\x82\xd3\xe4\x93\x02#\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sink\x12o\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty\"#\x82\xd3\xe4\x93\x02\x1d*\x1b/v2/{sink_name=*/*/sinks/*}\x12\x8a\x01\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse\"#\x82\xd3\xe4\x93\x02\x1d\x12\x1b/v2/{parent=*/*}/exclusions\x12|\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion\"#\x82\xd3\xe4\x93\x02\x1d\x12\x1b/v2/{name=*/*/exclusions/*}\x12\x8d\x01\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion\".\x82\xd3\xe4\x93\x02(\"\x1b/v2/{parent=*/*}/exclusions:\texclusion\x12\x8d\x01\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion\".\x82\xd3\xe4\x93\x02(2\x1b/v2/{name=*/*/exclusions/*}:\texclusion\x12y\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty\"#\x82\xd3\xe4\x93\x02\x1d*\x1b/v2/{name=*/*/exclusions/*}B\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') + serialized_pb=_b('\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xe3\x02\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12K\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormatB\x02\x18\x01\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12\x32\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\x12\x30\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02\"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\"S\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\x94\x19\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse\"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12\"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink\"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12\"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink\"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01\"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%\"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*\"\"/v2/{parent=organizations/*}/sinks:\x04sinkZ$\"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,\"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink\"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a\"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2\"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty\"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*\"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse\"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12\"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion\"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12\"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion\"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/\"\"/v2/{parent=projects/*}/exclusions:\texclusionZ4\"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ.\"!/v2/{parent=folders/*}/exclusions:\texclusionZ6\")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion\"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2\"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty\"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*\"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}B\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') , dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -52,8 +51,8 @@ ], containing_type=None, options=None, - serialized_start=480, - serialized_end=543, + serialized_start=492, + serialized_end=555, ) _sym_db.RegisterEnumDescriptor(_LOGSINK_VERSIONFORMAT) @@ -71,56 +70,56 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='destination', full_name='google.logging.v2.LogSink.destination', index=1, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='filter', full_name='google.logging.v2.LogSink.filter', index=2, number=5, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='output_version_format', full_name='google.logging.v2.LogSink.output_version_format', index=3, number=6, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='writer_identity', full_name='google.logging.v2.LogSink.writer_identity', index=4, number=8, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='include_children', full_name='google.logging.v2.LogSink.include_children', index=5, number=9, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='start_time', full_name='google.logging.v2.LogSink.start_time', index=6, number=10, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='end_time', full_name='google.logging.v2.LogSink.end_time', index=7, number=11, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')), file=DESCRIPTOR), ], extensions=[ ], @@ -135,7 +134,7 @@ oneofs=[ ], serialized_start=200, - serialized_end=543, + serialized_end=555, ) @@ -152,21 +151,21 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_token', full_name='google.logging.v2.ListSinksRequest.page_token', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_size', full_name='google.logging.v2.ListSinksRequest.page_size', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -179,8 +178,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=545, - serialized_end=618, + serialized_start=557, + serialized_end=630, ) @@ -197,14 +196,14 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='next_page_token', full_name='google.logging.v2.ListSinksResponse.next_page_token', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -217,8 +216,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=620, - serialized_end=707, + serialized_start=632, + serialized_end=719, ) @@ -235,7 +234,7 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -248,8 +247,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=709, - serialized_end=744, + serialized_start=721, + serialized_end=756, ) @@ -266,21 +265,21 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='sink', full_name='google.logging.v2.CreateSinkRequest.sink', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='unique_writer_identity', full_name='google.logging.v2.CreateSinkRequest.unique_writer_identity', index=2, number=3, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -293,8 +292,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=746, - serialized_end=855, + serialized_start=758, + serialized_end=867, ) @@ -311,28 +310,28 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='sink', full_name='google.logging.v2.UpdateSinkRequest.sink', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='unique_writer_identity', full_name='google.logging.v2.UpdateSinkRequest.unique_writer_identity', index=2, number=3, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='update_mask', full_name='google.logging.v2.UpdateSinkRequest.update_mask', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -345,8 +344,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=858, - serialized_end=1019, + serialized_start=870, + serialized_end=1031, ) @@ -363,7 +362,7 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -376,8 +375,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1021, - serialized_end=1059, + serialized_start=1033, + serialized_end=1071, ) @@ -394,28 +393,28 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='description', full_name='google.logging.v2.LogExclusion.description', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='filter', full_name='google.logging.v2.LogExclusion.filter', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='disabled', full_name='google.logging.v2.LogExclusion.disabled', index=3, number=4, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -428,8 +427,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1061, - serialized_end=1144, + serialized_start=1073, + serialized_end=1156, ) @@ -446,21 +445,21 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_token', full_name='google.logging.v2.ListExclusionsRequest.page_token', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_size', full_name='google.logging.v2.ListExclusionsRequest.page_size', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -473,8 +472,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1146, - serialized_end=1224, + serialized_start=1158, + serialized_end=1236, ) @@ -491,14 +490,14 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='next_page_token', full_name='google.logging.v2.ListExclusionsResponse.next_page_token', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -511,8 +510,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1226, - serialized_end=1328, + serialized_start=1238, + serialized_end=1340, ) @@ -529,7 +528,7 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -542,8 +541,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1330, - serialized_end=1365, + serialized_start=1342, + serialized_end=1377, ) @@ -560,14 +559,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='exclusion', full_name='google.logging.v2.CreateExclusionRequest.exclusion', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -580,8 +579,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1367, - serialized_end=1459, + serialized_start=1379, + serialized_end=1471, ) @@ -598,21 +597,21 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='exclusion', full_name='google.logging.v2.UpdateExclusionRequest.exclusion', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='update_mask', full_name='google.logging.v2.UpdateExclusionRequest.update_mask', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -625,8 +624,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1462, - serialized_end=1601, + serialized_start=1474, + serialized_end=1613, ) @@ -643,7 +642,7 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -656,8 +655,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1603, - serialized_end=1641, + serialized_start=1615, + serialized_end=1653, ) _LOGSINK.fields_by_name['output_version_format'].enum_type = _LOGSINK_VERSIONFORMAT @@ -686,6 +685,7 @@ DESCRIPTOR.message_types_by_name['CreateExclusionRequest'] = _CREATEEXCLUSIONREQUEST DESCRIPTOR.message_types_by_name['UpdateExclusionRequest'] = _UPDATEEXCLUSIONREQUEST DESCRIPTOR.message_types_by_name['DeleteExclusionRequest'] = _DELETEEXCLUSIONREQUEST +_sym_db.RegisterFileDescriptor(DESCRIPTOR) LogSink = _reflection.GeneratedProtocolMessageType('LogSink', (_message.Message,), dict( DESCRIPTOR = _LOGSINK, @@ -727,8 +727,8 @@ cannot be changed. writer_identity: Output only. An IAM identity—a service account or group—under - which Stackdriver Logging writes the exported log entries to - the sink's destination. This field is set by `sinks.create + which Logging writes the exported log entries to the sink's + destination. This field is set by `sinks.create `__ and `sinks.update `__, based on the setting of @@ -857,14 +857,14 @@ ``writer_identity`` in the new sink. If this value is omitted or set to false, and if the sink's parent is a project, then the value returned as ``writer_identity`` is the same group or - service account used by Stackdriver Logging before the - addition of writer identities to this API. The sink's - destination must be in the same project as the sink itself. - If this field is set to true, or if the sink is owned by a - non-project resource such as an organization, then the value - of ``writer_identity`` will be a unique service account used - only for exports from the new sink. For more information, see - ``writer_identity`` in [LogSink][google.logging.v2.LogSink]. + service account used by Logging before the addition of writer + identities to this API. The sink's destination must be in the + same project as the sink itself. If this field is set to + true, or if the sink is owned by a non-project resource such + as an organization, then the value of ``writer_identity`` will + be a unique service account used only for exports from the new + sink. For more information, see ``writer_identity`` in + [LogSink][google.logging.v2.LogSink]. """, # @@protoc_insertion_point(class_scope:google.logging.v2.CreateSinkRequest) )) @@ -912,8 +912,8 @@ some point in the future, behavior will be removed and specifying an empty updateMask will be an error. For a detailed ``FieldMask`` definition, see - https://developers.google.com/protocol- - buffers/docs/reference/google.protobuf#fieldmask Example: + https://developers.google.com/protocol-buffers/docs/reference/ + google.protobuf#google.protobuf.FieldMask Example: ``updateMask=filter``. """, # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateSinkRequest) @@ -945,12 +945,12 @@ DESCRIPTOR = _LOGEXCLUSION, __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' , - __doc__ = """Specifies a set of log entries that are not to be stored in Stackdriver - Logging. If your project receives a large volume of logs, you might be - able to use exclusions to reduce your chargeable logs. Exclusions are - processed after log sinks, so you can export log entries before they are - excluded. Audit log entries and log entries from Amazon Web Services are - never excluded. + __doc__ = """Specifies a set of log entries that are not to be stored in Logging. If + your project receives a large volume of logs, you might be able to use + exclusions to reduce your chargeable logs. Exclusions are processed + after log sinks, so you can export log entries before they are excluded. + Audit log entries and log entries from Amazon Web Services are never + excluded. Attributes: @@ -968,15 +968,14 @@ `__, you can exclude less than 100% of the matching log entries. For example, the following filter matches 99% of low-severity log - entries from load balancers: :: - "resource.type=http_load_balancer severity`__ to change the value - of this field. + `exclusions.patch `__ to change the value of this field. """, # @@protoc_insertion_point(class_scope:google.logging.v2.LogExclusion) )) @@ -1135,442 +1134,115 @@ DESCRIPTOR.has_options = True DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\025com.google.logging.v2B\022LoggingConfigProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2')) -try: - # THESE ELEMENTS WILL BE DEPRECATED. - # Please use the generated *_pb2_grpc.py files instead. - import grpc - from grpc.beta import implementations as beta_implementations - from grpc.beta import interfaces as beta_interfaces - from grpc.framework.common import cardinality - from grpc.framework.interfaces.face import utilities as face_utilities - - - class ConfigServiceV2Stub(object): - """Service for configuring sinks used to export log entries outside of - Stackdriver Logging. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListSinks = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListSinks', - request_serializer=ListSinksRequest.SerializeToString, - response_deserializer=ListSinksResponse.FromString, - ) - self.GetSink = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSink', - request_serializer=GetSinkRequest.SerializeToString, - response_deserializer=LogSink.FromString, - ) - self.CreateSink = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateSink', - request_serializer=CreateSinkRequest.SerializeToString, - response_deserializer=LogSink.FromString, - ) - self.UpdateSink = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSink', - request_serializer=UpdateSinkRequest.SerializeToString, - response_deserializer=LogSink.FromString, - ) - self.DeleteSink = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteSink', - request_serializer=DeleteSinkRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ListExclusions = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListExclusions', - request_serializer=ListExclusionsRequest.SerializeToString, - response_deserializer=ListExclusionsResponse.FromString, - ) - self.GetExclusion = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetExclusion', - request_serializer=GetExclusionRequest.SerializeToString, - response_deserializer=LogExclusion.FromString, - ) - self.CreateExclusion = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateExclusion', - request_serializer=CreateExclusionRequest.SerializeToString, - response_deserializer=LogExclusion.FromString, - ) - self.UpdateExclusion = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateExclusion', - request_serializer=UpdateExclusionRequest.SerializeToString, - response_deserializer=LogExclusion.FromString, - ) - self.DeleteExclusion = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteExclusion', - request_serializer=DeleteExclusionRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - - class ConfigServiceV2Servicer(object): - """Service for configuring sinks used to export log entries outside of - Stackdriver Logging. - """ - - def ListSinks(self, request, context): - """Lists sinks. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetSink(self, request, context): - """Gets a sink. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def CreateSink(self, request, context): - """Creates a sink that exports specified log entries to a destination. The - export of newly-ingested log entries begins immediately, unless the sink's - `writer_identity` is not permitted to write to the destination. A sink can - export log entries only from the resource owning the sink. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def UpdateSink(self, request, context): - """Updates a sink. This method replaces the following fields in the existing - sink with values from the new sink: `destination`, and `filter`. - The updated sink might also have a new `writer_identity`; see the - `unique_writer_identity` field. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def DeleteSink(self, request, context): - """Deletes a sink. If the sink has a unique `writer_identity`, then that - service account is also deleted. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ListExclusions(self, request, context): - """Lists all the exclusions in a parent resource. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetExclusion(self, request, context): - """Gets the description of an exclusion. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def CreateExclusion(self, request, context): - """Creates a new exclusion in a specified parent resource. - Only log entries belonging to that resource can be excluded. - You can have up to 10 exclusions in a resource. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def UpdateExclusion(self, request, context): - """Changes one or more properties of an existing exclusion. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def DeleteExclusion(self, request, context): - """Deletes an exclusion. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - - def add_ConfigServiceV2Servicer_to_server(servicer, server): - rpc_method_handlers = { - 'ListSinks': grpc.unary_unary_rpc_method_handler( - servicer.ListSinks, - request_deserializer=ListSinksRequest.FromString, - response_serializer=ListSinksResponse.SerializeToString, - ), - 'GetSink': grpc.unary_unary_rpc_method_handler( - servicer.GetSink, - request_deserializer=GetSinkRequest.FromString, - response_serializer=LogSink.SerializeToString, - ), - 'CreateSink': grpc.unary_unary_rpc_method_handler( - servicer.CreateSink, - request_deserializer=CreateSinkRequest.FromString, - response_serializer=LogSink.SerializeToString, - ), - 'UpdateSink': grpc.unary_unary_rpc_method_handler( - servicer.UpdateSink, - request_deserializer=UpdateSinkRequest.FromString, - response_serializer=LogSink.SerializeToString, - ), - 'DeleteSink': grpc.unary_unary_rpc_method_handler( - servicer.DeleteSink, - request_deserializer=DeleteSinkRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'ListExclusions': grpc.unary_unary_rpc_method_handler( - servicer.ListExclusions, - request_deserializer=ListExclusionsRequest.FromString, - response_serializer=ListExclusionsResponse.SerializeToString, - ), - 'GetExclusion': grpc.unary_unary_rpc_method_handler( - servicer.GetExclusion, - request_deserializer=GetExclusionRequest.FromString, - response_serializer=LogExclusion.SerializeToString, - ), - 'CreateExclusion': grpc.unary_unary_rpc_method_handler( - servicer.CreateExclusion, - request_deserializer=CreateExclusionRequest.FromString, - response_serializer=LogExclusion.SerializeToString, - ), - 'UpdateExclusion': grpc.unary_unary_rpc_method_handler( - servicer.UpdateExclusion, - request_deserializer=UpdateExclusionRequest.FromString, - response_serializer=LogExclusion.SerializeToString, - ), - 'DeleteExclusion': grpc.unary_unary_rpc_method_handler( - servicer.DeleteExclusion, - request_deserializer=DeleteExclusionRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.logging.v2.ConfigServiceV2', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - - - class BetaConfigServiceV2Servicer(object): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This class was generated - only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" - """Service for configuring sinks used to export log entries outside of - Stackdriver Logging. - """ - def ListSinks(self, request, context): - """Lists sinks. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def GetSink(self, request, context): - """Gets a sink. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def CreateSink(self, request, context): - """Creates a sink that exports specified log entries to a destination. The - export of newly-ingested log entries begins immediately, unless the sink's - `writer_identity` is not permitted to write to the destination. A sink can - export log entries only from the resource owning the sink. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def UpdateSink(self, request, context): - """Updates a sink. This method replaces the following fields in the existing - sink with values from the new sink: `destination`, and `filter`. - The updated sink might also have a new `writer_identity`; see the - `unique_writer_identity` field. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def DeleteSink(self, request, context): - """Deletes a sink. If the sink has a unique `writer_identity`, then that - service account is also deleted. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def ListExclusions(self, request, context): - """Lists all the exclusions in a parent resource. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def GetExclusion(self, request, context): - """Gets the description of an exclusion. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def CreateExclusion(self, request, context): - """Creates a new exclusion in a specified parent resource. - Only log entries belonging to that resource can be excluded. - You can have up to 10 exclusions in a resource. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def UpdateExclusion(self, request, context): - """Changes one or more properties of an existing exclusion. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def DeleteExclusion(self, request, context): - """Deletes an exclusion. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - - - class BetaConfigServiceV2Stub(object): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This class was generated - only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" - """Service for configuring sinks used to export log entries outside of - Stackdriver Logging. - """ - def ListSinks(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Lists sinks. - """ - raise NotImplementedError() - ListSinks.future = None - def GetSink(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Gets a sink. - """ - raise NotImplementedError() - GetSink.future = None - def CreateSink(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Creates a sink that exports specified log entries to a destination. The - export of newly-ingested log entries begins immediately, unless the sink's - `writer_identity` is not permitted to write to the destination. A sink can - export log entries only from the resource owning the sink. - """ - raise NotImplementedError() - CreateSink.future = None - def UpdateSink(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Updates a sink. This method replaces the following fields in the existing - sink with values from the new sink: `destination`, and `filter`. - The updated sink might also have a new `writer_identity`; see the - `unique_writer_identity` field. - """ - raise NotImplementedError() - UpdateSink.future = None - def DeleteSink(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Deletes a sink. If the sink has a unique `writer_identity`, then that - service account is also deleted. - """ - raise NotImplementedError() - DeleteSink.future = None - def ListExclusions(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Lists all the exclusions in a parent resource. - """ - raise NotImplementedError() - ListExclusions.future = None - def GetExclusion(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Gets the description of an exclusion. - """ - raise NotImplementedError() - GetExclusion.future = None - def CreateExclusion(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Creates a new exclusion in a specified parent resource. - Only log entries belonging to that resource can be excluded. - You can have up to 10 exclusions in a resource. - """ - raise NotImplementedError() - CreateExclusion.future = None - def UpdateExclusion(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Changes one or more properties of an existing exclusion. - """ - raise NotImplementedError() - UpdateExclusion.future = None - def DeleteExclusion(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Deletes an exclusion. - """ - raise NotImplementedError() - DeleteExclusion.future = None - - - def beta_create_ConfigServiceV2_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This function was - generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_deserializers = { - ('google.logging.v2.ConfigServiceV2', 'CreateExclusion'): CreateExclusionRequest.FromString, - ('google.logging.v2.ConfigServiceV2', 'CreateSink'): CreateSinkRequest.FromString, - ('google.logging.v2.ConfigServiceV2', 'DeleteExclusion'): DeleteExclusionRequest.FromString, - ('google.logging.v2.ConfigServiceV2', 'DeleteSink'): DeleteSinkRequest.FromString, - ('google.logging.v2.ConfigServiceV2', 'GetExclusion'): GetExclusionRequest.FromString, - ('google.logging.v2.ConfigServiceV2', 'GetSink'): GetSinkRequest.FromString, - ('google.logging.v2.ConfigServiceV2', 'ListExclusions'): ListExclusionsRequest.FromString, - ('google.logging.v2.ConfigServiceV2', 'ListSinks'): ListSinksRequest.FromString, - ('google.logging.v2.ConfigServiceV2', 'UpdateExclusion'): UpdateExclusionRequest.FromString, - ('google.logging.v2.ConfigServiceV2', 'UpdateSink'): UpdateSinkRequest.FromString, - } - response_serializers = { - ('google.logging.v2.ConfigServiceV2', 'CreateExclusion'): LogExclusion.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'CreateSink'): LogSink.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'DeleteExclusion'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'DeleteSink'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'GetExclusion'): LogExclusion.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'GetSink'): LogSink.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'ListExclusions'): ListExclusionsResponse.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'ListSinks'): ListSinksResponse.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'UpdateExclusion'): LogExclusion.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'UpdateSink'): LogSink.SerializeToString, - } - method_implementations = { - ('google.logging.v2.ConfigServiceV2', 'CreateExclusion'): face_utilities.unary_unary_inline(servicer.CreateExclusion), - ('google.logging.v2.ConfigServiceV2', 'CreateSink'): face_utilities.unary_unary_inline(servicer.CreateSink), - ('google.logging.v2.ConfigServiceV2', 'DeleteExclusion'): face_utilities.unary_unary_inline(servicer.DeleteExclusion), - ('google.logging.v2.ConfigServiceV2', 'DeleteSink'): face_utilities.unary_unary_inline(servicer.DeleteSink), - ('google.logging.v2.ConfigServiceV2', 'GetExclusion'): face_utilities.unary_unary_inline(servicer.GetExclusion), - ('google.logging.v2.ConfigServiceV2', 'GetSink'): face_utilities.unary_unary_inline(servicer.GetSink), - ('google.logging.v2.ConfigServiceV2', 'ListExclusions'): face_utilities.unary_unary_inline(servicer.ListExclusions), - ('google.logging.v2.ConfigServiceV2', 'ListSinks'): face_utilities.unary_unary_inline(servicer.ListSinks), - ('google.logging.v2.ConfigServiceV2', 'UpdateExclusion'): face_utilities.unary_unary_inline(servicer.UpdateExclusion), - ('google.logging.v2.ConfigServiceV2', 'UpdateSink'): face_utilities.unary_unary_inline(servicer.UpdateSink), - } - server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) - return beta_implementations.server(method_implementations, options=server_options) - - - def beta_create_ConfigServiceV2_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): - """The Beta API is deprecated for 0.15.0 and later. +_LOGSINK.fields_by_name['output_version_format'].has_options = True +_LOGSINK.fields_by_name['output_version_format']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) +_LOGSINK.fields_by_name['start_time'].has_options = True +_LOGSINK.fields_by_name['start_time']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) +_LOGSINK.fields_by_name['end_time'].has_options = True +_LOGSINK.fields_by_name['end_time']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) + +_CONFIGSERVICEV2 = _descriptor.ServiceDescriptor( + name='ConfigServiceV2', + full_name='google.logging.v2.ConfigServiceV2', + file=DESCRIPTOR, + index=0, + options=None, + serialized_start=1656, + serialized_end=4876, + methods=[ + _descriptor.MethodDescriptor( + name='ListSinks', + full_name='google.logging.v2.ConfigServiceV2.ListSinks', + index=0, + containing_service=None, + input_type=_LISTSINKSREQUEST, + output_type=_LISTSINKSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\247\001\022\026/v2/{parent=*/*}/sinksZ\037\022\035/v2/{parent=projects/*}/sinksZ$\022\"/v2/{parent=organizations/*}/sinksZ\036\022\034/v2/{parent=folders/*}/sinksZ&\022$/v2/{parent=billingAccounts/*}/sinks')), + ), + _descriptor.MethodDescriptor( + name='GetSink', + full_name='google.logging.v2.ConfigServiceV2.GetSink', + index=1, + containing_service=None, + input_type=_GETSINKREQUEST, + output_type=_LOGSINK, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\300\001\022\033/v2/{sink_name=*/*/sinks/*}Z$\022\"/v2/{sink_name=projects/*/sinks/*}Z)\022\'/v2/{sink_name=organizations/*/sinks/*}Z#\022!/v2/{sink_name=folders/*/sinks/*}Z+\022)/v2/{sink_name=billingAccounts/*/sinks/*}')), + ), + _descriptor.MethodDescriptor( + name='CreateSink', + full_name='google.logging.v2.ConfigServiceV2.CreateSink', + index=2, + containing_service=None, + input_type=_CREATESINKREQUEST, + output_type=_LOGSINK, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\305\001\"\026/v2/{parent=*/*}/sinks:\004sinkZ%\"\035/v2/{parent=projects/*}/sinks:\004sinkZ*\"\"/v2/{parent=organizations/*}/sinks:\004sinkZ$\"\034/v2/{parent=folders/*}/sinks:\004sinkZ,\"$/v2/{parent=billingAccounts/*}/sinks:\004sink')), + ), + _descriptor.MethodDescriptor( + name='UpdateSink', + full_name='google.logging.v2.ConfigServiceV2.UpdateSink', + index=3, + containing_service=None, + input_type=_UPDATESINKREQUEST, + output_type=_LOGSINK, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\231\003\032\033/v2/{sink_name=*/*/sinks/*}:\004sinkZ*\032\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/\032\'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)\032!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ1\032)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sinkZ*2\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sink')), + ), + _descriptor.MethodDescriptor( + name='DeleteSink', + full_name='google.logging.v2.ConfigServiceV2.DeleteSink', + index=4, + containing_service=None, + input_type=_DELETESINKREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\300\001*\033/v2/{sink_name=*/*/sinks/*}Z$*\"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}')), + ), + _descriptor.MethodDescriptor( + name='ListExclusions', + full_name='google.logging.v2.ConfigServiceV2.ListExclusions', + index=5, + containing_service=None, + input_type=_LISTEXCLUSIONSREQUEST, + output_type=_LISTEXCLUSIONSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\300\001\022\033/v2/{parent=*/*}/exclusionsZ$\022\"/v2/{parent=projects/*}/exclusionsZ)\022\'/v2/{parent=organizations/*}/exclusionsZ#\022!/v2/{parent=folders/*}/exclusionsZ+\022)/v2/{parent=billingAccounts/*}/exclusions')), + ), + _descriptor.MethodDescriptor( + name='GetExclusion', + full_name='google.logging.v2.ConfigServiceV2.GetExclusion', + index=6, + containing_service=None, + input_type=_GETEXCLUSIONREQUEST, + output_type=_LOGEXCLUSION, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\300\001\022\033/v2/{name=*/*/exclusions/*}Z$\022\"/v2/{name=projects/*/exclusions/*}Z)\022\'/v2/{name=organizations/*/exclusions/*}Z#\022!/v2/{name=folders/*/exclusions/*}Z+\022)/v2/{name=billingAccounts/*/exclusions/*}')), + ), + _descriptor.MethodDescriptor( + name='CreateExclusion', + full_name='google.logging.v2.ConfigServiceV2.CreateExclusion', + index=7, + containing_service=None, + input_type=_CREATEEXCLUSIONREQUEST, + output_type=_LOGEXCLUSION, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\367\001\"\033/v2/{parent=*/*}/exclusions:\texclusionZ/\"\"/v2/{parent=projects/*}/exclusions:\texclusionZ4\"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ.\"!/v2/{parent=folders/*}/exclusions:\texclusionZ6\")/v2/{parent=billingAccounts/*}/exclusions:\texclusion')), + ), + _descriptor.MethodDescriptor( + name='UpdateExclusion', + full_name='google.logging.v2.ConfigServiceV2.UpdateExclusion', + index=8, + containing_service=None, + input_type=_UPDATEEXCLUSIONREQUEST, + output_type=_LOGEXCLUSION, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\367\0012\033/v2/{name=*/*/exclusions/*}:\texclusionZ/2\"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion')), + ), + _descriptor.MethodDescriptor( + name='DeleteExclusion', + full_name='google.logging.v2.ConfigServiceV2.DeleteExclusion', + index=9, + containing_service=None, + input_type=_DELETEEXCLUSIONREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\300\001*\033/v2/{name=*/*/exclusions/*}Z$*\"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}')), + ), +]) +_sym_db.RegisterServiceDescriptor(_CONFIGSERVICEV2) + +DESCRIPTOR.services_by_name['ConfigServiceV2'] = _CONFIGSERVICEV2 - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This function was - generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_serializers = { - ('google.logging.v2.ConfigServiceV2', 'CreateExclusion'): CreateExclusionRequest.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'CreateSink'): CreateSinkRequest.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'DeleteExclusion'): DeleteExclusionRequest.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'DeleteSink'): DeleteSinkRequest.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'GetExclusion'): GetExclusionRequest.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'GetSink'): GetSinkRequest.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'ListExclusions'): ListExclusionsRequest.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'ListSinks'): ListSinksRequest.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'UpdateExclusion'): UpdateExclusionRequest.SerializeToString, - ('google.logging.v2.ConfigServiceV2', 'UpdateSink'): UpdateSinkRequest.SerializeToString, - } - response_deserializers = { - ('google.logging.v2.ConfigServiceV2', 'CreateExclusion'): LogExclusion.FromString, - ('google.logging.v2.ConfigServiceV2', 'CreateSink'): LogSink.FromString, - ('google.logging.v2.ConfigServiceV2', 'DeleteExclusion'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ('google.logging.v2.ConfigServiceV2', 'DeleteSink'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ('google.logging.v2.ConfigServiceV2', 'GetExclusion'): LogExclusion.FromString, - ('google.logging.v2.ConfigServiceV2', 'GetSink'): LogSink.FromString, - ('google.logging.v2.ConfigServiceV2', 'ListExclusions'): ListExclusionsResponse.FromString, - ('google.logging.v2.ConfigServiceV2', 'ListSinks'): ListSinksResponse.FromString, - ('google.logging.v2.ConfigServiceV2', 'UpdateExclusion'): LogExclusion.FromString, - ('google.logging.v2.ConfigServiceV2', 'UpdateSink'): LogSink.FromString, - } - cardinalities = { - 'CreateExclusion': cardinality.Cardinality.UNARY_UNARY, - 'CreateSink': cardinality.Cardinality.UNARY_UNARY, - 'DeleteExclusion': cardinality.Cardinality.UNARY_UNARY, - 'DeleteSink': cardinality.Cardinality.UNARY_UNARY, - 'GetExclusion': cardinality.Cardinality.UNARY_UNARY, - 'GetSink': cardinality.Cardinality.UNARY_UNARY, - 'ListExclusions': cardinality.Cardinality.UNARY_UNARY, - 'ListSinks': cardinality.Cardinality.UNARY_UNARY, - 'UpdateExclusion': cardinality.Cardinality.UNARY_UNARY, - 'UpdateSink': cardinality.Cardinality.UNARY_UNARY, - } - stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) - return beta_implementations.dynamic_stub(channel, 'google.logging.v2.ConfigServiceV2', cardinalities, options=stub_options) -except ImportError: - pass # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py index 4f218e7d43b6..87fc8a1a6088 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py @@ -1,13 +1,13 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -import google.cloud.logging_v2.proto.logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2 -import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.cloud.logging_v2.proto import logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 class ConfigServiceV2Stub(object): - """Service for configuring sinks used to export log entries outside of - Stackdriver Logging. + """Service for configuring sinks used to export log entries out of + Logging. """ def __init__(self, channel): @@ -69,8 +69,8 @@ def __init__(self, channel): class ConfigServiceV2Servicer(object): - """Service for configuring sinks used to export log entries outside of - Stackdriver Logging. + """Service for configuring sinks used to export log entries out of + Logging. """ def ListSinks(self, request, context): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py index 76a68b0c83fe..0be74ac88f55 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/logging_metrics.proto @@ -18,16 +19,16 @@ from google.api import metric_pb2 as google_dot_api_dot_metric__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='google/cloud/logging_v2/proto/logging_metrics.proto', package='google.logging.v2', syntax='proto3', - serialized_pb=_b('\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1dgoogle/api/distribution.proto\x1a\x17google/api/metric.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\"\xad\x03\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12\x38\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersion\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01\"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric\"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric\"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xd4\x05\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric\".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric\"/\x82\xd3\xe4\x93\x02)\"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric\"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty\".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}B\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') + serialized_pb=_b('\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1dgoogle/api/distribution.proto\x1a\x17google/api/metric.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xb1\x03\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01\"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric\"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric\"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xd4\x05\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric\".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric\"/\x82\xd3\xe4\x93\x02)\"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric\"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty\".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}B\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_distribution__pb2.DESCRIPTOR,google_dot_api_dot_metric__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_distribution__pb2.DESCRIPTOR,google_dot_api_dot_metric__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) @@ -48,8 +49,8 @@ ], containing_type=None, options=None, - serialized_start=625, - serialized_end=653, + serialized_start=662, + serialized_end=690, ) _sym_db.RegisterEnumDescriptor(_LOGMETRIC_APIVERSION) @@ -67,14 +68,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='value', full_name='google.logging.v2.LogMetric.LabelExtractorsEntry.value', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -87,8 +88,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=569, - serialized_end=623, + serialized_start=606, + serialized_end=660, ) _LOGMETRIC = _descriptor.Descriptor( @@ -104,56 +105,56 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='description', full_name='google.logging.v2.LogMetric.description', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='filter', full_name='google.logging.v2.LogMetric.filter', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='metric_descriptor', full_name='google.logging.v2.LogMetric.metric_descriptor', index=3, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='value_extractor', full_name='google.logging.v2.LogMetric.value_extractor', index=4, number=6, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='label_extractors', full_name='google.logging.v2.LogMetric.label_extractors', index=5, number=7, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='bucket_options', full_name='google.logging.v2.LogMetric.bucket_options', index=6, number=8, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='version', full_name='google.logging.v2.LogMetric.version', index=7, number=4, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')), file=DESCRIPTOR), ], extensions=[ ], @@ -167,8 +168,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=224, - serialized_end=653, + serialized_start=257, + serialized_end=690, ) @@ -185,21 +186,21 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_token', full_name='google.logging.v2.ListLogMetricsRequest.page_token', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_size', full_name='google.logging.v2.ListLogMetricsRequest.page_size', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -212,8 +213,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=655, - serialized_end=733, + serialized_start=692, + serialized_end=770, ) @@ -230,14 +231,14 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='next_page_token', full_name='google.logging.v2.ListLogMetricsResponse.next_page_token', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -250,8 +251,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=735, - serialized_end=831, + serialized_start=772, + serialized_end=868, ) @@ -268,7 +269,7 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -281,8 +282,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=833, - serialized_end=875, + serialized_start=870, + serialized_end=912, ) @@ -299,14 +300,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='metric', full_name='google.logging.v2.CreateLogMetricRequest.metric', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -319,8 +320,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=877, - serialized_end=963, + serialized_start=914, + serialized_end=1000, ) @@ -337,14 +338,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='metric', full_name='google.logging.v2.UpdateLogMetricRequest.metric', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -357,8 +358,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=965, - serialized_end=1056, + serialized_start=1002, + serialized_end=1093, ) @@ -375,7 +376,7 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -388,8 +389,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1058, - serialized_end=1103, + serialized_start=1095, + serialized_end=1140, ) _LOGMETRIC_LABELEXTRACTORSENTRY.containing_type = _LOGMETRIC @@ -408,6 +409,7 @@ DESCRIPTOR.message_types_by_name['CreateLogMetricRequest'] = _CREATELOGMETRICREQUEST DESCRIPTOR.message_types_by_name['UpdateLogMetricRequest'] = _UPDATELOGMETRICREQUEST DESCRIPTOR.message_types_by_name['DeleteLogMetricRequest'] = _DELETELOGMETRICREQUEST +_sym_db.RegisterFileDescriptor(DESCRIPTOR) LogMetric = _reflection.GeneratedProtocolMessageType('LogMetric', (_message.Message,), dict( @@ -645,251 +647,66 @@ DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\025com.google.logging.v2B\023LoggingMetricsProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2')) _LOGMETRIC_LABELEXTRACTORSENTRY.has_options = True _LOGMETRIC_LABELEXTRACTORSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) -try: - # THESE ELEMENTS WILL BE DEPRECATED. - # Please use the generated *_pb2_grpc.py files instead. - import grpc - from grpc.beta import implementations as beta_implementations - from grpc.beta import interfaces as beta_interfaces - from grpc.framework.common import cardinality - from grpc.framework.interfaces.face import utilities as face_utilities - - - class MetricsServiceV2Stub(object): - """Service for configuring logs-based metrics. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListLogMetrics = channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/ListLogMetrics', - request_serializer=ListLogMetricsRequest.SerializeToString, - response_deserializer=ListLogMetricsResponse.FromString, - ) - self.GetLogMetric = channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/GetLogMetric', - request_serializer=GetLogMetricRequest.SerializeToString, - response_deserializer=LogMetric.FromString, - ) - self.CreateLogMetric = channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/CreateLogMetric', - request_serializer=CreateLogMetricRequest.SerializeToString, - response_deserializer=LogMetric.FromString, - ) - self.UpdateLogMetric = channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', - request_serializer=UpdateLogMetricRequest.SerializeToString, - response_deserializer=LogMetric.FromString, - ) - self.DeleteLogMetric = channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', - request_serializer=DeleteLogMetricRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - - class MetricsServiceV2Servicer(object): - """Service for configuring logs-based metrics. - """ - - def ListLogMetrics(self, request, context): - """Lists logs-based metrics. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetLogMetric(self, request, context): - """Gets a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def CreateLogMetric(self, request, context): - """Creates a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def UpdateLogMetric(self, request, context): - """Creates or updates a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def DeleteLogMetric(self, request, context): - """Deletes a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - - def add_MetricsServiceV2Servicer_to_server(servicer, server): - rpc_method_handlers = { - 'ListLogMetrics': grpc.unary_unary_rpc_method_handler( - servicer.ListLogMetrics, - request_deserializer=ListLogMetricsRequest.FromString, - response_serializer=ListLogMetricsResponse.SerializeToString, - ), - 'GetLogMetric': grpc.unary_unary_rpc_method_handler( - servicer.GetLogMetric, - request_deserializer=GetLogMetricRequest.FromString, - response_serializer=LogMetric.SerializeToString, - ), - 'CreateLogMetric': grpc.unary_unary_rpc_method_handler( - servicer.CreateLogMetric, - request_deserializer=CreateLogMetricRequest.FromString, - response_serializer=LogMetric.SerializeToString, - ), - 'UpdateLogMetric': grpc.unary_unary_rpc_method_handler( - servicer.UpdateLogMetric, - request_deserializer=UpdateLogMetricRequest.FromString, - response_serializer=LogMetric.SerializeToString, - ), - 'DeleteLogMetric': grpc.unary_unary_rpc_method_handler( - servicer.DeleteLogMetric, - request_deserializer=DeleteLogMetricRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.logging.v2.MetricsServiceV2', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - - - class BetaMetricsServiceV2Servicer(object): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This class was generated - only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" - """Service for configuring logs-based metrics. - """ - def ListLogMetrics(self, request, context): - """Lists logs-based metrics. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def GetLogMetric(self, request, context): - """Gets a logs-based metric. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def CreateLogMetric(self, request, context): - """Creates a logs-based metric. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def UpdateLogMetric(self, request, context): - """Creates or updates a logs-based metric. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def DeleteLogMetric(self, request, context): - """Deletes a logs-based metric. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - - - class BetaMetricsServiceV2Stub(object): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This class was generated - only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" - """Service for configuring logs-based metrics. - """ - def ListLogMetrics(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Lists logs-based metrics. - """ - raise NotImplementedError() - ListLogMetrics.future = None - def GetLogMetric(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Gets a logs-based metric. - """ - raise NotImplementedError() - GetLogMetric.future = None - def CreateLogMetric(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Creates a logs-based metric. - """ - raise NotImplementedError() - CreateLogMetric.future = None - def UpdateLogMetric(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Creates or updates a logs-based metric. - """ - raise NotImplementedError() - UpdateLogMetric.future = None - def DeleteLogMetric(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Deletes a logs-based metric. - """ - raise NotImplementedError() - DeleteLogMetric.future = None - - - def beta_create_MetricsServiceV2_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This function was - generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_deserializers = { - ('google.logging.v2.MetricsServiceV2', 'CreateLogMetric'): CreateLogMetricRequest.FromString, - ('google.logging.v2.MetricsServiceV2', 'DeleteLogMetric'): DeleteLogMetricRequest.FromString, - ('google.logging.v2.MetricsServiceV2', 'GetLogMetric'): GetLogMetricRequest.FromString, - ('google.logging.v2.MetricsServiceV2', 'ListLogMetrics'): ListLogMetricsRequest.FromString, - ('google.logging.v2.MetricsServiceV2', 'UpdateLogMetric'): UpdateLogMetricRequest.FromString, - } - response_serializers = { - ('google.logging.v2.MetricsServiceV2', 'CreateLogMetric'): LogMetric.SerializeToString, - ('google.logging.v2.MetricsServiceV2', 'DeleteLogMetric'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ('google.logging.v2.MetricsServiceV2', 'GetLogMetric'): LogMetric.SerializeToString, - ('google.logging.v2.MetricsServiceV2', 'ListLogMetrics'): ListLogMetricsResponse.SerializeToString, - ('google.logging.v2.MetricsServiceV2', 'UpdateLogMetric'): LogMetric.SerializeToString, - } - method_implementations = { - ('google.logging.v2.MetricsServiceV2', 'CreateLogMetric'): face_utilities.unary_unary_inline(servicer.CreateLogMetric), - ('google.logging.v2.MetricsServiceV2', 'DeleteLogMetric'): face_utilities.unary_unary_inline(servicer.DeleteLogMetric), - ('google.logging.v2.MetricsServiceV2', 'GetLogMetric'): face_utilities.unary_unary_inline(servicer.GetLogMetric), - ('google.logging.v2.MetricsServiceV2', 'ListLogMetrics'): face_utilities.unary_unary_inline(servicer.ListLogMetrics), - ('google.logging.v2.MetricsServiceV2', 'UpdateLogMetric'): face_utilities.unary_unary_inline(servicer.UpdateLogMetric), - } - server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) - return beta_implementations.server(method_implementations, options=server_options) - - - def beta_create_MetricsServiceV2_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This function was - generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_serializers = { - ('google.logging.v2.MetricsServiceV2', 'CreateLogMetric'): CreateLogMetricRequest.SerializeToString, - ('google.logging.v2.MetricsServiceV2', 'DeleteLogMetric'): DeleteLogMetricRequest.SerializeToString, - ('google.logging.v2.MetricsServiceV2', 'GetLogMetric'): GetLogMetricRequest.SerializeToString, - ('google.logging.v2.MetricsServiceV2', 'ListLogMetrics'): ListLogMetricsRequest.SerializeToString, - ('google.logging.v2.MetricsServiceV2', 'UpdateLogMetric'): UpdateLogMetricRequest.SerializeToString, - } - response_deserializers = { - ('google.logging.v2.MetricsServiceV2', 'CreateLogMetric'): LogMetric.FromString, - ('google.logging.v2.MetricsServiceV2', 'DeleteLogMetric'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ('google.logging.v2.MetricsServiceV2', 'GetLogMetric'): LogMetric.FromString, - ('google.logging.v2.MetricsServiceV2', 'ListLogMetrics'): ListLogMetricsResponse.FromString, - ('google.logging.v2.MetricsServiceV2', 'UpdateLogMetric'): LogMetric.FromString, - } - cardinalities = { - 'CreateLogMetric': cardinality.Cardinality.UNARY_UNARY, - 'DeleteLogMetric': cardinality.Cardinality.UNARY_UNARY, - 'GetLogMetric': cardinality.Cardinality.UNARY_UNARY, - 'ListLogMetrics': cardinality.Cardinality.UNARY_UNARY, - 'UpdateLogMetric': cardinality.Cardinality.UNARY_UNARY, - } - stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) - return beta_implementations.dynamic_stub(channel, 'google.logging.v2.MetricsServiceV2', cardinalities, options=stub_options) -except ImportError: - pass +_LOGMETRIC.fields_by_name['version'].has_options = True +_LOGMETRIC.fields_by_name['version']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) + +_METRICSSERVICEV2 = _descriptor.ServiceDescriptor( + name='MetricsServiceV2', + full_name='google.logging.v2.MetricsServiceV2', + file=DESCRIPTOR, + index=0, + options=None, + serialized_start=1143, + serialized_end=1867, + methods=[ + _descriptor.MethodDescriptor( + name='ListLogMetrics', + full_name='google.logging.v2.MetricsServiceV2.ListLogMetrics', + index=0, + containing_service=None, + input_type=_LISTLOGMETRICSREQUEST, + output_type=_LISTLOGMETRICSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002!\022\037/v2/{parent=projects/*}/metrics')), + ), + _descriptor.MethodDescriptor( + name='GetLogMetric', + full_name='google.logging.v2.MetricsServiceV2.GetLogMetric', + index=1, + containing_service=None, + input_type=_GETLOGMETRICREQUEST, + output_type=_LOGMETRIC, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002(\022&/v2/{metric_name=projects/*/metrics/*}')), + ), + _descriptor.MethodDescriptor( + name='CreateLogMetric', + full_name='google.logging.v2.MetricsServiceV2.CreateLogMetric', + index=2, + containing_service=None, + input_type=_CREATELOGMETRICREQUEST, + output_type=_LOGMETRIC, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002)\"\037/v2/{parent=projects/*}/metrics:\006metric')), + ), + _descriptor.MethodDescriptor( + name='UpdateLogMetric', + full_name='google.logging.v2.MetricsServiceV2.UpdateLogMetric', + index=3, + containing_service=None, + input_type=_UPDATELOGMETRICREQUEST, + output_type=_LOGMETRIC, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0020\032&/v2/{metric_name=projects/*/metrics/*}:\006metric')), + ), + _descriptor.MethodDescriptor( + name='DeleteLogMetric', + full_name='google.logging.v2.MetricsServiceV2.DeleteLogMetric', + index=4, + containing_service=None, + input_type=_DELETELOGMETRICREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002(*&/v2/{metric_name=projects/*/metrics/*}')), + ), +]) +_sym_db.RegisterServiceDescriptor(_METRICSSERVICEV2) + +DESCRIPTOR.services_by_name['MetricsServiceV2'] = _METRICSSERVICEV2 + # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py index b464e0b4b3a4..f5a7b50f3383 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py @@ -1,8 +1,8 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -import google.cloud.logging_v2.proto.logging_metrics_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2 -import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.cloud.logging_v2.proto import logging_metrics_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 class MetricsServiceV2Stub(object): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py index 9665bcdee9f2..126ec3a53d6a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/logging.proto @@ -26,10 +27,9 @@ name='google/cloud/logging_v2/proto/logging.proto', package='google.logging.v2', syntax='proto3', - serialized_pb=_b('\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\"\x98\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x19\n\x17WriteLogEntriesResponse\"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01\"\x8d\x01\n\x15ListLogEntriesRequest\x12\x13\n\x0bproject_ids\x18\x01 \x03(\t\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t\"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t\"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xd5\x05\n\x10LoggingServiceV2\x12w\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v2beta1/{log_name=projects/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse\"(\x82\xd3\xe4\x93\x02\"\x12 /v2/monitoredResourceDescriptors\x12r\n\x08ListLogs\x12\".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse\"\x1d\x82\xd3\xe4\x93\x02\x17\x12\x15/v2/{parent=*/*}/logsB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') + serialized_pb=_b('\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\"\xa9\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x19\n\x17WriteLogEntriesResponse\"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01\"\x91\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t\"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t\"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xd8\x07\n\x10LoggingServiceV2\x12\xeb\x01\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty\"\xa0\x01\x82\xd3\xe4\x93\x02\x99\x01* /v2/{log_name=projects/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse\"(\x82\xd3\xe4\x93\x02\"\x12 /v2/monitoredResourceDescriptors\x12\xff\x01\n\x08ListLogs\x12\".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse\"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logsB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') , dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR,google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -47,7 +47,7 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -78,14 +78,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='value', full_name='google.logging.v2.WriteLogEntriesRequest.LabelsEntry.value', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -98,8 +98,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=573, - serialized_end=618, + serialized_start=590, + serialized_end=635, ) _WRITELOGENTRIESREQUEST = _descriptor.Descriptor( @@ -115,35 +115,42 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='resource', full_name='google.logging.v2.WriteLogEntriesRequest.resource', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='labels', full_name='google.logging.v2.WriteLogEntriesRequest.labels', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='entries', full_name='google.logging.v2.WriteLogEntriesRequest.entries', index=3, number=4, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='partial_success', full_name='google.logging.v2.WriteLogEntriesRequest.partial_success', index=4, number=5, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dry_run', full_name='google.logging.v2.WriteLogEntriesRequest.dry_run', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -157,7 +164,7 @@ oneofs=[ ], serialized_start=338, - serialized_end=618, + serialized_end=635, ) @@ -180,8 +187,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=620, - serialized_end=645, + serialized_start=637, + serialized_end=662, ) @@ -198,14 +205,14 @@ has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='value', full_name='google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.value', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -218,8 +225,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=775, - serialized_end=848, + serialized_start=792, + serialized_end=865, ) _WRITELOGENTRIESPARTIALERRORS = _descriptor.Descriptor( @@ -235,7 +242,7 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -248,8 +255,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=648, - serialized_end=848, + serialized_start=665, + serialized_end=865, ) @@ -266,42 +273,42 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='resource_names', full_name='google.logging.v2.ListLogEntriesRequest.resource_names', index=1, number=8, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='filter', full_name='google.logging.v2.ListLogEntriesRequest.filter', index=2, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='order_by', full_name='google.logging.v2.ListLogEntriesRequest.order_by', index=3, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_size', full_name='google.logging.v2.ListLogEntriesRequest.page_size', index=4, number=4, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_token', full_name='google.logging.v2.ListLogEntriesRequest.page_token', index=5, number=5, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -314,8 +321,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=851, - serialized_end=992, + serialized_start=868, + serialized_end=1013, ) @@ -332,14 +339,14 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='next_page_token', full_name='google.logging.v2.ListLogEntriesResponse.next_page_token', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -352,8 +359,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=994, - serialized_end=1089, + serialized_start=1015, + serialized_end=1110, ) @@ -370,14 +377,14 @@ has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_token', full_name='google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_token', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -390,8 +397,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1091, - serialized_end=1171, + serialized_start=1112, + serialized_end=1192, ) @@ -408,14 +415,14 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='next_page_token', full_name='google.logging.v2.ListMonitoredResourceDescriptorsResponse.next_page_token', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -428,8 +435,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1174, - serialized_end=1312, + serialized_start=1195, + serialized_end=1333, ) @@ -446,21 +453,21 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_size', full_name='google.logging.v2.ListLogsRequest.page_size', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_token', full_name='google.logging.v2.ListLogsRequest.page_token', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -473,8 +480,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1314, - serialized_end=1386, + serialized_start=1335, + serialized_end=1407, ) @@ -491,14 +498,14 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='next_page_token', full_name='google.logging.v2.ListLogsResponse.next_page_token', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -511,8 +518,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1388, - serialized_end=1450, + serialized_start=1409, + serialized_end=1471, ) _WRITELOGENTRIESREQUEST_LABELSENTRY.containing_type = _WRITELOGENTRIESREQUEST @@ -534,6 +541,7 @@ DESCRIPTOR.message_types_by_name['ListMonitoredResourceDescriptorsResponse'] = _LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE DESCRIPTOR.message_types_by_name['ListLogsRequest'] = _LISTLOGSREQUEST DESCRIPTOR.message_types_by_name['ListLogsResponse'] = _LISTLOGSRESPONSE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) DeleteLogRequest = _reflection.GeneratedProtocolMessageType('DeleteLogRequest', (_message.Message,), dict( DESCRIPTOR = _DELETELOGREQUEST, @@ -580,10 +588,13 @@ "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL- - encoded. For example, ``"projects/my-project-id/logs/syslog"`` - or ``"organizations/1234567890/logs/cloudresourcemanager.googl - eapis.com%2Factivity"``. For more information about log names, - see [LogEntry][google.logging.v2.LogEntry]. + encoded. For example: :: "projects/my-project- + id/logs/syslog" "organizations/1234567890/logs/cloudresour + cemanager.googleapis.com%2Factivity" The permission + logging.logEntries.create is needed on each project, + organization, billing account, or folder that is receiving new + log entries, whether the resource is specified in logName or + in an individual log entry. resource: Optional. A default monitored resource object that is assigned to all log entries in ``entries`` that do not specify a value @@ -598,13 +609,12 @@ parameter, then the log entry's label is not changed. See [LogEntry][google.logging.v2.LogEntry]. entries: - Required. The log entries to send to Stackdriver Logging. The - order of log entries in this list does not matter. Values - supplied in this method's ``log_name``, ``resource``, and - ``labels`` fields are copied into those log entries in this - list that do not include values for their corresponding - fields. For more information, see the - [LogEntry][google.logging.v2.LogEntry] type. If the + Required. The log entries to send to Logging. The order of log + entries in this list does not matter. Values supplied in this + method's ``log_name``, ``resource``, and ``labels`` fields are + copied into those log entries in this list that do not include + values for their corresponding fields. For more information, + see the [LogEntry][google.logging.v2.LogEntry] type. If the ``timestamp`` or ``insert_id`` fields are missing in log entries, then this method supplies the current time or a unique identifier, respectively. The supplied values are @@ -613,12 +623,14 @@ before the entries later in the list. See the ``entries.list`` method. Log entries with timestamps that are more than the `logs retention period `__ in the past - or more than 24 hours in the future might be discarded. - Discarding does not return an error. To improve throughput - and to avoid exceeding the `quota limit `__ for calls to ``entries.write``, you should try to - include several log entries in this list, rather than calling - this method for each individual log entry. + or more than 24 hours in the future will not be available when + calling ``entries.list``. However, those log entries can still + be exported with `LogSinks `__. To improve throughput and to avoid exceeding the + `quota limit `__ for calls to + ``entries.write``, you should try to include several log + entries in this list, rather than calling this method for each + individual log entry. partial_success: Optional. Whether valid entries should be written even if some other entries fail due to INVALID\_ARGUMENT or @@ -627,6 +639,11 @@ failed entries and the response includes error details keyed by the entries' zero-based index in the ``entries.write`` method. + dry_run: + Optional. If true, the request should expect normal response, + but the entries won't be persisted nor exported. Useful for + checking whether the logging API endpoints are working + properly before sending valuable data. """, # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest) )) @@ -857,290 +874,66 @@ _WRITELOGENTRIESREQUEST_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.has_options = True _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) -try: - # THESE ELEMENTS WILL BE DEPRECATED. - # Please use the generated *_pb2_grpc.py files instead. - import grpc - from grpc.beta import implementations as beta_implementations - from grpc.beta import interfaces as beta_interfaces - from grpc.framework.common import cardinality - from grpc.framework.interfaces.face import utilities as face_utilities - - - class LoggingServiceV2Stub(object): - """Service for ingesting and querying logs. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.DeleteLog = channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/DeleteLog', - request_serializer=DeleteLogRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.WriteLogEntries = channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/WriteLogEntries', - request_serializer=WriteLogEntriesRequest.SerializeToString, - response_deserializer=WriteLogEntriesResponse.FromString, - ) - self.ListLogEntries = channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogEntries', - request_serializer=ListLogEntriesRequest.SerializeToString, - response_deserializer=ListLogEntriesResponse.FromString, - ) - self.ListMonitoredResourceDescriptors = channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', - request_serializer=ListMonitoredResourceDescriptorsRequest.SerializeToString, - response_deserializer=ListMonitoredResourceDescriptorsResponse.FromString, - ) - self.ListLogs = channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogs', - request_serializer=ListLogsRequest.SerializeToString, - response_deserializer=ListLogsResponse.FromString, - ) - - - class LoggingServiceV2Servicer(object): - """Service for ingesting and querying logs. - """ - - def DeleteLog(self, request, context): - """Deletes all the log entries in a log. - The log reappears if it receives new entries. - Log entries written shortly before the delete operation might not be - deleted. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def WriteLogEntries(self, request, context): - """## Log entry resources - - Writes log entries to Stackdriver Logging. This API method is the - only way to send log entries to Stackdriver Logging. This method - is used, directly or indirectly, by the Stackdriver Logging agent - (fluentd) and all logging libraries configured to use Stackdriver - Logging. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ListLogEntries(self, request, context): - """Lists log entries. Use this method to retrieve log entries from - Stackdriver Logging. For ways to export log entries, see - [Exporting Logs](/logging/docs/export). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ListMonitoredResourceDescriptors(self, request, context): - """Lists the descriptors for monitored resource types used by Stackdriver - Logging. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') +_LISTLOGENTRIESREQUEST.fields_by_name['project_ids'].has_options = True +_LISTLOGENTRIESREQUEST.fields_by_name['project_ids']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) - def ListLogs(self, request, context): - """Lists the logs in projects, organizations, folders, or billing accounts. - Only logs that have entries are listed. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - - def add_LoggingServiceV2Servicer_to_server(servicer, server): - rpc_method_handlers = { - 'DeleteLog': grpc.unary_unary_rpc_method_handler( - servicer.DeleteLog, - request_deserializer=DeleteLogRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'WriteLogEntries': grpc.unary_unary_rpc_method_handler( - servicer.WriteLogEntries, - request_deserializer=WriteLogEntriesRequest.FromString, - response_serializer=WriteLogEntriesResponse.SerializeToString, - ), - 'ListLogEntries': grpc.unary_unary_rpc_method_handler( - servicer.ListLogEntries, - request_deserializer=ListLogEntriesRequest.FromString, - response_serializer=ListLogEntriesResponse.SerializeToString, - ), - 'ListMonitoredResourceDescriptors': grpc.unary_unary_rpc_method_handler( - servicer.ListMonitoredResourceDescriptors, - request_deserializer=ListMonitoredResourceDescriptorsRequest.FromString, - response_serializer=ListMonitoredResourceDescriptorsResponse.SerializeToString, - ), - 'ListLogs': grpc.unary_unary_rpc_method_handler( - servicer.ListLogs, - request_deserializer=ListLogsRequest.FromString, - response_serializer=ListLogsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.logging.v2.LoggingServiceV2', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - - - class BetaLoggingServiceV2Servicer(object): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This class was generated - only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" - """Service for ingesting and querying logs. - """ - def DeleteLog(self, request, context): - """Deletes all the log entries in a log. - The log reappears if it receives new entries. - Log entries written shortly before the delete operation might not be - deleted. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def WriteLogEntries(self, request, context): - """## Log entry resources - - Writes log entries to Stackdriver Logging. This API method is the - only way to send log entries to Stackdriver Logging. This method - is used, directly or indirectly, by the Stackdriver Logging agent - (fluentd) and all logging libraries configured to use Stackdriver - Logging. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def ListLogEntries(self, request, context): - """Lists log entries. Use this method to retrieve log entries from - Stackdriver Logging. For ways to export log entries, see - [Exporting Logs](/logging/docs/export). - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def ListMonitoredResourceDescriptors(self, request, context): - """Lists the descriptors for monitored resource types used by Stackdriver - Logging. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def ListLogs(self, request, context): - """Lists the logs in projects, organizations, folders, or billing accounts. - Only logs that have entries are listed. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - - - class BetaLoggingServiceV2Stub(object): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This class was generated - only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" - """Service for ingesting and querying logs. - """ - def DeleteLog(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Deletes all the log entries in a log. - The log reappears if it receives new entries. - Log entries written shortly before the delete operation might not be - deleted. - """ - raise NotImplementedError() - DeleteLog.future = None - def WriteLogEntries(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """## Log entry resources - - Writes log entries to Stackdriver Logging. This API method is the - only way to send log entries to Stackdriver Logging. This method - is used, directly or indirectly, by the Stackdriver Logging agent - (fluentd) and all logging libraries configured to use Stackdriver - Logging. - """ - raise NotImplementedError() - WriteLogEntries.future = None - def ListLogEntries(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Lists log entries. Use this method to retrieve log entries from - Stackdriver Logging. For ways to export log entries, see - [Exporting Logs](/logging/docs/export). - """ - raise NotImplementedError() - ListLogEntries.future = None - def ListMonitoredResourceDescriptors(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Lists the descriptors for monitored resource types used by Stackdriver - Logging. - """ - raise NotImplementedError() - ListMonitoredResourceDescriptors.future = None - def ListLogs(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Lists the logs in projects, organizations, folders, or billing accounts. - Only logs that have entries are listed. - """ - raise NotImplementedError() - ListLogs.future = None - - - def beta_create_LoggingServiceV2_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This function was - generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_deserializers = { - ('google.logging.v2.LoggingServiceV2', 'DeleteLog'): DeleteLogRequest.FromString, - ('google.logging.v2.LoggingServiceV2', 'ListLogEntries'): ListLogEntriesRequest.FromString, - ('google.logging.v2.LoggingServiceV2', 'ListLogs'): ListLogsRequest.FromString, - ('google.logging.v2.LoggingServiceV2', 'ListMonitoredResourceDescriptors'): ListMonitoredResourceDescriptorsRequest.FromString, - ('google.logging.v2.LoggingServiceV2', 'WriteLogEntries'): WriteLogEntriesRequest.FromString, - } - response_serializers = { - ('google.logging.v2.LoggingServiceV2', 'DeleteLog'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ('google.logging.v2.LoggingServiceV2', 'ListLogEntries'): ListLogEntriesResponse.SerializeToString, - ('google.logging.v2.LoggingServiceV2', 'ListLogs'): ListLogsResponse.SerializeToString, - ('google.logging.v2.LoggingServiceV2', 'ListMonitoredResourceDescriptors'): ListMonitoredResourceDescriptorsResponse.SerializeToString, - ('google.logging.v2.LoggingServiceV2', 'WriteLogEntries'): WriteLogEntriesResponse.SerializeToString, - } - method_implementations = { - ('google.logging.v2.LoggingServiceV2', 'DeleteLog'): face_utilities.unary_unary_inline(servicer.DeleteLog), - ('google.logging.v2.LoggingServiceV2', 'ListLogEntries'): face_utilities.unary_unary_inline(servicer.ListLogEntries), - ('google.logging.v2.LoggingServiceV2', 'ListLogs'): face_utilities.unary_unary_inline(servicer.ListLogs), - ('google.logging.v2.LoggingServiceV2', 'ListMonitoredResourceDescriptors'): face_utilities.unary_unary_inline(servicer.ListMonitoredResourceDescriptors), - ('google.logging.v2.LoggingServiceV2', 'WriteLogEntries'): face_utilities.unary_unary_inline(servicer.WriteLogEntries), - } - server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) - return beta_implementations.server(method_implementations, options=server_options) - - - def beta_create_LoggingServiceV2_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): - """The Beta API is deprecated for 0.15.0 and later. +_LOGGINGSERVICEV2 = _descriptor.ServiceDescriptor( + name='LoggingServiceV2', + full_name='google.logging.v2.LoggingServiceV2', + file=DESCRIPTOR, + index=0, + options=None, + serialized_start=1474, + serialized_end=2458, + methods=[ + _descriptor.MethodDescriptor( + name='DeleteLog', + full_name='google.logging.v2.LoggingServiceV2.DeleteLog', + index=0, + containing_service=None, + input_type=_DELETELOGREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\231\001* /v2/{log_name=projects/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}')), + ), + _descriptor.MethodDescriptor( + name='WriteLogEntries', + full_name='google.logging.v2.LoggingServiceV2.WriteLogEntries', + index=1, + containing_service=None, + input_type=_WRITELOGENTRIESREQUEST, + output_type=_WRITELOGENTRIESRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\026\"\021/v2/entries:write:\001*')), + ), + _descriptor.MethodDescriptor( + name='ListLogEntries', + full_name='google.logging.v2.LoggingServiceV2.ListLogEntries', + index=2, + containing_service=None, + input_type=_LISTLOGENTRIESREQUEST, + output_type=_LISTLOGENTRIESRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\025\"\020/v2/entries:list:\001*')), + ), + _descriptor.MethodDescriptor( + name='ListMonitoredResourceDescriptors', + full_name='google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors', + index=3, + containing_service=None, + input_type=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, + output_type=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\"\022 /v2/monitoredResourceDescriptors')), + ), + _descriptor.MethodDescriptor( + name='ListLogs', + full_name='google.logging.v2.LoggingServiceV2.ListLogs', + index=4, + containing_service=None, + input_type=_LISTLOGSREQUEST, + output_type=_LISTLOGSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\242\001\022\025/v2/{parent=*/*}/logsZ\036\022\034/v2/{parent=projects/*}/logsZ#\022!/v2/{parent=organizations/*}/logsZ\035\022\033/v2/{parent=folders/*}/logsZ%\022#/v2/{parent=billingAccounts/*}/logs')), + ), +]) +_sym_db.RegisterServiceDescriptor(_LOGGINGSERVICEV2) + +DESCRIPTOR.services_by_name['LoggingServiceV2'] = _LOGGINGSERVICEV2 - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This function was - generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_serializers = { - ('google.logging.v2.LoggingServiceV2', 'DeleteLog'): DeleteLogRequest.SerializeToString, - ('google.logging.v2.LoggingServiceV2', 'ListLogEntries'): ListLogEntriesRequest.SerializeToString, - ('google.logging.v2.LoggingServiceV2', 'ListLogs'): ListLogsRequest.SerializeToString, - ('google.logging.v2.LoggingServiceV2', 'ListMonitoredResourceDescriptors'): ListMonitoredResourceDescriptorsRequest.SerializeToString, - ('google.logging.v2.LoggingServiceV2', 'WriteLogEntries'): WriteLogEntriesRequest.SerializeToString, - } - response_deserializers = { - ('google.logging.v2.LoggingServiceV2', 'DeleteLog'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ('google.logging.v2.LoggingServiceV2', 'ListLogEntries'): ListLogEntriesResponse.FromString, - ('google.logging.v2.LoggingServiceV2', 'ListLogs'): ListLogsResponse.FromString, - ('google.logging.v2.LoggingServiceV2', 'ListMonitoredResourceDescriptors'): ListMonitoredResourceDescriptorsResponse.FromString, - ('google.logging.v2.LoggingServiceV2', 'WriteLogEntries'): WriteLogEntriesResponse.FromString, - } - cardinalities = { - 'DeleteLog': cardinality.Cardinality.UNARY_UNARY, - 'ListLogEntries': cardinality.Cardinality.UNARY_UNARY, - 'ListLogs': cardinality.Cardinality.UNARY_UNARY, - 'ListMonitoredResourceDescriptors': cardinality.Cardinality.UNARY_UNARY, - 'WriteLogEntries': cardinality.Cardinality.UNARY_UNARY, - } - stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) - return beta_implementations.dynamic_stub(channel, 'google.logging.v2.LoggingServiceV2', cardinalities, options=stub_options) -except ImportError: - pass # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py index fc16e12e140f..8f5d9e646e5f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py @@ -1,8 +1,8 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -import google.cloud.logging_v2.proto.logging_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2 -import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.cloud.logging_v2.proto import logging_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 class LoggingServiceV2Stub(object): @@ -57,13 +57,13 @@ def DeleteLog(self, request, context): raise NotImplementedError('Method not implemented!') def WriteLogEntries(self, request, context): - """## Log entry resources - - Writes log entries to Stackdriver Logging. This API method is the - only way to send log entries to Stackdriver Logging. This method - is used, directly or indirectly, by the Stackdriver Logging agent - (fluentd) and all logging libraries configured to use Stackdriver - Logging. + """Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method + is used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use Logging. + A single request may contain log entries for a maximum of 1000 + different resources (projects, organizations, billing accounts or + folders) """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -71,7 +71,7 @@ def WriteLogEntries(self, request, context): def ListLogEntries(self, request, context): """Lists log entries. Use this method to retrieve log entries from - Stackdriver Logging. For ways to export log entries, see + Logging. For ways to export log entries, see [Exporting Logs](/logging/docs/export). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) @@ -79,8 +79,7 @@ def ListLogEntries(self, request, context): raise NotImplementedError('Method not implemented!') def ListMonitoredResourceDescriptors(self, request, context): - """Lists the descriptors for monitored resource types used by Stackdriver - Logging. + """Lists the descriptors for monitored resource types used by Logging. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/synth.py new file mode 100644 index 000000000000..a7971946b67b --- /dev/null +++ b/packages/google-cloud-logging/synth.py @@ -0,0 +1,39 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This script is used to synthesize generated parts of this library.""" +import synthtool as s +from synthtool import gcp + +gapic = gcp.GAPICGenerator() + + +#---------------------------------------------------------------------------- +# Generate logging client +#---------------------------------------------------------------------------- +library = gapic.py_library( + 'logging', + 'v2', + config_path='/google/logging/artman_logging.yaml', + artman_output_name='logging-v2') + +s.move(library / 'google/cloud/logging_v2/proto') +s.move(library / 'google/cloud/logging_v2/gapic') + +# Issues exist where python files should define the source encoding +# https://github.com/googleapis/gapic-generator/issues/2097 +s.replace( + 'google/**/proto/*_pb2.py', + r"(^.*$\n)*", + r"# -*- coding: utf-8 -*-\n\g<0>") From 9b001c421810b20bce32d19791246769e1cda4f4 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Nov 2018 11:45:15 -0500 Subject: [PATCH 199/855] Fix client_info bug, update docstrings via synth. (#6435) Closes #6357. --- .../gapic/config_service_v2_client.py | 214 +++++++++--------- .../google/cloud/logging_v2/gapic/enums.py | 53 ++--- .../gapic/logging_service_v2_client.py | 194 ++++++++-------- .../gapic/metrics_service_v2_client.py | 31 +-- .../config_service_v2_grpc_transport.py | 16 +- .../logging_service_v2_grpc_transport.py | 6 +- 6 files changed, 263 insertions(+), 251 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index 169087562dbd..c252e69bb860 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -173,9 +173,10 @@ def __init__(self, ) if client_info is None: - client_info = ( - google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) - client_info.gapic_version = _GAPIC_LIBRARY_VERSION + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION, ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info # Parse out the default settings for retry and timeout for each RPC @@ -217,7 +218,7 @@ def list_sinks(self, >>> # Alternatively: >>> >>> # Iterate over results one page at a time - >>> for page in client.list_sinks(parent, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for page in client.list_sinks(parent).pages: ... for element in page: ... # process element ... pass @@ -227,10 +228,10 @@ def list_sinks(self, :: - \"projects/[PROJECT_ID]\" - \"organizations/[ORGANIZATION_ID]\" - \"billingAccounts/[BILLING_ACCOUNT_ID]\" - \"folders/[FOLDER_ID]\" + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -308,12 +309,12 @@ def get_sink(self, :: - \"projects/[PROJECT_ID]/sinks/[SINK_ID]\" - \"organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]\" - \"billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]\" - \"folders/[FOLDER_ID]/sinks/[SINK_ID]\" + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``\"projects/my-project-id/sinks/my-sink-id\"``. + Example: ``"projects/my-project-id/sinks/my-sink-id"``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -355,10 +356,10 @@ def create_sink(self, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): """ - Creates a sink that exports specified log entries to a destination. The - export of newly-ingested log entries begins immediately, unless the sink's - ``writer_identity`` is not permitted to write to the destination. A sink can - export log entries only from the resource owning the sink. + Creates a sink that exports specified log entries to a destination. The + export of newly-ingested log entries begins immediately, unless the + sink's ``writer_identity`` is not permitted to write to the destination. + A sink can export log entries only from the resource owning the sink. Example: >>> from google.cloud import logging_v2 @@ -367,7 +368,7 @@ def create_sink(self, >>> >>> parent = client.project_path('[PROJECT]') >>> - >>> # TODO: Initialize ``sink``: + >>> # TODO: Initialize `sink`: >>> sink = {} >>> >>> response = client.create_sink(parent, sink) @@ -377,27 +378,29 @@ def create_sink(self, :: - \"projects/[PROJECT_ID]\" - \"organizations/[ORGANIZATION_ID]\" - \"billingAccounts/[BILLING_ACCOUNT_ID]\" - \"folders/[FOLDER_ID]\" + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + sink (Union[dict, ~google.cloud.logging_v2.types.LogSink]): Required. The new sink, whose ``name`` parameter is a sink identifier + that is not already in use. - Examples: ``\"projects/my-logging-project\"``, ``\"organizations/123456789\"``. - sink (Union[dict, ~google.cloud.logging_v2.types.LogSink]): Required. The new sink, whose ``name`` parameter is a sink identifier that - is not already in use. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.LogSink` - unique_writer_identity (bool): Optional. Determines the kind of IAM identity returned as ``writer_identity`` - in the new sink. If this value is omitted or set to false, and if the - sink's parent is a project, then the value returned as ``writer_identity`` is - the same group or service account used by Logging before the - addition of writer identities to this API. The sink's destination must be - in the same project as the sink itself. + unique_writer_identity (bool): Optional. Determines the kind of IAM identity returned as + ``writer_identity`` in the new sink. If this value is omitted or set to + false, and if the sink's parent is a project, then the value returned as + ``writer_identity`` is the same group or service account used by Logging + before the addition of writer identities to this API. The sink's + destination must be in the same project as the sink itself. If this field is set to true, or if the sink is owned by a non-project - resource such as an organization, then the value of ``writer_identity`` will - be a unique service account used only for exports from the new sink. For - more information, see ``writer_identity`` in ``LogSink``. + resource such as an organization, then the value of ``writer_identity`` + will be a unique service account used only for exports from the new + sink. For more information, see ``writer_identity`` in ``LogSink``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -444,10 +447,10 @@ def update_sink(self, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): """ - Updates a sink. This method replaces the following fields in the existing - sink with values from the new sink: ``destination``, and ``filter``. - The updated sink might also have a new ``writer_identity``; see the - ``unique_writer_identity`` field. + Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, and + ``filter``. The updated sink might also have a new ``writer_identity``; + see the ``unique_writer_identity`` field. Example: >>> from google.cloud import logging_v2 @@ -456,7 +459,7 @@ def update_sink(self, >>> >>> sink_name = client.sink_path('[PROJECT]', '[SINK]') >>> - >>> # TODO: Initialize ``sink``: + >>> # TODO: Initialize `sink`: >>> sink = {} >>> >>> response = client.update_sink(sink_name, sink) @@ -467,41 +470,35 @@ def update_sink(self, :: - \"projects/[PROJECT_ID]/sinks/[SINK_ID]\" - \"organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]\" - \"billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]\" - \"folders/[FOLDER_ID]/sinks/[SINK_ID]\" + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + sink (Union[dict, ~google.cloud.logging_v2.types.LogSink]): Required. The updated sink, whose name is the same identifier that + appears as part of ``sink_name``. - Example: ``\"projects/my-project-id/sinks/my-sink-id\"``. - sink (Union[dict, ~google.cloud.logging_v2.types.LogSink]): Required. The updated sink, whose name is the same identifier that appears - as part of ``sink_name``. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.LogSink` unique_writer_identity (bool): Optional. See - `sinks.create `_ - for a description of this field. When updating a sink, the effect of this - field on the value of ``writer_identity`` in the updated sink depends on both - the old and new values of this field: - - + If the old and new values of this field are both false or both true, - :: - - then there is no change to the sink's `writer_identity`. - + If the old value is false and the new value is true, then - :: - - `writer_identity` is changed to a unique service account. - + It is an error if the old value is true and the new value is - :: - - set to false or defaulted to false. - update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Optional. Field mask that specifies the fields in ``sink`` that need - an update. A sink field will be overwritten if, and only if, it is - in the update mask. ``name`` and output only fields cannot be updated. + `sinks.create `__ + for a description of this field. When updating a sink, the effect of + this field on the value of ``writer_identity`` in the updated sink + depends on both the old and new values of this field: + + - If the old and new values of this field are both false or both true, + then there is no change to the sink's ``writer_identity``. + - If the old value is false and the new value is true, then + ``writer_identity`` is changed to a unique service account. + - It is an error if the old value is true and the new value is set to + false or defaulted to false. + update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Optional. Field mask that specifies the fields in ``sink`` that need an + update. A sink field will be overwritten if, and only if, it is in the + update mask. ``name`` and output only fields cannot be updated. An empty updateMask is temporarily treated as using the following mask - for backwards compatibility purposes: - destination,filter,includeChildren + for backwards compatibility purposes: destination,filter,includeChildren At some point in the future, behavior will be removed and specifying an empty updateMask will be an error. @@ -509,6 +506,7 @@ def update_sink(self, https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask Example: ``updateMask=filter``. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -573,12 +571,12 @@ def delete_sink(self, :: - \"projects/[PROJECT_ID]/sinks/[SINK_ID]\" - \"organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]\" - \"billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]\" - \"folders/[FOLDER_ID]/sinks/[SINK_ID]\" + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``\"projects/my-project-id/sinks/my-sink-id\"``. + Example: ``"projects/my-project-id/sinks/my-sink-id"``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -634,7 +632,7 @@ def list_exclusions(self, >>> # Alternatively: >>> >>> # Iterate over results one page at a time - >>> for page in client.list_exclusions(parent, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for page in client.list_exclusions(parent).pages: ... for element in page: ... # process element ... pass @@ -644,10 +642,10 @@ def list_exclusions(self, :: - \"projects/[PROJECT_ID]\" - \"organizations/[ORGANIZATION_ID]\" - \"billingAccounts/[BILLING_ACCOUNT_ID]\" - \"folders/[FOLDER_ID]\" + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -726,12 +724,12 @@ def get_exclusion(self, :: - \"projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]\" - \"organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]\" - \"billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]\" - \"folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]\" + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Example: ``\"projects/my-project-id/exclusions/my-exclusion-id\"``. + Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -784,7 +782,7 @@ def create_exclusion(self, >>> >>> parent = client.project_path('[PROJECT]') >>> - >>> # TODO: Initialize ``exclusion``: + >>> # TODO: Initialize `exclusion`: >>> exclusion = {} >>> >>> response = client.create_exclusion(parent, exclusion) @@ -794,14 +792,16 @@ def create_exclusion(self, :: - \"projects/[PROJECT_ID]\" - \"organizations/[ORGANIZATION_ID]\" - \"billingAccounts/[BILLING_ACCOUNT_ID]\" - \"folders/[FOLDER_ID]\" + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + exclusion (Union[dict, ~google.cloud.logging_v2.types.LogExclusion]): Required. The new exclusion, whose ``name`` parameter is an exclusion + name that is not already used in the parent resource. - Examples: ``\"projects/my-logging-project\"``, ``\"organizations/123456789\"``. - exclusion (Union[dict, ~google.cloud.logging_v2.types.LogExclusion]): Required. The new exclusion, whose ``name`` parameter is an exclusion name - that is not already used in the parent resource. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.LogExclusion` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -859,10 +859,10 @@ def update_exclusion(self, >>> >>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') >>> - >>> # TODO: Initialize ``exclusion``: + >>> # TODO: Initialize `exclusion`: >>> exclusion = {} >>> - >>> # TODO: Initialize ``update_mask``: + >>> # TODO: Initialize `update_mask`: >>> update_mask = {} >>> >>> response = client.update_exclusion(name, exclusion, update_mask) @@ -872,14 +872,15 @@ def update_exclusion(self, :: - \"projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]\" - \"organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]\" - \"billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]\" - \"folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]\" + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. + exclusion (Union[dict, ~google.cloud.logging_v2.types.LogExclusion]): Required. New values for the existing exclusion. Only the fields + specified in ``update_mask`` are relevant. - Example: ``\"projects/my-project-id/exclusions/my-exclusion-id\"``. - exclusion (Union[dict, ~google.cloud.logging_v2.types.LogExclusion]): Required. New values for the existing exclusion. Only the fields specified - in ``update_mask`` are relevant. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.LogExclusion` update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Required. A nonempty list of fields to change in the existing exclusion. @@ -888,7 +889,8 @@ def update_exclusion(self, ``update_mask`` are not changed and are ignored in the request. For example, to change the filter and description of an exclusion, - specify an ``update_mask`` of ``\"filter,description\"``. + specify an ``update_mask`` of ``"filter,description"``. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -952,12 +954,12 @@ def delete_exclusion(self, :: - \"projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]\" - \"organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]\" - \"billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]\" - \"folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]\" + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Example: ``\"projects/my-project-id/exclusions/my-exclusion-id\"``. + Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py index f448430659b1..e47d4def171e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py @@ -20,8 +20,8 @@ class LaunchStage(enum.IntEnum): """ - The launch stage as defined by [Google Cloud Platform - Launch Stages](http://cloud.google.com/terms/launch-stages). + The launch stage as defined by `Google Cloud Platform Launch + Stages `__. Attributes: LAUNCH_STAGE_UNSPECIFIED (int): Do not use this default value. @@ -47,10 +47,10 @@ class LaunchStage(enum.IntEnum): GA (int): GA features are open to all developers and are considered stable and fully qualified for production use. DEPRECATED (int): Deprecated features are scheduled to be shut down and removed. For more - information, see the “Deprecation Policy” section of our [Terms of - Service](https://cloud.google.com/terms/) - and the [Google Cloud Platform Subject to the Deprecation - Policy](https://cloud.google.com/terms/deprecation) documentation. + information, see the “Deprecation Policy” section of our `Terms of + Service `__ and the `Google Cloud + Platform Subject to the Deprecation + Policy `__ documentation. """ LAUNCH_STAGE_UNSPECIFIED = 0 EARLY_ACCESS = 1 @@ -62,10 +62,10 @@ class LaunchStage(enum.IntEnum): class NullValue(enum.IntEnum): """ - ``NullValue`` is a singleton enumeration to represent the null value for the - ``Value`` type union. + ``NullValue`` is a singleton enumeration to represent the null value for + the ``Value`` type union. - The JSON representation for ``NullValue`` is JSON ``null``. + The JSON representation for ``NullValue`` is JSON ``null``. Attributes: NULL_VALUE (int): Null value. @@ -75,23 +75,24 @@ class NullValue(enum.IntEnum): class LogSeverity(enum.IntEnum): """ - The severity of the event described in a log entry, expressed as one of the - standard severity levels listed below. For your reference, the levels are - assigned the listed numeric values. The effect of using numeric values other - than those listed is undefined. + The severity of the event described in a log entry, expressed as one of + the standard severity levels listed below. For your reference, the + levels are assigned the listed numeric values. The effect of using + numeric values other than those listed is undefined. - You can filter for log entries by severity. For example, the following - filter expression will match log entries with severities ``INFO``, ``NOTICE``, - and ``WARNING``: + You can filter for log entries by severity. For example, the following + filter expression will match log entries with severities ``INFO``, + ``NOTICE``, and ``WARNING``: :: - severity > DEBUG AND severity <= WARNING + severity > DEBUG AND severity <= WARNING - If you are writing log entries, you should map other severity encodings to - one of these standard levels. For example, you might map all of Java's FINE, - FINER, and FINEST levels to ``LogSeverity.DEBUG``. You can preserve the - original severity level in the log entry payload if you wish. + If you are writing log entries, you should map other severity encodings + to one of these standard levels. For example, you might map all of + Java's FINE, FINER, and FINEST levels to ``LogSeverity.DEBUG``. You can + preserve the original severity level in the log entry payload if you + wish. Attributes: DEFAULT (int): (0) The log entry has no assigned severity level. @@ -157,13 +158,13 @@ class ValueType(enum.IntEnum): Attributes: VALUE_TYPE_UNSPECIFIED (int): Do not use this default value. - BOOL (int): The value is a boolean. - This value type can be used only if the metric kind is ``GAUGE``. + BOOL (int): The value is a boolean. This value type can be used only if the metric + kind is ``GAUGE``. INT64 (int): The value is a signed 64-bit integer. DOUBLE (int): The value is a double precision floating point number. - STRING (int): The value is a text string. - This value type can be used only if the metric kind is ``GAUGE``. - DISTRIBUTION (int): The value is a ````Distribution````. + STRING (int): The value is a text string. This value type can be used only if the + metric kind is ``GAUGE``. + DISTRIBUTION (int): The value is a ``Distribution``. MONEY (int): The value is money. """ VALUE_TYPE_UNSPECIFIED = 0 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index f0a29707b9dc..2bf9ec6733c8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -158,9 +158,10 @@ def __init__(self, ) if client_info is None: - client_info = ( - google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) - client_info.gapic_version = _GAPIC_LIBRARY_VERSION + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION, ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info # Parse out the default settings for retry and timeout for each RPC @@ -202,16 +203,15 @@ def delete_log(self, :: - \"projects/[PROJECT_ID]/logs/[LOG_ID]\" - \"organizations/[ORGANIZATION_ID]/logs/[LOG_ID]\" - \"billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]\" - \"folders/[FOLDER_ID]/logs/[LOG_ID]\" + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL-encoded. For example, - ``\"projects/my-project-id/logs/syslog\"``, - ``\"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity\"``. - For more information about log names, see - ``LogEntry``. + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + For more information about log names, see ``LogEntry``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -266,79 +266,83 @@ def write_log_entries(self, >>> >>> client = logging_v2.LoggingServiceV2Client() >>> - >>> # TODO: Initialize ``entries``: + >>> # TODO: Initialize `entries`: >>> entries = [] >>> >>> response = client.write_log_entries(entries) Args: - entries (list[Union[dict, ~google.cloud.logging_v2.types.LogEntry]]): Required. The log entries to send to Logging. The order of log - entries in this list does not matter. Values supplied in this method's - ``log_name``, ``resource``, and ``labels`` fields are copied into those log - entries in this list that do not include values for their corresponding - fields. For more information, see the - ``LogEntry`` type. - - If the ``timestamp`` or ``insert_id`` fields are missing in log entries, then - this method supplies the current time or a unique identifier, respectively. - The supplied values are chosen so that, among the log entries that did not - supply their own values, the entries earlier in the list will sort before - the entries later in the list. See the ``entries.list`` method. - - Log entries with timestamps that are more than the - `logs retention period `_ in the past or more than - 24 hours in the future will not be available when calling ``entries.list``. - However, those log entries can still be exported with - `LogSinks `_. - - To improve throughput and to avoid exceeding the - `quota limit `_ for calls to ``entries.write``, - you should try to include several log entries in this list, - rather than calling this method for each individual log entry. + entries (list[Union[dict, ~google.cloud.logging_v2.types.LogEntry]]): Required. The log entries to send to Logging. The order of log entries + in this list does not matter. Values supplied in this method's + ``log_name``, ``resource``, and ``labels`` fields are copied into those + log entries in this list that do not include values for their + corresponding fields. For more information, see the ``LogEntry`` type. + + If the ``timestamp`` or ``insert_id`` fields are missing in log entries, + then this method supplies the current time or a unique identifier, + respectively. The supplied values are chosen so that, among the log + entries that did not supply their own values, the entries earlier in the + list will sort before the entries later in the list. See the + ``entries.list`` method. + + Log entries with timestamps that are more than the `logs retention + period `__ in the past or + more than 24 hours in the future will not be available when calling + ``entries.list``. However, those log entries can still be exported with + `LogSinks `__. + + To improve throughput and to avoid exceeding the `quota + limit `__ for calls to + ``entries.write``, you should try to include several log entries in this + list, rather than calling this method for each individual log entry. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.LogEntry` - log_name (str): Optional. A default log resource name that is assigned to all log entries - in ``entries`` that do not specify a value for ``log_name``: + log_name (str): Optional. A default log resource name that is assigned to all log + entries in ``entries`` that do not specify a value for ``log_name``: :: - \"projects/[PROJECT_ID]/logs/[LOG_ID]\" - \"organizations/[ORGANIZATION_ID]/logs/[LOG_ID]\" - \"billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]\" - \"folders/[FOLDER_ID]/logs/[LOG_ID]\" + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL-encoded. For example: :: - \"projects/my-project-id/logs/syslog\" - \"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity\" + "projects/my-project-id/logs/syslog" + "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" - The permission logging.logEntries.create is needed on each - project, organization, billing account, or folder that is receiving - new log entries, whether the resource is specified in - logName or in an individual log entry. - resource (Union[dict, ~google.cloud.logging_v2.types.MonitoredResource]): Optional. A default monitored resource object that is assigned to all log - entries in ``entries`` that do not specify a value for ``resource``. Example: + The permission logging.logEntries.create is needed on each project, + organization, billing account, or folder that is receiving new log + entries, whether the resource is specified in logName or in an + individual log entry. + resource (Union[dict, ~google.cloud.logging_v2.types.MonitoredResource]): Optional. A default monitored resource object that is assigned to all + log entries in ``entries`` that do not specify a value for ``resource``. + Example: :: - { \"type\": \"gce_instance\", - \"labels\": { - \"zone\": \"us-central1-a\", \"instance_id\": \"00000000000000000000\" }} + { "type": "gce_instance", + "labels": { + "zone": "us-central1-a", "instance_id": "00000000000000000000" }} See ``LogEntry``. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.MonitoredResource` - labels (dict[str -> str]): Optional. Default labels that are added to the ``labels`` field of all log - entries in ``entries``. If a log entry already has a label with the same key - as a label in this parameter, then the log entry's label is not changed. - See ``LogEntry``. + labels (dict[str -> str]): Optional. Default labels that are added to the ``labels`` field of all + log entries in ``entries``. If a log entry already has a label with the + same key as a label in this parameter, then the log entry's label is not + changed. See ``LogEntry``. partial_success (bool): Optional. Whether valid entries should be written even if some other - entries fail due to INVALID_ARGUMENT or PERMISSION_DENIED errors. If any - entry is not written, then the response status is the error associated - with one of the failed entries and the response includes error details - keyed by the entries' zero-based index in the ``entries.write`` method. + entries fail due to INVALID\_ARGUMENT or PERMISSION\_DENIED errors. If + any entry is not written, then the response status is the error + associated with one of the failed entries and the response includes + error details keyed by the entries' zero-based index in the + ``entries.write`` method. dry_run (bool): Optional. If true, the request should expect normal response, but the entries won't be persisted nor exported. Useful for checking whether the logging API endpoints are working properly before sending valuable data. @@ -394,16 +398,16 @@ def list_log_entries(self, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): """ - Lists log entries. Use this method to retrieve log entries from - Logging. For ways to export log entries, see - `Exporting Logs `_. + Lists log entries. Use this method to retrieve log entries from Logging. + For ways to export log entries, see `Exporting + Logs `__. Example: >>> from google.cloud import logging_v2 >>> >>> client = logging_v2.LoggingServiceV2Client() >>> - >>> # TODO: Initialize ``resource_names``: + >>> # TODO: Initialize `resource_names`: >>> resource_names = [] >>> >>> # Iterate over all results @@ -415,41 +419,43 @@ def list_log_entries(self, >>> # Alternatively: >>> >>> # Iterate over results one page at a time - >>> for page in client.list_log_entries(resource_names, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for page in client.list_log_entries(resource_names).pages: ... for element in page: ... # process element ... pass Args: - resource_names (list[str]): Required. Names of one or more parent resources from which to - retrieve log entries: + resource_names (list[str]): Required. Names of one or more parent resources from which to retrieve + log entries: :: - \"projects/[PROJECT_ID]\" - \"organizations/[ORGANIZATION_ID]\" - \"billingAccounts/[BILLING_ACCOUNT_ID]\" - \"folders/[FOLDER_ID]\" + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" Projects listed in the ``project_ids`` field are added to this list. - project_ids (list[str]): Deprecated. Use ``resource_names`` instead. One or more project identifiers - or project numbers from which to retrieve log entries. Example: - ``\"my-project-1A\"``. If present, these project identifiers are converted to - resource name format and added to the list of resources in + project_ids (list[str]): Deprecated. Use ``resource_names`` instead. One or more project + identifiers or project numbers from which to retrieve log entries. + Example: ``"my-project-1A"``. If present, these project identifiers are + converted to resource name format and added to the list of resources in ``resource_names``. - filter_ (str): Optional. A filter that chooses which log entries to return. See [Advanced - Logs Filters](/logging/docs/view/advanced_filters). Only log entries that - match the filter are returned. An empty filter matches all log entries in - the resources listed in ``resource_names``. Referencing a parent resource - that is not listed in ``resource_names`` will cause the filter to return no - results. - The maximum length of the filter is 20000 characters. - order_by (str): Optional. How the results should be sorted. Presently, the only permitted - values are ``\"timestamp asc\"`` (default) and ``\"timestamp desc\"``. The first - option returns entries in order of increasing values of - ``LogEntry.timestamp`` (oldest first), and the second option returns entries - in order of decreasing timestamps (newest first). Entries with equal - timestamps are returned in order of their ``insert_id`` values. + filter_ (str): Optional. A filter that chooses which log entries to return. See + `Advanced Logs + Filters `__. + Only log entries that match the filter are returned. An empty filter + matches all log entries in the resources listed in ``resource_names``. + Referencing a parent resource that is not listed in ``resource_names`` + will cause the filter to return no results. The maximum length of the + filter is 20000 characters. + order_by (str): Optional. How the results should be sorted. Presently, the only + permitted values are ``"timestamp asc"`` (default) and + ``"timestamp desc"``. The first option returns entries in order of + increasing values of ``LogEntry.timestamp`` (oldest first), and the + second option returns entries in order of decreasing timestamps (newest + first). Entries with equal timestamps are returned in order of their + ``insert_id`` values. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -532,7 +538,7 @@ def list_monitored_resource_descriptors( >>> # Alternatively: >>> >>> # Iterate over results one page at a time - >>> for page in client.list_monitored_resource_descriptors(options=CallOptions(page_token=INITIAL_PAGE)): + >>> for page in client.list_monitored_resource_descriptors().pages: ... for element in page: ... # process element ... pass @@ -619,7 +625,7 @@ def list_logs(self, >>> # Alternatively: >>> >>> # Iterate over results one page at a time - >>> for page in client.list_logs(parent, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for page in client.list_logs(parent).pages: ... for element in page: ... # process element ... pass @@ -629,10 +635,10 @@ def list_logs(self, :: - \"projects/[PROJECT_ID]\" - \"organizations/[ORGANIZATION_ID]\" - \"billingAccounts/[BILLING_ACCOUNT_ID]\" - \"folders/[FOLDER_ID]\" + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 6dde895a5e3e..581b2ef16c17 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -163,9 +163,10 @@ def __init__(self, ) if client_info is None: - client_info = ( - google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) - client_info.gapic_version = _GAPIC_LIBRARY_VERSION + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION, ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info # Parse out the default settings for retry and timeout for each RPC @@ -207,7 +208,7 @@ def list_log_metrics(self, >>> # Alternatively: >>> >>> # Iterate over results one page at a time - >>> for page in client.list_log_metrics(parent, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for page in client.list_log_metrics(parent).pages: ... for element in page: ... # process element ... pass @@ -217,7 +218,7 @@ def list_log_metrics(self, :: - \"projects/[PROJECT_ID]\" + "projects/[PROJECT_ID]" page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -296,7 +297,7 @@ def get_log_metric(self, :: - \"projects/[PROJECT_ID]/metrics/[METRIC_ID]\" + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -348,7 +349,7 @@ def create_log_metric(self, >>> >>> parent = client.project_path('[PROJECT]') >>> - >>> # TODO: Initialize ``metric``: + >>> # TODO: Initialize `metric`: >>> metric = {} >>> >>> response = client.create_log_metric(parent, metric) @@ -358,11 +359,12 @@ def create_log_metric(self, :: - \"projects/[PROJECT_ID]\" + "projects/[PROJECT_ID]" The new metric must be provided in the request. metric (Union[dict, ~google.cloud.logging_v2.types.LogMetric]): The new logs-based metric, which must not have an identifier that already exists. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.LogMetric` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -419,7 +421,7 @@ def update_log_metric(self, >>> >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') >>> - >>> # TODO: Initialize ``metric``: + >>> # TODO: Initialize `metric`: >>> metric = {} >>> >>> response = client.update_log_metric(metric_name, metric) @@ -429,12 +431,13 @@ def update_log_metric(self, :: - \"projects/[PROJECT_ID]/metrics/[METRIC_ID]\" + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - The updated metric must be provided in the request and it's - ``name`` field must be the same as ``[METRIC_ID]`` If the metric - does not exist in ``[PROJECT_ID]``, then a new metric is created. + The updated metric must be provided in the request and it's ``name`` + field must be the same as ``[METRIC_ID]`` If the metric does not exist + in ``[PROJECT_ID]``, then a new metric is created. metric (Union[dict, ~google.cloud.logging_v2.types.LogMetric]): The updated metric. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.LogMetric` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -497,7 +500,7 @@ def delete_log_metric(self, :: - \"projects/[PROJECT_ID]/metrics/[METRIC_ID]\" + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py index be5b43eda2fa..e3e0b0de0ac8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py @@ -128,10 +128,10 @@ def get_sink(self): def create_sink(self): """Return the gRPC stub for {$apiMethod.name}. - Creates a sink that exports specified log entries to a destination. The - export of newly-ingested log entries begins immediately, unless the sink's - ``writer_identity`` is not permitted to write to the destination. A sink can - export log entries only from the resource owning the sink. + Creates a sink that exports specified log entries to a destination. The + export of newly-ingested log entries begins immediately, unless the + sink's ``writer_identity`` is not permitted to write to the destination. + A sink can export log entries only from the resource owning the sink. Returns: Callable: A callable which accepts the appropriate @@ -144,10 +144,10 @@ def create_sink(self): def update_sink(self): """Return the gRPC stub for {$apiMethod.name}. - Updates a sink. This method replaces the following fields in the existing - sink with values from the new sink: ``destination``, and ``filter``. - The updated sink might also have a new ``writer_identity``; see the - ``unique_writer_identity`` field. + Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, and + ``filter``. The updated sink might also have a new ``writer_identity``; + see the ``unique_writer_identity`` field. Returns: Callable: A callable which accepts the appropriate diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py index 19ac7878636d..8824a74e6e2f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py @@ -137,9 +137,9 @@ def write_log_entries(self): def list_log_entries(self): """Return the gRPC stub for {$apiMethod.name}. - Lists log entries. Use this method to retrieve log entries from - Logging. For ways to export log entries, see - `Exporting Logs `_. + Lists log entries. Use this method to retrieve log entries from Logging. + For ways to export log entries, see `Exporting + Logs `__. Returns: Callable: A callable which accepts the appropriate From 80d0027e62a18fedb4293db627ae3ccc328302a8 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Nov 2018 14:03:35 -0500 Subject: [PATCH 200/855] Bump minimum 'api_core' version for all GAPIC libs to 1.4.1. (#6391) Closes #6390. --- packages/google-cloud-logging/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index fd996b380ff9..85eeaf96889e 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -29,8 +29,8 @@ # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ - 'google-cloud-core<0.29dev,>=0.28.0', - 'google-api-core[grpc]<2.0.0dev,>=1.0.0', + 'google-api-core[grpc] >= 1.4.1, < 2.0.0dev', + 'google-cloud-core >= 0.28.0, < 0.29dev', ] extras = { } From 3fff3810b3f2fa1bbc75e3c19fc0408906197864 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 21 Nov 2018 11:14:59 -0500 Subject: [PATCH 201/855] Logging: pick up fixes to GAPIC generator. (#6631) Includes fixes from these PRs: - googleapis/gapic-generator#2407 - googleapis/gapic-generator#2396 Includes changes to generated tests. Closes #6474. --- .../gapic/config_service_v2_client.py | 17 ++- .../gapic/logging_service_v2_client.py | 17 ++- .../gapic/metrics_service_v2_client.py | 17 ++- .../config_service_v2_grpc_transport.py | 11 ++ .../logging_service_v2_grpc_transport.py | 11 ++ .../metrics_service_v2_grpc_transport.py | 11 ++ packages/google-cloud-logging/synth.py | 1 + .../v2/test_config_service_v2_client_v2.py | 103 ++++++++++++++---- .../v2/test_logging_service_v2_client_v2.py | 53 +++++++-- .../v2/test_metrics_service_v2_client_v2.py | 53 +++++++-- 10 files changed, 239 insertions(+), 55 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index c252e69bb860..b38c432ad046 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -108,7 +108,7 @@ def __init__(self, transport=None, channel=None, credentials=None, - client_config=config_service_v2_client_config.config, + client_config=None, client_info=None): """Constructor. @@ -141,13 +141,20 @@ def __init__(self, your own client library. """ # Raise deprecation warnings for things we want to go away. - if client_config: - warnings.warn('The `client_config` argument is deprecated.', - PendingDeprecationWarning) + if client_config is not None: + warnings.warn( + 'The `client_config` argument is deprecated.', + PendingDeprecationWarning, + stacklevel=2) + else: + client_config = config_service_v2_client_config.config + if channel: warnings.warn( 'The `channel` argument is deprecated; use ' - '`transport` instead.', PendingDeprecationWarning) + '`transport` instead.', + PendingDeprecationWarning, + stacklevel=2) # Instantiate the transport. # The transport is responsible for handling serialization and diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index 2bf9ec6733c8..2df0c2578f25 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -93,7 +93,7 @@ def __init__(self, transport=None, channel=None, credentials=None, - client_config=logging_service_v2_client_config.config, + client_config=None, client_info=None): """Constructor. @@ -126,13 +126,20 @@ def __init__(self, your own client library. """ # Raise deprecation warnings for things we want to go away. - if client_config: - warnings.warn('The `client_config` argument is deprecated.', - PendingDeprecationWarning) + if client_config is not None: + warnings.warn( + 'The `client_config` argument is deprecated.', + PendingDeprecationWarning, + stacklevel=2) + else: + client_config = logging_service_v2_client_config.config + if channel: warnings.warn( 'The `channel` argument is deprecated; use ' - '`transport` instead.', PendingDeprecationWarning) + '`transport` instead.', + PendingDeprecationWarning, + stacklevel=2) # Instantiate the transport. # The transport is responsible for handling serialization and diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 581b2ef16c17..2a11e1ed84a7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -98,7 +98,7 @@ def __init__(self, transport=None, channel=None, credentials=None, - client_config=metrics_service_v2_client_config.config, + client_config=None, client_info=None): """Constructor. @@ -131,13 +131,20 @@ def __init__(self, your own client library. """ # Raise deprecation warnings for things we want to go away. - if client_config: - warnings.warn('The `client_config` argument is deprecated.', - PendingDeprecationWarning) + if client_config is not None: + warnings.warn( + 'The `client_config` argument is deprecated.', + PendingDeprecationWarning, + stacklevel=2) + else: + client_config = metrics_service_v2_client_config.config + if channel: warnings.warn( 'The `channel` argument is deprecated; use ' - '`transport` instead.', PendingDeprecationWarning) + '`transport` instead.', + PendingDeprecationWarning, + stacklevel=2) # Instantiate the transport. # The transport is responsible for handling serialization and diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py index e3e0b0de0ac8..224048411980 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py @@ -68,6 +68,8 @@ def __init__(self, credentials=credentials, ) + self._channel = channel + # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { @@ -98,6 +100,15 @@ def create_channel(cls, scopes=cls._OAUTH_SCOPES, ) + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + @property def list_sinks(self): """Return the gRPC stub for {$apiMethod.name}. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py index 8824a74e6e2f..4393bfb57f48 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py @@ -68,6 +68,8 @@ def __init__(self, credentials=credentials, ) + self._channel = channel + # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { @@ -98,6 +100,15 @@ def create_channel(cls, scopes=cls._OAUTH_SCOPES, ) + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + @property def delete_log(self): """Return the gRPC stub for {$apiMethod.name}. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py index b87218e67645..4a4b809b892d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py @@ -68,6 +68,8 @@ def __init__(self, credentials=credentials, ) + self._channel = channel + # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { @@ -98,6 +100,15 @@ def create_channel(cls, scopes=cls._OAUTH_SCOPES, ) + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + @property def list_log_metrics(self): """Return the gRPC stub for {$apiMethod.name}. diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/synth.py index a7971946b67b..2d3b53271fd6 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/synth.py @@ -30,6 +30,7 @@ s.move(library / 'google/cloud/logging_v2/proto') s.move(library / 'google/cloud/logging_v2/gapic') +s.move(library / 'tests/unit/gapic/v2') # Issues exist where python files should define the source encoding # https://github.com/googleapis/gapic-generator/issues/2097 diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py index a6893944cff4..70b375cf3dd6 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +15,7 @@ # limitations under the License. """Unit tests.""" +import mock import pytest from google.cloud import logging_v2 @@ -75,7 +78,10 @@ def test_list_sinks(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup Request parent = client.project_path('[PROJECT]') @@ -93,7 +99,10 @@ def test_list_sinks(self): def test_list_sinks_exception(self): channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup request parent = client.project_path('[PROJECT]') @@ -120,7 +129,10 @@ def test_get_sink(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup Request sink_name = client.sink_path('[PROJECT]', '[SINK]') @@ -137,7 +149,10 @@ def test_get_sink(self): def test_get_sink_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup request sink_name = client.sink_path('[PROJECT]', '[SINK]') @@ -163,7 +178,10 @@ def test_create_sink(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup Request parent = client.project_path('[PROJECT]') @@ -181,7 +199,10 @@ def test_create_sink(self): def test_create_sink_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup request parent = client.project_path('[PROJECT]') @@ -208,7 +229,10 @@ def test_update_sink(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup Request sink_name = client.sink_path('[PROJECT]', '[SINK]') @@ -226,7 +250,10 @@ def test_update_sink(self): def test_update_sink_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup request sink_name = client.sink_path('[PROJECT]', '[SINK]') @@ -237,7 +264,10 @@ def test_update_sink_exception(self): def test_delete_sink(self): channel = ChannelStub() - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup Request sink_name = client.sink_path('[PROJECT]', '[SINK]') @@ -253,7 +283,10 @@ def test_delete_sink(self): def test_delete_sink_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup request sink_name = client.sink_path('[PROJECT]', '[SINK]') @@ -275,7 +308,10 @@ def test_list_exclusions(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup Request parent = client.project_path('[PROJECT]') @@ -294,7 +330,10 @@ def test_list_exclusions(self): def test_list_exclusions_exception(self): channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup request parent = client.project_path('[PROJECT]') @@ -320,7 +359,10 @@ def test_get_exclusion(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup Request name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') @@ -336,7 +378,10 @@ def test_get_exclusion(self): def test_get_exclusion_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup request name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') @@ -361,7 +406,10 @@ def test_create_exclusion(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup Request parent = client.project_path('[PROJECT]') @@ -379,7 +427,10 @@ def test_create_exclusion(self): def test_create_exclusion_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup request parent = client.project_path('[PROJECT]') @@ -405,7 +456,10 @@ def test_update_exclusion(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup Request name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') @@ -424,7 +478,10 @@ def test_update_exclusion(self): def test_update_exclusion_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup request name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') @@ -436,7 +493,10 @@ def test_update_exclusion_exception(self): def test_delete_exclusion(self): channel = ChannelStub() - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup Request name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') @@ -451,7 +511,10 @@ def test_delete_exclusion(self): def test_delete_exclusion_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.ConfigServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() # Setup request name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py index 7d942ec9e67f..548955147ebf 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +15,7 @@ # limitations under the License. """Unit tests.""" +import mock import pytest from google.api import monitored_resource_pb2 @@ -64,7 +67,10 @@ class CustomException(Exception): class TestLoggingServiceV2Client(object): def test_delete_log(self): channel = ChannelStub() - client = logging_v2.LoggingServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.LoggingServiceV2Client() # Setup Request log_name = client.log_path('[PROJECT]', '[LOG]') @@ -79,7 +85,10 @@ def test_delete_log(self): def test_delete_log_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.LoggingServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.LoggingServiceV2Client() # Setup request log_name = client.log_path('[PROJECT]', '[LOG]') @@ -95,7 +104,10 @@ def test_write_log_entries(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.LoggingServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.LoggingServiceV2Client() # Setup Request entries = [] @@ -111,7 +123,10 @@ def test_write_log_entries(self): def test_write_log_entries_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.LoggingServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.LoggingServiceV2Client() # Setup request entries = [] @@ -133,7 +148,10 @@ def test_list_log_entries(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.LoggingServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.LoggingServiceV2Client() # Setup Request resource_names = [] @@ -152,7 +170,10 @@ def test_list_log_entries(self): def test_list_log_entries_exception(self): channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.LoggingServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.LoggingServiceV2Client() # Setup request resource_names = [] @@ -175,7 +196,10 @@ def test_list_monitored_resource_descriptors(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.LoggingServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.LoggingServiceV2Client() paged_list_response = client.list_monitored_resource_descriptors() resources = list(paged_list_response) @@ -191,7 +215,10 @@ def test_list_monitored_resource_descriptors(self): def test_list_monitored_resource_descriptors_exception(self): channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.LoggingServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.LoggingServiceV2Client() paged_list_response = client.list_monitored_resource_descriptors() with pytest.raises(CustomException): @@ -210,7 +237,10 @@ def test_list_logs(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.LoggingServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.LoggingServiceV2Client() # Setup Request parent = client.project_path('[PROJECT]') @@ -228,7 +258,10 @@ def test_list_logs(self): def test_list_logs_exception(self): channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.LoggingServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.LoggingServiceV2Client() # Setup request parent = client.project_path('[PROJECT]') diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py index 2efc90c24a1d..c5c16551c67c 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +15,7 @@ # limitations under the License. """Unit tests.""" +import mock import pytest from google.cloud import logging_v2 @@ -74,7 +77,10 @@ def test_list_log_metrics(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.MetricsServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.MetricsServiceV2Client() # Setup Request parent = client.project_path('[PROJECT]') @@ -93,7 +99,10 @@ def test_list_log_metrics(self): def test_list_log_metrics_exception(self): channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.MetricsServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.MetricsServiceV2Client() # Setup request parent = client.project_path('[PROJECT]') @@ -118,7 +127,10 @@ def test_get_log_metric(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.MetricsServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.MetricsServiceV2Client() # Setup Request metric_name = client.metric_path('[PROJECT]', '[METRIC]') @@ -135,7 +147,10 @@ def test_get_log_metric(self): def test_get_log_metric_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.MetricsServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.MetricsServiceV2Client() # Setup request metric_name = client.metric_path('[PROJECT]', '[METRIC]') @@ -159,7 +174,10 @@ def test_create_log_metric(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.MetricsServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.MetricsServiceV2Client() # Setup Request parent = client.project_path('[PROJECT]') @@ -177,7 +195,10 @@ def test_create_log_metric(self): def test_create_log_metric_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.MetricsServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.MetricsServiceV2Client() # Setup request parent = client.project_path('[PROJECT]') @@ -202,7 +223,10 @@ def test_update_log_metric(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = logging_v2.MetricsServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.MetricsServiceV2Client() # Setup Request metric_name = client.metric_path('[PROJECT]', '[METRIC]') @@ -220,7 +244,10 @@ def test_update_log_metric(self): def test_update_log_metric_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.MetricsServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.MetricsServiceV2Client() # Setup request metric_name = client.metric_path('[PROJECT]', '[METRIC]') @@ -231,7 +258,10 @@ def test_update_log_metric_exception(self): def test_delete_log_metric(self): channel = ChannelStub() - client = logging_v2.MetricsServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.MetricsServiceV2Client() # Setup Request metric_name = client.metric_path('[PROJECT]', '[METRIC]') @@ -247,7 +277,10 @@ def test_delete_log_metric(self): def test_delete_log_metric_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = logging_v2.MetricsServiceV2Client(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.MetricsServiceV2Client() # Setup request metric_name = client.metric_path('[PROJECT]', '[METRIC]') From bb603c800fe26dbacd52c0d8821d530963a341e0 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 28 Nov 2018 13:55:23 -0800 Subject: [PATCH 202/855] Add templates for flake8, coveragerc, noxfile, and black. (#6642) --- packages/google-cloud-logging/.coveragerc | 7 +++ packages/google-cloud-logging/.flake8 | 1 + packages/google-cloud-logging/MANIFEST.in | 3 +- packages/google-cloud-logging/noxfile.py | 75 +++++++++++++++-------- packages/google-cloud-logging/synth.py | 35 ++++++----- 5 files changed, 79 insertions(+), 42 deletions(-) diff --git a/packages/google-cloud-logging/.coveragerc b/packages/google-cloud-logging/.coveragerc index a54b99aa14b7..51fec440cebf 100644 --- a/packages/google-cloud-logging/.coveragerc +++ b/packages/google-cloud-logging/.coveragerc @@ -9,3 +9,10 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ + # Ignore abstract methods + raise NotImplementedError +omit = + */gapic/*.py + */proto/*.py + */google-cloud-python/core/*.py + */site-packages/*.py \ No newline at end of file diff --git a/packages/google-cloud-logging/.flake8 b/packages/google-cloud-logging/.flake8 index 1f44a90f8195..61766fa84d02 100644 --- a/packages/google-cloud-logging/.flake8 +++ b/packages/google-cloud-logging/.flake8 @@ -1,4 +1,5 @@ [flake8] +ignore = E203, E266, E501, W503 exclude = # Exclude generated code. **/proto/** diff --git a/packages/google-cloud-logging/MANIFEST.in b/packages/google-cloud-logging/MANIFEST.in index fc77f8c82ff0..9cbf175afe6b 100644 --- a/packages/google-cloud-logging/MANIFEST.in +++ b/packages/google-cloud-logging/MANIFEST.in @@ -1,4 +1,5 @@ include README.rst LICENSE recursive-include google *.json *.proto recursive-include tests * -global-exclude *.pyc __pycache__ +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index dfc477e5d7dd..882d014b9f08 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -33,11 +33,54 @@ 'webob', ) +@nox.session(python="3.7") +def blacken(session): + """Run black. + + Format code to uniform standard. + """ + session.install("black") + session.run( + "black", + "google", + "tests", + "docs", + "--exclude", + ".*/proto/.*|.*/gapic/.*|.*/.*_pb2.py", + ) + + +@nox.session(python="3.7") +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", "black", *LOCAL_DEPS) + session.run( + "black", + "--check", + "google", + "tests", + "docs", + "--exclude", + ".*/proto/.*|.*/gapic/.*|.*/.*_pb2.py", + ) + session.run("flake8", "google", "tests") + + +@nox.session(python="3.7") +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + def default(session, django_dep=('django',)): """Default unit test session. """ - + # Install all test dependencies, then install this package in-place. deps = UNIT_TEST_DEPS deps += django_dep @@ -114,34 +157,14 @@ def system(session): *session.posargs) -@nox.session(python='3.6') -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install('flake8', *LOCAL_DEPS) - session.install('.') - session.run('flake8', 'google', 'tests') - - -@nox.session(python='3.6') -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - - session.install('docutils', 'Pygments') - session.run( - 'python', 'setup.py', 'check', '--restructuredtext', '--strict') - - -@nox.session(python='3.6') +@nox.session(python="3.7") def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ - session.install('coverage', 'pytest-cov') - session.run('coverage', 'report', '--show-missing', '--fail-under=100') - session.run('coverage', 'erase') + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/synth.py index 2d3b53271fd6..a3e978335ac5 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/synth.py @@ -17,24 +17,29 @@ from synthtool import gcp gapic = gcp.GAPICGenerator() +common = gcp.CommonTemplates() - -#---------------------------------------------------------------------------- -# Generate logging client -#---------------------------------------------------------------------------- +# ---------------------------------------------------------------------------- +# Generate logging GAPIC layer +# ---------------------------------------------------------------------------- library = gapic.py_library( - 'logging', - 'v2', - config_path='/google/logging/artman_logging.yaml', - artman_output_name='logging-v2') + "logging", + "v2", + config_path="/google/logging/artman_logging.yaml", + artman_output_name="logging-v2", +) -s.move(library / 'google/cloud/logging_v2/proto') -s.move(library / 'google/cloud/logging_v2/gapic') -s.move(library / 'tests/unit/gapic/v2') +s.move(library / "google/cloud/logging_v2/proto") +s.move(library / "google/cloud/logging_v2/gapic") +s.move(library / "tests/unit/gapic/v2") # Issues exist where python files should define the source encoding # https://github.com/googleapis/gapic-generator/issues/2097 -s.replace( - 'google/**/proto/*_pb2.py', - r"(^.*$\n)*", - r"# -*- coding: utf-8 -*-\n\g<0>") +s.replace("google/**/proto/*_pb2.py", r"(^.*$\n)*", r"# -*- coding: utf-8 -*-\n\g<0>") + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- +templated_files = common.py_library(unit_cov_level=95, cov_level=100) +# Don't move noxfile. logging has special testing setups for django, etc +s.move(templated_files, exclude="noxfile.py") From 0b9e15131802780fa0b1a380bd7a2d73f9fcfdd0 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 29 Nov 2018 11:02:59 -0800 Subject: [PATCH 203/855] Blackening Continued... (#6667) * blacken bigtable * blacken logging * blacken translate * blacken runtimeconfig * blacken dns --- packages/google-cloud-logging/docs/conf.py | 167 ++-- .../google-cloud-logging/docs/snippets.py | 98 +- .../google-cloud-logging/google/__init__.py | 2 + .../google/cloud/__init__.py | 2 + .../google/cloud/logging/__init__.py | 9 +- .../google/cloud/logging/_gapic.py | 116 ++- .../google/cloud/logging/_helpers.py | 12 +- .../google/cloud/logging/_http.py | 146 ++- .../google/cloud/logging/client.py | 62 +- .../google/cloud/logging/entries.py | 152 +-- .../google/cloud/logging/handlers/__init__.py | 11 +- .../google/cloud/logging/handlers/_helpers.py | 34 +- .../cloud/logging/handlers/app_engine.py | 49 +- .../google/cloud/logging/handlers/handlers.py | 32 +- .../logging/handlers/middleware/__init__.py | 2 +- .../logging/handlers/middleware/request.py | 2 +- .../logging/handlers/transports/__init__.py | 5 +- .../handlers/transports/background_thread.py | 111 +- .../cloud/logging/handlers/transports/base.py | 5 +- .../cloud/logging/handlers/transports/sync.py | 21 +- .../google/cloud/logging/logger.py | 70 +- .../google/cloud/logging/metric.py | 26 +- .../google/cloud/logging/resource.py | 13 +- .../google/cloud/logging/sink.py | 23 +- .../google/cloud/logging_v2/__init__.py | 10 +- .../gapic/config_service_v2_client.py | 459 +++++---- .../gapic/config_service_v2_client_config.py | 32 +- .../google/cloud/logging_v2/gapic/enums.py | 8 + .../gapic/logging_service_v2_client.py | 282 +++--- .../gapic/logging_service_v2_client_config.py | 24 +- .../gapic/metrics_service_v2_client.py | 263 ++--- .../gapic/metrics_service_v2_client_config.py | 18 +- .../config_service_v2_grpc_transport.py | 60 +- .../logging_service_v2_grpc_transport.py | 49 +- .../metrics_service_v2_grpc_transport.py | 50 +- .../google/cloud/logging_v2/types.py | 9 +- .../v2/test_system_logging_service_v2_v2.py | 5 +- .../tests/system/test_system.py | 232 ++--- .../v2/test_config_service_v2_client_v2.py | 230 ++--- .../v2/test_logging_service_v2_client_v2.py | 70 +- .../v2/test_metrics_service_v2_client_v2.py | 118 ++- .../unit/handlers/middleware/test_request.py | 12 +- .../tests/unit/handlers/test__helpers.py | 73 +- .../tests/unit/handlers/test_app_engine.py | 82 +- .../unit/handlers/test_container_engine.py | 25 +- .../tests/unit/handlers/test_handlers.py | 23 +- .../transports/test_background_thread.py | 180 ++-- .../unit/handlers/transports/test_base.py | 2 +- .../unit/handlers/transports/test_sync.py | 52 +- .../tests/unit/test__gapic.py | 298 +++--- .../tests/unit/test__helpers.py | 44 +- .../tests/unit/test__http.py | 576 +++++------ .../tests/unit/test_client.py | 368 ++++--- .../tests/unit/test_entries.py | 640 +++++------- .../tests/unit/test_logger.py | 945 ++++++++---------- .../tests/unit/test_metric.py | 123 ++- .../tests/unit/test_sink.py | 226 ++--- 57 files changed, 3248 insertions(+), 3510 deletions(-) diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index adaa0afcea45..79ef19f4a583 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -18,50 +18,50 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath("..")) -__version__ = '0.91.4' +__version__ = "0.91.4" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.intersphinx', - 'sphinx.ext.coverage', - 'sphinx.ext.napoleon', - 'sphinx.ext.viewcode', + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.viewcode", ] # autodoc/autosummary flags -autoclass_content = 'both' -autodoc_default_flags = ['members'] +autoclass_content = "both" +autodoc_default_flags = ["members"] autosummary_generate = True # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'google-cloud-logging' -copyright = u'2017, Google' -author = u'Google APIs' +project = u"google-cloud-logging" +copyright = u"2017, Google" +author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -70,7 +70,7 @@ # The full version, including alpha/beta/rc tags. release = __version__ # The short X.Y version. -version = '.'.join(release.split('.')[0:2]) +version = ".".join(release.split(".")[0:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -81,37 +81,37 @@ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True @@ -120,31 +120,31 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, @@ -154,78 +154,75 @@ # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -#html_search_language = 'en' +# html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} +# html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' +# html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'google-cloud-logging-doc' +htmlhelp_basename = "google-cloud-logging-doc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. #'preamble': '', - # Latex figure (float) alignment #'figure_align': 'htbp', } @@ -234,39 +231,51 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'google-cloud-logging.tex', - u'google-cloud-logging Documentation', author, 'manual'), + ( + master_doc, + "google-cloud-logging.tex", + u"google-cloud-logging Documentation", + author, + "manual", + ) ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, 'google-cloud-logging', - u'google-cloud-logging Documentation', [author], 1)] +man_pages = [ + ( + master_doc, + "google-cloud-logging", + u"google-cloud-logging Documentation", + [author], + 1, + ) +] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------- @@ -274,27 +283,33 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'google-cloud-logging', u'google-cloud-logging Documentation', - author, 'google-cloud-logging', - 'GAPIC library for the {metadata.shortName} v2 service', 'APIs'), + ( + master_doc, + "google-cloud-logging", + u"google-cloud-logging Documentation", + author, + "google-cloud-logging", + "GAPIC library for the {metadata.shortName} v2 service", + "APIs", + ) ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - 'python': ('http://python.readthedocs.org/en/latest/', None), - 'gax': ('https://gax-python.readthedocs.org/en/latest/', None), + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), } # Napoleon settings diff --git a/packages/google-cloud-logging/docs/snippets.py b/packages/google-cloud-logging/docs/snippets.py index eabc46fa8073..1b3987e8fd61 100644 --- a/packages/google-cloud-logging/docs/snippets.py +++ b/packages/google-cloud-logging/docs/snippets.py @@ -49,14 +49,18 @@ def instantiate_client(_unused_client, _unused_to_delete): # [START client_create_default] from google.cloud import logging + client = logging.Client() # [END client_create_default] credentials = object() # [START client_create_explicit] from google.cloud import logging - client = logging.Client(project='my-project', credentials=credentials) + + client = logging.Client(project="my-project", credentials=credentials) # [END client_create_explicit] + + # pylint: enable=reimported,unused-variable,unused-argument @@ -70,13 +74,14 @@ def client_list_entries(client, to_delete): # pylint: disable=unused-argument # [END client_list_entries_default] # [START client_list_entries_filter] - FILTER = 'logName:log_name AND textPayload:simple' + FILTER = "logName:log_name AND textPayload:simple" for entry in client.list_entries(filter_=FILTER): # API call(s) do_something_with(entry) # [END client_list_entries_filter] # [START client_list_entries_order_by] from google.cloud.logging import DESCENDING + for entry in client.list_entries(order_by=DESCENDING): # API call(s) do_something_with(entry) # [END client_list_entries_order_by] @@ -97,11 +102,12 @@ def client_list_entries(client, to_delete): # pylint: disable=unused-argument # @snippet Commented because we need real project IDs to test def client_list_entries_multi_project( - client, to_delete): # pylint: disable=unused-argument + client, to_delete +): # pylint: disable=unused-argument """List entries via client across multiple projects.""" # [START client_list_entries_multi_project] - PROJECT_IDS = ['one-project', 'another-project'] + PROJECT_IDS = ["one-project", "another-project"] for entry in client.list_entries(project_ids=PROJECT_IDS): # API call(s) do_something_with(entry) # [END client_list_entries_multi_project] @@ -110,7 +116,7 @@ def client_list_entries_multi_project( @snippet def logger_usage(client, to_delete): """Logger usage.""" - LOG_NAME = 'logger_usage_%d' % (_millis()) + LOG_NAME = "logger_usage_%d" % (_millis()) # [START logger_create] logger = client.logger(LOG_NAME) @@ -122,14 +128,14 @@ def logger_usage(client, to_delete): # [END logger_log_text] # [START logger_log_struct] - logger.log_struct({ - 'message': 'My second entry', - 'weather': 'partly cloudy', - }) # API call + logger.log_struct( + {"message": "My second entry", "weather": "partly cloudy"} + ) # API call # [END logger_log_struct] # [START logger_list_entries] from google.cloud.logging import DESCENDING + for entry in logger.list_entries(order_by=DESCENDING): # API call(s) do_something_with(entry) # [END logger_list_entries] @@ -146,10 +152,10 @@ def _logger_delete(): @snippet def metric_crud(client, to_delete): """Metric CRUD.""" - METRIC_NAME = 'robots-%d' % (_millis(),) + METRIC_NAME = "robots-%d" % (_millis(),) DESCRIPTION = "Robots all up in your server" - FILTER = 'logName:apache-access AND textPayload:robot' - UPDATED_FILTER = 'textPayload:robot' + FILTER = "logName:apache-access AND textPayload:robot" + UPDATED_FILTER = "textPayload:robot" UPDATED_DESCRIPTION = "Danger, Will Robinson!" # [START client_list_metrics] @@ -158,8 +164,7 @@ def metric_crud(client, to_delete): # [END client_list_metrics] # [START metric_create] - metric = client.metric( - METRIC_NAME, filter_=FILTER, description=DESCRIPTION) + metric = client.metric(METRIC_NAME, filter_=FILTER, description=DESCRIPTION) assert not metric.exists() # API call metric.create() # API call assert metric.exists() # API call @@ -193,14 +198,15 @@ def _metric_delete(): def _sink_storage_setup(client): from google.cloud import storage - BUCKET_NAME = 'sink-storage-%d' % (_millis(),) + + BUCKET_NAME = "sink-storage-%d" % (_millis(),) client = storage.Client() bucket = client.bucket(BUCKET_NAME) bucket.create() # [START sink_bucket_permissions] bucket.acl.reload() # API call - logs_group = bucket.acl.group('cloud-logs@google.com') + logs_group = bucket.acl.group("cloud-logs@google.com") logs_group.grant_owner() bucket.acl.add_entity(logs_group) bucket.acl.save() # API call @@ -214,11 +220,11 @@ def sink_storage(client, to_delete): """Sink log entries to storage.""" bucket = _sink_storage_setup(client) to_delete.append(bucket) - SINK_NAME = 'robots-storage-%d' % (_millis(),) - FILTER = 'textPayload:robot' + SINK_NAME = "robots-storage-%d" % (_millis(),) + FILTER = "textPayload:robot" # [START sink_storage_create] - DESTINATION = 'storage.googleapis.com/%s' % (bucket.name,) + DESTINATION = "storage.googleapis.com/%s" % (bucket.name,) sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) assert not sink.exists() # API call sink.create() # API call @@ -229,7 +235,8 @@ def sink_storage(client, to_delete): def _sink_bigquery_setup(client): from google.cloud import bigquery - DATASET_NAME = 'sink_bigquery_%d' % (_millis(),) + + DATASET_NAME = "sink_bigquery_%d" % (_millis(),) client = bigquery.Client() dataset = client.dataset(DATASET_NAME) dataset.create() @@ -237,9 +244,9 @@ def _sink_bigquery_setup(client): # [START sink_dataset_permissions] from google.cloud.bigquery.dataset import AccessGrant + grants = dataset.access_grants - grants.append(AccessGrant( - 'WRITER', 'groupByEmail', 'cloud-logs@google.com')) + grants.append(AccessGrant("WRITER", "groupByEmail", "cloud-logs@google.com")) dataset.access_grants = grants dataset.update() # API call # [END sink_dataset_permissions] @@ -252,11 +259,11 @@ def sink_bigquery(client, to_delete): """Sink log entries to bigquery.""" dataset = _sink_bigquery_setup(client) to_delete.append(dataset) - SINK_NAME = 'robots-bigquery-%d' % (_millis(),) - FILTER = 'textPayload:robot' + SINK_NAME = "robots-bigquery-%d" % (_millis(),) + FILTER = "textPayload:robot" # [START sink_bigquery_create] - DESTINATION = 'bigquery.googleapis.com%s' % (dataset.path,) + DESTINATION = "bigquery.googleapis.com%s" % (dataset.path,) sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) assert not sink.exists() # API call sink.create() # API call @@ -267,14 +274,15 @@ def sink_bigquery(client, to_delete): def _sink_pubsub_setup(client): from google.cloud import pubsub - TOPIC_NAME = 'sink-pubsub-%d' % (_millis(),) + + TOPIC_NAME = "sink-pubsub-%d" % (_millis(),) client = pubsub.Client() topic = client.topic(TOPIC_NAME) topic.create() # [START sink_topic_permissions] policy = topic.get_iam_policy() # API call - policy.owners.add(policy.group('cloud-logs@google.com')) + policy.owners.add(policy.group("cloud-logs@google.com")) topic.set_iam_policy(policy) # API call # [END sink_topic_permissions] @@ -286,12 +294,12 @@ def sink_pubsub(client, to_delete): """Sink log entries to pubsub.""" topic = _sink_pubsub_setup(client) to_delete.append(topic) - SINK_NAME = 'robots-pubsub-%d' % (_millis(),) - FILTER = 'logName:apache-access AND textPayload:robot' - UPDATED_FILTER = 'textPayload:robot' + SINK_NAME = "robots-pubsub-%d" % (_millis(),) + FILTER = "logName:apache-access AND textPayload:robot" + UPDATED_FILTER = "textPayload:robot" # [START sink_pubsub_create] - DESTINATION = 'pubsub.googleapis.com/%s' % (topic.full_name,) + DESTINATION = "pubsub.googleapis.com/%s" % (topic.full_name,) sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) assert not sink.exists() # API call sink.create() # API call @@ -328,37 +336,39 @@ def sink_pubsub(client, to_delete): def logging_handler(client): # [START create_default_handler] import logging + handler = client.get_default_handler() - cloud_logger = logging.getLogger('cloudLogger') + cloud_logger = logging.getLogger("cloudLogger") cloud_logger.setLevel(logging.INFO) cloud_logger.addHandler(handler) - cloud_logger.error('bad news') + cloud_logger.error("bad news") # [END create_default_handler] # [START create_cloud_handler] from google.cloud.logging.handlers import CloudLoggingHandler + handler = CloudLoggingHandler(client) - cloud_logger = logging.getLogger('cloudLogger') + cloud_logger = logging.getLogger("cloudLogger") cloud_logger.setLevel(logging.INFO) cloud_logger.addHandler(handler) - cloud_logger.error('bad news') + cloud_logger.error("bad news") # [END create_cloud_handler] # [START create_named_handler] - handler = CloudLoggingHandler(client, name='mycustomlog') + handler = CloudLoggingHandler(client, name="mycustomlog") # [END create_named_handler] @snippet def setup_logging(client): import logging + # [START setup_logging] client.setup_logging(log_level=logging.INFO) # [END setup_logging] # [START setup_logging_excludes] - client.setup_logging(log_level=logging.INFO, - excluded_loggers=('werkzeug',)) + client.setup_logging(log_level=logging.INFO, excluded_loggers=("werkzeug",)) # [END setup_logging_excludes] @@ -367,8 +377,7 @@ def _line_no(func): def _find_examples(): - funcs = [obj for obj in globals().values() - if getattr(obj, '_snippet', False)] + funcs = [obj for obj in globals().values() if getattr(obj, "_snippet", False)] for func in sorted(funcs, key=_line_no): yield func @@ -379,6 +388,7 @@ def _name_and_doc(func): def _backoff_not_found(deleter): from google.cloud.exceptions import NotFound + timeouts = [1, 2, 4, 8, 16] while timeouts: try: @@ -393,16 +403,16 @@ def main(): client = Client() for example in _find_examples(): to_delete = [] - print('%-25s: %s' % _name_and_doc(example)) + print("%-25s: %s" % _name_and_doc(example)) try: example(client, to_delete) except AssertionError as failure: - print(' FAIL: %s' % (failure,)) + print(" FAIL: %s" % (failure,)) except Exception as error: # pylint: disable=broad-except - print(' ERROR: %r' % (error,)) + print(" ERROR: %r" % (error,)) for item in to_delete: _backoff_not_found(item.delete) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/packages/google-cloud-logging/google/__init__.py b/packages/google-cloud-logging/google/__init__.py index 9ee9bf4342ab..0e1bc5131ba6 100644 --- a/packages/google-cloud-logging/google/__init__.py +++ b/packages/google-cloud-logging/google/__init__.py @@ -14,7 +14,9 @@ try: import pkg_resources + pkg_resources.declare_namespace(__name__) except ImportError: import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-logging/google/cloud/__init__.py b/packages/google-cloud-logging/google/cloud/__init__.py index 9ee9bf4342ab..0e1bc5131ba6 100644 --- a/packages/google-cloud-logging/google/cloud/__init__.py +++ b/packages/google-cloud-logging/google/cloud/__init__.py @@ -14,7 +14,9 @@ try: import pkg_resources + pkg_resources.declare_namespace(__name__) except ImportError: import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-logging/google/cloud/logging/__init__.py b/packages/google-cloud-logging/google/cloud/logging/__init__.py index 952a5899c280..80de6c4b6113 100644 --- a/packages/google-cloud-logging/google/cloud/logging/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/__init__.py @@ -16,14 +16,15 @@ from pkg_resources import get_distribution -__version__ = get_distribution('google-cloud-logging').version + +__version__ = get_distribution("google-cloud-logging").version from google.cloud.logging.client import Client -ASCENDING = 'timestamp asc' +ASCENDING = "timestamp asc" """Query string to order by ascending timestamps.""" -DESCENDING = 'timestamp desc' +DESCENDING = "timestamp desc" """Query string to order by decending timestamps.""" -__all__ = ['__version__', 'ASCENDING', 'Client', 'DESCENDING'] +__all__ = ["__version__", "ASCENDING", "Client", "DESCENDING"] diff --git a/packages/google-cloud-logging/google/cloud/logging/_gapic.py b/packages/google-cloud-logging/google/cloud/logging/_gapic.py index 2ff0d30cf3af..00e9f5f2ee15 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gapic.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gapic.py @@ -18,12 +18,13 @@ import functools from google.api_core.gapic_v1 import client_info -from google.cloud.logging_v2.gapic.config_service_v2_client import ( - ConfigServiceV2Client) +from google.cloud.logging_v2.gapic.config_service_v2_client import ConfigServiceV2Client from google.cloud.logging_v2.gapic.logging_service_v2_client import ( - LoggingServiceV2Client) + LoggingServiceV2Client, +) from google.cloud.logging_v2.gapic.metrics_service_v2_client import ( - MetricsServiceV2Client) + MetricsServiceV2Client, +) from google.cloud.logging_v2.proto.logging_config_pb2 import LogSink from google.cloud.logging_v2.proto.logging_metrics_pb2 import LogMetric from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry @@ -36,8 +37,7 @@ from google.cloud.logging.metric import Metric -_CLIENT_INFO = client_info.ClientInfo( - client_library_version=__version__) +_CLIENT_INFO = client_info.ClientInfo(client_library_version=__version__) class _LoggingAPI(object): @@ -50,12 +50,14 @@ class _LoggingAPI(object): :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that owns this API object. """ + def __init__(self, gapic_api, client): self._gapic_api = gapic_api self._client = client - def list_entries(self, projects, filter_='', order_by='', - page_size=0, page_token=None): + def list_entries( + self, projects, filter_="", order_by="", page_size=0, page_token=None + ): """Return a page of log entry resources. :type projects: list of strings @@ -85,8 +87,12 @@ def list_entries(self, projects, filter_='', order_by='', accessible to the current API. """ page_iter = self._gapic_api.list_log_entries( - [], project_ids=projects, filter_=filter_, order_by=order_by, - page_size=page_size) + [], + project_ids=projects, + filter_=filter_, + order_by=order_by, + page_size=page_size, + ) page_iter.client = self._client page_iter.next_page_token = page_token @@ -94,12 +100,10 @@ def list_entries(self, projects, filter_='', order_by='', # objects are created by entry_from_resource, they can be # re-used by other log entries from the same logger. loggers = {} - page_iter.item_to_value = functools.partial( - _item_to_entry, loggers=loggers) + page_iter.item_to_value = functools.partial(_item_to_entry, loggers=loggers) return page_iter - def write_entries(self, entries, logger_name=None, resource=None, - labels=None): + def write_entries(self, entries, logger_name=None, resource=None, labels=None): """API call: log an entry resource via a POST request :type entries: sequence of mapping @@ -120,8 +124,12 @@ def write_entries(self, entries, logger_name=None, resource=None, partial_success = False entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries] self._gapic_api.write_log_entries( - entry_pbs, log_name=logger_name, resource=resource, labels=labels, - partial_success=partial_success) + entry_pbs, + log_name=logger_name, + resource=resource, + labels=labels, + partial_success=partial_success, + ) def logger_delete(self, project, logger_name): """API call: delete all entries in a logger via a DELETE request @@ -132,7 +140,7 @@ def logger_delete(self, project, logger_name): :type logger_name: str :param logger_name: name of logger containing the log entries to delete """ - path = 'projects/%s/logs/%s' % (project, logger_name) + path = "projects/%s/logs/%s" % (project, logger_name) self._gapic_api.delete_log(path) @@ -146,6 +154,7 @@ class _SinksAPI(object): :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that owns this API object. """ + def __init__(self, gapic_api, client): self._gapic_api = gapic_api self._client = client @@ -170,16 +179,16 @@ def list_sinks(self, project, page_size=0, page_token=None): if not None, indicates that more sinks can be retrieved with another call (pass that value as ``page_token``). """ - path = 'projects/%s' % (project,) - page_iter = self._gapic_api.list_sinks( - path, page_size=page_size) + path = "projects/%s" % (project,) + page_iter = self._gapic_api.list_sinks(path, page_size=page_size) page_iter.client = self._client page_iter.next_page_token = page_token page_iter.item_to_value = _item_to_sink return page_iter - def sink_create(self, project, sink_name, filter_, destination, - unique_writer_identity=False): + def sink_create( + self, project, sink_name, filter_, destination, unique_writer_identity=False + ): """API call: create a sink resource. See @@ -208,13 +217,10 @@ def sink_create(self, project, sink_name, filter_, destination, :returns: The sink resource returned from the API (converted from a protobuf to a dictionary). """ - parent = 'projects/%s' % (project,) - sink_pb = LogSink( - name=sink_name, filter=filter_, destination=destination) + parent = "projects/%s" % (project,) + sink_pb = LogSink(name=sink_name, filter=filter_, destination=destination) created_pb = self._gapic_api.create_sink( - parent, - sink_pb, - unique_writer_identity=unique_writer_identity + parent, sink_pb, unique_writer_identity=unique_writer_identity ) return MessageToDict(created_pb) @@ -231,14 +237,15 @@ def sink_get(self, project, sink_name): :returns: The sink object returned from the API (converted from a protobuf to a dictionary). """ - path = 'projects/%s/sinks/%s' % (project, sink_name) + path = "projects/%s/sinks/%s" % (project, sink_name) sink_pb = self._gapic_api.get_sink(path) # NOTE: LogSink message type does not have an ``Any`` field # so `MessageToDict`` can safely be used. return MessageToDict(sink_pb) - def sink_update(self, project, sink_name, filter_, destination, - unique_writer_identity=False): + def sink_update( + self, project, sink_name, filter_, destination, unique_writer_identity=False + ): """API call: update a sink resource. :type project: str @@ -264,12 +271,11 @@ def sink_update(self, project, sink_name, filter_, destination, :returns: The sink resource returned from the API (converted from a protobuf to a dictionary). """ - path = 'projects/%s/sinks/%s' % (project, sink_name) + path = "projects/%s/sinks/%s" % (project, sink_name) sink_pb = LogSink(name=path, filter=filter_, destination=destination) sink_pb = self._gapic_api.update_sink( - path, - sink_pb, - unique_writer_identity=unique_writer_identity) + path, sink_pb, unique_writer_identity=unique_writer_identity + ) # NOTE: LogSink message type does not have an ``Any`` field # so `MessageToDict`` can safely be used. return MessageToDict(sink_pb) @@ -283,7 +289,7 @@ def sink_delete(self, project, sink_name): :type sink_name: str :param sink_name: the name of the sink """ - path = 'projects/%s/sinks/%s' % (project, sink_name) + path = "projects/%s/sinks/%s" % (project, sink_name) self._gapic_api.delete_sink(path) @@ -298,6 +304,7 @@ class _MetricsAPI(object): :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that owns this API object. """ + def __init__(self, gapic_api, client): self._gapic_api = gapic_api self._client = client @@ -322,9 +329,8 @@ def list_metrics(self, project, page_size=0, page_token=None): :class:`~google.cloud.logging.metric.Metric` accessible to the current API. """ - path = 'projects/%s' % (project,) - page_iter = self._gapic_api.list_log_metrics( - path, page_size=page_size) + path = "projects/%s" % (project,) + page_iter = self._gapic_api.list_log_metrics(path, page_size=page_size) page_iter.client = self._client page_iter.next_page_token = page_token page_iter.item_to_value = _item_to_metric @@ -349,9 +355,8 @@ def metric_create(self, project, metric_name, filter_, description): :type description: str :param description: description of the metric. """ - parent = 'projects/%s' % (project,) - metric_pb = LogMetric(name=metric_name, filter=filter_, - description=description) + parent = "projects/%s" % (project,) + metric_pb = LogMetric(name=metric_name, filter=filter_, description=description) self._gapic_api.create_log_metric(parent, metric_pb) def metric_get(self, project, metric_name): @@ -367,7 +372,7 @@ def metric_get(self, project, metric_name): :returns: The metric object returned from the API (converted from a protobuf to a dictionary). """ - path = 'projects/%s/metrics/%s' % (project, metric_name) + path = "projects/%s/metrics/%s" % (project, metric_name) metric_pb = self._gapic_api.get_log_metric(path) # NOTE: LogMetric message type does not have an ``Any`` field # so `MessageToDict`` can safely be used. @@ -393,11 +398,9 @@ def metric_update(self, project, metric_name, filter_, description): :returns: The metric object returned from the API (converted from a protobuf to a dictionary). """ - path = 'projects/%s/metrics/%s' % (project, metric_name) - metric_pb = LogMetric(name=path, filter=filter_, - description=description) - metric_pb = self._gapic_api.update_log_metric( - path, metric_pb) + path = "projects/%s/metrics/%s" % (project, metric_name) + metric_pb = LogMetric(name=path, filter=filter_, description=description) + metric_pb = self._gapic_api.update_log_metric(path, metric_pb) # NOTE: LogMetric message type does not have an ``Any`` field # so `MessageToDict`` can safely be used. return MessageToDict(metric_pb) @@ -411,7 +414,7 @@ def metric_delete(self, project, metric_name): :type metric_name: str :param metric_name: the name of the metric """ - path = 'projects/%s/metrics/%s' % (project, metric_name) + path = "projects/%s/metrics/%s" % (project, metric_name) self._gapic_api.delete_log_metric(path) @@ -434,11 +437,11 @@ def _parse_log_entry(entry_pb): try: return MessageToDict(entry_pb) except TypeError: - if entry_pb.HasField('proto_payload'): + if entry_pb.HasField("proto_payload"): proto_payload = entry_pb.proto_payload - entry_pb.ClearField('proto_payload') + entry_pb.ClearField("proto_payload") entry_mapping = MessageToDict(entry_pb) - entry_mapping['protoPayload'] = proto_payload + entry_mapping["protoPayload"] = proto_payload return entry_mapping else: raise @@ -541,7 +544,8 @@ def make_logging_api(client): :returns: A metrics API instance with the proper credentials. """ generated = LoggingServiceV2Client( - credentials=client._credentials, client_info=_CLIENT_INFO) + credentials=client._credentials, client_info=_CLIENT_INFO + ) return _LoggingAPI(generated, client) @@ -555,7 +559,8 @@ def make_metrics_api(client): :returns: A metrics API instance with the proper credentials. """ generated = MetricsServiceV2Client( - credentials=client._credentials, client_info=_CLIENT_INFO) + credentials=client._credentials, client_info=_CLIENT_INFO + ) return _MetricsAPI(generated, client) @@ -569,5 +574,6 @@ def make_sinks_api(client): :returns: A metrics API instance with the proper credentials. """ generated = ConfigServiceV2Client( - credentials=client._credentials, client_info=_CLIENT_INFO) + credentials=client._credentials, client_info=_CLIENT_INFO + ) return _SinksAPI(generated, client) diff --git a/packages/google-cloud-logging/google/cloud/logging/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/_helpers.py index b817dfdaa96f..837028f716d7 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging/_helpers.py @@ -21,10 +21,8 @@ from google.cloud.logging.entries import StructEntry from google.cloud.logging.entries import TextEntry -METADATA_URL = 'http://metadata/computeMetadata/v1/' -METADATA_HEADERS = { - 'Metadata-Flavor': 'Google' -} +METADATA_URL = "http://metadata/computeMetadata/v1/" +METADATA_HEADERS = {"Metadata-Flavor": "Google"} def entry_from_resource(resource, client, loggers): @@ -45,13 +43,13 @@ def entry_from_resource(resource, client, loggers): :rtype: :class:`~google.cloud.logging.entries._BaseEntry` :returns: The entry instance, constructed via the resource """ - if 'textPayload' in resource: + if "textPayload" in resource: return TextEntry.from_api_repr(resource, client, loggers) - if 'jsonPayload' in resource: + if "jsonPayload" in resource: return StructEntry.from_api_repr(resource, client, loggers) - if 'protoPayload' in resource: + if "protoPayload" in resource: return ProtobufEntry.from_api_repr(resource, client, loggers) return LogEntry.from_api_repr(resource, client, loggers) diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index 82207f173503..d13baa175c1e 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -35,18 +35,16 @@ class Connection(_http.JSONConnection): :param client: The client that owns the current connection. """ - API_BASE_URL = 'https://logging.googleapis.com' + API_BASE_URL = "https://logging.googleapis.com" """The base of the API call URL.""" - API_VERSION = 'v2' + API_VERSION = "v2" """The version of the API, used in building the API call's URL.""" - API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' + API_URL_TEMPLATE = "{api_base_url}/{api_version}{path}" """A template for the URL of a particular API call.""" - _EXTRA_HEADERS = { - _http.CLIENT_INFO_HEADER: _CLIENT_INFO, - } + _EXTRA_HEADERS = {_http.CLIENT_INFO_HEADER: _CLIENT_INFO} class _LoggingAPI(object): @@ -64,8 +62,9 @@ def __init__(self, client): self._client = client self.api_request = client._connection.api_request - def list_entries(self, projects, filter_=None, order_by=None, - page_size=None, page_token=None): + def list_entries( + self, projects, filter_=None, order_by=None, page_size=None, page_token=None + ): """Return a page of log entry resources. See @@ -97,38 +96,37 @@ def list_entries(self, projects, filter_=None, order_by=None, :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` accessible to the current API. """ - extra_params = {'projectIds': projects} + extra_params = {"projectIds": projects} if filter_ is not None: - extra_params['filter'] = filter_ + extra_params["filter"] = filter_ if order_by is not None: - extra_params['orderBy'] = order_by + extra_params["orderBy"] = order_by if page_size is not None: - extra_params['pageSize'] = page_size + extra_params["pageSize"] = page_size - path = '/entries:list' + path = "/entries:list" # We attach a mutable loggers dictionary so that as Logger # objects are created by entry_from_resource, they can be # re-used by other log entries from the same logger. loggers = {} - item_to_value = functools.partial( - _item_to_entry, loggers=loggers) + item_to_value = functools.partial(_item_to_entry, loggers=loggers) iterator = page_iterator.HTTPIterator( client=self._client, api_request=self._client._connection.api_request, path=path, item_to_value=item_to_value, - items_key='entries', + items_key="entries", page_token=page_token, - extra_params=extra_params) + extra_params=extra_params, + ) # This method uses POST to make a read-only request. - iterator._HTTP_METHOD = 'POST' + iterator._HTTP_METHOD = "POST" return iterator - def write_entries(self, entries, logger_name=None, resource=None, - labels=None): + def write_entries(self, entries, logger_name=None, resource=None, labels=None): """API call: log an entry resource via a POST request See @@ -149,18 +147,18 @@ def write_entries(self, entries, logger_name=None, resource=None, :param labels: default labels to associate with entries; individual entries may override. """ - data = {'entries': list(entries)} + data = {"entries": list(entries)} if logger_name is not None: - data['logName'] = logger_name + data["logName"] = logger_name if resource is not None: - data['resource'] = resource + data["resource"] = resource if labels is not None: - data['labels'] = labels + data["labels"] = labels - self.api_request(method='POST', path='/entries:write', data=data) + self.api_request(method="POST", path="/entries:write", data=data) def logger_delete(self, project, logger_name): """API call: delete all entries in a logger via a DELETE request @@ -174,8 +172,8 @@ def logger_delete(self, project, logger_name): :type logger_name: str :param logger_name: name of logger containing the log entries to delete """ - path = '/projects/%s/logs/%s' % (project, logger_name) - self.api_request(method='DELETE', path=path) + path = "/projects/%s/logs/%s" % (project, logger_name) + self.api_request(method="DELETE", path=path) class _SinksAPI(object): @@ -187,6 +185,7 @@ class _SinksAPI(object): :type client: :class:`~google.cloud.logging.client.Client` :param client: The client used to make API requests. """ + def __init__(self, client): self._client = client self.api_request = client._connection.api_request @@ -217,20 +216,22 @@ def list_sinks(self, project, page_size=None, page_token=None): extra_params = {} if page_size is not None: - extra_params['pageSize'] = page_size + extra_params["pageSize"] = page_size - path = '/projects/%s/sinks' % (project,) + path = "/projects/%s/sinks" % (project,) return page_iterator.HTTPIterator( client=self._client, api_request=self._client._connection.api_request, path=path, item_to_value=_item_to_sink, - items_key='sinks', + items_key="sinks", page_token=page_token, - extra_params=extra_params) + extra_params=extra_params, + ) - def sink_create(self, project, sink_name, filter_, destination, - unique_writer_identity=False): + def sink_create( + self, project, sink_name, filter_, destination, unique_writer_identity=False + ): """API call: create a sink resource. See @@ -258,18 +259,11 @@ def sink_create(self, project, sink_name, filter_, destination, :rtype: dict :returns: The returned (created) resource. """ - target = '/projects/%s/sinks' % (project,) - data = { - 'name': sink_name, - 'filter': filter_, - 'destination': destination, - } - query_params = {'uniqueWriterIdentity': unique_writer_identity} + target = "/projects/%s/sinks" % (project,) + data = {"name": sink_name, "filter": filter_, "destination": destination} + query_params = {"uniqueWriterIdentity": unique_writer_identity} return self.api_request( - method='POST', - path=target, - data=data, - query_params=query_params, + method="POST", path=target, data=data, query_params=query_params ) def sink_get(self, project, sink_name): @@ -287,11 +281,12 @@ def sink_get(self, project, sink_name): :rtype: dict :returns: The JSON sink object returned from the API. """ - target = '/projects/%s/sinks/%s' % (project, sink_name) - return self.api_request(method='GET', path=target) + target = "/projects/%s/sinks/%s" % (project, sink_name) + return self.api_request(method="GET", path=target) - def sink_update(self, project, sink_name, filter_, destination, - unique_writer_identity=False): + def sink_update( + self, project, sink_name, filter_, destination, unique_writer_identity=False + ): """API call: update a sink resource. See @@ -319,15 +314,12 @@ def sink_update(self, project, sink_name, filter_, destination, :rtype: dict :returns: The returned (updated) resource. """ - target = '/projects/%s/sinks/%s' % (project, sink_name) - data = { - 'name': sink_name, - 'filter': filter_, - 'destination': destination, - } - query_params = {'uniqueWriterIdentity': unique_writer_identity} + target = "/projects/%s/sinks/%s" % (project, sink_name) + data = {"name": sink_name, "filter": filter_, "destination": destination} + query_params = {"uniqueWriterIdentity": unique_writer_identity} return self.api_request( - method='PUT', path=target, query_params=query_params, data=data) + method="PUT", path=target, query_params=query_params, data=data + ) def sink_delete(self, project, sink_name): """API call: delete a sink resource. @@ -341,8 +333,8 @@ def sink_delete(self, project, sink_name): :type sink_name: str :param sink_name: the name of the sink """ - target = '/projects/%s/sinks/%s' % (project, sink_name) - self.api_request(method='DELETE', path=target) + target = "/projects/%s/sinks/%s" % (project, sink_name) + self.api_request(method="DELETE", path=target) class _MetricsAPI(object): @@ -354,6 +346,7 @@ class _MetricsAPI(object): :type client: :class:`~google.cloud.logging.client.Client` :param client: The client used to make API requests. """ + def __init__(self, client): self._client = client self.api_request = client._connection.api_request @@ -384,17 +377,18 @@ def list_metrics(self, project, page_size=None, page_token=None): extra_params = {} if page_size is not None: - extra_params['pageSize'] = page_size + extra_params["pageSize"] = page_size - path = '/projects/%s/metrics' % (project,) + path = "/projects/%s/metrics" % (project,) return page_iterator.HTTPIterator( client=self._client, api_request=self._client._connection.api_request, path=path, item_to_value=_item_to_metric, - items_key='metrics', + items_key="metrics", page_token=page_token, - extra_params=extra_params) + extra_params=extra_params, + ) def metric_create(self, project, metric_name, filter_, description=None): """API call: create a metric resource. @@ -415,13 +409,9 @@ def metric_create(self, project, metric_name, filter_, description=None): :type description: str :param description: description of the metric. """ - target = '/projects/%s/metrics' % (project,) - data = { - 'name': metric_name, - 'filter': filter_, - 'description': description, - } - self.api_request(method='POST', path=target, data=data) + target = "/projects/%s/metrics" % (project,) + data = {"name": metric_name, "filter": filter_, "description": description} + self.api_request(method="POST", path=target, data=data) def metric_get(self, project, metric_name): """API call: retrieve a metric resource. @@ -438,8 +428,8 @@ def metric_get(self, project, metric_name): :rtype: dict :returns: The JSON metric object returned from the API. """ - target = '/projects/%s/metrics/%s' % (project, metric_name) - return self.api_request(method='GET', path=target) + target = "/projects/%s/metrics/%s" % (project, metric_name) + return self.api_request(method="GET", path=target) def metric_update(self, project, metric_name, filter_, description): """API call: update a metric resource. @@ -463,13 +453,9 @@ def metric_update(self, project, metric_name, filter_, description): :rtype: dict :returns: The returned (updated) resource. """ - target = '/projects/%s/metrics/%s' % (project, metric_name) - data = { - 'name': metric_name, - 'filter': filter_, - 'description': description, - } - return self.api_request(method='PUT', path=target, data=data) + target = "/projects/%s/metrics/%s" % (project, metric_name) + data = {"name": metric_name, "filter": filter_, "description": description} + return self.api_request(method="PUT", path=target, data=data) def metric_delete(self, project, metric_name): """API call: delete a metric resource. @@ -483,8 +469,8 @@ def metric_delete(self, project, metric_name): :type metric_name: str :param metric_name: the name of the metric. """ - target = '/projects/%s/metrics/%s' % (project, metric_name) - self.api_request(method='DELETE', path=target) + target = "/projects/%s/metrics/%s" % (project, metric_name) + self.api_request(method="DELETE", path=target) def _item_to_entry(iterator, resource, loggers): diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 1e854664c7d3..5e493b27f757 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -46,13 +46,13 @@ _DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) _USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC -_APPENGINE_FLEXIBLE_ENV_VM = 'GAE_APPENGINE_HOSTNAME' +_APPENGINE_FLEXIBLE_ENV_VM = "GAE_APPENGINE_HOSTNAME" """Environment variable set in App Engine when vm:true is set.""" -_APPENGINE_INSTANCE_ID = 'GAE_INSTANCE' +_APPENGINE_INSTANCE_ID = "GAE_INSTANCE" """Environment variable set in App Engine standard and flexible environment.""" -_GKE_CLUSTER_NAME = 'instance/attributes/cluster-name' +_GKE_CLUSTER_NAME = "instance/attributes/cluster-name" """Attribute in metadata server when in GKE environment.""" @@ -92,16 +92,18 @@ class Client(ClientWithProject): _sinks_api = None _metrics_api = None - SCOPE = ('https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/cloud-platform') + SCOPE = ( + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/cloud-platform", + ) """The scopes required for authenticating as a Logging consumer.""" - def __init__(self, project=None, credentials=None, - _http=None, _use_grpc=None): + def __init__(self, project=None, credentials=None, _http=None, _use_grpc=None): super(Client, self).__init__( - project=project, credentials=credentials, _http=_http) + project=project, credentials=credentials, _http=_http + ) self._connection = Connection(self) if _use_grpc is None: self._use_grpc = _USE_GRPC @@ -162,8 +164,14 @@ def logger(self, name): """ return Logger(name, client=self) - def list_entries(self, projects=None, filter_=None, order_by=None, - page_size=None, page_token=None): + def list_entries( + self, + projects=None, + filter_=None, + order_by=None, + page_size=None, + page_token=None, + ): """Return a page of log entries. See @@ -199,8 +207,12 @@ def list_entries(self, projects=None, filter_=None, order_by=None, projects = [self.project] return self.logging_api.list_entries( - projects=projects, filter_=filter_, order_by=order_by, - page_size=page_size, page_token=page_token) + projects=projects, + filter_=filter_, + order_by=order_by, + page_size=page_size, + page_token=page_token, + ) def sink(self, name, filter_=None, destination=None): """Creates a sink bound to the current client. @@ -245,10 +257,9 @@ def list_sinks(self, page_size=None, page_token=None): :class:`~google.cloud.logging.sink.Sink` accessible to the current client. """ - return self.sinks_api.list_sinks( - self.project, page_size, page_token) + return self.sinks_api.list_sinks(self.project, page_size, page_token) - def metric(self, name, filter_=None, description=''): + def metric(self, name, filter_=None, description=""): """Creates a metric bound to the current client. :type name: str @@ -289,8 +300,7 @@ def list_metrics(self, page_size=None, page_token=None): :returns: Iterator of :class:`~google.cloud.logging.metric.Metric` accessible to the current client. """ - return self.metrics_api.list_metrics( - self.project, page_size, page_token) + return self.metrics_api.list_metrics(self.project, page_size, page_token) def get_default_handler(self): """Return the default logging handler based on the local environment. @@ -300,16 +310,19 @@ def get_default_handler(self): """ gke_cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME) - if (_APPENGINE_FLEXIBLE_ENV_VM in os.environ or - _APPENGINE_INSTANCE_ID in os.environ): + if ( + _APPENGINE_FLEXIBLE_ENV_VM in os.environ + or _APPENGINE_INSTANCE_ID in os.environ + ): return AppEngineHandler(self) elif gke_cluster_name is not None: return ContainerEngineHandler() else: return CloudLoggingHandler(self) - def setup_logging(self, log_level=logging.INFO, - excluded_loggers=EXCLUDED_LOGGER_DEFAULTS): + def setup_logging( + self, log_level=logging.INFO, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS + ): """Attach default Stackdriver logging handler to the root logger. This method uses the default log handler, obtained by @@ -328,5 +341,4 @@ def setup_logging(self, log_level=logging.INFO, itself. """ handler = self.get_default_handler() - setup_logging(handler, log_level=log_level, - excluded_loggers=excluded_loggers) + setup_logging(handler, log_level=log_level, excluded_loggers=excluded_loggers) diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py index c2c33020e697..ed1c28163f60 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -28,15 +28,18 @@ from google.cloud._helpers import _datetime_to_rfc3339 -_GLOBAL_RESOURCE = Resource(type='global', labels={}) +_GLOBAL_RESOURCE = Resource(type="global", labels={}) -_LOGGER_TEMPLATE = re.compile(r""" +_LOGGER_TEMPLATE = re.compile( + r""" projects/ # static prefix (?P[^/]+) # initial letter, wordchars + hyphen /logs/ # static midfix (?P[^/]+) # initial letter, wordchars + allowed punc -""", re.VERBOSE) +""", + re.VERBOSE, +) def logger_name_from_path(path): @@ -62,28 +65,28 @@ def _int_or_none(value): _LOG_ENTRY_FIELDS = ( # (name, default) - ('log_name', None), - ('labels', None), - ('insert_id', None), - ('severity', None), - ('http_request', None), - ('timestamp', None), - ('resource', _GLOBAL_RESOURCE), - ('trace', None), - ('span_id', None), - ('trace_sampled', None), - ('source_location', None), - ('operation', None), - ('logger', None), - ('payload', None), + ("log_name", None), + ("labels", None), + ("insert_id", None), + ("severity", None), + ("http_request", None), + ("timestamp", None), + ("resource", _GLOBAL_RESOURCE), + ("trace", None), + ("span_id", None), + ("trace_sampled", None), + ("source_location", None), + ("operation", None), + ("logger", None), + ("payload", None), ) _LogEntryTuple = collections.namedtuple( - 'LogEntry', (field for field, _ in _LOG_ENTRY_FIELDS)) + "LogEntry", (field for field, _ in _LOG_ENTRY_FIELDS) +) -_LogEntryTuple.__new__.__defaults__ = tuple( - default for _, default in _LOG_ENTRY_FIELDS) +_LogEntryTuple.__new__.__defaults__ = tuple(default for _, default in _LOG_ENTRY_FIELDS) _LOG_ENTRY_PARAM_DOCSTRING = """\ @@ -142,10 +145,14 @@ def _int_or_none(value): class LogEntry(_LogEntryTuple): - __doc__ = """ + __doc__ = ( + """ Log entry. - """ + _LOG_ENTRY_PARAM_DOCSTRING + _LOG_ENTRY_SEE_ALSO_DOCSTRING + """ + + _LOG_ENTRY_PARAM_DOCSTRING + + _LOG_ENTRY_SEE_ALSO_DOCSTRING + ) received_timestamp = None @@ -176,29 +183,29 @@ def from_api_repr(cls, resource, client, loggers=None): """ if loggers is None: loggers = {} - logger_fullname = resource['logName'] + logger_fullname = resource["logName"] logger = loggers.get(logger_fullname) if logger is None: logger_name = logger_name_from_path(logger_fullname) logger = loggers[logger_fullname] = client.logger(logger_name) payload = cls._extract_payload(resource) - insert_id = resource.get('insertId') - timestamp = resource.get('timestamp') + insert_id = resource.get("insertId") + timestamp = resource.get("timestamp") if timestamp is not None: timestamp = _rfc3339_nanos_to_datetime(timestamp) - labels = resource.get('labels') - severity = resource.get('severity') - http_request = resource.get('httpRequest') - trace = resource.get('trace') - span_id = resource.get('spanId') - trace_sampled = resource.get('traceSampled') - source_location = resource.get('sourceLocation') + labels = resource.get("labels") + severity = resource.get("severity") + http_request = resource.get("httpRequest") + trace = resource.get("trace") + span_id = resource.get("spanId") + trace_sampled = resource.get("traceSampled") + source_location = resource.get("sourceLocation") if source_location is not None: - line = source_location.pop('line', None) - source_location['line'] = _int_or_none(line) - operation = resource.get('operation') + line = source_location.pop("line", None) + source_location["line"] = _int_or_none(line) + operation = resource.get("operation") - monitored_resource_dict = resource.get('resource') + monitored_resource_dict = resource.get("resource") monitored_resource = None if monitored_resource_dict is not None: monitored_resource = Resource._from_dict(monitored_resource_dict) @@ -219,7 +226,7 @@ def from_api_repr(cls, resource, client, loggers=None): logger=logger, payload=payload, ) - received = resource.get('receiveTimestamp') + received = resource.get("receiveTimestamp") if received is not None: inst.received_timestamp = _rfc3339_nanos_to_datetime(received) return inst @@ -229,94 +236,109 @@ def to_api_repr(self): """ info = {} if self.log_name is not None: - info['logName'] = self.log_name + info["logName"] = self.log_name if self.resource is not None: - info['resource'] = self.resource._to_dict() + info["resource"] = self.resource._to_dict() if self.labels is not None: - info['labels'] = self.labels + info["labels"] = self.labels if self.insert_id is not None: - info['insertId'] = self.insert_id + info["insertId"] = self.insert_id if self.severity is not None: - info['severity'] = self.severity + info["severity"] = self.severity if self.http_request is not None: - info['httpRequest'] = self.http_request + info["httpRequest"] = self.http_request if self.timestamp is not None: - info['timestamp'] = _datetime_to_rfc3339(self.timestamp) + info["timestamp"] = _datetime_to_rfc3339(self.timestamp) if self.trace is not None: - info['trace'] = self.trace + info["trace"] = self.trace if self.span_id is not None: - info['spanId'] = self.span_id + info["spanId"] = self.span_id if self.trace_sampled is not None: - info['traceSampled'] = self.trace_sampled + info["traceSampled"] = self.trace_sampled if self.source_location is not None: source_location = self.source_location.copy() - source_location['line'] = str(source_location.pop('line', 0)) - info['sourceLocation'] = source_location + source_location["line"] = str(source_location.pop("line", 0)) + info["sourceLocation"] = source_location if self.operation is not None: - info['operation'] = self.operation + info["operation"] = self.operation return info class TextEntry(LogEntry): - __doc__ = """ + __doc__ = ( + """ Log entry with text payload. - """ + _LOG_ENTRY_PARAM_DOCSTRING + """ + """ + + _LOG_ENTRY_PARAM_DOCSTRING + + """ :type payload: str | unicode :param payload: payload for the log entry. - """ + _LOG_ENTRY_SEE_ALSO_DOCSTRING + """ + + _LOG_ENTRY_SEE_ALSO_DOCSTRING + ) @classmethod def _extract_payload(cls, resource): """Helper for :meth:`from_api_repr`""" - return resource['textPayload'] + return resource["textPayload"] def to_api_repr(self): """API repr (JSON format) for entry. """ info = super(TextEntry, self).to_api_repr() - info['textPayload'] = self.payload + info["textPayload"] = self.payload return info class StructEntry(LogEntry): - __doc__ = """ + __doc__ = ( + """ Log entry with JSON payload. - """ + _LOG_ENTRY_PARAM_DOCSTRING + """ + """ + + _LOG_ENTRY_PARAM_DOCSTRING + + """ :type payload: dict :param payload: payload for the log entry. - """ + _LOG_ENTRY_SEE_ALSO_DOCSTRING + """ + + _LOG_ENTRY_SEE_ALSO_DOCSTRING + ) @classmethod def _extract_payload(cls, resource): """Helper for :meth:`from_api_repr`""" - return resource['jsonPayload'] + return resource["jsonPayload"] def to_api_repr(self): """API repr (JSON format) for entry. """ info = super(StructEntry, self).to_api_repr() - info['jsonPayload'] = self.payload + info["jsonPayload"] = self.payload return info class ProtobufEntry(LogEntry): - __doc__ = """ + __doc__ = ( + """ Log entry with protobuf message payload. - """ + _LOG_ENTRY_PARAM_DOCSTRING + """ + """ + + _LOG_ENTRY_PARAM_DOCSTRING + + """ :type payload: protobuf message :param payload: payload for the log entry. - """ + _LOG_ENTRY_SEE_ALSO_DOCSTRING + """ + + _LOG_ENTRY_SEE_ALSO_DOCSTRING + ) @classmethod def _extract_payload(cls, resource): """Helper for :meth:`from_api_repr`""" - return resource['protoPayload'] + return resource["protoPayload"] @property def payload_pb(self): @@ -332,7 +354,7 @@ def to_api_repr(self): """API repr (JSON format) for entry. """ info = super(ProtobufEntry, self).to_api_repr() - info['protoPayload'] = MessageToDict(self.payload) + info["protoPayload"] = MessageToDict(self.payload) return info def parse_message(self, message): diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py index 59562f67ebf0..67b96c95e907 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py @@ -15,10 +15,13 @@ """Python :mod:`logging` handlers for Google Cloud Logging.""" from google.cloud.logging.handlers.app_engine import AppEngineHandler -from google.cloud.logging.handlers.container_engine import ( - ContainerEngineHandler) +from google.cloud.logging.handlers.container_engine import ContainerEngineHandler from google.cloud.logging.handlers.handlers import CloudLoggingHandler from google.cloud.logging.handlers.handlers import setup_logging -__all__ = ['AppEngineHandler', 'CloudLoggingHandler', 'ContainerEngineHandler', - 'setup_logging'] +__all__ = [ + "AppEngineHandler", + "CloudLoggingHandler", + "ContainerEngineHandler", + "setup_logging", +] diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py index 03dc6bfa4f3e..d65a2690f8f7 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py @@ -31,12 +31,11 @@ # in the tests. webapp2 = None -from google.cloud.logging.handlers.middleware.request import ( - _get_django_request) +from google.cloud.logging.handlers.middleware.request import _get_django_request -_DJANGO_TRACE_HEADER = 'HTTP_X_CLOUD_TRACE_CONTEXT' -_FLASK_TRACE_HEADER = 'X_CLOUD_TRACE_CONTEXT' -_WEBAPP2_TRACE_HEADER = 'X-CLOUD-TRACE-CONTEXT' +_DJANGO_TRACE_HEADER = "HTTP_X_CLOUD_TRACE_CONTEXT" +_FLASK_TRACE_HEADER = "X_CLOUD_TRACE_CONTEXT" +_WEBAPP2_TRACE_HEADER = "X-CLOUD-TRACE-CONTEXT" def format_stackdriver_json(record, message): @@ -48,13 +47,10 @@ def format_stackdriver_json(record, message): subsecond, second = math.modf(record.created) payload = { - 'message': message, - 'timestamp': { - 'seconds': int(second), - 'nanos': int(subsecond * 1e9), - }, - 'thread': record.thread, - 'severity': record.levelname, + "message": message, + "timestamp": {"seconds": int(second), "nanos": int(subsecond * 1e9)}, + "thread": record.thread, + "severity": record.levelname, } return json.dumps(payload) @@ -74,7 +70,7 @@ def get_trace_id_from_flask(): if header is None: return None - trace_id = header.split('/', 1)[0] + trace_id = header.split("/", 1)[0] return trace_id @@ -101,7 +97,7 @@ def get_trace_id_from_webapp2(): if header is None: return None - trace_id = header.split('/', 1)[0] + trace_id = header.split("/", 1)[0] return trace_id @@ -121,7 +117,7 @@ def get_trace_id_from_django(): if header is None: return None - trace_id = header.split('/', 1)[0] + trace_id = header.split("/", 1)[0] return trace_id @@ -132,9 +128,11 @@ def get_trace_id(): :rtype: str :returns: TraceID in HTTP request headers. """ - checkers = (get_trace_id_from_django, - get_trace_id_from_flask, - get_trace_id_from_webapp2) + checkers = ( + get_trace_id_from_django, + get_trace_id_from_flask, + get_trace_id_from_webapp2, + ) for checker in checkers: trace_id = checker() diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py index 5f1334300e80..cf6aa304eae7 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py @@ -25,14 +25,14 @@ from google.cloud.logging.handlers.transports import BackgroundThreadTransport from google.cloud.logging.resource import Resource -_DEFAULT_GAE_LOGGER_NAME = 'app' +_DEFAULT_GAE_LOGGER_NAME = "app" -_GAE_PROJECT_ENV_FLEX = 'GCLOUD_PROJECT' -_GAE_PROJECT_ENV_STANDARD = 'GOOGLE_CLOUD_PROJECT' -_GAE_SERVICE_ENV = 'GAE_SERVICE' -_GAE_VERSION_ENV = 'GAE_VERSION' +_GAE_PROJECT_ENV_FLEX = "GCLOUD_PROJECT" +_GAE_PROJECT_ENV_STANDARD = "GOOGLE_CLOUD_PROJECT" +_GAE_SERVICE_ENV = "GAE_SERVICE" +_GAE_VERSION_ENV = "GAE_VERSION" -_TRACE_ID_LABEL = 'appengine.googleapis.com/trace_id' +_TRACE_ID_LABEL = "appengine.googleapis.com/trace_id" class AppEngineHandler(logging.StreamHandler): @@ -48,18 +48,18 @@ class AppEngineHandler(logging.StreamHandler): :class:`.BackgroundThreadTransport` will be used. """ - def __init__(self, client, - name=_DEFAULT_GAE_LOGGER_NAME, - transport=BackgroundThreadTransport): + def __init__( + self, client, name=_DEFAULT_GAE_LOGGER_NAME, transport=BackgroundThreadTransport + ): super(AppEngineHandler, self).__init__() self.name = name self.client = client self.transport = transport(client, name) self.project_id = os.environ.get( - _GAE_PROJECT_ENV_FLEX, - os.environ.get(_GAE_PROJECT_ENV_STANDARD, '')) - self.module_id = os.environ.get(_GAE_SERVICE_ENV, '') - self.version_id = os.environ.get(_GAE_VERSION_ENV, '') + _GAE_PROJECT_ENV_FLEX, os.environ.get(_GAE_PROJECT_ENV_STANDARD, "") + ) + self.module_id = os.environ.get(_GAE_SERVICE_ENV, "") + self.version_id = os.environ.get(_GAE_VERSION_ENV, "") self.resource = self.get_gae_resource() def get_gae_resource(self): @@ -69,11 +69,11 @@ def get_gae_resource(self): :returns: Monitored resource for GAE. """ gae_resource = Resource( - type='gae_app', + type="gae_app", labels={ - 'project_id': self.project_id, - 'module_id': self.module_id, - 'version_id': self.version_id, + "project_id": self.project_id, + "module_id": self.module_id, + "version_id": self.version_id, }, ) return gae_resource @@ -107,14 +107,11 @@ def emit(self, record): """ message = super(AppEngineHandler, self).format(record) gae_labels = self.get_gae_labels() - trace_id = ('projects/%s/traces/%s' % (self.project_id, - gae_labels[_TRACE_ID_LABEL]) - if _TRACE_ID_LABEL in gae_labels - else None) + trace_id = ( + "projects/%s/traces/%s" % (self.project_id, gae_labels[_TRACE_ID_LABEL]) + if _TRACE_ID_LABEL in gae_labels + else None + ) self.transport.send( - record, - message, - resource=self.resource, - labels=gae_labels, - trace=trace_id, + record, message, resource=self.resource, labels=gae_labels, trace=trace_id ) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py index e679840fe31d..aee214a09bb6 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py @@ -19,13 +19,9 @@ from google.cloud.logging.handlers.transports import BackgroundThreadTransport from google.cloud.logging.logger import _GLOBAL_RESOURCE -DEFAULT_LOGGER_NAME = 'python' +DEFAULT_LOGGER_NAME = "python" -EXCLUDED_LOGGER_DEFAULTS = ( - 'google.cloud', - 'google.auth', - 'google_auth_httplib2', -) +EXCLUDED_LOGGER_DEFAULTS = ("google.cloud", "google.auth", "google_auth_httplib2") class CloudLoggingHandler(logging.StreamHandler): @@ -81,11 +77,14 @@ class CloudLoggingHandler(logging.StreamHandler): """ - def __init__(self, client, - name=DEFAULT_LOGGER_NAME, - transport=BackgroundThreadTransport, - resource=_GLOBAL_RESOURCE, - labels=None): + def __init__( + self, + client, + name=DEFAULT_LOGGER_NAME, + transport=BackgroundThreadTransport, + resource=_GLOBAL_RESOURCE, + labels=None, + ): super(CloudLoggingHandler, self).__init__() self.name = name self.client = client @@ -104,15 +103,12 @@ def emit(self, record): :param record: The record to be logged. """ message = super(CloudLoggingHandler, self).format(record) - self.transport.send( - record, - message, - resource=self.resource, - labels=self.labels) + self.transport.send(record, message, resource=self.resource, labels=self.labels) -def setup_logging(handler, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, - log_level=logging.INFO): +def setup_logging( + handler, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, log_level=logging.INFO +): """Attach a logging handler to the Python root logger Excludes loggers that this library itself uses to avoid diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py index 94f6feccc358..d8ba3016f724 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py @@ -14,4 +14,4 @@ from google.cloud.logging.handlers.middleware.request import RequestMiddleware -__all__ = ['RequestMiddleware'] +__all__ = ["RequestMiddleware"] diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py index 3bede377ecce..33bc278fcf60 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py @@ -30,7 +30,7 @@ def _get_django_request(): :rtype: str :returns: Django request. """ - return getattr(_thread_locals, 'request', None) + return getattr(_thread_locals, "request", None) try: diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py index d07bdf9edc02..3c6cc214e5e3 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py @@ -23,6 +23,7 @@ from google.cloud.logging.handlers.transports.base import Transport from google.cloud.logging.handlers.transports.sync import SyncTransport from google.cloud.logging.handlers.transports.background_thread import ( - BackgroundThreadTransport) + BackgroundThreadTransport, +) -__all__ = ['BackgroundThreadTransport', 'SyncTransport', 'Transport'] +__all__ = ["BackgroundThreadTransport", "SyncTransport", "Transport"] diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 852e32dd42bb..1eb6d212af5d 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -33,7 +33,7 @@ _DEFAULT_GRACE_PERIOD = 5.0 # Seconds _DEFAULT_MAX_BATCH_SIZE = 10 _DEFAULT_MAX_LATENCY = 0 # Seconds -_WORKER_THREAD_NAME = 'google.cloud.logging.Worker' +_WORKER_THREAD_NAME = "google.cloud.logging.Worker" _WORKER_TERMINATOR = object() _LOGGER = logging.getLogger(__name__) @@ -94,9 +94,13 @@ class _Worker(object): before sending them to the server. """ - def __init__(self, cloud_logger, grace_period=_DEFAULT_GRACE_PERIOD, - max_batch_size=_DEFAULT_MAX_BATCH_SIZE, - max_latency=_DEFAULT_MAX_LATENCY): + def __init__( + self, + cloud_logger, + grace_period=_DEFAULT_GRACE_PERIOD, + max_batch_size=_DEFAULT_MAX_BATCH_SIZE, + max_latency=_DEFAULT_MAX_LATENCY, + ): self._cloud_logger = cloud_logger self._grace_period = grace_period self._max_batch_size = max_batch_size @@ -116,10 +120,9 @@ def _safely_commit_batch(self, batch): try: if total_logs > 0: batch.commit() - _LOGGER.debug('Submitted %d logs', total_logs) + _LOGGER.debug("Submitted %d logs", total_logs) except Exception: - _LOGGER.error( - 'Failed to submit %d logs.', total_logs, exc_info=True) + _LOGGER.error("Failed to submit %d logs.", total_logs, exc_info=True) def _thread_main(self): """The entry point for the worker thread. @@ -127,14 +130,16 @@ def _thread_main(self): Pulls pending log entries off the queue and writes them in batches to the Cloud Logger. """ - _LOGGER.debug('Background thread started.') + _LOGGER.debug("Background thread started.") quit_ = False while True: batch = self._cloud_logger.batch() items = _get_many( - self._queue, max_items=self._max_batch_size, - max_latency=self._max_latency) + self._queue, + max_items=self._max_batch_size, + max_latency=self._max_latency, + ) for item in items: if item is _WORKER_TERMINATOR: @@ -152,7 +157,7 @@ def _thread_main(self): if quit_: break - _LOGGER.debug('Background thread exited gracefully.') + _LOGGER.debug("Background thread exited gracefully.") def start(self): """Starts the background thread. @@ -165,8 +170,8 @@ def start(self): return self._thread = threading.Thread( - target=self._thread_main, - name=_WORKER_THREAD_NAME) + target=self._thread_main, name=_WORKER_THREAD_NAME + ) self._thread.daemon = True self._thread.start() atexit.register(self._main_thread_terminated) @@ -196,9 +201,7 @@ def stop(self, grace_period=None): self._queue.put_nowait(_WORKER_TERMINATOR) if grace_period is not None: - print( - 'Waiting up to %d seconds.' % (grace_period,), - file=sys.stderr) + print("Waiting up to %d seconds." % (grace_period,), file=sys.stderr) self._thread.join(timeout=grace_period) @@ -218,19 +221,22 @@ def _main_thread_terminated(self): if not self._queue.empty(): print( - 'Program shutting down, attempting to send %d queued log ' - 'entries to Stackdriver Logging...' % (self._queue.qsize(),), - file=sys.stderr) + "Program shutting down, attempting to send %d queued log " + "entries to Stackdriver Logging..." % (self._queue.qsize(),), + file=sys.stderr, + ) if self.stop(self._grace_period): - print('Sent all pending logs.', file=sys.stderr) + print("Sent all pending logs.", file=sys.stderr) else: print( - 'Failed to send %d pending logs.' % (self._queue.qsize(),), - file=sys.stderr) + "Failed to send %d pending logs." % (self._queue.qsize(),), + file=sys.stderr, + ) - def enqueue(self, record, message, resource=None, labels=None, - trace=None, span_id=None): + def enqueue( + self, record, message, resource=None, labels=None, trace=None, span_id=None + ): """Queues a log entry to be written by the background thread. :type record: :class:`logging.LogRecord` @@ -253,17 +259,16 @@ def enqueue(self, record, message, resource=None, labels=None, :param span_id: (optional) span_id within the trace for the log entry. Specify the trace parameter if span_id is set. """ - self._queue.put_nowait({ - 'info': { - 'message': message, - 'python_logger': record.name, - }, - 'severity': record.levelname, - 'resource': resource, - 'labels': labels, - 'trace': trace, - 'span_id': span_id, - }) + self._queue.put_nowait( + { + "info": {"message": message, "python_logger": record.name}, + "severity": record.levelname, + "resource": resource, + "labels": labels, + "trace": trace, + "span_id": span_id, + } + ) def flush(self): """Submit any pending log records.""" @@ -295,19 +300,27 @@ class BackgroundThreadTransport(Transport): before sending them to the server. """ - def __init__(self, client, name, grace_period=_DEFAULT_GRACE_PERIOD, - batch_size=_DEFAULT_MAX_BATCH_SIZE, - max_latency=_DEFAULT_MAX_LATENCY): + def __init__( + self, + client, + name, + grace_period=_DEFAULT_GRACE_PERIOD, + batch_size=_DEFAULT_MAX_BATCH_SIZE, + max_latency=_DEFAULT_MAX_LATENCY, + ): self.client = client logger = self.client.logger(name) - self.worker = _Worker(logger, - grace_period=grace_period, - max_batch_size=batch_size, - max_latency=max_latency) + self.worker = _Worker( + logger, + grace_period=grace_period, + max_batch_size=batch_size, + max_latency=max_latency, + ) self.worker.start() - def send(self, record, message, resource=None, labels=None, - trace=None, span_id=None): + def send( + self, record, message, resource=None, labels=None, trace=None, span_id=None + ): """Overrides Transport.send(). :type record: :class:`logging.LogRecord` @@ -330,8 +343,14 @@ def send(self, record, message, resource=None, labels=None, :param span_id: (optional) span_id within the trace for the log entry. Specify the trace parameter if span_id is set. """ - self.worker.enqueue(record, message, resource=resource, labels=labels, - trace=trace, span_id=span_id) + self.worker.enqueue( + record, + message, + resource=resource, + labels=labels, + trace=trace, + span_id=span_id, + ) def flush(self): """Submit any pending log records.""" diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py index 9e40cc8a2194..7e24cc0206ca 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py @@ -22,8 +22,9 @@ class Transport(object): client and name object, and must override :meth:`send`. """ - def send(self, record, message, resource=None, labels=None, - trace=None, span_id=None): + def send( + self, record, message, resource=None, labels=None, trace=None, span_id=None + ): """Transport send to be implemented by subclasses. :type record: :class:`logging.LogRecord` diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py index 589b15db5f6a..861f1ab3fdf7 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py @@ -29,8 +29,9 @@ class SyncTransport(Transport): def __init__(self, client, name): self.logger = client.logger(name) - def send(self, record, message, resource=None, labels=None, - trace=None, span_id=None): + def send( + self, record, message, resource=None, labels=None, trace=None, span_id=None + ): """Overrides transport.send(). :type record: :class:`logging.LogRecord` @@ -46,10 +47,12 @@ def send(self, record, message, resource=None, labels=None, :type labels: dict :param labels: (Optional) Mapping of labels for the entry. """ - info = {'message': message, 'python_logger': record.name} - self.logger.log_struct(info, - severity=record.levelname, - resource=resource, - labels=labels, - trace=trace, - span_id=span_id) + info = {"message": message, "python_logger": record.name} + self.logger.log_struct( + info, + severity=record.levelname, + resource=resource, + labels=labels, + trace=trace, + span_id=span_id, + ) diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index acdb0940e4b1..104d09e37a37 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -21,23 +21,23 @@ from google.cloud.logging.resource import Resource -_GLOBAL_RESOURCE = Resource(type='global', labels={}) +_GLOBAL_RESOURCE = Resource(type="global", labels={}) _OUTBOUND_ENTRY_FIELDS = ( # (name, default) - ('type_', None), - ('log_name', None), - ('payload', None), - ('labels', None), - ('insert_id', None), - ('severity', None), - ('http_request', None), - ('timestamp', None), - ('resource', _GLOBAL_RESOURCE), - ('trace', None), - ('span_id', None), - ('trace_sampled', None), - ('source_location', None), + ("type_", None), + ("log_name", None), + ("payload", None), + ("labels", None), + ("insert_id", None), + ("severity", None), + ("http_request", None), + ("timestamp", None), + ("resource", _GLOBAL_RESOURCE), + ("trace", None), + ("span_id", None), + ("trace_sampled", None), + ("source_location", None), ) @@ -58,6 +58,7 @@ class Logger(object): :param labels: (optional) mapping of default labels for entries written via this logger. """ + def __init__(self, name, client, labels=None): self.name = name self._client = client @@ -76,12 +77,12 @@ def project(self): @property def full_name(self): """Fully-qualified name used in logging APIs""" - return 'projects/%s/logs/%s' % (self.project, self.name) + return "projects/%s/logs/%s" % (self.project, self.name) @property def path(self): """URI path for use in logging APIs""" - return '/%s' % (self.full_name,) + return "/%s" % (self.full_name,) def _require_client(self, client): """Check client or verify over-ride. @@ -118,9 +119,9 @@ def _do_log(self, client, _entry_class, payload=None, **kw): client = self._require_client(client) # Apply defaults - kw['log_name'] = kw.pop('log_name', self.full_name) - kw['labels'] = kw.pop('labels', self.labels) - kw['resource'] = kw.pop('resource', _GLOBAL_RESOURCE) + kw["log_name"] = kw.pop("log_name", self.full_name) + kw["labels"] = kw.pop("labels", self.labels) + kw["resource"] = kw.pop("resource", _GLOBAL_RESOURCE) if payload is not None: entry = _entry_class(payload=payload, **kw) @@ -221,8 +222,14 @@ def delete(self, client=None): client = self._require_client(client) client.logging_api.logger_delete(self.project, self.name) - def list_entries(self, projects=None, filter_=None, order_by=None, - page_size=None, page_token=None): + def list_entries( + self, + projects=None, + filter_=None, + order_by=None, + page_size=None, + page_token=None, + ): """Return a page of log entries. See @@ -254,14 +261,18 @@ def list_entries(self, projects=None, filter_=None, order_by=None, :returns: Iterator of log entries accessible to the current logger. See :class:`~google.cloud.logging.entries.LogEntry`. """ - log_filter = 'logName=%s' % (self.full_name,) + log_filter = "logName=%s" % (self.full_name,) if filter_ is not None: - filter_ = '%s AND %s' % (filter_, log_filter) + filter_ = "%s AND %s" % (filter_, log_filter) else: filter_ = log_filter return self.client.list_entries( - projects=projects, filter_=filter_, order_by=order_by, - page_size=page_size, page_token=page_token) + projects=projects, + filter_=filter_, + order_by=order_by, + page_size=page_size, + page_token=page_token, + ) class Batch(object): @@ -284,6 +295,7 @@ class Batch(object): if explicitly set to None. If no entries' resource are set to None, this parameter will be ignored on the server. """ + def __init__(self, logger, client, resource=None): self.logger = logger self.entries = [] @@ -353,15 +365,13 @@ def commit(self, client=None): if client is None: client = self.client - kwargs = { - 'logger_name': self.logger.full_name, - } + kwargs = {"logger_name": self.logger.full_name} if self.resource is not None: - kwargs['resource'] = self.resource._to_dict() + kwargs["resource"] = self.resource._to_dict() if self.logger.labels is not None: - kwargs['labels'] = self.logger.labels + kwargs["labels"] = self.logger.labels entries = [entry.to_api_repr() for entry in self.entries] diff --git a/packages/google-cloud-logging/google/cloud/logging/metric.py b/packages/google-cloud-logging/google/cloud/logging/metric.py index a85f9271c1ee..3fb91bb52f0a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/metric.py +++ b/packages/google-cloud-logging/google/cloud/logging/metric.py @@ -38,7 +38,8 @@ class Metric(object): :type description: str :param description: an optional description of the metric. """ - def __init__(self, name, filter_=None, client=None, description=''): + + def __init__(self, name, filter_=None, client=None, description=""): self.name = name self._client = client self.filter_ = filter_ @@ -57,12 +58,12 @@ def project(self): @property def full_name(self): """Fully-qualified name used in metric APIs""" - return 'projects/%s/metrics/%s' % (self.project, self.name) + return "projects/%s/metrics/%s" % (self.project, self.name) @property def path(self): """URL path for the metric's APIs""" - return '/%s' % (self.full_name,) + return "/%s" % (self.full_name,) @classmethod def from_api_repr(cls, resource, client): @@ -78,11 +79,10 @@ def from_api_repr(cls, resource, client): :rtype: :class:`google.cloud.logging.metric.Metric` :returns: Metric parsed from ``resource``. """ - metric_name = resource['name'] - filter_ = resource['filter'] - description = resource.get('description', '') - return cls(metric_name, filter_, client=client, - description=description) + metric_name = resource["name"] + filter_ = resource["filter"] + description = resource.get("description", "") + return cls(metric_name, filter_, client=client, description=description) def _require_client(self, client): """Check client or verify over-ride. @@ -112,7 +112,8 @@ def create(self, client=None): """ client = self._require_client(client) client.metrics_api.metric_create( - self.project, self.name, self.filter_, self.description) + self.project, self.name, self.filter_, self.description + ) def exists(self, client=None): """API call: test for the existence of the metric via a GET request @@ -150,8 +151,8 @@ def reload(self, client=None): """ client = self._require_client(client) data = client.metrics_api.metric_get(self.project, self.name) - self.description = data.get('description', '') - self.filter_ = data['filter'] + self.description = data.get("description", "") + self.filter_ = data["filter"] def update(self, client=None): """API call: update metric configuration via a PUT request @@ -166,7 +167,8 @@ def update(self, client=None): """ client = self._require_client(client) client.metrics_api.metric_update( - self.project, self.name, self.filter_, self.description) + self.project, self.name, self.filter_, self.description + ) def delete(self, client=None): """API call: delete a metric via a DELETE request diff --git a/packages/google-cloud-logging/google/cloud/logging/resource.py b/packages/google-cloud-logging/google/cloud/logging/resource.py index 3f5bb4490364..dda59ca09f61 100644 --- a/packages/google-cloud-logging/google/cloud/logging/resource.py +++ b/packages/google-cloud-logging/google/cloud/logging/resource.py @@ -17,7 +17,7 @@ import collections -class Resource(collections.namedtuple('Resource', 'type labels')): +class Resource(collections.namedtuple("Resource", "type labels")): """A monitored resource identified by specifying values for all labels. :type type: str @@ -27,6 +27,7 @@ class Resource(collections.namedtuple('Resource', 'type labels')): :param labels: A mapping from label names to values for all labels enumerated in the associated :class:`ResourceDescriptor`. """ + __slots__ = () @classmethod @@ -40,10 +41,7 @@ def _from_dict(cls, info): :rtype: :class:`Resource` :returns: A resource object. """ - return cls( - type=info['type'], - labels=info.get('labels', {}), - ) + return cls(type=info["type"], labels=info.get("labels", {})) def _to_dict(self): """Build a dictionary ready to be serialized to the JSON format. @@ -52,7 +50,4 @@ def _to_dict(self): :returns: A dict representation of the object that can be written to the API. """ - return { - 'type': self.type, - 'labels': self.labels, - } + return {"type": self.type, "labels": self.labels} diff --git a/packages/google-cloud-logging/google/cloud/logging/sink.py b/packages/google-cloud-logging/google/cloud/logging/sink.py index 2e9b7ef62e13..2a7d46fdbb81 100644 --- a/packages/google-cloud-logging/google/cloud/logging/sink.py +++ b/packages/google-cloud-logging/google/cloud/logging/sink.py @@ -39,6 +39,7 @@ class Sink(object): :param client: A client which holds credentials and project configuration for the sink (which requires a project). """ + def __init__(self, name, filter_=None, destination=None, client=None): self.name = name self.filter_ = filter_ @@ -59,12 +60,12 @@ def project(self): @property def full_name(self): """Fully-qualified name used in sink APIs""" - return 'projects/%s/sinks/%s' % (self.project, self.name) + return "projects/%s/sinks/%s" % (self.project, self.name) @property def path(self): """URL path for the sink's APIs""" - return '/%s' % (self.full_name) + return "/%s" % (self.full_name) @property def writer_identity(self): @@ -73,9 +74,9 @@ def writer_identity(self): def _update_from_api_repr(self, resource): """Helper for API methods returning sink resources.""" - self.destination = resource['destination'] - self.filter_ = resource.get('filter') - self._writer_identity = resource.get('writerIdentity') + self.destination = resource["destination"] + self.filter_ = resource.get("filter") + self._writer_identity = resource.get("writerIdentity") @classmethod def from_api_repr(cls, resource, client): @@ -94,7 +95,7 @@ def from_api_repr(cls, resource, client): project from the resource does not agree with the project from the client. """ - sink_name = resource['name'] + sink_name = resource["name"] instance = cls(sink_name, client=client) instance._update_from_api_repr(resource) return instance @@ -132,7 +133,10 @@ def create(self, client=None, unique_writer_identity=False): """ client = self._require_client(client) resource = client.sinks_api.sink_create( - self.project, self.name, self.filter_, self.destination, + self.project, + self.name, + self.filter_, + self.destination, unique_writer_identity=unique_writer_identity, ) self._update_from_api_repr(resource) @@ -193,7 +197,10 @@ def update(self, client=None, unique_writer_identity=False): """ client = self._require_client(client) resource = client.sinks_api.sink_update( - self.project, self.name, self.filter_, self.destination, + self.project, + self.name, + self.filter_, + self.destination, unique_writer_identity=unique_writer_identity, ) self._update_from_api_repr(resource) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py index 536e5dd24330..964c99572fd6 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py @@ -37,9 +37,9 @@ class MetricsServiceV2Client(metrics_service_v2_client.MetricsServiceV2Client): __all__ = ( - 'enums', - 'types', - 'LoggingServiceV2Client', - 'ConfigServiceV2Client', - 'MetricsServiceV2Client', + "enums", + "types", + "LoggingServiceV2Client", + "ConfigServiceV2Client", + "MetricsServiceV2Client", ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index b38c432ad046..4e5aec994f23 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -40,8 +40,7 @@ from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - 'google-cloud-logging', ).version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version class ConfigServiceV2Client(object): @@ -50,12 +49,12 @@ class ConfigServiceV2Client(object): Logging. """ - SERVICE_ADDRESS = 'logging.googleapis.com:443' + SERVICE_ADDRESS = "logging.googleapis.com:443" """The default address of the service.""" # The name of the interface for this client. This is the key used to # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = 'google.logging.v2.ConfigServiceV2' + _INTERFACE_NAME = "google.logging.v2.ConfigServiceV2" @classmethod def from_service_account_file(cls, filename, *args, **kwargs): @@ -71,9 +70,8 @@ def from_service_account_file(cls, filename, *args, **kwargs): Returns: ConfigServiceV2Client: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs['credentials'] = credentials + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file @@ -82,34 +80,33 @@ def from_service_account_file(cls, filename, *args, **kwargs): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - 'projects/{project}', - project=project, + "projects/{project}", project=project ) @classmethod def sink_path(cls, project, sink): """Return a fully-qualified sink string.""" return google.api_core.path_template.expand( - 'projects/{project}/sinks/{sink}', - project=project, - sink=sink, + "projects/{project}/sinks/{sink}", project=project, sink=sink ) @classmethod def exclusion_path(cls, project, exclusion): """Return a fully-qualified exclusion string.""" return google.api_core.path_template.expand( - 'projects/{project}/exclusions/{exclusion}', + "projects/{project}/exclusions/{exclusion}", project=project, exclusion=exclusion, ) - def __init__(self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None): + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + ): """Constructor. Args: @@ -143,18 +140,19 @@ def __init__(self, # Raise deprecation warnings for things we want to go away. if client_config is not None: warnings.warn( - 'The `client_config` argument is deprecated.', + "The `client_config` argument is deprecated.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) else: client_config = config_service_v2_client_config.config if channel: warnings.warn( - 'The `channel` argument is deprecated; use ' - '`transport` instead.', + "The `channel` argument is deprecated; use " "`transport` instead.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) # Instantiate the transport. # The transport is responsible for handling serialization and @@ -163,25 +161,24 @@ def __init__(self, if callable(transport): self.transport = transport( credentials=credentials, - default_class=config_service_v2_grpc_transport. - ConfigServiceV2GrpcTransport, + default_class=config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport, ) else: if credentials: raise ValueError( - 'Received both a transport instance and ' - 'credentials; these are mutually exclusive.') + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) self.transport = transport else: self.transport = config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport( - address=self.SERVICE_ADDRESS, - channel=channel, - credentials=credentials, + address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, ) + gapic_version=_GAPIC_LIBRARY_VERSION + ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info @@ -191,7 +188,8 @@ def __init__(self, # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config['interfaces'][self._INTERFACE_NAME], ) + client_config["interfaces"][self._INTERFACE_NAME] + ) # Save a dictionary of cached API call functions. # These are the actual callables which invoke the proper @@ -200,12 +198,14 @@ def __init__(self, self._inner_api_calls = {} # Service calls - def list_sinks(self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def list_sinks( + self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Lists sinks. @@ -267,38 +267,41 @@ def list_sinks(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'list_sinks' not in self._inner_api_calls: + if "list_sinks" not in self._inner_api_calls: self._inner_api_calls[ - 'list_sinks'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_sinks, - default_retry=self._method_configs['ListSinks'].retry, - default_timeout=self._method_configs['ListSinks'].timeout, - client_info=self._client_info, - ) + "list_sinks" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_sinks, + default_retry=self._method_configs["ListSinks"].retry, + default_timeout=self._method_configs["ListSinks"].timeout, + client_info=self._client_info, + ) request = logging_config_pb2.ListSinksRequest( - parent=parent, - page_size=page_size, + parent=parent, page_size=page_size ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._inner_api_calls['list_sinks'], + self._inner_api_calls["list_sinks"], retry=retry, timeout=timeout, - metadata=metadata), + metadata=metadata, + ), request=request, - items_field='sinks', - request_token_field='page_token', - response_token_field='next_page_token', + items_field="sinks", + request_token_field="page_token", + response_token_field="next_page_token", ) return iterator - def get_sink(self, - sink_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def get_sink( + self, + sink_name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Gets a sink. @@ -342,26 +345,30 @@ def get_sink(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'get_sink' not in self._inner_api_calls: + if "get_sink" not in self._inner_api_calls: self._inner_api_calls[ - 'get_sink'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_sink, - default_retry=self._method_configs['GetSink'].retry, - default_timeout=self._method_configs['GetSink'].timeout, - client_info=self._client_info, - ) + "get_sink" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_sink, + default_retry=self._method_configs["GetSink"].retry, + default_timeout=self._method_configs["GetSink"].timeout, + client_info=self._client_info, + ) - request = logging_config_pb2.GetSinkRequest(sink_name=sink_name, ) - return self._inner_api_calls['get_sink']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def create_sink(self, - parent, - sink, - unique_writer_identity=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + request = logging_config_pb2.GetSinkRequest(sink_name=sink_name) + return self._inner_api_calls["get_sink"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def create_sink( + self, + parent, + sink, + unique_writer_identity=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the @@ -428,31 +435,33 @@ def create_sink(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'create_sink' not in self._inner_api_calls: + if "create_sink" not in self._inner_api_calls: self._inner_api_calls[ - 'create_sink'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_sink, - default_retry=self._method_configs['CreateSink'].retry, - default_timeout=self._method_configs['CreateSink'].timeout, - client_info=self._client_info, - ) + "create_sink" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_sink, + default_retry=self._method_configs["CreateSink"].retry, + default_timeout=self._method_configs["CreateSink"].timeout, + client_info=self._client_info, + ) request = logging_config_pb2.CreateSinkRequest( - parent=parent, - sink=sink, - unique_writer_identity=unique_writer_identity, + parent=parent, sink=sink, unique_writer_identity=unique_writer_identity + ) + return self._inner_api_calls["create_sink"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['create_sink']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def update_sink(self, - sink_name, - sink, - unique_writer_identity=None, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def update_sink( + self, + sink_name, + sink, + unique_writer_identity=None, + update_mask=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, and @@ -536,14 +545,15 @@ def update_sink(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'update_sink' not in self._inner_api_calls: + if "update_sink" not in self._inner_api_calls: self._inner_api_calls[ - 'update_sink'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_sink, - default_retry=self._method_configs['UpdateSink'].retry, - default_timeout=self._method_configs['UpdateSink'].timeout, - client_info=self._client_info, - ) + "update_sink" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_sink, + default_retry=self._method_configs["UpdateSink"].retry, + default_timeout=self._method_configs["UpdateSink"].timeout, + client_info=self._client_info, + ) request = logging_config_pb2.UpdateSinkRequest( sink_name=sink_name, @@ -551,14 +561,17 @@ def update_sink(self, unique_writer_identity=unique_writer_identity, update_mask=update_mask, ) - return self._inner_api_calls['update_sink']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def delete_sink(self, - sink_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["update_sink"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def delete_sink( + self, + sink_name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -601,25 +614,29 @@ def delete_sink(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'delete_sink' not in self._inner_api_calls: + if "delete_sink" not in self._inner_api_calls: self._inner_api_calls[ - 'delete_sink'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_sink, - default_retry=self._method_configs['DeleteSink'].retry, - default_timeout=self._method_configs['DeleteSink'].timeout, - client_info=self._client_info, - ) + "delete_sink" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_sink, + default_retry=self._method_configs["DeleteSink"].retry, + default_timeout=self._method_configs["DeleteSink"].timeout, + client_info=self._client_info, + ) - request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name, ) - self._inner_api_calls['delete_sink']( - request, retry=retry, timeout=timeout, metadata=metadata) + request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name) + self._inner_api_calls["delete_sink"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) - def list_exclusions(self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def list_exclusions( + self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Lists all the exclusions in a parent resource. @@ -681,39 +698,41 @@ def list_exclusions(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'list_exclusions' not in self._inner_api_calls: + if "list_exclusions" not in self._inner_api_calls: self._inner_api_calls[ - 'list_exclusions'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_exclusions, - default_retry=self._method_configs['ListExclusions'].retry, - default_timeout=self._method_configs['ListExclusions']. - timeout, - client_info=self._client_info, - ) + "list_exclusions" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_exclusions, + default_retry=self._method_configs["ListExclusions"].retry, + default_timeout=self._method_configs["ListExclusions"].timeout, + client_info=self._client_info, + ) request = logging_config_pb2.ListExclusionsRequest( - parent=parent, - page_size=page_size, + parent=parent, page_size=page_size ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._inner_api_calls['list_exclusions'], + self._inner_api_calls["list_exclusions"], retry=retry, timeout=timeout, - metadata=metadata), + metadata=metadata, + ), request=request, - items_field='exclusions', - request_token_field='page_token', - response_token_field='next_page_token', + items_field="exclusions", + request_token_field="page_token", + response_token_field="next_page_token", ) return iterator - def get_exclusion(self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def get_exclusion( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Gets the description of an exclusion. @@ -757,26 +776,29 @@ def get_exclusion(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'get_exclusion' not in self._inner_api_calls: + if "get_exclusion" not in self._inner_api_calls: self._inner_api_calls[ - 'get_exclusion'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_exclusion, - default_retry=self._method_configs['GetExclusion'].retry, - default_timeout=self._method_configs['GetExclusion']. - timeout, - client_info=self._client_info, - ) + "get_exclusion" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_exclusion, + default_retry=self._method_configs["GetExclusion"].retry, + default_timeout=self._method_configs["GetExclusion"].timeout, + client_info=self._client_info, + ) - request = logging_config_pb2.GetExclusionRequest(name=name, ) - return self._inner_api_calls['get_exclusion']( - request, retry=retry, timeout=timeout, metadata=metadata) + request = logging_config_pb2.GetExclusionRequest(name=name) + return self._inner_api_calls["get_exclusion"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) - def create_exclusion(self, - parent, - exclusion, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def create_exclusion( + self, + parent, + exclusion, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Creates a new exclusion in a specified parent resource. Only log entries belonging to that resource can be excluded. @@ -831,31 +853,32 @@ def create_exclusion(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'create_exclusion' not in self._inner_api_calls: + if "create_exclusion" not in self._inner_api_calls: self._inner_api_calls[ - 'create_exclusion'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_exclusion, - default_retry=self._method_configs['CreateExclusion']. - retry, - default_timeout=self._method_configs['CreateExclusion']. - timeout, - client_info=self._client_info, - ) + "create_exclusion" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_exclusion, + default_retry=self._method_configs["CreateExclusion"].retry, + default_timeout=self._method_configs["CreateExclusion"].timeout, + client_info=self._client_info, + ) request = logging_config_pb2.CreateExclusionRequest( - parent=parent, - exclusion=exclusion, + parent=parent, exclusion=exclusion ) - return self._inner_api_calls['create_exclusion']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def update_exclusion(self, - name, - exclusion, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["create_exclusion"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def update_exclusion( + self, + name, + exclusion, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Changes one or more properties of an existing exclusion. @@ -920,30 +943,30 @@ def update_exclusion(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'update_exclusion' not in self._inner_api_calls: + if "update_exclusion" not in self._inner_api_calls: self._inner_api_calls[ - 'update_exclusion'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_exclusion, - default_retry=self._method_configs['UpdateExclusion']. - retry, - default_timeout=self._method_configs['UpdateExclusion']. - timeout, - client_info=self._client_info, - ) + "update_exclusion" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_exclusion, + default_retry=self._method_configs["UpdateExclusion"].retry, + default_timeout=self._method_configs["UpdateExclusion"].timeout, + client_info=self._client_info, + ) request = logging_config_pb2.UpdateExclusionRequest( - name=name, - exclusion=exclusion, - update_mask=update_mask, + name=name, exclusion=exclusion, update_mask=update_mask ) - return self._inner_api_calls['update_exclusion']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def delete_exclusion(self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["update_exclusion"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def delete_exclusion( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Deletes an exclusion. @@ -984,17 +1007,17 @@ def delete_exclusion(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'delete_exclusion' not in self._inner_api_calls: + if "delete_exclusion" not in self._inner_api_calls: self._inner_api_calls[ - 'delete_exclusion'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_exclusion, - default_retry=self._method_configs['DeleteExclusion']. - retry, - default_timeout=self._method_configs['DeleteExclusion']. - timeout, - client_info=self._client_info, - ) + "delete_exclusion" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_exclusion, + default_retry=self._method_configs["DeleteExclusion"].retry, + default_timeout=self._method_configs["DeleteExclusion"].timeout, + client_info=self._client_info, + ) - request = logging_config_pb2.DeleteExclusionRequest(name=name, ) - self._inner_api_calls['delete_exclusion']( - request, retry=retry, timeout=timeout, metadata=metadata) + request = logging_config_pb2.DeleteExclusionRequest(name=name) + self._inner_api_calls["delete_exclusion"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py index 4dfcb0f02c83..b7c00db4cd93 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py @@ -3,7 +3,7 @@ "google.logging.v2.ConfigServiceV2": { "retry_codes": { "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], - "non_idempotent": [] + "non_idempotent": [], }, "retry_params": { "default": { @@ -13,7 +13,7 @@ "initial_rpc_timeout_millis": 20000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000 + "total_timeout_millis": 600000, }, "write_sink": { "initial_retry_delay_millis": 100, @@ -22,61 +22,61 @@ "initial_rpc_timeout_millis": 20000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000 - } + "total_timeout_millis": 600000, + }, }, "methods": { "ListSinks": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "GetSink": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "CreateSink": { "timeout_millis": 120000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "UpdateSink": { "timeout_millis": 120000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "DeleteSink": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "ListExclusions": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "GetExclusion": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "CreateExclusion": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "UpdateExclusion": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "DeleteExclusion": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" - } - } + "retry_params_name": "default", + }, + }, } } } diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py index e47d4def171e..403002a8abee 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py @@ -52,6 +52,7 @@ class LaunchStage(enum.IntEnum): Platform Subject to the Deprecation Policy `__ documentation. """ + LAUNCH_STAGE_UNSPECIFIED = 0 EARLY_ACCESS = 1 ALPHA = 2 @@ -70,6 +71,7 @@ class NullValue(enum.IntEnum): Attributes: NULL_VALUE (int): Null value. """ + NULL_VALUE = 0 @@ -106,6 +108,7 @@ class LogSeverity(enum.IntEnum): ALERT (int): (700) A person must take an action immediately. EMERGENCY (int): (800) One or more systems are unusable. """ + DEFAULT = 0 DEBUG = 100 INFO = 200 @@ -127,6 +130,7 @@ class ValueType(enum.IntEnum): BOOL (int): Boolean; true or false. INT64 (int): A 64-bit signed integer. """ + STRING = 0 BOOL = 1 INT64 = 2 @@ -147,6 +151,7 @@ class MetricKind(enum.IntEnum): value to zero and sets a new start time for the following points. """ + METRIC_KIND_UNSPECIFIED = 0 GAUGE = 1 DELTA = 2 @@ -167,6 +172,7 @@ class ValueType(enum.IntEnum): DISTRIBUTION (int): The value is a ``Distribution``. MONEY (int): The value is money. """ + VALUE_TYPE_UNSPECIFIED = 0 BOOL = 1 INT64 = 2 @@ -185,6 +191,7 @@ class ApiVersion(enum.IntEnum): V2 (int): Logging API v2. V1 (int): Logging API v1. """ + V2 = 0 V1 = 1 @@ -201,6 +208,7 @@ class VersionFormat(enum.IntEnum): V2 (int): ``LogEntry`` version 2 format. V1 (int): ``LogEntry`` version 1 format. """ + VERSION_FORMAT_UNSPECIFIED = 0 V2 = 1 V1 = 2 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index 2df0c2578f25..692f01f290a6 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -37,19 +37,18 @@ from google.cloud.logging_v2.proto import logging_pb2_grpc from google.protobuf import empty_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - 'google-cloud-logging', ).version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version class LoggingServiceV2Client(object): """Service for ingesting and querying logs.""" - SERVICE_ADDRESS = 'logging.googleapis.com:443' + SERVICE_ADDRESS = "logging.googleapis.com:443" """The default address of the service.""" # The name of the interface for this client. This is the key used to # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = 'google.logging.v2.LoggingServiceV2' + _INTERFACE_NAME = "google.logging.v2.LoggingServiceV2" @classmethod def from_service_account_file(cls, filename, *args, **kwargs): @@ -65,9 +64,8 @@ def from_service_account_file(cls, filename, *args, **kwargs): Returns: LoggingServiceV2Client: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs['credentials'] = credentials + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file @@ -76,25 +74,24 @@ def from_service_account_file(cls, filename, *args, **kwargs): def log_path(cls, project, log): """Return a fully-qualified log string.""" return google.api_core.path_template.expand( - 'projects/{project}/logs/{log}', - project=project, - log=log, + "projects/{project}/logs/{log}", project=project, log=log ) @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - 'projects/{project}', - project=project, + "projects/{project}", project=project ) - def __init__(self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None): + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + ): """Constructor. Args: @@ -128,18 +125,19 @@ def __init__(self, # Raise deprecation warnings for things we want to go away. if client_config is not None: warnings.warn( - 'The `client_config` argument is deprecated.', + "The `client_config` argument is deprecated.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) else: client_config = logging_service_v2_client_config.config if channel: warnings.warn( - 'The `channel` argument is deprecated; use ' - '`transport` instead.', + "The `channel` argument is deprecated; use " "`transport` instead.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) # Instantiate the transport. # The transport is responsible for handling serialization and @@ -148,25 +146,24 @@ def __init__(self, if callable(transport): self.transport = transport( credentials=credentials, - default_class=logging_service_v2_grpc_transport. - LoggingServiceV2GrpcTransport, + default_class=logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport, ) else: if credentials: raise ValueError( - 'Received both a transport instance and ' - 'credentials; these are mutually exclusive.') + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) self.transport = transport else: self.transport = logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport( - address=self.SERVICE_ADDRESS, - channel=channel, - credentials=credentials, + address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, ) + gapic_version=_GAPIC_LIBRARY_VERSION + ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info @@ -176,7 +173,8 @@ def __init__(self, # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config['interfaces'][self._INTERFACE_NAME], ) + client_config["interfaces"][self._INTERFACE_NAME] + ) # Save a dictionary of cached API call functions. # These are the actual callables which invoke the proper @@ -185,11 +183,13 @@ def __init__(self, self._inner_api_calls = {} # Service calls - def delete_log(self, - log_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def delete_log( + self, + log_name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Deletes all the log entries in a log. The log reappears if it receives new entries. @@ -236,29 +236,33 @@ def delete_log(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'delete_log' not in self._inner_api_calls: + if "delete_log" not in self._inner_api_calls: self._inner_api_calls[ - 'delete_log'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_log, - default_retry=self._method_configs['DeleteLog'].retry, - default_timeout=self._method_configs['DeleteLog'].timeout, - client_info=self._client_info, - ) + "delete_log" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_log, + default_retry=self._method_configs["DeleteLog"].retry, + default_timeout=self._method_configs["DeleteLog"].timeout, + client_info=self._client_info, + ) - request = logging_pb2.DeleteLogRequest(log_name=log_name, ) - self._inner_api_calls['delete_log']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def write_log_entries(self, - entries, - log_name=None, - resource=None, - labels=None, - partial_success=None, - dry_run=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + request = logging_pb2.DeleteLogRequest(log_name=log_name) + self._inner_api_calls["delete_log"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def write_log_entries( + self, + entries, + log_name=None, + resource=None, + labels=None, + partial_success=None, + dry_run=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method @@ -373,16 +377,15 @@ def write_log_entries(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'write_log_entries' not in self._inner_api_calls: + if "write_log_entries" not in self._inner_api_calls: self._inner_api_calls[ - 'write_log_entries'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.write_log_entries, - default_retry=self._method_configs['WriteLogEntries']. - retry, - default_timeout=self._method_configs['WriteLogEntries']. - timeout, - client_info=self._client_info, - ) + "write_log_entries" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.write_log_entries, + default_retry=self._method_configs["WriteLogEntries"].retry, + default_timeout=self._method_configs["WriteLogEntries"].timeout, + client_info=self._client_info, + ) request = logging_pb2.WriteLogEntriesRequest( entries=entries, @@ -392,18 +395,21 @@ def write_log_entries(self, partial_success=partial_success, dry_run=dry_run, ) - return self._inner_api_calls['write_log_entries']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def list_log_entries(self, - resource_names, - project_ids=None, - filter_=None, - order_by=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["write_log_entries"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_log_entries( + self, + resource_names, + project_ids=None, + filter_=None, + order_by=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Lists log entries. Use this method to retrieve log entries from Logging. For ways to export log entries, see `Exporting @@ -491,15 +497,15 @@ def list_log_entries(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'list_log_entries' not in self._inner_api_calls: + if "list_log_entries" not in self._inner_api_calls: self._inner_api_calls[ - 'list_log_entries'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_log_entries, - default_retry=self._method_configs['ListLogEntries'].retry, - default_timeout=self._method_configs['ListLogEntries']. - timeout, - client_info=self._client_info, - ) + "list_log_entries" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_log_entries, + default_retry=self._method_configs["ListLogEntries"].retry, + default_timeout=self._method_configs["ListLogEntries"].timeout, + client_info=self._client_info, + ) request = logging_pb2.ListLogEntriesRequest( resource_names=resource_names, @@ -511,23 +517,25 @@ def list_log_entries(self, iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._inner_api_calls['list_log_entries'], + self._inner_api_calls["list_log_entries"], retry=retry, timeout=timeout, - metadata=metadata), + metadata=metadata, + ), request=request, - items_field='entries', - request_token_field='page_token', - response_token_field='next_page_token', + items_field="entries", + request_token_field="page_token", + response_token_field="next_page_token", ) return iterator def list_monitored_resource_descriptors( - self, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + self, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Lists the descriptors for monitored resource types used by Logging. @@ -579,39 +587,46 @@ def list_monitored_resource_descriptors( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'list_monitored_resource_descriptors' not in self._inner_api_calls: + if "list_monitored_resource_descriptors" not in self._inner_api_calls: self._inner_api_calls[ - 'list_monitored_resource_descriptors'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_monitored_resource_descriptors, - default_retry=self. - _method_configs['ListMonitoredResourceDescriptors'].retry, - default_timeout=self._method_configs[ - 'ListMonitoredResourceDescriptors'].timeout, - client_info=self._client_info, - ) + "list_monitored_resource_descriptors" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_monitored_resource_descriptors, + default_retry=self._method_configs[ + "ListMonitoredResourceDescriptors" + ].retry, + default_timeout=self._method_configs[ + "ListMonitoredResourceDescriptors" + ].timeout, + client_info=self._client_info, + ) request = logging_pb2.ListMonitoredResourceDescriptorsRequest( - page_size=page_size, ) + page_size=page_size + ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._inner_api_calls['list_monitored_resource_descriptors'], + self._inner_api_calls["list_monitored_resource_descriptors"], retry=retry, timeout=timeout, - metadata=metadata), + metadata=metadata, + ), request=request, - items_field='resource_descriptors', - request_token_field='page_token', - response_token_field='next_page_token', + items_field="resource_descriptors", + request_token_field="page_token", + response_token_field="next_page_token", ) return iterator - def list_logs(self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def list_logs( + self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed. @@ -674,29 +689,28 @@ def list_logs(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'list_logs' not in self._inner_api_calls: + if "list_logs" not in self._inner_api_calls: self._inner_api_calls[ - 'list_logs'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_logs, - default_retry=self._method_configs['ListLogs'].retry, - default_timeout=self._method_configs['ListLogs'].timeout, - client_info=self._client_info, - ) + "list_logs" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_logs, + default_retry=self._method_configs["ListLogs"].retry, + default_timeout=self._method_configs["ListLogs"].timeout, + client_info=self._client_info, + ) - request = logging_pb2.ListLogsRequest( - parent=parent, - page_size=page_size, - ) + request = logging_pb2.ListLogsRequest(parent=parent, page_size=page_size) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._inner_api_calls['list_logs'], + self._inner_api_calls["list_logs"], retry=retry, timeout=timeout, - metadata=metadata), + metadata=metadata, + ), request=request, - items_field='log_names', - request_token_field='page_token', - response_token_field='next_page_token', + items_field="log_names", + request_token_field="page_token", + response_token_field="next_page_token", ) return iterator diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py index 26fca76eaef1..5293e5e38015 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py @@ -3,7 +3,7 @@ "google.logging.v2.LoggingServiceV2": { "retry_codes": { "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], - "non_idempotent": [] + "non_idempotent": [], }, "retry_params": { "default": { @@ -13,7 +13,7 @@ "initial_rpc_timeout_millis": 20000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000 + "total_timeout_millis": 600000, }, "list": { "initial_retry_delay_millis": 100, @@ -22,14 +22,14 @@ "initial_rpc_timeout_millis": 20000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000 - } + "total_timeout_millis": 600000, + }, }, "methods": { "DeleteLog": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "WriteLogEntries": { "timeout_millis": 60000, @@ -38,25 +38,25 @@ "bundling": { "element_count_threshold": 1000, "request_byte_threshold": 1048576, - "delay_threshold_millis": 50 - } + "delay_threshold_millis": 50, + }, }, "ListLogEntries": { "timeout_millis": 10000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "ListMonitoredResourceDescriptors": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "ListLogs": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" - } - } + "retry_params_name": "default", + }, + }, } } } diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 2a11e1ed84a7..8a8d0c1d2bd3 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -42,19 +42,18 @@ from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - 'google-cloud-logging', ).version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version class MetricsServiceV2Client(object): """Service for configuring logs-based metrics.""" - SERVICE_ADDRESS = 'logging.googleapis.com:443' + SERVICE_ADDRESS = "logging.googleapis.com:443" """The default address of the service.""" # The name of the interface for this client. This is the key used to # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = 'google.logging.v2.MetricsServiceV2' + _INTERFACE_NAME = "google.logging.v2.MetricsServiceV2" @classmethod def from_service_account_file(cls, filename, *args, **kwargs): @@ -70,9 +69,8 @@ def from_service_account_file(cls, filename, *args, **kwargs): Returns: MetricsServiceV2Client: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs['credentials'] = credentials + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file @@ -81,25 +79,24 @@ def from_service_account_file(cls, filename, *args, **kwargs): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - 'projects/{project}', - project=project, + "projects/{project}", project=project ) @classmethod def metric_path(cls, project, metric): """Return a fully-qualified metric string.""" return google.api_core.path_template.expand( - 'projects/{project}/metrics/{metric}', - project=project, - metric=metric, + "projects/{project}/metrics/{metric}", project=project, metric=metric ) - def __init__(self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None): + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + ): """Constructor. Args: @@ -133,18 +130,19 @@ def __init__(self, # Raise deprecation warnings for things we want to go away. if client_config is not None: warnings.warn( - 'The `client_config` argument is deprecated.', + "The `client_config` argument is deprecated.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) else: client_config = metrics_service_v2_client_config.config if channel: warnings.warn( - 'The `channel` argument is deprecated; use ' - '`transport` instead.', + "The `channel` argument is deprecated; use " "`transport` instead.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) # Instantiate the transport. # The transport is responsible for handling serialization and @@ -153,25 +151,24 @@ def __init__(self, if callable(transport): self.transport = transport( credentials=credentials, - default_class=metrics_service_v2_grpc_transport. - MetricsServiceV2GrpcTransport, + default_class=metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport, ) else: if credentials: raise ValueError( - 'Received both a transport instance and ' - 'credentials; these are mutually exclusive.') + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) self.transport = transport else: self.transport = metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport( - address=self.SERVICE_ADDRESS, - channel=channel, - credentials=credentials, + address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, ) + gapic_version=_GAPIC_LIBRARY_VERSION + ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info @@ -181,7 +178,8 @@ def __init__(self, # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config['interfaces'][self._INTERFACE_NAME], ) + client_config["interfaces"][self._INTERFACE_NAME] + ) # Save a dictionary of cached API call functions. # These are the actual callables which invoke the proper @@ -190,12 +188,14 @@ def __init__(self, self._inner_api_calls = {} # Service calls - def list_log_metrics(self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def list_log_metrics( + self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Lists logs-based metrics. @@ -254,39 +254,41 @@ def list_log_metrics(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'list_log_metrics' not in self._inner_api_calls: + if "list_log_metrics" not in self._inner_api_calls: self._inner_api_calls[ - 'list_log_metrics'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_log_metrics, - default_retry=self._method_configs['ListLogMetrics'].retry, - default_timeout=self._method_configs['ListLogMetrics']. - timeout, - client_info=self._client_info, - ) + "list_log_metrics" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_log_metrics, + default_retry=self._method_configs["ListLogMetrics"].retry, + default_timeout=self._method_configs["ListLogMetrics"].timeout, + client_info=self._client_info, + ) request = logging_metrics_pb2.ListLogMetricsRequest( - parent=parent, - page_size=page_size, + parent=parent, page_size=page_size ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._inner_api_calls['list_log_metrics'], + self._inner_api_calls["list_log_metrics"], retry=retry, timeout=timeout, - metadata=metadata), + metadata=metadata, + ), request=request, - items_field='metrics', - request_token_field='page_token', - response_token_field='next_page_token', + items_field="metrics", + request_token_field="page_token", + response_token_field="next_page_token", ) return iterator - def get_log_metric(self, - metric_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def get_log_metric( + self, + metric_name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Gets a logs-based metric. @@ -325,27 +327,29 @@ def get_log_metric(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'get_log_metric' not in self._inner_api_calls: + if "get_log_metric" not in self._inner_api_calls: self._inner_api_calls[ - 'get_log_metric'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_log_metric, - default_retry=self._method_configs['GetLogMetric'].retry, - default_timeout=self._method_configs['GetLogMetric']. - timeout, - client_info=self._client_info, - ) + "get_log_metric" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_log_metric, + default_retry=self._method_configs["GetLogMetric"].retry, + default_timeout=self._method_configs["GetLogMetric"].timeout, + client_info=self._client_info, + ) - request = logging_metrics_pb2.GetLogMetricRequest( - metric_name=metric_name, ) - return self._inner_api_calls['get_log_metric']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def create_log_metric(self, - parent, - metric, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + request = logging_metrics_pb2.GetLogMetricRequest(metric_name=metric_name) + return self._inner_api_calls["get_log_metric"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def create_log_metric( + self, + parent, + metric, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Creates a logs-based metric. @@ -394,30 +398,31 @@ def create_log_metric(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'create_log_metric' not in self._inner_api_calls: + if "create_log_metric" not in self._inner_api_calls: self._inner_api_calls[ - 'create_log_metric'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_log_metric, - default_retry=self._method_configs['CreateLogMetric']. - retry, - default_timeout=self._method_configs['CreateLogMetric']. - timeout, - client_info=self._client_info, - ) + "create_log_metric" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_log_metric, + default_retry=self._method_configs["CreateLogMetric"].retry, + default_timeout=self._method_configs["CreateLogMetric"].timeout, + client_info=self._client_info, + ) request = logging_metrics_pb2.CreateLogMetricRequest( - parent=parent, - metric=metric, + parent=parent, metric=metric + ) + return self._inner_api_calls["create_log_metric"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['create_log_metric']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def update_log_metric(self, - metric_name, - metric, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def update_log_metric( + self, + metric_name, + metric, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Creates or updates a logs-based metric. @@ -467,29 +472,30 @@ def update_log_metric(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'update_log_metric' not in self._inner_api_calls: + if "update_log_metric" not in self._inner_api_calls: self._inner_api_calls[ - 'update_log_metric'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_log_metric, - default_retry=self._method_configs['UpdateLogMetric']. - retry, - default_timeout=self._method_configs['UpdateLogMetric']. - timeout, - client_info=self._client_info, - ) + "update_log_metric" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_log_metric, + default_retry=self._method_configs["UpdateLogMetric"].retry, + default_timeout=self._method_configs["UpdateLogMetric"].timeout, + client_info=self._client_info, + ) request = logging_metrics_pb2.UpdateLogMetricRequest( - metric_name=metric_name, - metric=metric, + metric_name=metric_name, metric=metric ) - return self._inner_api_calls['update_log_metric']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def delete_log_metric(self, - metric_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["update_log_metric"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def delete_log_metric( + self, + metric_name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Deletes a logs-based metric. @@ -525,18 +531,17 @@ def delete_log_metric(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'delete_log_metric' not in self._inner_api_calls: + if "delete_log_metric" not in self._inner_api_calls: self._inner_api_calls[ - 'delete_log_metric'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_log_metric, - default_retry=self._method_configs['DeleteLogMetric']. - retry, - default_timeout=self._method_configs['DeleteLogMetric']. - timeout, - client_info=self._client_info, - ) + "delete_log_metric" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_log_metric, + default_retry=self._method_configs["DeleteLogMetric"].retry, + default_timeout=self._method_configs["DeleteLogMetric"].timeout, + client_info=self._client_info, + ) - request = logging_metrics_pb2.DeleteLogMetricRequest( - metric_name=metric_name, ) - self._inner_api_calls['delete_log_metric']( - request, retry=retry, timeout=timeout, metadata=metadata) + request = logging_metrics_pb2.DeleteLogMetricRequest(metric_name=metric_name) + self._inner_api_calls["delete_log_metric"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py index 57377ba1e29d..133abec23dcf 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py @@ -3,7 +3,7 @@ "google.logging.v2.MetricsServiceV2": { "retry_codes": { "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], - "non_idempotent": [] + "non_idempotent": [], }, "retry_params": { "default": { @@ -13,36 +13,36 @@ "initial_rpc_timeout_millis": 20000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000 + "total_timeout_millis": 600000, } }, "methods": { "ListLogMetrics": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "GetLogMetric": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "CreateLogMetric": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "UpdateLogMetric": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "DeleteLogMetric": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" - } - } + "retry_params_name": "default", + }, + }, } } } diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py index 224048411980..f27ed84c91a0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py @@ -27,20 +27,20 @@ class ConfigServiceV2GrpcTransport(object): which can be used to take advantage of advanced features of gRPC. """ + # The scopes needed to make gRPC calls to all of the methods defined # in this service. _OAUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", ) - def __init__(self, - channel=None, - credentials=None, - address='logging.googleapis.com:443'): + def __init__( + self, channel=None, credentials=None, address="logging.googleapis.com:443" + ): """Instantiate the transport class. Args: @@ -58,29 +58,25 @@ def __init__(self, # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - 'The `channel` and `credentials` arguments are mutually ' - 'exclusive.', ) + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) # Create the channel. if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - ) + channel = self.create_channel(address=address, credentials=credentials) self._channel = channel # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { - 'config_service_v2_stub': - logging_config_pb2_grpc.ConfigServiceV2Stub(channel), + "config_service_v2_stub": logging_config_pb2_grpc.ConfigServiceV2Stub( + channel + ) } @classmethod - def create_channel(cls, - address='logging.googleapis.com:443', - credentials=None): + def create_channel(cls, address="logging.googleapis.com:443", credentials=None): """Create and return a gRPC channel object. Args: @@ -95,9 +91,7 @@ def create_channel(cls, grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( - address, - credentials=credentials, - scopes=cls._OAUTH_SCOPES, + address, credentials=credentials, scopes=cls._OAUTH_SCOPES ) @property @@ -120,7 +114,7 @@ def list_sinks(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['config_service_v2_stub'].ListSinks + return self._stubs["config_service_v2_stub"].ListSinks @property def get_sink(self): @@ -133,7 +127,7 @@ def get_sink(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['config_service_v2_stub'].GetSink + return self._stubs["config_service_v2_stub"].GetSink @property def create_sink(self): @@ -149,7 +143,7 @@ def create_sink(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['config_service_v2_stub'].CreateSink + return self._stubs["config_service_v2_stub"].CreateSink @property def update_sink(self): @@ -165,7 +159,7 @@ def update_sink(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['config_service_v2_stub'].UpdateSink + return self._stubs["config_service_v2_stub"].UpdateSink @property def delete_sink(self): @@ -179,7 +173,7 @@ def delete_sink(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['config_service_v2_stub'].DeleteSink + return self._stubs["config_service_v2_stub"].DeleteSink @property def list_exclusions(self): @@ -192,7 +186,7 @@ def list_exclusions(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['config_service_v2_stub'].ListExclusions + return self._stubs["config_service_v2_stub"].ListExclusions @property def get_exclusion(self): @@ -205,7 +199,7 @@ def get_exclusion(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['config_service_v2_stub'].GetExclusion + return self._stubs["config_service_v2_stub"].GetExclusion @property def create_exclusion(self): @@ -220,7 +214,7 @@ def create_exclusion(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['config_service_v2_stub'].CreateExclusion + return self._stubs["config_service_v2_stub"].CreateExclusion @property def update_exclusion(self): @@ -233,7 +227,7 @@ def update_exclusion(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['config_service_v2_stub'].UpdateExclusion + return self._stubs["config_service_v2_stub"].UpdateExclusion @property def delete_exclusion(self): @@ -246,4 +240,4 @@ def delete_exclusion(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['config_service_v2_stub'].DeleteExclusion + return self._stubs["config_service_v2_stub"].DeleteExclusion diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py index 4393bfb57f48..008d803d5415 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py @@ -27,20 +27,20 @@ class LoggingServiceV2GrpcTransport(object): which can be used to take advantage of advanced features of gRPC. """ + # The scopes needed to make gRPC calls to all of the methods defined # in this service. _OAUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", ) - def __init__(self, - channel=None, - credentials=None, - address='logging.googleapis.com:443'): + def __init__( + self, channel=None, credentials=None, address="logging.googleapis.com:443" + ): """Instantiate the transport class. Args: @@ -58,29 +58,23 @@ def __init__(self, # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - 'The `channel` and `credentials` arguments are mutually ' - 'exclusive.', ) + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) # Create the channel. if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - ) + channel = self.create_channel(address=address, credentials=credentials) self._channel = channel # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { - 'logging_service_v2_stub': - logging_pb2_grpc.LoggingServiceV2Stub(channel), + "logging_service_v2_stub": logging_pb2_grpc.LoggingServiceV2Stub(channel) } @classmethod - def create_channel(cls, - address='logging.googleapis.com:443', - credentials=None): + def create_channel(cls, address="logging.googleapis.com:443", credentials=None): """Create and return a gRPC channel object. Args: @@ -95,9 +89,7 @@ def create_channel(cls, grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( - address, - credentials=credentials, - scopes=cls._OAUTH_SCOPES, + address, credentials=credentials, scopes=cls._OAUTH_SCOPES ) @property @@ -123,7 +115,7 @@ def delete_log(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['logging_service_v2_stub'].DeleteLog + return self._stubs["logging_service_v2_stub"].DeleteLog @property def write_log_entries(self): @@ -142,7 +134,7 @@ def write_log_entries(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['logging_service_v2_stub'].WriteLogEntries + return self._stubs["logging_service_v2_stub"].WriteLogEntries @property def list_log_entries(self): @@ -157,7 +149,7 @@ def list_log_entries(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['logging_service_v2_stub'].ListLogEntries + return self._stubs["logging_service_v2_stub"].ListLogEntries @property def list_monitored_resource_descriptors(self): @@ -170,8 +162,7 @@ def list_monitored_resource_descriptors(self): deserialized request object and returns a deserialized response object. """ - return self._stubs[ - 'logging_service_v2_stub'].ListMonitoredResourceDescriptors + return self._stubs["logging_service_v2_stub"].ListMonitoredResourceDescriptors @property def list_logs(self): @@ -185,4 +176,4 @@ def list_logs(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['logging_service_v2_stub'].ListLogs + return self._stubs["logging_service_v2_stub"].ListLogs diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py index 4a4b809b892d..6876c305e36e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py @@ -27,20 +27,20 @@ class MetricsServiceV2GrpcTransport(object): which can be used to take advantage of advanced features of gRPC. """ + # The scopes needed to make gRPC calls to all of the methods defined # in this service. _OAUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", ) - def __init__(self, - channel=None, - credentials=None, - address='logging.googleapis.com:443'): + def __init__( + self, channel=None, credentials=None, address="logging.googleapis.com:443" + ): """Instantiate the transport class. Args: @@ -58,29 +58,25 @@ def __init__(self, # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - 'The `channel` and `credentials` arguments are mutually ' - 'exclusive.', ) + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) # Create the channel. if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - ) + channel = self.create_channel(address=address, credentials=credentials) self._channel = channel # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { - 'metrics_service_v2_stub': - logging_metrics_pb2_grpc.MetricsServiceV2Stub(channel), + "metrics_service_v2_stub": logging_metrics_pb2_grpc.MetricsServiceV2Stub( + channel + ) } @classmethod - def create_channel(cls, - address='logging.googleapis.com:443', - credentials=None): + def create_channel(cls, address="logging.googleapis.com:443", credentials=None): """Create and return a gRPC channel object. Args: @@ -95,9 +91,7 @@ def create_channel(cls, grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( - address, - credentials=credentials, - scopes=cls._OAUTH_SCOPES, + address, credentials=credentials, scopes=cls._OAUTH_SCOPES ) @property @@ -120,7 +114,7 @@ def list_log_metrics(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['metrics_service_v2_stub'].ListLogMetrics + return self._stubs["metrics_service_v2_stub"].ListLogMetrics @property def get_log_metric(self): @@ -133,7 +127,7 @@ def get_log_metric(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['metrics_service_v2_stub'].GetLogMetric + return self._stubs["metrics_service_v2_stub"].GetLogMetric @property def create_log_metric(self): @@ -146,7 +140,7 @@ def create_log_metric(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['metrics_service_v2_stub'].CreateLogMetric + return self._stubs["metrics_service_v2_stub"].CreateLogMetric @property def update_log_metric(self): @@ -159,7 +153,7 @@ def update_log_metric(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['metrics_service_v2_stub'].UpdateLogMetric + return self._stubs["metrics_service_v2_stub"].UpdateLogMetric @property def delete_log_metric(self): @@ -172,4 +166,4 @@ def delete_log_metric(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['metrics_service_v2_stub'].DeleteLogMetric + return self._stubs["metrics_service_v2_stub"].DeleteLogMetric diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types.py b/packages/google-cloud-logging/google/cloud/logging_v2/types.py index 25787f31017f..464edbe709dc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types.py @@ -54,12 +54,7 @@ status_pb2, ] -_local_modules = [ - log_entry_pb2, - logging_config_pb2, - logging_metrics_pb2, - logging_pb2, -] +_local_modules = [log_entry_pb2, logging_config_pb2, logging_metrics_pb2, logging_pb2] names = [] @@ -70,7 +65,7 @@ for module in _local_modules: for name, message in get_messages(module).items(): - message.__module__ = 'google.cloud.logging_v2.types' + message.__module__ = "google.cloud.logging_v2.types" setattr(sys.modules[__name__], name, message) names.append(name) diff --git a/packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py b/packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py index bf3ce70c0d92..d574de7785f7 100644 --- a/packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py +++ b/packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py @@ -27,9 +27,10 @@ def test_write_log_entries(self): _, project_id = google.auth.default() client = logging_v2.LoggingServiceV2Client() - log_name = client.log_path(project_id, 'test-{0}'.format(time.time())) + log_name = client.log_path(project_id, "test-{0}".format(time.time())) resource = {} labels = {} entries = [] response = client.write_log_entries( - entries, log_name=log_name, resource=resource, labels=labels) + entries, log_name=log_name, resource=resource, labels=labels + ) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index c028ff3b4189..63add9f26fcf 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -35,9 +35,9 @@ from test_utils.retry import RetryResult from test_utils.system import unique_resource_id -_RESOURCE_ID = unique_resource_id('-') -DEFAULT_FILTER = 'logName:syslog AND severity>=INFO' -DEFAULT_DESCRIPTION = 'System testing' +_RESOURCE_ID = unique_resource_id("-") +DEFAULT_FILTER = "logName:syslog AND severity>=INFO" +DEFAULT_DESCRIPTION = "System testing" retry_429 = RetryErrors(TooManyRequests) @@ -66,8 +66,7 @@ def _list_entries(logger): :returns: List of all entries consumed. """ inner = RetryResult(_has_entries, max_tries=9)(_consume_entries) - outer = RetryErrors( - (ServiceUnavailable, ResourceExhausted), max_tries=9)(inner) + outer = RetryErrors((ServiceUnavailable, ResourceExhausted), max_tries=9)(inner) return outer(logger) @@ -81,6 +80,7 @@ class Config(object): This is a mutable stand-in to allow test set-up to modify global state. """ + CLIENT = None @@ -91,11 +91,11 @@ def setUpModule(): class TestLogging(unittest.TestCase): JSON_PAYLOAD = { - 'message': 'System test: test_log_struct', - 'weather': { - 'clouds': 'party or partly', - 'temperature': 70, - 'precipitation': False, + "message": "System test: test_log_struct", + "weather": { + "clouds": "party or partly", + "temperature": 70, + "precipitation": False, }, } TYPE_FILTER = 'protoPayload.@type = "{}"' @@ -105,8 +105,7 @@ def setUp(self): self._handlers_cache = logging.getLogger().handlers[:] def tearDown(self): - retry = RetryErrors( - (NotFound, TooManyRequests, RetryError), max_tries=9) + retry = RetryErrors((NotFound, TooManyRequests, RetryError), max_tries=9) for doomed in self.to_delete: try: retry(doomed.delete)() @@ -117,7 +116,7 @@ def tearDown(self): @staticmethod def _logger_name(prefix): - return prefix + unique_resource_id('-') + return prefix + unique_resource_id("-") def test_list_entry_with_unregistered(self): from google.protobuf import any_pb2 @@ -125,15 +124,14 @@ def test_list_entry_with_unregistered(self): from google.cloud.logging import entries pool = descriptor_pool.Default() - type_name = 'google.cloud.audit.AuditLog' + type_name = "google.cloud.audit.AuditLog" # Make sure the descriptor is not known in the registry. with self.assertRaises(KeyError): pool.FindMessageTypeByName(type_name) - type_url = 'type.googleapis.com/' + type_name + type_url = "type.googleapis.com/" + type_name filter_ = self.TYPE_FILTER.format(type_url) - entry_iter = iter( - Config.CLIENT.list_entries(page_size=1, filter_=filter_)) + entry_iter = iter(Config.CLIENT.list_entries(page_size=1, filter_=filter_)) retry = RetryErrors(TooManyRequests) protobuf_entry = retry(lambda: next(entry_iter))() @@ -145,11 +143,11 @@ def test_list_entry_with_unregistered(self): self.assertEqual(protobuf_entry.payload_pb.type_url, type_url) else: self.assertIsNone(protobuf_entry.payload_pb) - self.assertEqual(protobuf_entry.payload_json['@type'], type_url) + self.assertEqual(protobuf_entry.payload_json["@type"], type_url) def test_log_text(self): - TEXT_PAYLOAD = 'System test: test_log_text' - logger = Config.CLIENT.logger(self._logger_name('log_text')) + TEXT_PAYLOAD = "System test: test_log_text" + logger = Config.CLIENT.logger(self._logger_name("log_text")) self.to_delete.append(logger) logger.log_text(TEXT_PAYLOAD) entries = _list_entries(logger) @@ -159,8 +157,8 @@ def test_log_text(self): def test_log_text_with_timestamp(self): import datetime - text_payload = 'System test: test_log_text_with_timestamp' - logger = Config.CLIENT.logger(self._logger_name('log_text_ts')) + text_payload = "System test: test_log_text_with_timestamp" + logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) now = datetime.datetime.utcnow() self.to_delete.append(logger) @@ -173,16 +171,12 @@ def test_log_text_with_timestamp(self): self.assertIsInstance(entries[0].received_timestamp, datetime.datetime) def test_log_text_with_resource(self): - text_payload = 'System test: test_log_text_with_timestamp' + text_payload = "System test: test_log_text_with_timestamp" - logger = Config.CLIENT.logger(self._logger_name('log_text_res')) + logger = Config.CLIENT.logger(self._logger_name("log_text_res")) now = datetime.datetime.utcnow() resource = Resource( - type='gae_app', - labels={ - 'module_id': 'default', - 'version_id': 'test' - } + type="gae_app", labels={"module_id": "default", "version_id": "test"} ) self.to_delete.append(logger) @@ -192,26 +186,23 @@ def test_log_text_with_resource(self): self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, text_payload) # project_id is output only so we don't want it in assertion - del entries[0].resource.labels['project_id'] + del entries[0].resource.labels["project_id"] self.assertEqual(entries[0].resource, resource) def test_log_text_w_metadata(self): - TEXT_PAYLOAD = 'System test: test_log_text' - INSERT_ID = 'INSERTID' - SEVERITY = 'INFO' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' + TEXT_PAYLOAD = "System test: test_log_text" + INSERT_ID = "INSERTID" + SEVERITY = "INFO" + METHOD = "POST" + URI = "https://api.example.com/endpoint" STATUS = 500 - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } - logger = Config.CLIENT.logger(self._logger_name('log_text_md')) + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} + logger = Config.CLIENT.logger(self._logger_name("log_text_md")) self.to_delete.append(logger) - logger.log_text(TEXT_PAYLOAD, insert_id=INSERT_ID, severity=SEVERITY, - http_request=REQUEST) + logger.log_text( + TEXT_PAYLOAD, insert_id=INSERT_ID, severity=SEVERITY, http_request=REQUEST + ) entries = _list_entries(logger) self.assertEqual(len(entries), 1) @@ -222,12 +213,12 @@ def test_log_text_w_metadata(self): self.assertEqual(entry.severity, SEVERITY) request = entry.http_request - self.assertEqual(request['requestMethod'], METHOD) - self.assertEqual(request['requestUrl'], URI) - self.assertEqual(request['status'], STATUS) + self.assertEqual(request["requestMethod"], METHOD) + self.assertEqual(request["requestUrl"], URI) + self.assertEqual(request["status"], STATUS) def test_log_struct(self): - logger = Config.CLIENT.logger(self._logger_name('log_struct')) + logger = Config.CLIENT.logger(self._logger_name("log_struct")) self.to_delete.append(logger) logger.log_struct(self.JSON_PAYLOAD) @@ -237,21 +228,21 @@ def test_log_struct(self): self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) def test_log_struct_w_metadata(self): - INSERT_ID = 'INSERTID' - SEVERITY = 'INFO' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' + INSERT_ID = "INSERTID" + SEVERITY = "INFO" + METHOD = "POST" + URI = "https://api.example.com/endpoint" STATUS = 500 - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } - logger = Config.CLIENT.logger(self._logger_name('log_struct_md')) + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} + logger = Config.CLIENT.logger(self._logger_name("log_struct_md")) self.to_delete.append(logger) - logger.log_struct(self.JSON_PAYLOAD, insert_id=INSERT_ID, - severity=SEVERITY, http_request=REQUEST) + logger.log_struct( + self.JSON_PAYLOAD, + insert_id=INSERT_ID, + severity=SEVERITY, + http_request=REQUEST, + ) entries = _list_entries(logger) self.assertEqual(len(entries), 1) @@ -259,14 +250,14 @@ def test_log_struct_w_metadata(self): self.assertEqual(entries[0].insert_id, INSERT_ID) self.assertEqual(entries[0].severity, SEVERITY) request = entries[0].http_request - self.assertEqual(request['requestMethod'], METHOD) - self.assertEqual(request['requestUrl'], URI) - self.assertEqual(request['status'], STATUS) + self.assertEqual(request["requestMethod"], METHOD) + self.assertEqual(request["requestUrl"], URI) + self.assertEqual(request["status"], STATUS) def test_log_handler_async(self): - LOG_MESSAGE = 'It was the worst of times' + LOG_MESSAGE = "It was the worst of times" - handler_name = self._logger_name('handler_async') + handler_name = self._logger_name("handler_async") handler = CloudLoggingHandler(Config.CLIENT, name=handler_name) # only create the logger to delete, hidden otherwise logger = Config.CLIENT.logger(handler_name) @@ -277,43 +268,38 @@ def test_log_handler_async(self): cloud_logger.warn(LOG_MESSAGE) handler.flush() entries = _list_entries(logger) - expected_payload = { - 'message': LOG_MESSAGE, - 'python_logger': handler.name - } + expected_payload = {"message": LOG_MESSAGE, "python_logger": handler.name} self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) def test_log_handler_sync(self): - LOG_MESSAGE = 'It was the best of times.' + LOG_MESSAGE = "It was the best of times." - handler_name = self._logger_name('handler_sync') - handler = CloudLoggingHandler(Config.CLIENT, - name=handler_name, - transport=SyncTransport) + handler_name = self._logger_name("handler_sync") + handler = CloudLoggingHandler( + Config.CLIENT, name=handler_name, transport=SyncTransport + ) # only create the logger to delete, hidden otherwise logger = Config.CLIENT.logger(handler.name) self.to_delete.append(logger) - LOGGER_NAME = 'mylogger' + LOGGER_NAME = "mylogger" cloud_logger = logging.getLogger(LOGGER_NAME) cloud_logger.addHandler(handler) cloud_logger.warn(LOG_MESSAGE) entries = _list_entries(logger) - expected_payload = { - 'message': LOG_MESSAGE, - 'python_logger': LOGGER_NAME - } + expected_payload = {"message": LOG_MESSAGE, "python_logger": LOGGER_NAME} self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) def test_log_root_handler(self): - LOG_MESSAGE = 'It was the best of times.' + LOG_MESSAGE = "It was the best of times." handler = CloudLoggingHandler( - Config.CLIENT, name=self._logger_name('handler_root')) + Config.CLIENT, name=self._logger_name("handler_root") + ) # only create the logger to delete, hidden otherwise logger = Config.CLIENT.logger(handler.name) self.to_delete.append(logger) @@ -322,18 +308,14 @@ def test_log_root_handler(self): logging.warn(LOG_MESSAGE) entries = _list_entries(logger) - expected_payload = { - 'message': LOG_MESSAGE, - 'python_logger': 'root' - } + expected_payload = {"message": LOG_MESSAGE, "python_logger": "root"} self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) def test_create_metric(self): - METRIC_NAME = 'test-create-metric%s' % (_RESOURCE_ID,) - metric = Config.CLIENT.metric( - METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + METRIC_NAME = "test-create-metric%s" % (_RESOURCE_ID,) + metric = Config.CLIENT.metric(METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) self.assertFalse(metric.exists()) retry = RetryErrors(Conflict) @@ -343,9 +325,8 @@ def test_create_metric(self): self.assertTrue(metric.exists()) def test_list_metrics(self): - METRIC_NAME = 'test-list-metrics%s' % (_RESOURCE_ID,) - metric = Config.CLIENT.metric( - METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + METRIC_NAME = "test-list-metrics%s" % (_RESOURCE_ID,) + metric = Config.CLIENT.metric(METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) self.assertFalse(metric.exists()) before_metrics = list(Config.CLIENT.list_metrics()) before_names = set(before.name for before in before_metrics) @@ -361,15 +342,14 @@ def test_list_metrics(self): self.assertTrue(metric.name in after_names) def test_reload_metric(self): - METRIC_NAME = 'test-reload-metric%s' % (_RESOURCE_ID,) + METRIC_NAME = "test-reload-metric%s" % (_RESOURCE_ID,) retry = RetryErrors(Conflict) - metric = Config.CLIENT.metric( - METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + metric = Config.CLIENT.metric(METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) self.assertFalse(metric.exists()) retry(metric.create)() self.to_delete.append(metric) - metric.filter_ = 'logName:other' - metric.description = 'local changes' + metric.filter_ = "logName:other" + metric.description = "local changes" metric.reload() @@ -377,12 +357,11 @@ def test_reload_metric(self): self.assertEqual(metric.description, DEFAULT_DESCRIPTION) def test_update_metric(self): - METRIC_NAME = 'test-update-metric%s' % (_RESOURCE_ID,) + METRIC_NAME = "test-update-metric%s" % (_RESOURCE_ID,) retry = RetryErrors(Conflict) - NEW_FILTER = 'logName:other' - NEW_DESCRIPTION = 'updated' - metric = Config.CLIENT.metric( - METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + NEW_FILTER = "logName:other" + NEW_DESCRIPTION = "updated" + metric = Config.CLIENT.metric(METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) self.assertFalse(metric.exists()) retry(metric.create)() self.to_delete.append(metric) @@ -399,8 +378,9 @@ def test_update_metric(self): def _init_storage_bucket(self): from google.cloud import storage - BUCKET_NAME = 'g-c-python-testing%s' % (_RESOURCE_ID,) - BUCKET_URI = 'storage.googleapis.com/%s' % (BUCKET_NAME,) + + BUCKET_NAME = "g-c-python-testing%s" % (_RESOURCE_ID,) + BUCKET_URI = "storage.googleapis.com/%s" % (BUCKET_NAME,) # Create the destination bucket, and set up the ACL to allow # Stackdriver Logging to write into it. @@ -410,7 +390,7 @@ def _init_storage_bucket(self): retry(bucket.create)() self.to_delete.append(bucket) bucket.acl.reload() - logs_group = bucket.acl.group('cloud-logs@google.com') + logs_group = bucket.acl.group("cloud-logs@google.com") logs_group.grant_owner() bucket.acl.add_entity(logs_group) bucket.acl.save() @@ -419,7 +399,7 @@ def _init_storage_bucket(self): def test_create_sink_storage_bucket(self): uri = self._init_storage_bucket() - SINK_NAME = 'test-create-sink-bucket%s' % (_RESOURCE_ID,) + SINK_NAME = "test-create-sink-bucket%s" % (_RESOURCE_ID,) retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) @@ -433,8 +413,8 @@ def test_create_sink_storage_bucket(self): def test_create_sink_pubsub_topic(self): from google.cloud import pubsub_v1 - SINK_NAME = 'test-create-sink-topic%s' % (_RESOURCE_ID,) - TOPIC_NAME = 'logging-systest{}'.format(unique_resource_id('-')) + SINK_NAME = "test-create-sink-topic%s" % (_RESOURCE_ID,) + TOPIC_NAME = "logging-systest{}".format(unique_resource_id("-")) # Create the destination topic, and set up the IAM policy to allow # Stackdriver Logging to write into it. @@ -444,13 +424,10 @@ def test_create_sink_pubsub_topic(self): publisher.create_topic(topic_path) policy = publisher.get_iam_policy(topic_path) - policy.bindings.add( - role='roles/owner', - members=['group:cloud-logs@google.com'] - ) + policy.bindings.add(role="roles/owner", members=["group:cloud-logs@google.com"]) publisher.set_iam_policy(topic_path, policy) - TOPIC_URI = 'pubsub.googleapis.com/%s' % (topic_path,) + TOPIC_URI = "pubsub.googleapis.com/%s" % (topic_path,) retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, TOPIC_URI) @@ -464,28 +441,28 @@ def test_create_sink_pubsub_topic(self): def _init_bigquery_dataset(self): from google.cloud import bigquery from google.cloud.bigquery.dataset import AccessEntry - dataset_name = ( - 'system_testing_dataset' + _RESOURCE_ID).replace('-', '_') - dataset_uri = 'bigquery.googleapis.com/projects/%s/datasets/%s' % ( - Config.CLIENT.project, dataset_name,) + + dataset_name = ("system_testing_dataset" + _RESOURCE_ID).replace("-", "_") + dataset_uri = "bigquery.googleapis.com/projects/%s/datasets/%s" % ( + Config.CLIENT.project, + dataset_name, + ) # Create the destination dataset, and set up the ACL to allow # Stackdriver Logging to write into it. retry = RetryErrors((TooManyRequests, BadGateway, ServiceUnavailable)) bigquery_client = bigquery.Client() dataset_ref = bigquery_client.dataset(dataset_name) - dataset = retry(bigquery_client.create_dataset)( - bigquery.Dataset(dataset_ref)) + dataset = retry(bigquery_client.create_dataset)(bigquery.Dataset(dataset_ref)) self.to_delete.append((bigquery_client, dataset)) bigquery_client.get_dataset(dataset) - access = AccessEntry( - 'WRITER', 'groupByEmail', 'cloud-logs@google.com') + access = AccessEntry("WRITER", "groupByEmail", "cloud-logs@google.com") dataset.access_entries.append(access) - bigquery_client.update_dataset(dataset, ['access_entries']) + bigquery_client.update_dataset(dataset, ["access_entries"]) return dataset_uri def test_create_sink_bigquery_dataset(self): - SINK_NAME = 'test-create-sink-dataset%s' % (_RESOURCE_ID,) + SINK_NAME = "test-create-sink-dataset%s" % (_RESOURCE_ID,) retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) uri = self._init_bigquery_dataset() sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) @@ -497,7 +474,7 @@ def test_create_sink_bigquery_dataset(self): self.assertTrue(sink.exists()) def test_list_sinks(self): - SINK_NAME = 'test-list-sinks%s' % (_RESOURCE_ID,) + SINK_NAME = "test-list-sinks%s" % (_RESOURCE_ID,) uri = self._init_storage_bucket() retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) @@ -515,15 +492,15 @@ def test_list_sinks(self): self.assertTrue(sink.name in after_names) def test_reload_sink(self): - SINK_NAME = 'test-reload-sink%s' % (_RESOURCE_ID,) + SINK_NAME = "test-reload-sink%s" % (_RESOURCE_ID,) retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) uri = self._init_bigquery_dataset() sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) self.assertFalse(sink.exists()) retry(sink.create)() self.to_delete.append(sink) - sink.filter_ = 'BOGUS FILTER' - sink.destination = 'BOGUS DESTINATION' + sink.filter_ = "BOGUS FILTER" + sink.destination = "BOGUS DESTINATION" sink.reload() @@ -531,11 +508,11 @@ def test_reload_sink(self): self.assertEqual(sink.destination, uri) def test_update_sink(self): - SINK_NAME = 'test-update-sink%s' % (_RESOURCE_ID,) + SINK_NAME = "test-update-sink%s" % (_RESOURCE_ID,) retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) bucket_uri = self._init_storage_bucket() dataset_uri = self._init_bigquery_dataset() - UPDATED_FILTER = 'logName:syslog' + UPDATED_FILTER = "logName:syslog" sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, bucket_uri) self.assertFalse(sink.exists()) retry(sink.create)() @@ -550,7 +527,6 @@ def test_update_sink(self): class _DeleteWrapper(object): - def __init__(self, publisher, topic_path): self.publisher = publisher self.topic_path = topic_path diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py index 70b375cf3dd6..429dafb233ac 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py @@ -52,10 +52,7 @@ def __init__(self, responses=[]): self.responses = responses self.requests = [] - def unary_unary(self, - method, - request_serializer=None, - response_deserializer=None): + def unary_unary(self, method, request_serializer=None, response_deserializer=None): return MultiCallableStub(method, self) @@ -66,25 +63,21 @@ class CustomException(Exception): class TestConfigServiceV2Client(object): def test_list_sinks(self): # Setup Expected Response - next_page_token = '' + next_page_token = "" sinks_element = {} sinks = [sinks_element] - expected_response = { - 'next_page_token': next_page_token, - 'sinks': sinks - } - expected_response = logging_config_pb2.ListSinksResponse( - **expected_response) + expected_response = {"next_page_token": next_page_token, "sinks": sinks} + expected_response = logging_config_pb2.ListSinksResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup Request - parent = client.project_path('[PROJECT]') + parent = client.project_path("[PROJECT]") paged_list_response = client.list_sinks(parent) resources = list(paged_list_response) @@ -99,13 +92,13 @@ def test_list_sinks(self): def test_list_sinks_exception(self): channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup request - parent = client.project_path('[PROJECT]') + parent = client.project_path("[PROJECT]") paged_list_response = client.list_sinks(parent) with pytest.raises(CustomException): @@ -113,78 +106,77 @@ def test_list_sinks_exception(self): def test_get_sink(self): # Setup Expected Response - name = 'name3373707' - destination = 'destination-1429847026' - filter_ = 'filter-1274492040' - writer_identity = 'writerIdentity775638794' + name = "name3373707" + destination = "destination-1429847026" + filter_ = "filter-1274492040" + writer_identity = "writerIdentity775638794" include_children = True expected_response = { - 'name': name, - 'destination': destination, - 'filter': filter_, - 'writer_identity': writer_identity, - 'include_children': include_children + "name": name, + "destination": destination, + "filter": filter_, + "writer_identity": writer_identity, + "include_children": include_children, } expected_response = logging_config_pb2.LogSink(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup Request - sink_name = client.sink_path('[PROJECT]', '[SINK]') + sink_name = client.sink_path("[PROJECT]", "[SINK]") response = client.get_sink(sink_name) assert expected_response == response assert len(channel.requests) == 1 - expected_request = logging_config_pb2.GetSinkRequest( - sink_name=sink_name) + expected_request = logging_config_pb2.GetSinkRequest(sink_name=sink_name) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_get_sink_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup request - sink_name = client.sink_path('[PROJECT]', '[SINK]') + sink_name = client.sink_path("[PROJECT]", "[SINK]") with pytest.raises(CustomException): client.get_sink(sink_name) def test_create_sink(self): # Setup Expected Response - name = 'name3373707' - destination = 'destination-1429847026' - filter_ = 'filter-1274492040' - writer_identity = 'writerIdentity775638794' + name = "name3373707" + destination = "destination-1429847026" + filter_ = "filter-1274492040" + writer_identity = "writerIdentity775638794" include_children = True expected_response = { - 'name': name, - 'destination': destination, - 'filter': filter_, - 'writer_identity': writer_identity, - 'include_children': include_children + "name": name, + "destination": destination, + "filter": filter_, + "writer_identity": writer_identity, + "include_children": include_children, } expected_response = logging_config_pb2.LogSink(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup Request - parent = client.project_path('[PROJECT]') + parent = client.project_path("[PROJECT]") sink = {} response = client.create_sink(parent, sink) @@ -192,20 +184,21 @@ def test_create_sink(self): assert len(channel.requests) == 1 expected_request = logging_config_pb2.CreateSinkRequest( - parent=parent, sink=sink) + parent=parent, sink=sink + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_create_sink_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup request - parent = client.project_path('[PROJECT]') + parent = client.project_path("[PROJECT]") sink = {} with pytest.raises(CustomException): @@ -213,29 +206,29 @@ def test_create_sink_exception(self): def test_update_sink(self): # Setup Expected Response - name = 'name3373707' - destination = 'destination-1429847026' - filter_ = 'filter-1274492040' - writer_identity = 'writerIdentity775638794' + name = "name3373707" + destination = "destination-1429847026" + filter_ = "filter-1274492040" + writer_identity = "writerIdentity775638794" include_children = True expected_response = { - 'name': name, - 'destination': destination, - 'filter': filter_, - 'writer_identity': writer_identity, - 'include_children': include_children + "name": name, + "destination": destination, + "filter": filter_, + "writer_identity": writer_identity, + "include_children": include_children, } expected_response = logging_config_pb2.LogSink(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup Request - sink_name = client.sink_path('[PROJECT]', '[SINK]') + sink_name = client.sink_path("[PROJECT]", "[SINK]") sink = {} response = client.update_sink(sink_name, sink) @@ -243,20 +236,21 @@ def test_update_sink(self): assert len(channel.requests) == 1 expected_request = logging_config_pb2.UpdateSinkRequest( - sink_name=sink_name, sink=sink) + sink_name=sink_name, sink=sink + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_update_sink_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup request - sink_name = client.sink_path('[PROJECT]', '[SINK]') + sink_name = client.sink_path("[PROJECT]", "[SINK]") sink = {} with pytest.raises(CustomException): @@ -264,57 +258,57 @@ def test_update_sink_exception(self): def test_delete_sink(self): channel = ChannelStub() - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup Request - sink_name = client.sink_path('[PROJECT]', '[SINK]') + sink_name = client.sink_path("[PROJECT]", "[SINK]") client.delete_sink(sink_name) assert len(channel.requests) == 1 - expected_request = logging_config_pb2.DeleteSinkRequest( - sink_name=sink_name) + expected_request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_delete_sink_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup request - sink_name = client.sink_path('[PROJECT]', '[SINK]') + sink_name = client.sink_path("[PROJECT]", "[SINK]") with pytest.raises(CustomException): client.delete_sink(sink_name) def test_list_exclusions(self): # Setup Expected Response - next_page_token = '' + next_page_token = "" exclusions_element = {} exclusions = [exclusions_element] expected_response = { - 'next_page_token': next_page_token, - 'exclusions': exclusions + "next_page_token": next_page_token, + "exclusions": exclusions, } expected_response = logging_config_pb2.ListExclusionsResponse( - **expected_response) + **expected_response + ) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup Request - parent = client.project_path('[PROJECT]') + parent = client.project_path("[PROJECT]") paged_list_response = client.list_exclusions(parent) resources = list(paged_list_response) @@ -323,20 +317,19 @@ def test_list_exclusions(self): assert expected_response.exclusions[0] == resources[0] assert len(channel.requests) == 1 - expected_request = logging_config_pb2.ListExclusionsRequest( - parent=parent) + expected_request = logging_config_pb2.ListExclusionsRequest(parent=parent) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_list_exclusions_exception(self): channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup request - parent = client.project_path('[PROJECT]') + parent = client.project_path("[PROJECT]") paged_list_response = client.list_exclusions(parent) with pytest.raises(CustomException): @@ -344,28 +337,27 @@ def test_list_exclusions_exception(self): def test_get_exclusion(self): # Setup Expected Response - name_2 = 'name2-1052831874' - description = 'description-1724546052' - filter_ = 'filter-1274492040' + name_2 = "name2-1052831874" + description = "description-1724546052" + filter_ = "filter-1274492040" disabled = True expected_response = { - 'name': name_2, - 'description': description, - 'filter': filter_, - 'disabled': disabled + "name": name_2, + "description": description, + "filter": filter_, + "disabled": disabled, } - expected_response = logging_config_pb2.LogExclusion( - **expected_response) + expected_response = logging_config_pb2.LogExclusion(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup Request - name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") response = client.get_exclusion(name) assert expected_response == response @@ -378,41 +370,40 @@ def test_get_exclusion(self): def test_get_exclusion_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup request - name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") with pytest.raises(CustomException): client.get_exclusion(name) def test_create_exclusion(self): # Setup Expected Response - name = 'name3373707' - description = 'description-1724546052' - filter_ = 'filter-1274492040' + name = "name3373707" + description = "description-1724546052" + filter_ = "filter-1274492040" disabled = True expected_response = { - 'name': name, - 'description': description, - 'filter': filter_, - 'disabled': disabled + "name": name, + "description": description, + "filter": filter_, + "disabled": disabled, } - expected_response = logging_config_pb2.LogExclusion( - **expected_response) + expected_response = logging_config_pb2.LogExclusion(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup Request - parent = client.project_path('[PROJECT]') + parent = client.project_path("[PROJECT]") exclusion = {} response = client.create_exclusion(parent, exclusion) @@ -420,20 +411,21 @@ def test_create_exclusion(self): assert len(channel.requests) == 1 expected_request = logging_config_pb2.CreateExclusionRequest( - parent=parent, exclusion=exclusion) + parent=parent, exclusion=exclusion + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_create_exclusion_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup request - parent = client.project_path('[PROJECT]') + parent = client.project_path("[PROJECT]") exclusion = {} with pytest.raises(CustomException): @@ -441,28 +433,27 @@ def test_create_exclusion_exception(self): def test_update_exclusion(self): # Setup Expected Response - name_2 = 'name2-1052831874' - description = 'description-1724546052' - filter_ = 'filter-1274492040' + name_2 = "name2-1052831874" + description = "description-1724546052" + filter_ = "filter-1274492040" disabled = True expected_response = { - 'name': name_2, - 'description': description, - 'filter': filter_, - 'disabled': disabled + "name": name_2, + "description": description, + "filter": filter_, + "disabled": disabled, } - expected_response = logging_config_pb2.LogExclusion( - **expected_response) + expected_response = logging_config_pb2.LogExclusion(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup Request - name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") exclusion = {} update_mask = {} @@ -471,20 +462,21 @@ def test_update_exclusion(self): assert len(channel.requests) == 1 expected_request = logging_config_pb2.UpdateExclusionRequest( - name=name, exclusion=exclusion, update_mask=update_mask) + name=name, exclusion=exclusion, update_mask=update_mask + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_update_exclusion_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup request - name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") exclusion = {} update_mask = {} @@ -493,13 +485,13 @@ def test_update_exclusion_exception(self): def test_delete_exclusion(self): channel = ChannelStub() - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup Request - name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") client.delete_exclusion(name) @@ -511,13 +503,13 @@ def test_delete_exclusion(self): def test_delete_exclusion_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.ConfigServiceV2Client() # Setup request - name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") with pytest.raises(CustomException): client.delete_exclusion(name) diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py index 548955147ebf..538a5e8bdfcf 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py @@ -53,10 +53,7 @@ def __init__(self, responses=[]): self.responses = responses self.requests = [] - def unary_unary(self, - method, - request_serializer=None, - response_deserializer=None): + def unary_unary(self, method, request_serializer=None, response_deserializer=None): return MultiCallableStub(method, self) @@ -67,13 +64,13 @@ class CustomException(Exception): class TestLoggingServiceV2Client(object): def test_delete_log(self): channel = ChannelStub() - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.LoggingServiceV2Client() # Setup Request - log_name = client.log_path('[PROJECT]', '[LOG]') + log_name = client.log_path("[PROJECT]", "[LOG]") client.delete_log(log_name) @@ -85,13 +82,13 @@ def test_delete_log(self): def test_delete_log_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.LoggingServiceV2Client() # Setup request - log_name = client.log_path('[PROJECT]', '[LOG]') + log_name = client.log_path("[PROJECT]", "[LOG]") with pytest.raises(CustomException): client.delete_log(log_name) @@ -99,12 +96,11 @@ def test_delete_log_exception(self): def test_write_log_entries(self): # Setup Expected Response expected_response = {} - expected_response = logging_pb2.WriteLogEntriesResponse( - **expected_response) + expected_response = logging_pb2.WriteLogEntriesResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.LoggingServiceV2Client() @@ -123,7 +119,7 @@ def test_write_log_entries(self): def test_write_log_entries_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.LoggingServiceV2Client() @@ -136,19 +132,15 @@ def test_write_log_entries_exception(self): def test_list_log_entries(self): # Setup Expected Response - next_page_token = '' + next_page_token = "" entries_element = {} entries = [entries_element] - expected_response = { - 'next_page_token': next_page_token, - 'entries': entries - } - expected_response = logging_pb2.ListLogEntriesResponse( - **expected_response) + expected_response = {"next_page_token": next_page_token, "entries": entries} + expected_response = logging_pb2.ListLogEntriesResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.LoggingServiceV2Client() @@ -164,13 +156,14 @@ def test_list_log_entries(self): assert len(channel.requests) == 1 expected_request = logging_pb2.ListLogEntriesRequest( - resource_names=resource_names) + resource_names=resource_names + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_list_log_entries_exception(self): channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.LoggingServiceV2Client() @@ -184,19 +177,20 @@ def test_list_log_entries_exception(self): def test_list_monitored_resource_descriptors(self): # Setup Expected Response - next_page_token = '' + next_page_token = "" resource_descriptors_element = {} resource_descriptors = [resource_descriptors_element] expected_response = { - 'next_page_token': next_page_token, - 'resource_descriptors': resource_descriptors + "next_page_token": next_page_token, + "resource_descriptors": resource_descriptors, } expected_response = logging_pb2.ListMonitoredResourceDescriptorsResponse( - **expected_response) + **expected_response + ) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.LoggingServiceV2Client() @@ -208,14 +202,13 @@ def test_list_monitored_resource_descriptors(self): assert expected_response.resource_descriptors[0] == resources[0] assert len(channel.requests) == 1 - expected_request = logging_pb2.ListMonitoredResourceDescriptorsRequest( - ) + expected_request = logging_pb2.ListMonitoredResourceDescriptorsRequest() actual_request = channel.requests[0][1] assert expected_request == actual_request def test_list_monitored_resource_descriptors_exception(self): channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.LoggingServiceV2Client() @@ -226,24 +219,21 @@ def test_list_monitored_resource_descriptors_exception(self): def test_list_logs(self): # Setup Expected Response - next_page_token = '' - log_names_element = 'logNamesElement-1079688374' + next_page_token = "" + log_names_element = "logNamesElement-1079688374" log_names = [log_names_element] - expected_response = { - 'next_page_token': next_page_token, - 'log_names': log_names - } + expected_response = {"next_page_token": next_page_token, "log_names": log_names} expected_response = logging_pb2.ListLogsResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.LoggingServiceV2Client() # Setup Request - parent = client.project_path('[PROJECT]') + parent = client.project_path("[PROJECT]") paged_list_response = client.list_logs(parent) resources = list(paged_list_response) @@ -258,13 +248,13 @@ def test_list_logs(self): def test_list_logs_exception(self): channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.LoggingServiceV2Client() # Setup request - parent = client.project_path('[PROJECT]') + parent = client.project_path("[PROJECT]") paged_list_response = client.list_logs(parent) with pytest.raises(CustomException): diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py index c5c16551c67c..b12e191dc7f7 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py @@ -51,10 +51,7 @@ def __init__(self, responses=[]): self.responses = responses self.requests = [] - def unary_unary(self, - method, - request_serializer=None, - response_deserializer=None): + def unary_unary(self, method, request_serializer=None, response_deserializer=None): return MultiCallableStub(method, self) @@ -65,25 +62,23 @@ class CustomException(Exception): class TestMetricsServiceV2Client(object): def test_list_log_metrics(self): # Setup Expected Response - next_page_token = '' + next_page_token = "" metrics_element = {} metrics = [metrics_element] - expected_response = { - 'next_page_token': next_page_token, - 'metrics': metrics - } + expected_response = {"next_page_token": next_page_token, "metrics": metrics} expected_response = logging_metrics_pb2.ListLogMetricsResponse( - **expected_response) + **expected_response + ) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.MetricsServiceV2Client() # Setup Request - parent = client.project_path('[PROJECT]') + parent = client.project_path("[PROJECT]") paged_list_response = client.list_log_metrics(parent) resources = list(paged_list_response) @@ -92,20 +87,19 @@ def test_list_log_metrics(self): assert expected_response.metrics[0] == resources[0] assert len(channel.requests) == 1 - expected_request = logging_metrics_pb2.ListLogMetricsRequest( - parent=parent) + expected_request = logging_metrics_pb2.ListLogMetricsRequest(parent=parent) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_list_log_metrics_exception(self): channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.MetricsServiceV2Client() # Setup request - parent = client.project_path('[PROJECT]') + parent = client.project_path("[PROJECT]") paged_list_response = client.list_log_metrics(parent) with pytest.raises(CustomException): @@ -113,74 +107,75 @@ def test_list_log_metrics_exception(self): def test_get_log_metric(self): # Setup Expected Response - name = 'name3373707' - description = 'description-1724546052' - filter_ = 'filter-1274492040' - value_extractor = 'valueExtractor2047672534' + name = "name3373707" + description = "description-1724546052" + filter_ = "filter-1274492040" + value_extractor = "valueExtractor2047672534" expected_response = { - 'name': name, - 'description': description, - 'filter': filter_, - 'value_extractor': value_extractor + "name": name, + "description": description, + "filter": filter_, + "value_extractor": value_extractor, } expected_response = logging_metrics_pb2.LogMetric(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.MetricsServiceV2Client() # Setup Request - metric_name = client.metric_path('[PROJECT]', '[METRIC]') + metric_name = client.metric_path("[PROJECT]", "[METRIC]") response = client.get_log_metric(metric_name) assert expected_response == response assert len(channel.requests) == 1 expected_request = logging_metrics_pb2.GetLogMetricRequest( - metric_name=metric_name) + metric_name=metric_name + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_get_log_metric_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.MetricsServiceV2Client() # Setup request - metric_name = client.metric_path('[PROJECT]', '[METRIC]') + metric_name = client.metric_path("[PROJECT]", "[METRIC]") with pytest.raises(CustomException): client.get_log_metric(metric_name) def test_create_log_metric(self): # Setup Expected Response - name = 'name3373707' - description = 'description-1724546052' - filter_ = 'filter-1274492040' - value_extractor = 'valueExtractor2047672534' + name = "name3373707" + description = "description-1724546052" + filter_ = "filter-1274492040" + value_extractor = "valueExtractor2047672534" expected_response = { - 'name': name, - 'description': description, - 'filter': filter_, - 'value_extractor': value_extractor + "name": name, + "description": description, + "filter": filter_, + "value_extractor": value_extractor, } expected_response = logging_metrics_pb2.LogMetric(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.MetricsServiceV2Client() # Setup Request - parent = client.project_path('[PROJECT]') + parent = client.project_path("[PROJECT]") metric = {} response = client.create_log_metric(parent, metric) @@ -188,20 +183,21 @@ def test_create_log_metric(self): assert len(channel.requests) == 1 expected_request = logging_metrics_pb2.CreateLogMetricRequest( - parent=parent, metric=metric) + parent=parent, metric=metric + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_create_log_metric_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.MetricsServiceV2Client() # Setup request - parent = client.project_path('[PROJECT]') + parent = client.project_path("[PROJECT]") metric = {} with pytest.raises(CustomException): @@ -209,27 +205,27 @@ def test_create_log_metric_exception(self): def test_update_log_metric(self): # Setup Expected Response - name = 'name3373707' - description = 'description-1724546052' - filter_ = 'filter-1274492040' - value_extractor = 'valueExtractor2047672534' + name = "name3373707" + description = "description-1724546052" + filter_ = "filter-1274492040" + value_extractor = "valueExtractor2047672534" expected_response = { - 'name': name, - 'description': description, - 'filter': filter_, - 'value_extractor': value_extractor + "name": name, + "description": description, + "filter": filter_, + "value_extractor": value_extractor, } expected_response = logging_metrics_pb2.LogMetric(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.MetricsServiceV2Client() # Setup Request - metric_name = client.metric_path('[PROJECT]', '[METRIC]') + metric_name = client.metric_path("[PROJECT]", "[METRIC]") metric = {} response = client.update_log_metric(metric_name, metric) @@ -237,20 +233,21 @@ def test_update_log_metric(self): assert len(channel.requests) == 1 expected_request = logging_metrics_pb2.UpdateLogMetricRequest( - metric_name=metric_name, metric=metric) + metric_name=metric_name, metric=metric + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_update_log_metric_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.MetricsServiceV2Client() # Setup request - metric_name = client.metric_path('[PROJECT]', '[METRIC]') + metric_name = client.metric_path("[PROJECT]", "[METRIC]") metric = {} with pytest.raises(CustomException): @@ -258,32 +255,33 @@ def test_update_log_metric_exception(self): def test_delete_log_metric(self): channel = ChannelStub() - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.MetricsServiceV2Client() # Setup Request - metric_name = client.metric_path('[PROJECT]', '[METRIC]') + metric_name = client.metric_path("[PROJECT]", "[METRIC]") client.delete_log_metric(metric_name) assert len(channel.requests) == 1 expected_request = logging_metrics_pb2.DeleteLogMetricRequest( - metric_name=metric_name) + metric_name=metric_name + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_delete_log_metric_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = logging_v2.MetricsServiceV2Client() # Setup request - metric_name = client.metric_path('[PROJECT]', '[METRIC]') + metric_name = client.metric_path("[PROJECT]", "[METRIC]") with pytest.raises(CustomException): client.delete_log_metric(metric_name) diff --git a/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py b/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py index f3762aea38d5..f606da573cec 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py +++ b/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py @@ -18,7 +18,6 @@ class DjangoBase(unittest.TestCase): - @classmethod def setUpClass(cls): from django.conf import settings @@ -36,7 +35,6 @@ def tearDownClass(cls): class TestRequestMiddleware(DjangoBase): - def _get_target_class(self): from google.cloud.logging.handlers.middleware import request @@ -50,7 +48,7 @@ def test_process_request(self): from google.cloud.logging.handlers.middleware import request middleware = self._make_one() - mock_request = RequestFactory().get('/') + mock_request = RequestFactory().get("/") middleware.process_request(mock_request) django_request = request._get_django_request() @@ -58,7 +56,6 @@ def test_process_request(self): class Test__get_django_request(DjangoBase): - @staticmethod def _call_fut(): from google.cloud.logging.handlers.middleware import request @@ -68,11 +65,12 @@ def _call_fut(): @staticmethod def _make_patch(new_locals): return mock.patch( - 'google.cloud.logging.handlers.middleware.request._thread_locals', - new=new_locals) + "google.cloud.logging.handlers.middleware.request._thread_locals", + new=new_locals, + ) def test_with_request(self): - thread_locals = mock.Mock(spec=['request']) + thread_locals = mock.Mock(spec=["request"]) with self._make_patch(thread_locals): django_request = self._call_fut() diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index b53098633d4e..b5ba26fd17a7 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -27,7 +27,6 @@ class Test_get_trace_id_from_flask(unittest.TestCase): - @staticmethod def _call_fut(): from google.cloud.logging.handlers import _helpers @@ -40,30 +39,28 @@ def create_app(): app = flask.Flask(__name__) - @app.route('/') + @app.route("/") def index(): - return 'test flask trace' # pragma: NO COVER + return "test flask trace" # pragma: NO COVER return app def test_no_context_header(self): app = self.create_app() - with app.test_request_context( - path='/', - headers={}): + with app.test_request_context(path="/", headers={}): trace_id = self._call_fut() self.assertIsNone(trace_id) def test_valid_context_header(self): - flask_trace_header = 'X_CLOUD_TRACE_CONTEXT' - expected_trace_id = 'testtraceidflask' - flask_trace_id = expected_trace_id + '/testspanid' + flask_trace_header = "X_CLOUD_TRACE_CONTEXT" + expected_trace_id = "testtraceidflask" + flask_trace_id = expected_trace_id + "/testspanid" app = self.create_app() context = app.test_request_context( - path='/', - headers={flask_trace_header: flask_trace_id}) + path="/", headers={flask_trace_header: flask_trace_id} + ) with context: trace_id = self._call_fut() @@ -76,27 +73,24 @@ def get(self): from google.cloud.logging.handlers import _helpers trace_id = _helpers.get_trace_id_from_webapp2() - self.response.content_type = 'application/json' + self.response.content_type = "application/json" self.response.out.write(json.dumps(trace_id)) -@unittest.skipIf(six.PY3, 'webapp2 is Python 2 only') +@unittest.skipIf(six.PY3, "webapp2 is Python 2 only") class Test_get_trace_id_from_webapp2(unittest.TestCase): - @staticmethod def create_app(): import webapp2 - app = webapp2.WSGIApplication([ - ('/', _GetTraceId), - ]) + app = webapp2.WSGIApplication([("/", _GetTraceId)]) return app def test_no_context_header(self): import webob - req = webob.BaseRequest.blank('/') + req = webob.BaseRequest.blank("/") response = req.get_response(self.create_app()) trace_id = json.loads(response.body) @@ -105,13 +99,13 @@ def test_no_context_header(self): def test_valid_context_header(self): import webob - webapp2_trace_header = 'X-Cloud-Trace-Context' - expected_trace_id = 'testtraceidwebapp2' - webapp2_trace_id = expected_trace_id + '/testspanid' + webapp2_trace_header = "X-Cloud-Trace-Context" + expected_trace_id = "testtraceidwebapp2" + webapp2_trace_id = expected_trace_id + "/testspanid" req = webob.BaseRequest.blank( - '/', - headers={webapp2_trace_header: webapp2_trace_id}) + "/", headers={webapp2_trace_header: webapp2_trace_id} + ) response = req.get_response(self.create_app()) trace_id = json.loads(response.body) @@ -119,7 +113,6 @@ def test_valid_context_header(self): class Test_get_trace_id_from_django(unittest.TestCase): - @staticmethod def _call_fut(): from google.cloud.logging.handlers import _helpers @@ -145,7 +138,7 @@ def test_no_context_header(self): from django.test import RequestFactory from google.cloud.logging.handlers.middleware import request - django_request = RequestFactory().get('/') + django_request = RequestFactory().get("/") middleware = request.RequestMiddleware() middleware.process_request(django_request) @@ -156,13 +149,13 @@ def test_valid_context_header(self): from django.test import RequestFactory from google.cloud.logging.handlers.middleware import request - django_trace_header = 'HTTP_X_CLOUD_TRACE_CONTEXT' - expected_trace_id = 'testtraceiddjango' - django_trace_id = expected_trace_id + '/testspanid' + django_trace_header = "HTTP_X_CLOUD_TRACE_CONTEXT" + expected_trace_id = "testtraceiddjango" + django_trace_id = expected_trace_id + "/testspanid" django_request = RequestFactory().get( - '/', - **{django_trace_header: django_trace_id}) + "/", **{django_trace_header: django_trace_id} + ) middleware = request.RequestMiddleware() middleware.process_request(django_request) @@ -172,7 +165,6 @@ def test_valid_context_header(self): class Test_get_trace_id(unittest.TestCase): - @staticmethod def _call_fut(): from google.cloud.logging.handlers import _helpers @@ -181,11 +173,13 @@ def _call_fut(): def _helper(self, django_return, flask_return): django_patch = mock.patch( - 'google.cloud.logging.handlers._helpers.get_trace_id_from_django', - return_value=django_return) + "google.cloud.logging.handlers._helpers.get_trace_id_from_django", + return_value=django_return, + ) flask_patch = mock.patch( - 'google.cloud.logging.handlers._helpers.get_trace_id_from_flask', - return_value=flask_return) + "google.cloud.logging.handlers._helpers.get_trace_id_from_flask", + return_value=flask_return, + ) with django_patch as django_mock: with flask_patch as flask_mock: @@ -194,16 +188,14 @@ def _helper(self, django_return, flask_return): return django_mock, flask_mock, trace_id def test_from_django(self): - django_mock, flask_mock, trace_id = self._helper( - 'test-django-trace-id', None) + django_mock, flask_mock, trace_id = self._helper("test-django-trace-id", None) self.assertEqual(trace_id, django_mock.return_value) django_mock.assert_called_once_with() flask_mock.assert_not_called() def test_from_flask(self): - django_mock, flask_mock, trace_id = self._helper( - None, 'test-flask-trace-id') + django_mock, flask_mock, trace_id = self._helper(None, "test-flask-trace-id") self.assertEqual(trace_id, flask_mock.return_value) django_mock.assert_called_once_with() @@ -211,7 +203,8 @@ def test_from_flask(self): def test_from_django_and_flask(self): django_mock, flask_mock, trace_id = self._helper( - 'test-django-trace-id', 'test-flask-trace-id') + "test-django-trace-id", "test-flask-trace-id" + ) # Django wins. self.assertEqual(trace_id, django_mock.return_value) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index 641d06ca3b27..13d21d58d33f 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -19,7 +19,7 @@ class TestAppEngineHandler(unittest.TestCase): - PROJECT = 'PROJECT' + PROJECT = "PROJECT" def _get_target_class(self): from google.cloud.logging.handlers.app_engine import AppEngineHandler @@ -30,85 +30,86 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_constructor(self): - from google.cloud.logging.handlers.app_engine import ( - _GAE_PROJECT_ENV_FLEX) - from google.cloud.logging.handlers.app_engine import ( - _GAE_PROJECT_ENV_STANDARD) + from google.cloud.logging.handlers.app_engine import _GAE_PROJECT_ENV_FLEX + from google.cloud.logging.handlers.app_engine import _GAE_PROJECT_ENV_STANDARD from google.cloud.logging.handlers.app_engine import _GAE_SERVICE_ENV from google.cloud.logging.handlers.app_engine import _GAE_VERSION_ENV - client = mock.Mock(project=self.PROJECT, spec=['project']) + client = mock.Mock(project=self.PROJECT, spec=["project"]) # Verify that project/service/version are picked up from the # environment. - with mock.patch('os.environ', new={ - _GAE_PROJECT_ENV_STANDARD: 'test_project', - _GAE_SERVICE_ENV: 'test_service', - _GAE_VERSION_ENV: 'test_version', - }): + with mock.patch( + "os.environ", + new={ + _GAE_PROJECT_ENV_STANDARD: "test_project", + _GAE_SERVICE_ENV: "test_service", + _GAE_VERSION_ENV: "test_version", + }, + ): handler = self._make_one(client, transport=_Transport) self.assertIs(handler.client, client) - self.assertEqual(handler.resource.type, 'gae_app') - self.assertEqual(handler.resource.labels['project_id'], 'test_project') - self.assertEqual(handler.resource.labels['module_id'], 'test_service') - self.assertEqual(handler.resource.labels['version_id'], 'test_version') + self.assertEqual(handler.resource.type, "gae_app") + self.assertEqual(handler.resource.labels["project_id"], "test_project") + self.assertEqual(handler.resource.labels["module_id"], "test_service") + self.assertEqual(handler.resource.labels["version_id"], "test_version") # Verify that _GAE_PROJECT_ENV_FLEX environment variable takes # precedence over _GAE_PROJECT_ENV_STANDARD. - with mock.patch('os.environ', new={ - _GAE_PROJECT_ENV_FLEX: 'test_project_2', - _GAE_PROJECT_ENV_STANDARD: 'test_project_should_be_overridden', - _GAE_SERVICE_ENV: 'test_service_2', - _GAE_VERSION_ENV: 'test_version_2', - }): + with mock.patch( + "os.environ", + new={ + _GAE_PROJECT_ENV_FLEX: "test_project_2", + _GAE_PROJECT_ENV_STANDARD: "test_project_should_be_overridden", + _GAE_SERVICE_ENV: "test_service_2", + _GAE_VERSION_ENV: "test_version_2", + }, + ): handler = self._make_one(client, transport=_Transport) self.assertIs(handler.client, client) - self.assertEqual(handler.resource.type, 'gae_app') - self.assertEqual( - handler.resource.labels['project_id'], 'test_project_2') - self.assertEqual( - handler.resource.labels['module_id'], 'test_service_2') - self.assertEqual( - handler.resource.labels['version_id'], 'test_version_2') + self.assertEqual(handler.resource.type, "gae_app") + self.assertEqual(handler.resource.labels["project_id"], "test_project_2") + self.assertEqual(handler.resource.labels["module_id"], "test_service_2") + self.assertEqual(handler.resource.labels["version_id"], "test_version_2") def test_emit(self): - client = mock.Mock(project=self.PROJECT, spec=['project']) + client = mock.Mock(project=self.PROJECT, spec=["project"]) handler = self._make_one(client, transport=_Transport) gae_resource = handler.get_gae_resource() gae_labels = handler.get_gae_labels() trace = None - logname = 'app' - message = 'hello world' - record = logging.LogRecord(logname, logging, None, None, message, - None, None) + logname = "app" + message = "hello world" + record = logging.LogRecord(logname, logging, None, None, message, None, None) handler.emit(record) self.assertIs(handler.transport.client, client) self.assertEqual(handler.transport.name, logname) self.assertEqual( handler.transport.send_called_with, - (record, message, gae_resource, gae_labels, trace)) + (record, message, gae_resource, gae_labels, trace), + ) def _get_gae_labels_helper(self, trace_id): get_trace_patch = mock.patch( - 'google.cloud.logging.handlers.app_engine.get_trace_id', - return_value=trace_id) + "google.cloud.logging.handlers.app_engine.get_trace_id", + return_value=trace_id, + ) - client = mock.Mock(project=self.PROJECT, spec=['project']) + client = mock.Mock(project=self.PROJECT, spec=["project"]) # The handler actually calls ``get_gae_labels()``. with get_trace_patch as mock_get_trace: handler = self._make_one(client, transport=_Transport) gae_labels = handler.get_gae_labels() - self.assertEqual(mock_get_trace.mock_calls, - [mock.call()]) + self.assertEqual(mock_get_trace.mock_calls, [mock.call()]) return gae_labels def test_get_gae_labels_with_label(self): from google.cloud.logging.handlers import app_engine - trace_id = 'test-gae-trace-id' + trace_id = "test-gae-trace-id" gae_labels = self._get_gae_labels_helper(trace_id) expected_labels = {app_engine._TRACE_ID_LABEL: trace_id} self.assertEqual(gae_labels, expected_labels) @@ -119,7 +120,6 @@ def test_get_gae_labels_without_label(self): class _Transport(object): - def __init__(self, client, name): self.client = client self.name = name diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py index b2b461e8bae0..cbe833146c57 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py @@ -16,11 +16,12 @@ class TestContainerEngineHandler(unittest.TestCase): - PROJECT = 'PROJECT' + PROJECT = "PROJECT" def _get_target_class(self): from google.cloud.logging.handlers.container_engine import ( - ContainerEngineHandler) + ContainerEngineHandler, + ) return ContainerEngineHandler @@ -32,19 +33,17 @@ def test_format(self): import json handler = self._make_one() - logname = 'loggername' - message = 'hello world' - record = logging.LogRecord(logname, logging.INFO, None, None, - message, None, None) + logname = "loggername" + message = "hello world" + record = logging.LogRecord( + logname, logging.INFO, None, None, message, None, None + ) record.created = 5.03 expected_payload = { - 'message': message, - 'timestamp': { - 'seconds': 5, - 'nanos': int(.03 * 1e9) - }, - 'thread': record.thread, - 'severity': record.levelname, + "message": message, + "timestamp": {"seconds": 5, "nanos": int(0.03 * 1e9)}, + "thread": record.thread, + "severity": record.levelname, } payload = handler.format(record) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index a23a0296dced..ff738046d892 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -18,7 +18,7 @@ class TestCloudLoggingHandler(unittest.TestCase): - PROJECT = 'PROJECT' + PROJECT = "PROJECT" @staticmethod def _get_target_class(): @@ -39,20 +39,20 @@ def test_emit(self): client = _Client(self.PROJECT) handler = self._make_one( - client, transport=_Transport, resource=_GLOBAL_RESOURCE) - logname = 'loggername' - message = 'hello world' - record = logging.LogRecord(logname, logging, None, None, message, - None, None) + client, transport=_Transport, resource=_GLOBAL_RESOURCE + ) + logname = "loggername" + message = "hello world" + record = logging.LogRecord(logname, logging, None, None, message, None, None) handler.emit(record) self.assertEqual( handler.transport.send_called_with, - (record, message, _GLOBAL_RESOURCE, None)) + (record, message, _GLOBAL_RESOURCE, None), + ) class TestSetupLogging(unittest.TestCase): - def _call_fut(self, handler, excludes=None): from google.cloud.logging.handlers.handlers import setup_logging @@ -69,8 +69,8 @@ def test_setup_logging(self): self.assertIn(handler, root_handlers) def test_setup_logging_excludes(self): - INCLUDED_LOGGER_NAME = 'includeme' - EXCLUDED_LOGGER_NAME = 'excludeme' + INCLUDED_LOGGER_NAME = "includeme" + EXCLUDED_LOGGER_NAME = "excludeme" handler = _Handler(logging.INFO) self._call_fut(handler, (EXCLUDED_LOGGER_NAME,)) @@ -91,7 +91,6 @@ def tearDown(self): class _Handler(object): - def __init__(self, level): self.level = level @@ -103,13 +102,11 @@ def release(self): class _Client(object): - def __init__(self, project): self.project = project class _Transport(object): - def __init__(self, client, name): pass diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index e06083d2b756..6842bc455968 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -20,26 +20,25 @@ class TestBackgroundThreadHandler(unittest.TestCase): - PROJECT = 'PROJECT' + PROJECT = "PROJECT" @staticmethod def _get_target_class(): - from google.cloud.logging.handlers.transports import ( - BackgroundThreadTransport) + from google.cloud.logging.handlers.transports import BackgroundThreadTransport return BackgroundThreadTransport def _make_one(self, *args, **kw): worker_patch = mock.patch( - 'google.cloud.logging.handlers.transports.' - 'background_thread._Worker', - autospec=True) + "google.cloud.logging.handlers.transports." "background_thread._Worker", + autospec=True, + ) with worker_patch as worker_mock: return self._get_target_class()(*args, **kw), worker_mock def test_constructor(self): client = _Client(self.PROJECT) - name = 'python_logger' + name = "python_logger" transport, worker = self._make_one(client, name) @@ -50,70 +49,70 @@ def test_send(self): from google.cloud.logging.logger import _GLOBAL_RESOURCE client = _Client(self.PROJECT) - name = 'python_logger' + name = "python_logger" transport, _ = self._make_one(client, name) - python_logger_name = 'mylogger' - message = 'hello world' + python_logger_name = "mylogger" + message = "hello world" record = logging.LogRecord( - python_logger_name, logging.INFO, - None, None, message, None, None) + python_logger_name, logging.INFO, None, None, message, None, None + ) transport.send(record, message, _GLOBAL_RESOURCE) transport.worker.enqueue.assert_called_once_with( - record, message, _GLOBAL_RESOURCE, None, - trace=None, span_id=None) + record, message, _GLOBAL_RESOURCE, None, trace=None, span_id=None + ) def test_trace_send(self): from google.cloud.logging.logger import _GLOBAL_RESOURCE client = _Client(self.PROJECT) - name = 'python_logger' + name = "python_logger" transport, _ = self._make_one(client, name) - python_logger_name = 'mylogger' - message = 'hello world' - trace = 'the-project/trace/longlogTraceid' + python_logger_name = "mylogger" + message = "hello world" + trace = "the-project/trace/longlogTraceid" record = logging.LogRecord( - python_logger_name, logging.INFO, - None, None, message, None, None) + python_logger_name, logging.INFO, None, None, message, None, None + ) transport.send(record, message, _GLOBAL_RESOURCE, trace=trace) transport.worker.enqueue.assert_called_once_with( - record, message, _GLOBAL_RESOURCE, None, - trace=trace, span_id=None) + record, message, _GLOBAL_RESOURCE, None, trace=trace, span_id=None + ) def test_span_send(self): from google.cloud.logging.logger import _GLOBAL_RESOURCE client = _Client(self.PROJECT) - name = 'python_logger' + name = "python_logger" transport, _ = self._make_one(client, name) - python_logger_name = 'mylogger' - message = 'hello world' - span_id = 'the-project/trace/longlogTraceid/span/123456789012abbacdac' + python_logger_name = "mylogger" + message = "hello world" + span_id = "the-project/trace/longlogTraceid/span/123456789012abbacdac" record = logging.LogRecord( - python_logger_name, logging.INFO, - None, None, message, None, None) + python_logger_name, logging.INFO, None, None, message, None, None + ) transport.send(record, message, _GLOBAL_RESOURCE, span_id=span_id) transport.worker.enqueue.assert_called_once_with( - record, message, _GLOBAL_RESOURCE, None, - trace=None, span_id=span_id) + record, message, _GLOBAL_RESOURCE, None, trace=None, span_id=span_id + ) def test_flush(self): client = _Client(self.PROJECT) - name = 'python_logger' + name = "python_logger" transport, _ = self._make_one(client, name) @@ -123,28 +122,27 @@ def test_flush(self): def test_worker(self): client = _Client(self.PROJECT) - name = 'python_logger' + name = "python_logger" batch_size = 30 - grace_period = 20. + grace_period = 20.0 max_latency = 0.1 - transport, worker = self._make_one(client, - name, - grace_period=grace_period, - batch_size=batch_size, - max_latency=max_latency) - worker_grace_period = worker.call_args[1]['grace_period'] # **kwargs. - worker_batch_size = worker.call_args[1]['max_batch_size'] - worker_max_latency = worker.call_args[1]['max_latency'] - self.assertEqual(worker_grace_period, - grace_period) - self.assertEqual(worker_batch_size, - batch_size) - self.assertEqual(worker_max_latency, - max_latency) + transport, worker = self._make_one( + client, + name, + grace_period=grace_period, + batch_size=batch_size, + max_latency=max_latency, + ) + worker_grace_period = worker.call_args[1]["grace_period"] # **kwargs. + worker_batch_size = worker.call_args[1]["max_batch_size"] + worker_max_latency = worker.call_args[1]["max_latency"] + self.assertEqual(worker_grace_period, grace_period) + self.assertEqual(worker_batch_size, batch_size) + self.assertEqual(worker_max_latency, max_latency) class Test_Worker(unittest.TestCase): - NAME = 'python_logger' + NAME = "python_logger" @staticmethod def _get_target_class(): @@ -156,8 +154,8 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def _start_with_thread_patch(self, worker): - with mock.patch('threading.Thread', new=_Thread) as thread_mock: - with mock.patch('atexit.register') as atexit_mock: + with mock.patch("threading.Thread", new=_Thread) as thread_mock: + with mock.patch("atexit.register") as atexit_mock: worker.start() return thread_mock, atexit_mock @@ -168,8 +166,11 @@ def test_constructor(self): max_latency = 0.1 worker = self._make_one( - logger, grace_period=grace_period, max_batch_size=max_batch_size, - max_latency=max_latency) + logger, + grace_period=grace_period, + max_batch_size=max_batch_size, + max_latency=max_latency, + ) self.assertEqual(worker._cloud_logger, logger) self.assertEqual(worker._grace_period, grace_period) @@ -189,8 +190,7 @@ def test_start(self): self.assertIsNotNone(worker._thread) self.assertTrue(worker._thread.daemon) self.assertEqual(worker._thread._target, worker._thread_main) - self.assertEqual( - worker._thread._name, background_thread._WORKER_THREAD_NAME) + self.assertEqual(worker._thread._name, background_thread._WORKER_THREAD_NAME) atexit_mock.assert_called_once_with(worker._main_thread_terminated) # Calling start again should not start a new thread. @@ -210,8 +210,7 @@ def test_stop(self): worker.stop(grace_period) self.assertEqual(worker._queue.qsize(), 1) - self.assertEqual( - worker._queue.get(), background_thread._WORKER_TERMINATOR) + self.assertEqual(worker._queue.get(), background_thread._WORKER_TERMINATOR) self.assertFalse(worker.is_alive) self.assertIsNone(worker._thread) self.assertEqual(thread._timeout, grace_period) @@ -244,7 +243,7 @@ def test__main_thread_terminated_non_empty_queue(self): worker = self._make_one(_Logger(self.NAME)) self._start_with_thread_patch(worker) - worker.enqueue(mock.Mock(), '') + worker.enqueue(mock.Mock(), "") worker._main_thread_terminated() self.assertFalse(worker.is_alive) @@ -254,7 +253,7 @@ def test__main_thread_terminated_did_not_join(self): self._start_with_thread_patch(worker) worker._thread._terminate_on_join = False - worker.enqueue(mock.Mock(), '') + worker.enqueue(mock.Mock(), "") worker._main_thread_terminated() self.assertFalse(worker.is_alive) @@ -262,8 +261,8 @@ def test__main_thread_terminated_did_not_join(self): @staticmethod def _enqueue_record(worker, message): record = logging.LogRecord( - 'python_logger', logging.INFO, - None, None, message, None, None) + "python_logger", logging.INFO, None, None, message, None, None + ) worker.enqueue(record, message) def test__thread_main(self): @@ -272,8 +271,8 @@ def test__thread_main(self): worker = self._make_one(_Logger(self.NAME)) # Enqueue two records and the termination signal. - self._enqueue_record(worker, '1') - self._enqueue_record(worker, '2') + self._enqueue_record(worker, "1") + self._enqueue_record(worker, "2") worker._queue.put_nowait(background_thread._WORKER_TERMINATOR) worker._thread_main() @@ -289,7 +288,7 @@ def test__thread_main_error(self): worker._cloud_logger._batch_cls = _RaisingBatch # Enqueue one record and the termination signal. - self._enqueue_record(worker, '1') + self._enqueue_record(worker, "1") worker._queue.put_nowait(background_thread._WORKER_TERMINATOR) worker._thread_main() @@ -305,10 +304,10 @@ def test__thread_main_batches(self): # Enqueue three records and the termination signal. This should be # enough to perform two separate batches and a third loop with just # the exit. - self._enqueue_record(worker, '1') - self._enqueue_record(worker, '2') - self._enqueue_record(worker, '3') - self._enqueue_record(worker, '4') + self._enqueue_record(worker, "1") + self._enqueue_record(worker, "2") + self._enqueue_record(worker, "3") + self._enqueue_record(worker, "4") worker._queue.put_nowait(background_thread._WORKER_TERMINATOR) worker._thread_main() @@ -317,7 +316,7 @@ def test__thread_main_batches(self): self.assertFalse(worker._cloud_logger._batch.commit_called) self.assertEqual(worker._queue.qsize(), 0) - @mock.patch('time.time', autospec=True, return_value=1) + @mock.patch("time.time", autospec=True, return_value=1) def test__thread_main_max_latency(self, time): # Note: this test is a bit brittle as it assumes the operation of # _get_many invokes queue.get() followed by queue._get(). It fails @@ -329,14 +328,13 @@ def test__thread_main_max_latency(self, time): # Use monotonically increasing time. time.side_effect = range(1, 6) - worker = self._make_one( - _Logger(self.NAME), max_latency=2, max_batch_size=10) + worker = self._make_one(_Logger(self.NAME), max_latency=2, max_batch_size=10) worker._queue = mock.create_autospec(queue.Queue, instance=True) worker._queue.get.side_effect = [ - {'info': {'message': '1'}}, # Single record. + {"info": {"message": "1"}}, # Single record. queue.Empty(), # Emulate a queue.get() timeout. - {'info': {'message': '1'}}, # Second record. + {"info": {"message": "1"}}, # Second record. background_thread._WORKER_TERMINATOR, # Stop the thread. queue.Empty(), # Emulate a queue.get() timeout. ] @@ -377,13 +375,15 @@ def test__thread_main_max_latency(self, time): # 3 and now will be 4, and finally with timeout=0 as start will be 3 # and now will be 5. # - worker._queue.get.assert_has_calls([ - mock.call(), - mock.call(timeout=1), - mock.call(), - mock.call(timeout=1), - mock.call(timeout=0) - ]) + worker._queue.get.assert_has_calls( + [ + mock.call(), + mock.call(timeout=1), + mock.call(), + mock.call(timeout=1), + mock.call(timeout=0), + ] + ) def test_flush(self): worker = self._make_one(_Logger(self.NAME)) @@ -395,7 +395,6 @@ def test_flush(self): class _Thread(object): - def __init__(self, target, name): self._target = target self._name = name @@ -419,22 +418,26 @@ def join(self, timeout=None): class _Batch(object): - def __init__(self): self.entries = [] self.commit_called = False self.commit_count = None def log_struct( - self, info, severity=logging.INFO, resource=None, labels=None, - trace=None, span_id=None): + self, + info, + severity=logging.INFO, + resource=None, + labels=None, + trace=None, + span_id=None, + ): from google.cloud.logging.logger import _GLOBAL_RESOURCE assert resource is None resource = _GLOBAL_RESOURCE - self.log_struct_called_with = (info, severity, resource, labels, - trace, span_id) + self.log_struct_called_with = (info, severity, resource, labels, trace, span_id) self.entries.append(info) def commit(self): @@ -446,11 +449,10 @@ def commit(self): class _RaisingBatch(_Batch): def commit(self): self.commit_called = True - raise ValueError('This batch raises on commit.') + raise ValueError("This batch raises on commit.") class _Logger(object): - def __init__(self, name): self.name = name self._batch_cls = _Batch @@ -464,15 +466,13 @@ def batch(self): class _Client(object): - def __init__(self, project, _http=None, credentials=None): import mock self.project = project self._http = _http self._credentials = credentials - self._connection = mock.Mock( - credentials=credentials, spec=['credentials']) + self._connection = mock.Mock(credentials=credentials, spec=["credentials"]) def logger(self, name): # pylint: disable=unused-argument self._logger = _Logger(name) diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py index 24e2d31ae02a..03612e115a98 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py @@ -17,7 +17,7 @@ class TestBaseHandler(unittest.TestCase): - PROJECT = 'PROJECT' + PROJECT = "PROJECT" @staticmethod def _get_target_class(): diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py index 6a3f5dcf7a67..ca6e9260c7a1 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py @@ -18,7 +18,7 @@ class TestSyncHandler(unittest.TestCase): - PROJECT = 'PROJECT' + PROJECT = "PROJECT" @staticmethod def _get_target_class(): @@ -31,31 +31,27 @@ def _make_one(self, *args, **kw): def test_ctor(self): client = _Client(self.PROJECT) - NAME = 'python_logger' + NAME = "python_logger" transport = self._make_one(client, NAME) - self.assertEqual(transport.logger.name, 'python_logger') + self.assertEqual(transport.logger.name, "python_logger") def test_send(self): from google.cloud.logging.logger import _GLOBAL_RESOURCE client = _Client(self.PROJECT) - stackdriver_logger_name = 'python' - python_logger_name = 'mylogger' + stackdriver_logger_name = "python" + python_logger_name = "mylogger" transport = self._make_one(client, stackdriver_logger_name) - message = 'hello world' - record = logging.LogRecord(python_logger_name, logging.INFO, - None, None, message, None, None) + message = "hello world" + record = logging.LogRecord( + python_logger_name, logging.INFO, None, None, message, None, None + ) transport.send(record, message, _GLOBAL_RESOURCE) - EXPECTED_STRUCT = { - 'message': message, - 'python_logger': python_logger_name, - } - EXPECTED_SENT = (EXPECTED_STRUCT, 'INFO', _GLOBAL_RESOURCE, None, - None, None) - self.assertEqual( - transport.logger.log_struct_called_with, EXPECTED_SENT) + EXPECTED_STRUCT = {"message": message, "python_logger": python_logger_name} + EXPECTED_SENT = (EXPECTED_STRUCT, "INFO", _GLOBAL_RESOURCE, None, None, None) + self.assertEqual(transport.logger.log_struct_called_with, EXPECTED_SENT) class _Logger(object): @@ -64,15 +60,26 @@ class _Logger(object): def __init__(self, name): self.name = name - def log_struct(self, message, severity=None, - resource=_GLOBAL_RESOURCE, labels=None, - trace=None, span_id=None): - self.log_struct_called_with = (message, severity, resource, labels, - trace, span_id) + def log_struct( + self, + message, + severity=None, + resource=_GLOBAL_RESOURCE, + labels=None, + trace=None, + span_id=None, + ): + self.log_struct_called_with = ( + message, + severity, + resource, + labels, + trace, + span_id, + ) class _Client(object): - def __init__(self, project): self.project = project @@ -82,7 +89,6 @@ def logger(self, name): # pylint: disable=unused-argument class _Handler(object): - def __init__(self, level): self.level = level # pragma: NO COVER diff --git a/packages/google-cloud-logging/tests/unit/test__gapic.py b/packages/google-cloud-logging/tests/unit/test__gapic.py index c578d6d852bb..03ff0a7a14f6 100644 --- a/packages/google-cloud-logging/tests/unit/test__gapic.py +++ b/packages/google-cloud-logging/tests/unit/test__gapic.py @@ -30,28 +30,26 @@ from google.cloud.logging_v2.proto import logging_metrics_pb2 -PROJECT = 'PROJECT' -PROJECT_PATH = 'projects/%s' % (PROJECT,) -FILTER = 'logName:syslog AND severity>=ERROR' +PROJECT = "PROJECT" +PROJECT_PATH = "projects/%s" % (PROJECT,) +FILTER = "logName:syslog AND severity>=ERROR" class Test_LoggingAPI(object): - LOG_NAME = 'log_name' - LOG_PATH = 'projects/%s/logs/%s' % (PROJECT, LOG_NAME) + LOG_NAME = "log_name" + LOG_PATH = "projects/%s/logs/%s" % (PROJECT, LOG_NAME) @staticmethod def make_logging_api(): channel = grpc_helpers.ChannelStub() - gapic_client = logging_service_v2_client.LoggingServiceV2Client( - channel=channel) + gapic_client = logging_service_v2_client.LoggingServiceV2Client(channel=channel) handwritten_client = mock.Mock() api = _gapic._LoggingAPI(gapic_client, handwritten_client) return channel, api def test_ctor(self): channel = grpc_helpers.ChannelStub() - gapic_client = logging_service_v2_client.LoggingServiceV2Client( - channel=channel) + gapic_client = logging_service_v2_client.LoggingServiceV2Client(channel=channel) api = _gapic._LoggingAPI(gapic_client, mock.sentinel.client) assert api._gapic_api is gapic_client assert api._client is mock.sentinel.client @@ -60,12 +58,12 @@ def test_list_entries(self): channel, api = self.make_logging_api() log_entry_msg = log_entry_pb2.LogEntry( - log_name=self.LOG_PATH, - text_payload='text') + log_name=self.LOG_PATH, text_payload="text" + ) channel.ListLogEntries.response = logging_pb2.ListLogEntriesResponse( - entries=[log_entry_msg]) - result = api.list_entries( - [PROJECT], FILTER, google.cloud.logging.DESCENDING) + entries=[log_entry_msg] + ) + result = api.list_entries([PROJECT], FILTER, google.cloud.logging.DESCENDING) entries = list(result) @@ -73,7 +71,7 @@ def test_list_entries(self): assert len(entries) == 1 entry = entries[0] assert isinstance(entry, google.cloud.logging.entries.TextEntry) - assert entry.payload == 'text' + assert entry.payload == "text" # Check the request assert len(channel.ListLogEntries.requests) == 1 @@ -85,12 +83,15 @@ def test_list_entries(self): def test_list_entries_with_options(self): channel, api = self.make_logging_api() - channel.ListLogEntries.response = logging_pb2.ListLogEntriesResponse( - entries=[]) + channel.ListLogEntries.response = logging_pb2.ListLogEntriesResponse(entries=[]) result = api.list_entries( - [PROJECT], FILTER, google.cloud.logging.ASCENDING, page_size=42, - page_token='token') + [PROJECT], + FILTER, + google.cloud.logging.ASCENDING, + page_size=42, + page_token="token", + ) list(result) @@ -101,7 +102,7 @@ def test_list_entries_with_options(self): assert request.filter == FILTER assert request.order_by == google.cloud.logging.ASCENDING assert request.page_size == 42 - assert request.page_token == 'token' + assert request.page_token == "token" def test_write_entries_single(self): channel, api = self.make_logging_api() @@ -109,9 +110,9 @@ def test_write_entries_single(self): channel.WriteLogEntries.response = empty_pb2.Empty() entry = { - 'logName': self.LOG_PATH, - 'resource': {'type': 'global'}, - 'textPayload': 'text', + "logName": self.LOG_PATH, + "resource": {"type": "global"}, + "textPayload": "text", } api.write_entries([entry]) @@ -121,9 +122,9 @@ def test_write_entries_single(self): request = channel.WriteLogEntries.requests[0] assert request.partial_success is False assert len(request.entries) == 1 - assert request.entries[0].log_name == entry['logName'] - assert request.entries[0].resource.type == entry['resource']['type'] - assert request.entries[0].text_payload == 'text' + assert request.entries[0].log_name == entry["logName"] + assert request.entries[0].resource.type == entry["resource"]["type"] + assert request.entries[0].text_payload == "text" def test_logger_delete(self): channel, api = self.make_logging_api() @@ -138,24 +139,22 @@ def test_logger_delete(self): class Test_SinksAPI(object): - SINK_NAME = 'sink_name' - SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) - DESTINATION_URI = 'faux.googleapis.com/destination' - SINK_WRITER_IDENTITY = 'serviceAccount:project-123@example.com' + SINK_NAME = "sink_name" + SINK_PATH = "projects/%s/sinks/%s" % (PROJECT, SINK_NAME) + DESTINATION_URI = "faux.googleapis.com/destination" + SINK_WRITER_IDENTITY = "serviceAccount:project-123@example.com" @staticmethod def make_sinks_api(): channel = grpc_helpers.ChannelStub() - gapic_client = config_service_v2_client.ConfigServiceV2Client( - channel=channel) + gapic_client = config_service_v2_client.ConfigServiceV2Client(channel=channel) handwritten_client = mock.Mock() api = _gapic._SinksAPI(gapic_client, handwritten_client) return channel, api def test_ctor(self): channel = grpc_helpers.ChannelStub() - gapic_client = config_service_v2_client.ConfigServiceV2Client( - channel=channel) + gapic_client = config_service_v2_client.ConfigServiceV2Client(channel=channel) api = _gapic._SinksAPI(gapic_client, mock.sentinel.client) assert api._gapic_api is gapic_client assert api._client is mock.sentinel.client @@ -164,11 +163,11 @@ def test_list_sinks(self): channel, api = self.make_sinks_api() sink_msg = logging_config_pb2.LogSink( - name=self.SINK_PATH, - destination=self.DESTINATION_URI, - filter=FILTER) + name=self.SINK_PATH, destination=self.DESTINATION_URI, filter=FILTER + ) channel.ListSinks.response = logging_config_pb2.ListSinksResponse( - sinks=[sink_msg]) + sinks=[sink_msg] + ) result = api.list_sinks(PROJECT) sinks = list(result) @@ -189,18 +188,17 @@ def test_list_sinks(self): def test_list_sinks_with_options(self): channel, api = self.make_sinks_api() - channel.ListSinks.response = logging_config_pb2.ListSinksResponse( - sinks=[]) + channel.ListSinks.response = logging_config_pb2.ListSinksResponse(sinks=[]) - result = api.list_sinks(PROJECT, page_size=42, page_token='token') + result = api.list_sinks(PROJECT, page_size=42, page_token="token") list(result) # Check the request assert len(channel.ListSinks.requests) == 1 request = channel.ListSinks.requests[0] - assert request.parent == 'projects/%s' % PROJECT + assert request.parent == "projects/%s" % PROJECT assert request.page_size == 42 - assert request.page_token == 'token' + assert request.page_token == "token" def test_sink_create(self): channel, api = self.make_sinks_api() @@ -222,10 +220,10 @@ def test_sink_create(self): # Check response assert result == { - 'name': self.SINK_NAME, - 'filter': FILTER, - 'destination': self.DESTINATION_URI, - 'writerIdentity': self.SINK_WRITER_IDENTITY, + "name": self.SINK_NAME, + "filter": FILTER, + "destination": self.DESTINATION_URI, + "writerIdentity": self.SINK_WRITER_IDENTITY, } # Check request @@ -241,17 +239,16 @@ def test_sink_get(self): channel, api = self.make_sinks_api() channel.GetSink.response = logging_config_pb2.LogSink( - name=self.SINK_PATH, - destination=self.DESTINATION_URI, - filter=FILTER) + name=self.SINK_PATH, destination=self.DESTINATION_URI, filter=FILTER + ) response = api.sink_get(PROJECT, self.SINK_NAME) # Check response assert response == { - 'name': self.SINK_PATH, - 'filter': FILTER, - 'destination': self.DESTINATION_URI + "name": self.SINK_PATH, + "filter": FILTER, + "destination": self.DESTINATION_URI, } # Check request @@ -274,14 +271,15 @@ def test_sink_update(self): self.SINK_NAME, FILTER, self.DESTINATION_URI, - unique_writer_identity=True) + unique_writer_identity=True, + ) # Check response assert result == { - 'name': self.SINK_NAME, - 'filter': FILTER, - 'destination': self.DESTINATION_URI, - 'writerIdentity': self.SINK_WRITER_IDENTITY, + "name": self.SINK_NAME, + "filter": FILTER, + "destination": self.DESTINATION_URI, + "writerIdentity": self.SINK_WRITER_IDENTITY, } # Check request @@ -306,23 +304,21 @@ def test_sink_delete(self): class Test_MetricsAPI(object): - METRIC_NAME = 'metric_name' - METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, METRIC_NAME) - DESCRIPTION = 'Description' + METRIC_NAME = "metric_name" + METRIC_PATH = "projects/%s/metrics/%s" % (PROJECT, METRIC_NAME) + DESCRIPTION = "Description" @staticmethod def make_metrics_api(): channel = grpc_helpers.ChannelStub() - gapic_client = metrics_service_v2_client.MetricsServiceV2Client( - channel=channel) + gapic_client = metrics_service_v2_client.MetricsServiceV2Client(channel=channel) handwritten_client = mock.Mock() api = _gapic._MetricsAPI(gapic_client, handwritten_client) return channel, api def test_ctor(self): channel = grpc_helpers.ChannelStub() - gapic_client = metrics_service_v2_client.MetricsServiceV2Client( - channel=channel) + gapic_client = metrics_service_v2_client.MetricsServiceV2Client(channel=channel) api = _gapic._MetricsAPI(gapic_client, mock.sentinel.client) assert api._gapic_api is gapic_client assert api._client is mock.sentinel.client @@ -331,12 +327,11 @@ def test_list_metrics(self): channel, api = self.make_metrics_api() sink_msg = logging_metrics_pb2.LogMetric( - name=self.METRIC_PATH, - description=self.DESCRIPTION, - filter=FILTER) - channel.ListLogMetrics.response = ( - logging_metrics_pb2.ListLogMetricsResponse( - metrics=[sink_msg])) + name=self.METRIC_PATH, description=self.DESCRIPTION, filter=FILTER + ) + channel.ListLogMetrics.response = logging_metrics_pb2.ListLogMetricsResponse( + metrics=[sink_msg] + ) result = api.list_metrics(PROJECT) metrics = list(result) @@ -357,11 +352,11 @@ def test_list_metrics(self): def test_list_metrics_options(self): channel, api = self.make_metrics_api() - channel.ListLogMetrics.response = ( - logging_metrics_pb2.ListLogMetricsResponse( - metrics=[])) + channel.ListLogMetrics.response = logging_metrics_pb2.ListLogMetricsResponse( + metrics=[] + ) - result = api.list_metrics(PROJECT, page_size=42, page_token='token') + result = api.list_metrics(PROJECT, page_size=42, page_token="token") list(result) # Check the request @@ -369,15 +364,14 @@ def test_list_metrics_options(self): request = channel.ListLogMetrics.requests[0] assert request.parent == PROJECT_PATH assert request.page_size == 42 - assert request.page_token == 'token' + assert request.page_token == "token" def test_metric_create(self): channel, api = self.make_metrics_api() channel.CreateLogMetric.response = empty_pb2.Empty() - api.metric_create( - PROJECT, self.METRIC_NAME, FILTER, self.DESCRIPTION) + api.metric_create(PROJECT, self.METRIC_NAME, FILTER, self.DESCRIPTION) # Check the request assert len(channel.CreateLogMetric.requests) == 1 @@ -391,17 +385,16 @@ def test_metric_get(self): channel, api = self.make_metrics_api() channel.GetLogMetric.response = logging_metrics_pb2.LogMetric( - name=self.METRIC_PATH, - description=self.DESCRIPTION, - filter=FILTER) + name=self.METRIC_PATH, description=self.DESCRIPTION, filter=FILTER + ) response = api.metric_get(PROJECT, self.METRIC_NAME) # Check the response assert response == { - 'name': self.METRIC_PATH, - 'filter': FILTER, - 'description': self.DESCRIPTION, + "name": self.METRIC_PATH, + "filter": FILTER, + "description": self.DESCRIPTION, } # Check the request @@ -413,18 +406,18 @@ def test_metric_update(self): channel, api = self.make_metrics_api() channel.UpdateLogMetric.response = logging_metrics_pb2.LogMetric( - name=self.METRIC_PATH, - description=self.DESCRIPTION, - filter=FILTER) + name=self.METRIC_PATH, description=self.DESCRIPTION, filter=FILTER + ) response = api.metric_update( - PROJECT, self.METRIC_NAME, FILTER, self.DESCRIPTION) + PROJECT, self.METRIC_NAME, FILTER, self.DESCRIPTION + ) # Check the response assert response == { - 'name': self.METRIC_PATH, - 'filter': FILTER, - 'description': self.DESCRIPTION, + "name": self.METRIC_PATH, + "filter": FILTER, + "description": self.DESCRIPTION, } # Check the request @@ -448,7 +441,6 @@ def test_metric_delete(self): class Test__parse_log_entry(unittest.TestCase): - @staticmethod def _call_fut(*args, **kwargs): from google.cloud.logging._gapic import _parse_log_entry @@ -458,23 +450,19 @@ def _call_fut(*args, **kwargs): def test_simple(self): from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry - entry_pb = LogEntry(log_name=u'lol-jk', text_payload=u'bah humbug') + entry_pb = LogEntry(log_name=u"lol-jk", text_payload=u"bah humbug") result = self._call_fut(entry_pb) - expected = { - 'logName': entry_pb.log_name, - 'textPayload': entry_pb.text_payload, - } + expected = {"logName": entry_pb.log_name, "textPayload": entry_pb.text_payload} self.assertEqual(result, expected) - @mock.patch('google.cloud.logging._gapic.MessageToDict', - side_effect=TypeError) + @mock.patch("google.cloud.logging._gapic.MessageToDict", side_effect=TypeError) def test_non_registry_failure(self, msg_to_dict_mock): - entry_pb = mock.Mock(spec=['HasField']) + entry_pb = mock.Mock(spec=["HasField"]) entry_pb.HasField.return_value = False with self.assertRaises(TypeError): self._call_fut(entry_pb) - entry_pb.HasField.assert_called_once_with('proto_payload') + entry_pb.HasField.assert_called_once_with("proto_payload") msg_to_dict_mock.assert_called_once_with(entry_pb) def test_unregistered_type(self): @@ -484,25 +472,24 @@ def test_unregistered_type(self): from google.protobuf.timestamp_pb2 import Timestamp pool = descriptor_pool.Default() - type_name = 'google.bigtable.admin.v2.UpdateClusterMetadata' + type_name = "google.bigtable.admin.v2.UpdateClusterMetadata" # Make sure the descriptor is not known in the registry. with self.assertRaises(KeyError): pool.FindMessageTypeByName(type_name) - type_url = 'type.googleapis.com/' + type_name - metadata_bytes = ( - b'\n\n\n\x03foo\x12\x03bar\x12\x06\x08\xbd\xb6\xfb\xc6\x05') + type_url = "type.googleapis.com/" + type_name + metadata_bytes = b"\n\n\n\x03foo\x12\x03bar\x12\x06\x08\xbd\xb6\xfb\xc6\x05" any_pb = any_pb2.Any(type_url=type_url, value=metadata_bytes) timestamp = Timestamp(seconds=61, nanos=1234000) entry_pb = LogEntry(proto_payload=any_pb, timestamp=timestamp) result = self._call_fut(entry_pb) self.assertEqual(len(result), 2) - self.assertEqual(result['timestamp'], '1970-01-01T00:01:01.001234Z') + self.assertEqual(result["timestamp"], "1970-01-01T00:01:01.001234Z") # NOTE: This "hack" is needed on Windows, where the equality check # for an ``Any`` instance fails on unregistered types. - self.assertEqual(result['protoPayload'].type_url, type_url) - self.assertEqual(result['protoPayload'].value, metadata_bytes) + self.assertEqual(result["protoPayload"].type_url, type_url) + self.assertEqual(result["protoPayload"].value, metadata_bytes) def test_registered_type(self): from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry @@ -512,35 +499,27 @@ def test_registered_type(self): from google.protobuf.struct_pb2 import Value pool = descriptor_pool.Default() - type_name = 'google.protobuf.Struct' + type_name = "google.protobuf.Struct" # Make sure the descriptor is known in the registry. descriptor = pool.FindMessageTypeByName(type_name) - self.assertEqual(descriptor.name, 'Struct') - - type_url = 'type.googleapis.com/' + type_name - field_name = 'foo' - field_value = u'Bar' - struct_pb = Struct( - fields={field_name: Value(string_value=field_value)}) - any_pb = any_pb2.Any( - type_url=type_url, - value=struct_pb.SerializeToString(), - ) + self.assertEqual(descriptor.name, "Struct") + + type_url = "type.googleapis.com/" + type_name + field_name = "foo" + field_value = u"Bar" + struct_pb = Struct(fields={field_name: Value(string_value=field_value)}) + any_pb = any_pb2.Any(type_url=type_url, value=struct_pb.SerializeToString()) - entry_pb = LogEntry(proto_payload=any_pb, log_name=u'all-good') + entry_pb = LogEntry(proto_payload=any_pb, log_name=u"all-good") result = self._call_fut(entry_pb) expected_proto = { - 'logName': entry_pb.log_name, - 'protoPayload': { - '@type': type_url, - 'value': {field_name: field_value}, - }, + "logName": entry_pb.log_name, + "protoPayload": {"@type": type_url, "value": {field_name: field_value}}, } self.assertEqual(result, expected_proto) class Test__log_entry_mapping_to_pb(unittest.TestCase): - @staticmethod def _call_fut(*args, **kwargs): from google.cloud.logging._gapic import _log_entry_mapping_to_pb @@ -558,23 +537,18 @@ def test_unregistered_type(self): from google.protobuf.json_format import ParseError pool = descriptor_pool.Default() - type_name = 'google.bigtable.admin.v2.UpdateClusterMetadata' + type_name = "google.bigtable.admin.v2.UpdateClusterMetadata" # Make sure the descriptor is not known in the registry. with self.assertRaises(KeyError): pool.FindMessageTypeByName(type_name) - type_url = 'type.googleapis.com/' + type_name + type_url = "type.googleapis.com/" + type_name json_mapping = { - 'protoPayload': { - '@type': type_url, - 'originalRequest': { - 'name': 'foo', - 'location': 'bar', - }, - 'requestTime': { - 'seconds': 1491000125, - }, - }, + "protoPayload": { + "@type": type_url, + "originalRequest": {"name": "foo", "location": "bar"}, + "requestTime": {"seconds": 1491000125}, + } } with self.assertRaises(ParseError): self._call_fut(json_mapping) @@ -585,61 +559,57 @@ def test_registered_type(self): from google.protobuf import descriptor_pool pool = descriptor_pool.Default() - type_name = 'google.protobuf.Struct' + type_name = "google.protobuf.Struct" # Make sure the descriptor is known in the registry. descriptor = pool.FindMessageTypeByName(type_name) - self.assertEqual(descriptor.name, 'Struct') + self.assertEqual(descriptor.name, "Struct") - type_url = 'type.googleapis.com/' + type_name - field_name = 'foo' - field_value = u'Bar' + type_url = "type.googleapis.com/" + type_name + field_name = "foo" + field_value = u"Bar" json_mapping = { - 'logName': u'hi-everybody', - 'protoPayload': { - '@type': type_url, - 'value': {field_name: field_value}, - }, + "logName": u"hi-everybody", + "protoPayload": {"@type": type_url, "value": {field_name: field_value}}, } # Convert to a valid LogEntry. result = self._call_fut(json_mapping) entry_pb = LogEntry( - log_name=json_mapping['logName'], + log_name=json_mapping["logName"], proto_payload=any_pb2.Any( - type_url=type_url, - value=b'\n\014\n\003foo\022\005\032\003Bar', + type_url=type_url, value=b"\n\014\n\003foo\022\005\032\003Bar" ), ) self.assertEqual(result, entry_pb) -@mock.patch( - 'google.cloud.logging._gapic.LoggingServiceV2Client', autospec=True) +@mock.patch("google.cloud.logging._gapic.LoggingServiceV2Client", autospec=True) def test_make_logging_api(gapic_client): - client = mock.Mock(spec=['_credentials']) + client = mock.Mock(spec=["_credentials"]) api = _gapic.make_logging_api(client) assert api._client == client assert api._gapic_api == gapic_client.return_value gapic_client.assert_called_once_with( - credentials=client._credentials, client_info=_gapic._CLIENT_INFO) + credentials=client._credentials, client_info=_gapic._CLIENT_INFO + ) -@mock.patch( - 'google.cloud.logging._gapic.MetricsServiceV2Client', autospec=True) +@mock.patch("google.cloud.logging._gapic.MetricsServiceV2Client", autospec=True) def test_make_metrics_api(gapic_client): - client = mock.Mock(spec=['_credentials']) + client = mock.Mock(spec=["_credentials"]) api = _gapic.make_metrics_api(client) assert api._client == client assert api._gapic_api == gapic_client.return_value gapic_client.assert_called_once_with( - credentials=client._credentials, client_info=_gapic._CLIENT_INFO) + credentials=client._credentials, client_info=_gapic._CLIENT_INFO + ) -@mock.patch( - 'google.cloud.logging._gapic.ConfigServiceV2Client', autospec=True) +@mock.patch("google.cloud.logging._gapic.ConfigServiceV2Client", autospec=True) def test_make_sinks_api(gapic_client): - client = mock.Mock(spec=['_credentials']) + client = mock.Mock(spec=["_credentials"]) api = _gapic.make_sinks_api(client) assert api._client == client assert api._gapic_api == gapic_client.return_value gapic_client.assert_called_once_with( - credentials=client._credentials, client_info=_gapic._CLIENT_INFO) + credentials=client._credentials, client_info=_gapic._CLIENT_INFO + ) diff --git a/packages/google-cloud-logging/tests/unit/test__helpers.py b/packages/google-cloud-logging/tests/unit/test__helpers.py index 8a4a0e7f362f..7f11988f5275 100644 --- a/packages/google-cloud-logging/tests/unit/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/test__helpers.py @@ -19,7 +19,6 @@ class Test_entry_from_resource(unittest.TestCase): - @staticmethod def _call_fut(resource, client, loggers): from google.cloud.logging._helpers import entry_from_resource @@ -31,12 +30,12 @@ def _payload_helper(self, key, class_name): resource = {} if key is not None: - resource[key] = 'yup' + resource[key] = "yup" client = object() loggers = {} mock_class = EntryMock() - name = 'google.cloud.logging._helpers.' + class_name + name = "google.cloud.logging._helpers." + class_name with mock.patch(name, new=mock_class): result = self._call_fut(resource, client, loggers) @@ -44,20 +43,19 @@ def _payload_helper(self, key, class_name): self.assertEqual(mock_class.called, (resource, client, loggers)) def test_wo_payload(self): - self._payload_helper(None, 'LogEntry') + self._payload_helper(None, "LogEntry") def test_text_payload(self): - self._payload_helper('textPayload', 'TextEntry') + self._payload_helper("textPayload", "TextEntry") def test_json_payload(self): - self._payload_helper('jsonPayload', 'StructEntry') + self._payload_helper("jsonPayload", "StructEntry") def test_proto_payload(self): - self._payload_helper('protoPayload', 'ProtobufEntry') + self._payload_helper("protoPayload", "ProtobufEntry") class Test_retrieve_metadata_server(unittest.TestCase): - @staticmethod def _call_fut(metadata_key): from google.cloud.logging._helpers import retrieve_metadata_server @@ -66,8 +64,8 @@ def _call_fut(metadata_key): def test_metadata_exists(self): status_code_ok = 200 - response_text = 'my-gke-cluster' - metadata_key = 'test_key' + response_text = "my-gke-cluster" + metadata_key = "test_key" response_mock = ResponseMock(status_code=status_code_ok) response_mock.text = response_text @@ -76,9 +74,7 @@ def test_metadata_exists(self): requests_mock.get.return_value = response_mock requests_mock.codes.ok = status_code_ok - patch = mock.patch( - 'google.cloud.logging._helpers.requests', - requests_mock) + patch = mock.patch("google.cloud.logging._helpers.requests", requests_mock) with patch: metadata = self._call_fut(metadata_key) @@ -88,7 +84,7 @@ def test_metadata_exists(self): def test_metadata_does_not_exist(self): status_code_ok = 200 status_code_not_found = 404 - metadata_key = 'test_key' + metadata_key = "test_key" response_mock = ResponseMock(status_code=status_code_not_found) @@ -96,9 +92,7 @@ def test_metadata_does_not_exist(self): requests_mock.get.return_value = response_mock requests_mock.codes.ok = status_code_ok - patch = mock.patch( - 'google.cloud.logging._helpers.requests', - requests_mock) + patch = mock.patch("google.cloud.logging._helpers.requests", requests_mock) with patch: metadata = self._call_fut(metadata_key) @@ -108,17 +102,17 @@ def test_metadata_does_not_exist(self): def test_request_exception(self): import requests - metadata_key = 'test_url_cannot_connect' - metadata_url = 'http://metadata.invalid/' + metadata_key = "test_url_cannot_connect" + metadata_url = "http://metadata.invalid/" - requests_get_mock = mock.Mock(spec=['__call__']) + requests_get_mock = mock.Mock(spec=["__call__"]) requests_get_mock.side_effect = requests.exceptions.RequestException - requests_get_patch = mock.patch('requests.get', requests_get_mock) + requests_get_patch = mock.patch("requests.get", requests_get_mock) url_patch = mock.patch( - 'google.cloud.logging._helpers.METADATA_URL', - new=metadata_url) + "google.cloud.logging._helpers.METADATA_URL", new=metadata_url + ) with requests_get_patch: with url_patch: @@ -128,7 +122,6 @@ def test_request_exception(self): class EntryMock(object): - def __init__(self): self.sentinel = object() self.called = None @@ -139,7 +132,6 @@ def from_api_repr(self, resource, client, loggers): class ResponseMock(object): - - def __init__(self, status_code, text='test_response_text'): + def __init__(self, status_code, text="test_response_text"): self.status_code = status_code self.text = text diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index aa4c54ac30d2..e4c4ecb279ef 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -25,8 +25,8 @@ def _make_credentials(): class TestConnection(unittest.TestCase): - PROJECT = 'project' - FILTER = 'logName:syslog AND severity>=ERROR' + PROJECT = "project" + FILTER = "logName:syslog AND severity>=ERROR" @staticmethod def _get_target_class(): @@ -51,38 +51,34 @@ def test_extra_headers(self): http = mock.create_autospec(requests.Session, instance=True) response = requests.Response() response.status_code = 200 - data = b'brent-spiner' + data = b"brent-spiner" response._content = data http.request.return_value = response - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock(_http=http, spec=["_http"]) conn = self._make_one(client) - req_data = 'req-data-boring' - result = conn.api_request( - 'GET', '/rainbow', data=req_data, expect_json=False) + req_data = "req-data-boring" + result = conn.api_request("GET", "/rainbow", data=req_data, expect_json=False) self.assertEqual(result, data) expected_headers = { - 'Accept-Encoding': 'gzip', + "Accept-Encoding": "gzip", base_http.CLIENT_INFO_HEADER: MUT._CLIENT_INFO, - 'User-Agent': conn.USER_AGENT, + "User-Agent": conn.USER_AGENT, } - expected_uri = conn.build_api_url('/rainbow') + expected_uri = conn.build_api_url("/rainbow") http.request.assert_called_once_with( - data=req_data, - headers=expected_headers, - method='GET', - url=expected_uri, + data=req_data, headers=expected_headers, method="GET", url=expected_uri ) class Test_LoggingAPI(unittest.TestCase): - PROJECT = 'project' - LIST_ENTRIES_PATH = 'entries:list' - WRITE_ENTRIES_PATH = 'entries:write' - LOGGER_NAME = 'LOGGER_NAME' - FILTER = 'logName:syslog AND severity>=ERROR' + PROJECT = "project" + LIST_ENTRIES_PATH = "entries:list" + WRITE_ENTRIES_PATH = "entries:write" + LOGGER_NAME = "LOGGER_NAME" + FILTER = "logName:syslog AND severity>=ERROR" @staticmethod def _get_target_class(): @@ -115,27 +111,25 @@ def test_list_entries_no_paging(self): from google.cloud.logging.logger import Logger NOW, TIMESTAMP = self._make_timestamp() - IID = 'IID' - TEXT = 'TEXT' - SENT = { - 'projectIds': [self.PROJECT], - } - TOKEN = 'TOKEN' + IID = "IID" + TEXT = "TEXT" + SENT = {"projectIds": [self.PROJECT]} + TOKEN = "TOKEN" RETURNED = { - 'entries': [{ - 'textPayload': TEXT, - 'insertId': IID, - 'resource': { - 'type': 'global', - }, - 'timestamp': TIMESTAMP, - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - }], - 'nextPageToken': TOKEN, + "entries": [ + { + "textPayload": TEXT, + "insertId": IID, + "resource": {"type": "global"}, + "timestamp": TIMESTAMP, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + } + ], + "nextPageToken": TOKEN, } - client = Client(project=self.PROJECT, credentials=_make_credentials(), - _use_grpc=False) + client = Client( + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) client._connection = _Connection(RETURNED) api = self._make_one(client) @@ -160,12 +154,10 @@ def test_list_entries_no_paging(self): self.assertIsNone(entry.http_request) called_with = client._connection._called_with - expected_path = '/%s' % (self.LIST_ENTRIES_PATH,) - self.assertEqual(called_with, { - 'method': 'POST', - 'path': expected_path, - 'data': SENT, - }) + expected_path = "/%s" % (self.LIST_ENTRIES_PATH,) + self.assertEqual( + called_with, {"method": "POST", "path": expected_path, "data": SENT} + ) def test_list_entries_w_paging(self): from google.cloud.logging import DESCENDING @@ -174,52 +166,54 @@ def test_list_entries_w_paging(self): from google.cloud.logging.entries import ProtobufEntry from google.cloud.logging.entries import StructEntry - PROJECT1 = 'PROJECT1' - PROJECT2 = 'PROJECT2' + PROJECT1 = "PROJECT1" + PROJECT2 = "PROJECT2" NOW, TIMESTAMP = self._make_timestamp() - IID1 = 'IID1' - IID2 = 'IID2' - PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} + IID1 = "IID1" + IID2 = "IID2" + PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} PROTO_PAYLOAD = PAYLOAD.copy() - PROTO_PAYLOAD['@type'] = 'type.googleapis.com/testing.example' - TOKEN = 'TOKEN' + PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" + TOKEN = "TOKEN" PAGE_SIZE = 42 SENT = { - 'projectIds': [PROJECT1, PROJECT2], - 'filter': self.FILTER, - 'orderBy': DESCENDING, - 'pageSize': PAGE_SIZE, - 'pageToken': TOKEN, + "projectIds": [PROJECT1, PROJECT2], + "filter": self.FILTER, + "orderBy": DESCENDING, + "pageSize": PAGE_SIZE, + "pageToken": TOKEN, } RETURNED = { - 'entries': [{ - 'jsonPayload': PAYLOAD, - 'insertId': IID1, - 'resource': { - 'type': 'global', + "entries": [ + { + "jsonPayload": PAYLOAD, + "insertId": IID1, + "resource": {"type": "global"}, + "timestamp": TIMESTAMP, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, - 'timestamp': TIMESTAMP, - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - }, { - 'protoPayload': PROTO_PAYLOAD, - 'insertId': IID2, - 'resource': { - 'type': 'global', + { + "protoPayload": PROTO_PAYLOAD, + "insertId": IID2, + "resource": {"type": "global"}, + "timestamp": TIMESTAMP, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, - 'timestamp': TIMESTAMP, - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - }], + ] } - client = Client(project=self.PROJECT, credentials=_make_credentials(), - _use_grpc=False) + client = Client( + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) client._connection = _Connection(RETURNED) api = self._make_one(client) iterator = api.list_entries( - projects=[PROJECT1, PROJECT2], filter_=self.FILTER, - order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN) + projects=[PROJECT1, PROJECT2], + filter_=self.FILTER, + order_by=DESCENDING, + page_size=PAGE_SIZE, + page_token=TOKEN, + ) entries = list(iterator) token = iterator.next_page_token @@ -250,58 +244,42 @@ def test_list_entries_w_paging(self): self.assertIsNone(entry2.http_request) called_with = client._connection._called_with - expected_path = '/%s' % (self.LIST_ENTRIES_PATH,) - self.assertEqual(called_with, { - 'method': 'POST', - 'path': expected_path, - 'data': SENT, - }) + expected_path = "/%s" % (self.LIST_ENTRIES_PATH,) + self.assertEqual( + called_with, {"method": "POST", "path": expected_path, "data": SENT} + ) def test_write_entries_single(self): - TEXT = 'TEXT' + TEXT = "TEXT" ENTRY = { - 'textPayload': TEXT, - 'resource': { - 'type': 'global', - }, - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - } - SENT = { - 'entries': [ENTRY], + "textPayload": TEXT, + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), } + SENT = {"entries": [ENTRY]} conn = _Connection({}) client = _Client(conn) api = self._make_one(client) api.write_entries([ENTRY]) - self.assertEqual(conn._called_with['method'], 'POST') - path = '/%s' % self.WRITE_ENTRIES_PATH - self.assertEqual(conn._called_with['path'], path) - self.assertEqual(conn._called_with['data'], SENT) + self.assertEqual(conn._called_with["method"], "POST") + path = "/%s" % self.WRITE_ENTRIES_PATH + self.assertEqual(conn._called_with["path"], path) + self.assertEqual(conn._called_with["data"], SENT) def test_write_entries_multiple(self): - TEXT = 'TEXT' - LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) - RESOURCE = { - 'type': 'global', - } - LABELS = { - 'baz': 'qux', - 'spam': 'eggs', - } - ENTRY1 = { - 'textPayload': TEXT, - } - ENTRY2 = { - 'jsonPayload': {'foo': 'bar'}, - } + TEXT = "TEXT" + LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + RESOURCE = {"type": "global"} + LABELS = {"baz": "qux", "spam": "eggs"} + ENTRY1 = {"textPayload": TEXT} + ENTRY2 = {"jsonPayload": {"foo": "bar"}} SENT = { - 'logName': LOG_NAME, - 'resource': RESOURCE, - 'labels': LABELS, - 'entries': [ENTRY1, ENTRY2], + "logName": LOG_NAME, + "resource": RESOURCE, + "labels": LABELS, + "entries": [ENTRY1, ENTRY2], } conn = _Connection({}) client = _Client(conn) @@ -309,32 +287,32 @@ def test_write_entries_multiple(self): api.write_entries([ENTRY1, ENTRY2], LOG_NAME, RESOURCE, LABELS) - self.assertEqual(conn._called_with['method'], 'POST') - path = '/%s' % self.WRITE_ENTRIES_PATH - self.assertEqual(conn._called_with['path'], path) - self.assertEqual(conn._called_with['data'], SENT) + self.assertEqual(conn._called_with["method"], "POST") + path = "/%s" % self.WRITE_ENTRIES_PATH + self.assertEqual(conn._called_with["path"], path) + self.assertEqual(conn._called_with["data"], SENT) def test_logger_delete(self): - path = '/projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + path = "/projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) conn = _Connection({}) client = _Client(conn) api = self._make_one(client) api.logger_delete(self.PROJECT, self.LOGGER_NAME) - self.assertEqual(conn._called_with['method'], 'DELETE') - self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with["method"], "DELETE") + self.assertEqual(conn._called_with["path"], path) class Test_SinksAPI(unittest.TestCase): - PROJECT = 'project' - FILTER = 'logName:syslog AND severity>=ERROR' - LIST_SINKS_PATH = 'projects/%s/sinks' % (PROJECT,) - SINK_NAME = 'sink_name' - SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) - DESTINATION_URI = 'faux.googleapis.com/destination' - WRITER_IDENTITY = 'serviceAccount:project-123@example.com' + PROJECT = "project" + FILTER = "logName:syslog AND severity>=ERROR" + LIST_SINKS_PATH = "projects/%s/sinks" % (PROJECT,) + SINK_NAME = "sink_name" + SINK_PATH = "projects/%s/sinks/%s" % (PROJECT, SINK_NAME) + DESTINATION_URI = "faux.googleapis.com/destination" + WRITER_IDENTITY = "serviceAccount:project-123@example.com" @staticmethod def _get_target_class(): @@ -356,14 +334,16 @@ def test_list_sinks_no_paging(self): import six from google.cloud.logging.sink import Sink - TOKEN = 'TOKEN' + TOKEN = "TOKEN" RETURNED = { - 'sinks': [{ - 'name': self.SINK_PATH, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - }], - 'nextPageToken': TOKEN, + "sinks": [ + { + "name": self.SINK_PATH, + "filter": self.FILTER, + "destination": self.DESTINATION_URI, + } + ], + "nextPageToken": TOKEN, } conn = _Connection(RETURNED) client = _Client(conn) @@ -386,31 +366,30 @@ def test_list_sinks_no_paging(self): self.assertIs(sink.client, client) called_with = conn._called_with - path = '/%s' % (self.LIST_SINKS_PATH,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': path, - 'query_params': {}, - }) + path = "/%s" % (self.LIST_SINKS_PATH,) + self.assertEqual( + called_with, {"method": "GET", "path": path, "query_params": {}} + ) def test_list_sinks_w_paging(self): from google.cloud.logging.sink import Sink - TOKEN = 'TOKEN' + TOKEN = "TOKEN" PAGE_SIZE = 42 RETURNED = { - 'sinks': [{ - 'name': self.SINK_PATH, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - }], + "sinks": [ + { + "name": self.SINK_PATH, + "filter": self.FILTER, + "destination": self.DESTINATION_URI, + } + ] } conn = _Connection(RETURNED) client = _Client(conn) api = self._make_one(client) - iterator = api.list_sinks( - self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + iterator = api.list_sinks(self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) sinks = list(iterator) token = iterator.next_page_token @@ -426,23 +405,23 @@ def test_list_sinks_w_paging(self): self.assertIs(sink.client, client) called_with = conn._called_with - path = '/%s' % (self.LIST_SINKS_PATH,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': path, - 'query_params': { - 'pageSize': PAGE_SIZE, - 'pageToken': TOKEN, + path = "/%s" % (self.LIST_SINKS_PATH,) + self.assertEqual( + called_with, + { + "method": "GET", + "path": path, + "query_params": {"pageSize": PAGE_SIZE, "pageToken": TOKEN}, }, - }) + ) def test_sink_create_conflict(self): from google.cloud.exceptions import Conflict sent = { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, + "name": self.SINK_NAME, + "filter": self.FILTER, + "destination": self.DESTINATION_URI, } conn = _Connection() conn._raise_conflict = True @@ -451,26 +430,26 @@ def test_sink_create_conflict(self): with self.assertRaises(Conflict): api.sink_create( - self.PROJECT, self.SINK_NAME, self.FILTER, - self.DESTINATION_URI) + self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI + ) - path = '/projects/%s/sinks' % (self.PROJECT,) + path = "/projects/%s/sinks" % (self.PROJECT,) expected = { - 'method': 'POST', - 'path': path, - 'data': sent, - 'query_params': {'uniqueWriterIdentity': False}, + "method": "POST", + "path": path, + "data": sent, + "query_params": {"uniqueWriterIdentity": False}, } self.assertEqual(conn._called_with, expected) def test_sink_create_ok(self): sent = { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, + "name": self.SINK_NAME, + "filter": self.FILTER, + "destination": self.DESTINATION_URI, } after_create = sent.copy() - after_create['writerIdentity'] = self.WRITER_IDENTITY + after_create["writerIdentity"] = self.WRITER_IDENTITY conn = _Connection(after_create) client = _Client(conn) api = self._make_one(client) @@ -484,12 +463,12 @@ def test_sink_create_ok(self): ) self.assertEqual(returned, after_create) - path = '/projects/%s/sinks' % (self.PROJECT,) + path = "/projects/%s/sinks" % (self.PROJECT,) expected = { - 'method': 'POST', - 'path': path, - 'data': sent, - 'query_params': {'uniqueWriterIdentity': True}, + "method": "POST", + "path": path, + "data": sent, + "query_params": {"uniqueWriterIdentity": True}, } self.assertEqual(conn._called_with, expected) @@ -503,15 +482,15 @@ def test_sink_get_miss(self): with self.assertRaises(NotFound): api.sink_get(self.PROJECT, self.SINK_NAME) - self.assertEqual(conn._called_with['method'], 'GET') - path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) - self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with["method"], "GET") + path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with["path"], path) def test_sink_get_hit(self): RESPONSE = { - 'name': self.SINK_PATH, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, + "name": self.SINK_PATH, + "filter": self.FILTER, + "destination": self.DESTINATION_URI, } conn = _Connection(RESPONSE) client = _Client(conn) @@ -520,17 +499,17 @@ def test_sink_get_hit(self): response = api.sink_get(self.PROJECT, self.SINK_NAME) self.assertEqual(response, RESPONSE) - self.assertEqual(conn._called_with['method'], 'GET') - path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) - self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with["method"], "GET") + path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with["path"], path) def test_sink_update_miss(self): from google.cloud.exceptions import NotFound sent = { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, + "name": self.SINK_NAME, + "filter": self.FILTER, + "destination": self.DESTINATION_URI, } conn = _Connection() client = _Client(conn) @@ -538,26 +517,26 @@ def test_sink_update_miss(self): with self.assertRaises(NotFound): api.sink_update( - self.PROJECT, self.SINK_NAME, self.FILTER, - self.DESTINATION_URI) + self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI + ) - path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) expected = { - 'method': 'PUT', - 'path': path, - 'data': sent, - 'query_params': {'uniqueWriterIdentity': False}, + "method": "PUT", + "path": path, + "data": sent, + "query_params": {"uniqueWriterIdentity": False}, } self.assertEqual(conn._called_with, expected) def test_sink_update_hit(self): sent = { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, + "name": self.SINK_NAME, + "filter": self.FILTER, + "destination": self.DESTINATION_URI, } after_update = sent.copy() - after_update['writerIdentity'] = self.WRITER_IDENTITY + after_update["writerIdentity"] = self.WRITER_IDENTITY conn = _Connection(after_update) client = _Client(conn) api = self._make_one(client) @@ -567,15 +546,16 @@ def test_sink_update_hit(self): self.SINK_NAME, self.FILTER, self.DESTINATION_URI, - unique_writer_identity=True) + unique_writer_identity=True, + ) self.assertEqual(returned, after_update) - path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) expected = { - 'method': 'PUT', - 'path': path, - 'data': sent, - 'query_params': {'uniqueWriterIdentity': True}, + "method": "PUT", + "path": path, + "data": sent, + "query_params": {"uniqueWriterIdentity": True}, } self.assertEqual(conn._called_with, expected) @@ -589,9 +569,9 @@ def test_sink_delete_miss(self): with self.assertRaises(NotFound): api.sink_delete(self.PROJECT, self.SINK_NAME) - self.assertEqual(conn._called_with['method'], 'DELETE') - path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) - self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with["method"], "DELETE") + path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with["path"], path) def test_sink_delete_hit(self): conn = _Connection({}) @@ -600,19 +580,19 @@ def test_sink_delete_hit(self): api.sink_delete(self.PROJECT, self.SINK_NAME) - self.assertEqual(conn._called_with['method'], 'DELETE') - path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) - self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with["method"], "DELETE") + path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with["path"], path) class Test_MetricsAPI(unittest.TestCase): - PROJECT = 'project' - FILTER = 'logName:syslog AND severity>=ERROR' - LIST_METRICS_PATH = 'projects/%s/metrics' % (PROJECT,) - METRIC_NAME = 'metric_name' - METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, METRIC_NAME) - DESCRIPTION = 'DESCRIPTION' + PROJECT = "project" + FILTER = "logName:syslog AND severity>=ERROR" + LIST_METRICS_PATH = "projects/%s/metrics" % (PROJECT,) + METRIC_NAME = "metric_name" + METRIC_PATH = "projects/%s/metrics/%s" % (PROJECT, METRIC_NAME) + DESCRIPTION = "DESCRIPTION" @staticmethod def _get_target_class(): @@ -627,13 +607,10 @@ def test_list_metrics_no_paging(self): import six from google.cloud.logging.metric import Metric - TOKEN = 'TOKEN' + TOKEN = "TOKEN" RETURNED = { - 'metrics': [{ - 'name': self.METRIC_PATH, - 'filter': self.FILTER, - }], - 'nextPageToken': TOKEN, + "metrics": [{"name": self.METRIC_PATH, "filter": self.FILTER}], + "nextPageToken": TOKEN, } conn = _Connection(RETURNED) client = _Client(conn) @@ -652,34 +629,26 @@ def test_list_metrics_no_paging(self): self.assertIsInstance(metric, Metric) self.assertEqual(metric.name, self.METRIC_PATH) self.assertEqual(metric.filter_, self.FILTER) - self.assertEqual(metric.description, '') + self.assertEqual(metric.description, "") self.assertIs(metric.client, client) called_with = conn._called_with - path = '/%s' % (self.LIST_METRICS_PATH,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': path, - 'query_params': {}, - }) + path = "/%s" % (self.LIST_METRICS_PATH,) + self.assertEqual( + called_with, {"method": "GET", "path": path, "query_params": {}} + ) def test_list_metrics_w_paging(self): from google.cloud.logging.metric import Metric - TOKEN = 'TOKEN' + TOKEN = "TOKEN" PAGE_SIZE = 42 - RETURNED = { - 'metrics': [{ - 'name': self.METRIC_PATH, - 'filter': self.FILTER, - }], - } + RETURNED = {"metrics": [{"name": self.METRIC_PATH, "filter": self.FILTER}]} conn = _Connection(RETURNED) client = _Client(conn) api = self._make_one(client) - iterator = api.list_metrics( - self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + iterator = api.list_metrics(self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) metrics = list(iterator) token = iterator.next_page_token @@ -691,27 +660,27 @@ def test_list_metrics_w_paging(self): self.assertIsInstance(metric, Metric) self.assertEqual(metric.name, self.METRIC_PATH) self.assertEqual(metric.filter_, self.FILTER) - self.assertEqual(metric.description, '') + self.assertEqual(metric.description, "") self.assertIs(metric.client, client) called_with = conn._called_with - path = '/%s' % (self.LIST_METRICS_PATH,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': path, - 'query_params': { - 'pageSize': PAGE_SIZE, - 'pageToken': TOKEN, + path = "/%s" % (self.LIST_METRICS_PATH,) + self.assertEqual( + called_with, + { + "method": "GET", + "path": path, + "query_params": {"pageSize": PAGE_SIZE, "pageToken": TOKEN}, }, - }) + ) def test_metric_create_conflict(self): from google.cloud.exceptions import Conflict SENT = { - 'name': self.METRIC_NAME, - 'filter': self.FILTER, - 'description': self.DESCRIPTION, + "name": self.METRIC_NAME, + "filter": self.FILTER, + "description": self.DESCRIPTION, } conn = _Connection() conn._raise_conflict = True @@ -720,31 +689,30 @@ def test_metric_create_conflict(self): with self.assertRaises(Conflict): api.metric_create( - self.PROJECT, self.METRIC_NAME, self.FILTER, - self.DESCRIPTION) + self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION + ) - self.assertEqual(conn._called_with['method'], 'POST') - path = '/projects/%s/metrics' % (self.PROJECT,) - self.assertEqual(conn._called_with['path'], path) - self.assertEqual(conn._called_with['data'], SENT) + self.assertEqual(conn._called_with["method"], "POST") + path = "/projects/%s/metrics" % (self.PROJECT,) + self.assertEqual(conn._called_with["path"], path) + self.assertEqual(conn._called_with["data"], SENT) def test_metric_create_ok(self): SENT = { - 'name': self.METRIC_NAME, - 'filter': self.FILTER, - 'description': self.DESCRIPTION, + "name": self.METRIC_NAME, + "filter": self.FILTER, + "description": self.DESCRIPTION, } conn = _Connection({}) client = _Client(conn) api = self._make_one(client) - api.metric_create( - self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) + api.metric_create(self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) - self.assertEqual(conn._called_with['method'], 'POST') - path = '/projects/%s/metrics' % (self.PROJECT,) - self.assertEqual(conn._called_with['path'], path) - self.assertEqual(conn._called_with['data'], SENT) + self.assertEqual(conn._called_with["method"], "POST") + path = "/projects/%s/metrics" % (self.PROJECT,) + self.assertEqual(conn._called_with["path"], path) + self.assertEqual(conn._called_with["data"], SENT) def test_metric_get_miss(self): from google.cloud.exceptions import NotFound @@ -756,15 +724,15 @@ def test_metric_get_miss(self): with self.assertRaises(NotFound): api.metric_get(self.PROJECT, self.METRIC_NAME) - self.assertEqual(conn._called_with['method'], 'GET') - path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with["method"], "GET") + path = "/projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with["path"], path) def test_metric_get_hit(self): RESPONSE = { - 'name': self.METRIC_NAME, - 'filter': self.FILTER, - 'description': self.DESCRIPTION, + "name": self.METRIC_NAME, + "filter": self.FILTER, + "description": self.DESCRIPTION, } conn = _Connection(RESPONSE) client = _Client(conn) @@ -773,17 +741,17 @@ def test_metric_get_hit(self): response = api.metric_get(self.PROJECT, self.METRIC_NAME) self.assertEqual(response, RESPONSE) - self.assertEqual(conn._called_with['method'], 'GET') - path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with["method"], "GET") + path = "/projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with["path"], path) def test_metric_update_miss(self): from google.cloud.exceptions import NotFound SENT = { - 'name': self.METRIC_NAME, - 'filter': self.FILTER, - 'description': self.DESCRIPTION, + "name": self.METRIC_NAME, + "filter": self.FILTER, + "description": self.DESCRIPTION, } conn = _Connection() client = _Client(conn) @@ -791,31 +759,30 @@ def test_metric_update_miss(self): with self.assertRaises(NotFound): api.metric_update( - self.PROJECT, self.METRIC_NAME, self.FILTER, - self.DESCRIPTION) + self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION + ) - self.assertEqual(conn._called_with['method'], 'PUT') - path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - self.assertEqual(conn._called_with['path'], path) - self.assertEqual(conn._called_with['data'], SENT) + self.assertEqual(conn._called_with["method"], "PUT") + path = "/projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with["path"], path) + self.assertEqual(conn._called_with["data"], SENT) def test_metric_update_hit(self): SENT = { - 'name': self.METRIC_NAME, - 'filter': self.FILTER, - 'description': self.DESCRIPTION, + "name": self.METRIC_NAME, + "filter": self.FILTER, + "description": self.DESCRIPTION, } conn = _Connection({}) client = _Client(conn) api = self._make_one(client) - api.metric_update( - self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) + api.metric_update(self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) - self.assertEqual(conn._called_with['method'], 'PUT') - path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - self.assertEqual(conn._called_with['path'], path) - self.assertEqual(conn._called_with['data'], SENT) + self.assertEqual(conn._called_with["method"], "PUT") + path = "/projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with["path"], path) + self.assertEqual(conn._called_with["data"], SENT) def test_metric_delete_miss(self): from google.cloud.exceptions import NotFound @@ -827,9 +794,9 @@ def test_metric_delete_miss(self): with self.assertRaises(NotFound): api.metric_delete(self.PROJECT, self.METRIC_NAME) - self.assertEqual(conn._called_with['method'], 'DELETE') - path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with["method"], "DELETE") + path = "/projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with["path"], path) def test_metric_delete_hit(self): conn = _Connection({}) @@ -838,9 +805,9 @@ def test_metric_delete_hit(self): api.metric_delete(self.PROJECT, self.METRIC_NAME) - self.assertEqual(conn._called_with['method'], 'DELETE') - path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with["method"], "DELETE") + path = "/projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with["path"], path) class _Connection(object): @@ -857,11 +824,11 @@ def api_request(self, **kw): self._called_with = kw if self._raise_conflict: - raise Conflict('oops') + raise Conflict("oops") try: response, self._responses = self._responses[0], self._responses[1:] except IndexError: - raise NotFound('miss') + raise NotFound("miss") return response @@ -869,10 +836,9 @@ def _datetime_to_rfc3339_w_nanos(value): from google.cloud._helpers import _RFC3339_NO_FRACTION no_fraction = value.strftime(_RFC3339_NO_FRACTION) - return '%s.%09dZ' % (no_fraction, value.microsecond * 1000) + return "%s.%09dZ" % (no_fraction, value.microsecond * 1000) class _Client(object): - def __init__(self, connection): self._connection = connection diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 9636e8ff6954..e3b33a266bdd 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -25,14 +25,14 @@ def _make_credentials(): class TestClient(unittest.TestCase): - PROJECT = 'PROJECT' - LOGGER_NAME = 'LOGGER_NAME' - SINK_NAME = 'SINK_NAME' - FILTER = 'logName:syslog AND severity>=ERROR' - DESTINATION_URI = 'faux.googleapis.com/destination' - METRIC_NAME = 'metric_name' - FILTER = 'logName:syslog AND severity>=ERROR' - DESCRIPTION = 'DESCRIPTION' + PROJECT = "PROJECT" + LOGGER_NAME = "LOGGER_NAME" + SINK_NAME = "SINK_NAME" + FILTER = "logName:syslog AND severity>=ERROR" + DESTINATION_URI = "faux.googleapis.com/destination" + METRIC_NAME = "metric_name" + FILTER = "logName:syslog AND severity>=ERROR" + DESCRIPTION = "DESCRIPTION" @staticmethod def _get_target_class(): @@ -51,9 +51,9 @@ def test_ctor(self): def test_logging_api_wo_gapic(self): from google.cloud.logging._http import _LoggingAPI - client = self._make_one(self.PROJECT, - credentials=_make_credentials(), - _use_grpc=False) + client = self._make_one( + self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) conn = client._connection = _Connection() api = client.logging_api @@ -73,10 +73,9 @@ def make_api(client_obj): return api_obj creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=True) + client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) - patch = mock.patch('google.cloud.logging.client._gapic') + patch = mock.patch("google.cloud.logging.client._gapic") with patch as gapic_module: gapic_module.make_logging_api.side_effect = make_api api = client.logging_api @@ -91,12 +90,11 @@ def test_no_gapic_ctor(self): from google.cloud.logging._http import _LoggingAPI creds = _make_credentials() - patch = mock.patch( - 'google.cloud.logging.client._USE_GRPC', - new=True) + patch = mock.patch("google.cloud.logging.client._USE_GRPC", new=True) with patch: - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=False) + client = self._make_one( + project=self.PROJECT, credentials=creds, _use_grpc=False + ) api = client.logging_api self.assertIsInstance(api, _LoggingAPI) @@ -105,8 +103,8 @@ def test_sinks_api_wo_gapic(self): from google.cloud.logging._http import _SinksAPI client = self._make_one( - self.PROJECT, credentials=_make_credentials(), - _use_grpc=False) + self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) conn = client._connection = _Connection() api = client.sinks_api @@ -126,10 +124,9 @@ def make_api(client_obj): return api_obj creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=True) + client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) - patch = mock.patch('google.cloud.logging.client._gapic') + patch = mock.patch("google.cloud.logging.client._gapic") with patch as gapic_module: gapic_module.make_sinks_api.side_effect = make_api api = client.sinks_api @@ -144,8 +141,8 @@ def test_metrics_api_wo_gapic(self): from google.cloud.logging._http import _MetricsAPI client = self._make_one( - self.PROJECT, credentials=_make_credentials(), - _use_grpc=False) + self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) conn = client._connection = _Connection() api = client.metrics_api @@ -165,10 +162,9 @@ def make_api(client_obj): return api_obj creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=True) + client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) - patch = mock.patch('google.cloud.logging.client._gapic') + patch = mock.patch("google.cloud.logging.client._gapic") with patch as gapic_module: gapic_module.make_metrics_api.side_effect = make_api api = client.metrics_api @@ -194,25 +190,22 @@ def test_list_entries_defaults(self): import six from google.cloud.logging.entries import TextEntry - IID = 'IID' - TEXT = 'TEXT' - TOKEN = 'TOKEN' - ENTRIES = [{ - 'textPayload': TEXT, - 'insertId': IID, - 'resource': { - 'type': 'global', - }, - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - }] + IID = "IID" + TEXT = "TEXT" + TOKEN = "TOKEN" + ENTRIES = [ + { + "textPayload": TEXT, + "insertId": IID, + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + } + ] creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=False) - returned = { - 'entries': ENTRIES, - 'nextPageToken': TOKEN, - } + client = self._make_one( + project=self.PROJECT, credentials=creds, _use_grpc=False + ) + returned = {"entries": ENTRIES, "nextPageToken": TOKEN} client._connection = _Connection(returned) iterator = client.list_entries() @@ -232,11 +225,14 @@ def test_list_entries_defaults(self): self.assertEqual(token, TOKEN) called_with = client._connection._called_with - self.assertEqual(called_with, { - 'path': '/entries:list', - 'method': 'POST', - 'data': {'projectIds': [self.PROJECT]}, - }) + self.assertEqual( + called_with, + { + "path": "/entries:list", + "method": "POST", + "data": {"projectIds": [self.PROJECT]}, + }, + ) def test_list_entries_explicit(self): from google.cloud.logging import DESCENDING @@ -244,41 +240,43 @@ def test_list_entries_explicit(self): from google.cloud.logging.entries import StructEntry from google.cloud.logging.logger import Logger - PROJECT1 = 'PROJECT1' - PROJECT2 = 'PROJECT2' - FILTER = 'logName:LOGNAME' - IID1 = 'IID1' - IID2 = 'IID2' - PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} + PROJECT1 = "PROJECT1" + PROJECT2 = "PROJECT2" + FILTER = "logName:LOGNAME" + IID1 = "IID1" + IID2 = "IID2" + PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} PROTO_PAYLOAD = PAYLOAD.copy() - PROTO_PAYLOAD['@type'] = 'type.googleapis.com/testing.example' - TOKEN = 'TOKEN' + PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" + TOKEN = "TOKEN" PAGE_SIZE = 42 - ENTRIES = [{ - 'jsonPayload': PAYLOAD, - 'insertId': IID1, - 'resource': { - 'type': 'global', + ENTRIES = [ + { + "jsonPayload": PAYLOAD, + "insertId": IID1, + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - }, { - 'protoPayload': PROTO_PAYLOAD, - 'insertId': IID2, - 'resource': { - 'type': 'global', + { + "protoPayload": PROTO_PAYLOAD, + "insertId": IID2, + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - }] - client = self._make_one(self.PROJECT, credentials=_make_credentials(), - _use_grpc=False) - returned = {'entries': ENTRIES} + ] + client = self._make_one( + self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) + returned = {"entries": ENTRIES} client._connection = _Connection(returned) iterator = client.list_entries( - projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, - page_size=PAGE_SIZE, page_token=TOKEN) + projects=[PROJECT1, PROJECT2], + filter_=FILTER, + order_by=DESCENDING, + page_size=PAGE_SIZE, + page_token=TOKEN, + ) entries = list(iterator) token = iterator.next_page_token @@ -308,17 +306,20 @@ def test_list_entries_explicit(self): self.assertIs(entries[0].logger, entries[1].logger) called_with = client._connection._called_with - self.assertEqual(called_with, { - 'path': '/entries:list', - 'method': 'POST', - 'data': { - 'filter': FILTER, - 'orderBy': DESCENDING, - 'pageSize': PAGE_SIZE, - 'pageToken': TOKEN, - 'projectIds': [PROJECT1, PROJECT2], + self.assertEqual( + called_with, + { + "path": "/entries:list", + "method": "POST", + "data": { + "filter": FILTER, + "orderBy": DESCENDING, + "pageSize": PAGE_SIZE, + "pageToken": TOKEN, + "projectIds": [PROJECT1, PROJECT2], + }, }, - }) + ) def test_sink_defaults(self): from google.cloud.logging.sink import Sink @@ -350,22 +351,17 @@ def test_list_sinks_no_paging(self): import six from google.cloud.logging.sink import Sink - PROJECT = 'PROJECT' - TOKEN = 'TOKEN' - SINK_NAME = 'sink_name' - FILTER = 'logName:syslog AND severity>=ERROR' - SINKS = [{ - 'name': SINK_NAME, - 'filter': FILTER, - 'destination': self.DESTINATION_URI, - }] - client = self._make_one(project=PROJECT, - credentials=_make_credentials(), - _use_grpc=False) - returned = { - 'sinks': SINKS, - 'nextPageToken': TOKEN, - } + PROJECT = "PROJECT" + TOKEN = "TOKEN" + SINK_NAME = "sink_name" + FILTER = "logName:syslog AND severity>=ERROR" + SINKS = [ + {"name": SINK_NAME, "filter": FILTER, "destination": self.DESTINATION_URI} + ] + client = self._make_one( + project=PROJECT, credentials=_make_credentials(), _use_grpc=False + ) + returned = {"sinks": SINKS, "nextPageToken": TOKEN} client._connection = _Connection(returned) iterator = client.list_sinks() @@ -386,31 +382,26 @@ def test_list_sinks_no_paging(self): # Verify the mocked transport. called_with = client._connection._called_with - path = '/projects/%s/sinks' % (self.PROJECT,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': path, - 'query_params': {}, - }) + path = "/projects/%s/sinks" % (self.PROJECT,) + self.assertEqual( + called_with, {"method": "GET", "path": path, "query_params": {}} + ) def test_list_sinks_with_paging(self): from google.cloud.logging.sink import Sink - PROJECT = 'PROJECT' - SINK_NAME = 'sink_name' - FILTER = 'logName:syslog AND severity>=ERROR' - TOKEN = 'TOKEN' + PROJECT = "PROJECT" + SINK_NAME = "sink_name" + FILTER = "logName:syslog AND severity>=ERROR" + TOKEN = "TOKEN" PAGE_SIZE = 42 - SINKS = [{ - 'name': SINK_NAME, - 'filter': FILTER, - 'destination': self.DESTINATION_URI, - }] + SINKS = [ + {"name": SINK_NAME, "filter": FILTER, "destination": self.DESTINATION_URI} + ] client = self._make_one( - project=PROJECT, credentials=_make_credentials(), _use_grpc=False) - returned = { - 'sinks': SINKS, - } + project=PROJECT, credentials=_make_credentials(), _use_grpc=False + ) + returned = {"sinks": SINKS} client._connection = _Connection(returned) iterator = client.list_sinks(PAGE_SIZE, TOKEN) @@ -430,15 +421,15 @@ def test_list_sinks_with_paging(self): # Verify the mocked transport. called_with = client._connection._called_with - path = '/projects/%s/sinks' % (self.PROJECT,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': path, - 'query_params': { - 'pageSize': PAGE_SIZE, - 'pageToken': TOKEN, + path = "/projects/%s/sinks" % (self.PROJECT,) + self.assertEqual( + called_with, + { + "method": "GET", + "path": path, + "query_params": {"pageSize": PAGE_SIZE, "pageToken": TOKEN}, }, - }) + ) def test_metric_defaults(self): from google.cloud.logging.metric import Metric @@ -450,7 +441,7 @@ def test_metric_defaults(self): self.assertIsInstance(metric, Metric) self.assertEqual(metric.name, self.METRIC_NAME) self.assertIsNone(metric.filter_) - self.assertEqual(metric.description, '') + self.assertEqual(metric.description, "") self.assertIs(metric.client, client_obj) self.assertEqual(metric.project, self.PROJECT) @@ -460,8 +451,9 @@ def test_metric_explicit(self): creds = _make_credentials() client_obj = self._make_one(project=self.PROJECT, credentials=creds) - metric = client_obj.metric(self.METRIC_NAME, self.FILTER, - description=self.DESCRIPTION) + metric = client_obj.metric( + self.METRIC_NAME, self.FILTER, description=self.DESCRIPTION + ) self.assertIsInstance(metric, Metric) self.assertEqual(metric.name, self.METRIC_NAME) self.assertEqual(metric.filter_, self.FILTER) @@ -472,17 +464,17 @@ def test_metric_explicit(self): def test_list_metrics_no_paging(self): from google.cloud.logging.metric import Metric - metrics = [{ - 'name': self.METRIC_NAME, - 'filter': self.FILTER, - 'description': self.DESCRIPTION, - }] + metrics = [ + { + "name": self.METRIC_NAME, + "filter": self.FILTER, + "description": self.DESCRIPTION, + } + ] client = self._make_one( - project=self.PROJECT, credentials=_make_credentials(), - _use_grpc=False) - returned = { - 'metrics': metrics, - } + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) + returned = {"metrics": metrics} client._connection = _Connection(returned) # Execute request. @@ -500,32 +492,29 @@ def test_list_metrics_no_paging(self): # Verify mocked transport. called_with = client._connection._called_with - path = '/projects/%s/metrics' % (self.PROJECT,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': path, - 'query_params': {}, - }) + path = "/projects/%s/metrics" % (self.PROJECT,) + self.assertEqual( + called_with, {"method": "GET", "path": path, "query_params": {}} + ) def test_list_metrics_with_paging(self): import six from google.cloud.logging.metric import Metric - token = 'TOKEN' - next_token = 'T00KEN' + token = "TOKEN" + next_token = "T00KEN" page_size = 42 - metrics = [{ - 'name': self.METRIC_NAME, - 'filter': self.FILTER, - 'description': self.DESCRIPTION, - }] + metrics = [ + { + "name": self.METRIC_NAME, + "filter": self.FILTER, + "description": self.DESCRIPTION, + } + ] client = self._make_one( - project=self.PROJECT, credentials=_make_credentials(), - _use_grpc=False) - returned = { - 'metrics': metrics, - 'nextPageToken': next_token, - } + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) + returned = {"metrics": metrics, "nextPageToken": next_token} client._connection = _Connection(returned) # Execute request. @@ -546,15 +535,15 @@ def test_list_metrics_with_paging(self): # Verify mocked transport. called_with = client._connection._called_with - path = '/projects/%s/metrics' % (self.PROJECT,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': path, - 'query_params': { - 'pageSize': page_size, - 'pageToken': token, + path = "/projects/%s/metrics" % (self.PROJECT,) + self.assertEqual( + called_with, + { + "method": "GET", + "path": path, + "query_params": {"pageSize": page_size, "pageToken": token}, }, - }) + ) def test_get_default_handler_app_engine(self): import os @@ -564,10 +553,10 @@ def test_get_default_handler_app_engine(self): credentials = _make_credentials() - with _Monkey(os, environ={_APPENGINE_FLEXIBLE_ENV_VM: 'True'}): - client = self._make_one(project=self.PROJECT, - credentials=credentials, - _use_grpc=False) + with _Monkey(os, environ={_APPENGINE_FLEXIBLE_ENV_VM: "True"}): + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) handler = client.get_default_handler() self.assertIsInstance(handler, AppEngineHandler) @@ -576,13 +565,13 @@ def test_get_default_handler_container_engine(self): from google.cloud.logging.handlers import ContainerEngineHandler client = self._make_one( - project=self.PROJECT, - credentials=_make_credentials(), - _use_grpc=False) + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) patch = mock.patch( - 'google.cloud.logging.client.retrieve_metadata_server', - return_value='test-gke-cluster') + "google.cloud.logging.client.retrieve_metadata_server", + return_value="test-gke-cluster", + ) with patch: handler = client.get_default_handler() @@ -594,9 +583,9 @@ def test_get_default_handler_general(self): credentials = _make_credentials() - client = self._make_one(project=self.PROJECT, - credentials=credentials, - _use_grpc=False) + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) handler = client.get_default_handler() self.assertIsInstance(handler, CloudLoggingHandler) @@ -606,11 +595,10 @@ def test_setup_logging(self): credentials = _make_credentials() - with mock.patch('google.cloud.logging.client.setup_logging', - new=setup_logging): - client = self._make_one(project=self.PROJECT, - credentials=credentials, - _use_grpc=False) + with mock.patch("google.cloud.logging.client.setup_logging", new=setup_logging): + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) client.setup_logging() setup_logging.assert_called() diff --git a/packages/google-cloud-logging/tests/unit/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py index 7aaf16acc130..3aad7fbb130c 100644 --- a/packages/google-cloud-logging/tests/unit/test_entries.py +++ b/packages/google-cloud-logging/tests/unit/test_entries.py @@ -18,29 +18,27 @@ class Test_logger_name_from_path(unittest.TestCase): - def _call_fut(self, path): from google.cloud.logging.entries import logger_name_from_path return logger_name_from_path(path) def test_w_simple_name(self): - LOGGER_NAME = 'LOGGER_NAME' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/logs/%s' % (PROJECT, LOGGER_NAME) + LOGGER_NAME = "LOGGER_NAME" + PROJECT = "my-project-1234" + PATH = "projects/%s/logs/%s" % (PROJECT, LOGGER_NAME) logger_name = self._call_fut(PATH) self.assertEqual(logger_name, LOGGER_NAME) def test_w_name_w_all_extras(self): - LOGGER_NAME = 'LOGGER_NAME-part.one~part.two%part-three' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/logs/%s' % (PROJECT, LOGGER_NAME) + LOGGER_NAME = "LOGGER_NAME-part.one~part.two%part-three" + PROJECT = "my-project-1234" + PATH = "projects/%s/logs/%s" % (PROJECT, LOGGER_NAME) logger_name = self._call_fut(PATH) self.assertEqual(logger_name, LOGGER_NAME) class Test__int_or_none(unittest.TestCase): - def _call_fut(self, value): from google.cloud.logging.entries import _int_or_none @@ -53,13 +51,13 @@ def test_w_int(self): self.assertEqual(self._call_fut(123), 123) def test_w_str(self): - self.assertEqual(self._call_fut('123'), 123) + self.assertEqual(self._call_fut("123"), 123) class TestLogEntry(unittest.TestCase): - PROJECT = 'PROJECT' - LOGGER_NAME = 'LOGGER_NAME' + PROJECT = "PROJECT" + LOGGER_NAME = "LOGGER_NAME" @staticmethod def _get_target_class(): @@ -94,38 +92,25 @@ def test_ctor_explicit(self): import datetime from google.cloud.logging.resource import Resource - LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) - IID = 'IID' + LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + IID = "IID" TIMESTAMP = datetime.datetime.now() - LABELS = {'foo': 'bar', 'baz': 'qux'} - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } - resource = Resource(type='global', labels={}) - TRACE = '12345678-1234-5678-1234-567812345678' - SPANID = '000000000000004a' - FILE = 'my_file.py' + LABELS = {"foo": "bar", "baz": "qux"} + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} + resource = Resource(type="global", labels={}) + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + FILE = "my_file.py" LINE_NO = 123 - FUNCTION = 'my_function' - SOURCE_LOCATION = { - 'file': FILE, - 'line': LINE_NO, - 'function': FUNCTION, - } - OP_ID = 'OP_ID' - PRODUCER = 'PRODUCER' - OPERATION = { - 'id': OP_ID, - 'producer': PRODUCER, - 'first': True, - 'last': False, - } + FUNCTION = "my_function" + SOURCE_LOCATION = {"file": FILE, "line": LINE_NO, "function": FUNCTION} + OP_ID = "OP_ID" + PRODUCER = "PRODUCER" + OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} logger = _Logger(self.LOGGER_NAME, self.PROJECT) entry = self._make_one( @@ -150,28 +135,26 @@ def test_ctor_explicit(self): self.assertEqual(entry.timestamp, TIMESTAMP) self.assertEqual(entry.labels, LABELS) self.assertEqual(entry.severity, SEVERITY) - self.assertEqual(entry.http_request['requestMethod'], METHOD) - self.assertEqual(entry.http_request['requestUrl'], URI) - self.assertEqual(entry.http_request['status'], STATUS) + self.assertEqual(entry.http_request["requestMethod"], METHOD) + self.assertEqual(entry.http_request["requestUrl"], URI) + self.assertEqual(entry.http_request["status"], STATUS) self.assertEqual(entry.resource, resource) self.assertEqual(entry.trace, TRACE) self.assertEqual(entry.span_id, SPANID) self.assertTrue(entry.trace_sampled) source_location = entry.source_location - self.assertEqual(source_location['file'], FILE) - self.assertEqual(source_location['line'], LINE_NO) - self.assertEqual(source_location['function'], FUNCTION) + self.assertEqual(source_location["file"], FILE) + self.assertEqual(source_location["line"], LINE_NO) + self.assertEqual(source_location["function"], FUNCTION) self.assertEqual(entry.operation, OPERATION) self.assertIsNone(entry.payload) def test_from_api_repr_missing_data_no_loggers(self): client = _Client(self.PROJECT) - LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) - API_REPR = { - 'logName': LOG_NAME, - } + LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + API_REPR = {"logName": LOG_NAME} klass = self._get_target_class() entry = klass.from_api_repr(API_REPR, client) @@ -199,60 +182,48 @@ def test_from_api_repr_w_loggers_no_logger_match(self): klass = self._get_target_class() client = _Client(self.PROJECT) - SEVERITY = 'CRITICAL' - IID = 'IID' + SEVERITY = "CRITICAL" + IID = "IID" NOW = datetime.utcnow().replace(tzinfo=UTC) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) - LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) - LABELS = {'foo': 'bar', 'baz': 'qux'} - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' + LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + LABELS = {"foo": "bar", "baz": "qux"} + METHOD = "POST" + URI = "https://api.example.com/endpoint" RESOURCE = Resource( - type='gae_app', + type="gae_app", labels={ - 'type': 'gae_app', - 'labels': { - 'module_id': 'default', - 'version': 'test', - } - } + "type": "gae_app", + "labels": {"module_id": "default", "version": "test"}, + }, ) - STATUS = '500' - TRACE = '12345678-1234-5678-1234-567812345678' - SPANID = '000000000000004a' - FILE = 'my_file.py' + STATUS = "500" + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + FILE = "my_file.py" LINE_NO = 123 - FUNCTION = 'my_function' - SOURCE_LOCATION = { - 'file': FILE, - 'line': str(LINE_NO), - 'function': FUNCTION, - } - OP_ID = 'OP_ID' - PRODUCER = 'PRODUCER' - OPERATION = { - 'id': OP_ID, - 'producer': PRODUCER, - 'first': True, - 'last': False, - } + FUNCTION = "my_function" + SOURCE_LOCATION = {"file": FILE, "line": str(LINE_NO), "function": FUNCTION} + OP_ID = "OP_ID" + PRODUCER = "PRODUCER" + OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} API_REPR = { - 'logName': LOG_NAME, - 'insertId': IID, - 'timestamp': TIMESTAMP, - 'labels': LABELS, - 'severity': SEVERITY, - 'httpRequest': { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, + "logName": LOG_NAME, + "insertId": IID, + "timestamp": TIMESTAMP, + "labels": LABELS, + "severity": SEVERITY, + "httpRequest": { + "requestMethod": METHOD, + "requestUrl": URI, + "status": STATUS, }, - 'resource': RESOURCE._to_dict(), - 'trace': TRACE, - 'spanId': SPANID, - 'traceSampled': True, - 'sourceLocation': SOURCE_LOCATION, - 'operation': OPERATION, + "resource": RESOURCE._to_dict(), + "trace": TRACE, + "spanId": SPANID, + "traceSampled": True, + "sourceLocation": SOURCE_LOCATION, + "operation": OPERATION, } loggers = {} @@ -267,9 +238,9 @@ def test_from_api_repr_w_loggers_no_logger_match(self): self.assertIsNone(entry.received_timestamp) self.assertEqual(entry.labels, LABELS) self.assertEqual(entry.severity, SEVERITY) - self.assertEqual(entry.http_request['requestMethod'], METHOD) - self.assertEqual(entry.http_request['requestUrl'], URI) - self.assertEqual(entry.http_request['status'], STATUS) + self.assertEqual(entry.http_request["requestMethod"], METHOD) + self.assertEqual(entry.http_request["requestUrl"], URI) + self.assertEqual(entry.http_request["status"], STATUS) self.assertIs(logger.client, client) self.assertEqual(logger.name, self.LOGGER_NAME) self.assertEqual(loggers, {LOG_NAME: logger}) @@ -279,9 +250,9 @@ def test_from_api_repr_w_loggers_no_logger_match(self): self.assertTrue(entry.trace_sampled) source_location = entry.source_location - self.assertEqual(source_location['file'], FILE) - self.assertEqual(source_location['line'], LINE_NO) - self.assertEqual(source_location['function'], FUNCTION) + self.assertEqual(source_location["file"], FILE) + self.assertEqual(source_location["line"], LINE_NO) + self.assertEqual(source_location["function"], FUNCTION) self.assertEqual(entry.operation, OPERATION) self.assertIsNone(entry.payload) @@ -292,42 +263,33 @@ def test_from_api_repr_w_loggers_w_logger_match(self): from google.cloud._helpers import UTC client = _Client(self.PROJECT) - IID = 'IID' + IID = "IID" NOW = datetime.utcnow().replace(tzinfo=UTC) LATER = NOW + timedelta(seconds=1) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) RECEIVED = _datetime_to_rfc3339_w_nanos(LATER) - LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) - LABELS = {'foo': 'bar', 'baz': 'qux'} - TRACE = '12345678-1234-5678-1234-567812345678' - SPANID = '000000000000004a' - FILE = 'my_file.py' + LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + LABELS = {"foo": "bar", "baz": "qux"} + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + FILE = "my_file.py" LINE_NO = 123 - FUNCTION = 'my_function' - SOURCE_LOCATION = { - 'file': FILE, - 'line': str(LINE_NO), - 'function': FUNCTION, - } - OP_ID = 'OP_ID' - PRODUCER = 'PRODUCER' - OPERATION = { - 'id': OP_ID, - 'producer': PRODUCER, - 'first': True, - 'last': False, - } + FUNCTION = "my_function" + SOURCE_LOCATION = {"file": FILE, "line": str(LINE_NO), "function": FUNCTION} + OP_ID = "OP_ID" + PRODUCER = "PRODUCER" + OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} API_REPR = { - 'logName': LOG_NAME, - 'insertId': IID, - 'timestamp': TIMESTAMP, - 'receiveTimestamp': RECEIVED, - 'labels': LABELS, - 'trace': TRACE, - 'spanId': SPANID, - 'traceSampled': True, - 'sourceLocation': SOURCE_LOCATION, - 'operation': OPERATION, + "logName": LOG_NAME, + "insertId": IID, + "timestamp": TIMESTAMP, + "receiveTimestamp": RECEIVED, + "labels": LABELS, + "trace": TRACE, + "spanId": SPANID, + "traceSampled": True, + "sourceLocation": SOURCE_LOCATION, + "operation": OPERATION, } LOGGER = object() loggers = {LOG_NAME: LOGGER} @@ -346,9 +308,9 @@ def test_from_api_repr_w_loggers_w_logger_match(self): self.assertTrue(entry.trace_sampled) source_location = entry.source_location - self.assertEqual(source_location['file'], FILE) - self.assertEqual(source_location['line'], LINE_NO) - self.assertEqual(source_location['function'], FUNCTION) + self.assertEqual(source_location["file"], FILE) + self.assertEqual(source_location["line"], LINE_NO) + self.assertEqual(source_location["function"], FUNCTION) self.assertEqual(entry.operation, OPERATION) self.assertIsNone(entry.payload) @@ -356,23 +318,15 @@ def test_from_api_repr_w_loggers_w_logger_match(self): def test_to_api_repr_w_source_location_no_line(self): from google.cloud.logging.logger import _GLOBAL_RESOURCE - LOG_NAME = 'test.log' - FILE = 'my_file.py' - FUNCTION = 'my_function' - SOURCE_LOCATION = { - 'file': FILE, - 'function': FUNCTION, - } - entry = self._make_one( - log_name=LOG_NAME, source_location=SOURCE_LOCATION) + LOG_NAME = "test.log" + FILE = "my_file.py" + FUNCTION = "my_function" + SOURCE_LOCATION = {"file": FILE, "function": FUNCTION} + entry = self._make_one(log_name=LOG_NAME, source_location=SOURCE_LOCATION) expected = { - 'logName': LOG_NAME, - 'resource': _GLOBAL_RESOURCE._to_dict(), - 'sourceLocation': { - 'file': FILE, - 'line': '0', - 'function': FUNCTION, - } + "logName": LOG_NAME, + "resource": _GLOBAL_RESOURCE._to_dict(), + "sourceLocation": {"file": FILE, "line": "0", "function": FUNCTION}, } self.assertEqual(entry.to_api_repr(), expected) @@ -381,61 +335,40 @@ def test_to_api_repr_explicit(self): from google.cloud.logging.resource import Resource from google.cloud._helpers import _datetime_to_rfc3339 - LOG_NAME = 'test.log' - LABELS = {'foo': 'bar', 'baz': 'qux'} - IID = 'IID' - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } + LOG_NAME = "test.log" + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource( - type='gae_app', - labels={ - 'module_id': 'default', - 'version_id': 'test' - } + type="gae_app", labels={"module_id": "default", "version_id": "test"} ) - TRACE = '12345678-1234-5678-1234-567812345678' - SPANID = '000000000000004a' - FILE = 'my_file.py' + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + FILE = "my_file.py" LINE = 123 - FUNCTION = 'my_function' - SOURCE_LOCATION = { - 'file': FILE, - 'line': LINE, - 'function': FUNCTION, - } - OP_ID = 'OP_ID' - PRODUCER = 'PRODUCER' - OPERATION = { - 'id': OP_ID, - 'producer': PRODUCER, - 'first': True, - 'last': False, - } + FUNCTION = "my_function" + SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION} + OP_ID = "OP_ID" + PRODUCER = "PRODUCER" + OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} expected = { - 'logName': LOG_NAME, - 'labels': LABELS, - 'insertId': IID, - 'severity': SEVERITY, - 'httpRequest': REQUEST, - 'timestamp': _datetime_to_rfc3339(TIMESTAMP), - 'resource': RESOURCE._to_dict(), - 'trace': TRACE, - 'spanId': SPANID, - 'traceSampled': True, - 'sourceLocation': { - 'file': FILE, - 'line': str(LINE), - 'function': FUNCTION, - }, - 'operation': OPERATION, + "logName": LOG_NAME, + "labels": LABELS, + "insertId": IID, + "severity": SEVERITY, + "httpRequest": REQUEST, + "timestamp": _datetime_to_rfc3339(TIMESTAMP), + "resource": RESOURCE._to_dict(), + "trace": TRACE, + "spanId": SPANID, + "traceSampled": True, + "sourceLocation": {"file": FILE, "line": str(LINE), "function": FUNCTION}, + "operation": OPERATION, } entry = self._make_one( log_name=LOG_NAME, @@ -457,8 +390,8 @@ def test_to_api_repr_explicit(self): class TestTextEntry(unittest.TestCase): - PROJECT = 'PROJECT' - LOGGER_NAME = 'LOGGER_NAME' + PROJECT = "PROJECT" + LOGGER_NAME = "LOGGER_NAME" @staticmethod def _get_target_class(): @@ -472,13 +405,13 @@ def _make_one(self, *args, **kw): def test_to_api_repr_defaults(self): from google.cloud.logging.logger import _GLOBAL_RESOURCE - LOG_NAME = 'test.log' - TEXT = 'TESTING' + LOG_NAME = "test.log" + TEXT = "TESTING" entry = self._make_one(log_name=LOG_NAME, payload=TEXT) expected = { - 'logName': LOG_NAME, - 'textPayload': TEXT, - 'resource': _GLOBAL_RESOURCE._to_dict(), + "logName": LOG_NAME, + "textPayload": TEXT, + "resource": _GLOBAL_RESOURCE._to_dict(), } self.assertEqual(entry.to_api_repr(), expected) @@ -487,63 +420,42 @@ def test_to_api_repr_explicit(self): from google.cloud.logging.resource import Resource from google.cloud._helpers import _datetime_to_rfc3339 - LOG_NAME = 'test.log' - TEXT = 'This is the entry text' - LABELS = {'foo': 'bar', 'baz': 'qux'} - IID = 'IID' - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } + LOG_NAME = "test.log" + TEXT = "This is the entry text" + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource( - type='gae_app', - labels={ - 'module_id': 'default', - 'version_id': 'test' - } + type="gae_app", labels={"module_id": "default", "version_id": "test"} ) - TRACE = '12345678-1234-5678-1234-567812345678' - SPANID = '000000000000004a' - FILE = 'my_file.py' + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + FILE = "my_file.py" LINE = 123 - FUNCTION = 'my_function' - SOURCE_LOCATION = { - 'file': FILE, - 'line': LINE, - 'function': FUNCTION, - } - OP_ID = 'OP_ID' - PRODUCER = 'PRODUCER' - OPERATION = { - 'id': OP_ID, - 'producer': PRODUCER, - 'first': True, - 'last': False, - } + FUNCTION = "my_function" + SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION} + OP_ID = "OP_ID" + PRODUCER = "PRODUCER" + OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} expected = { - 'logName': LOG_NAME, - 'textPayload': TEXT, - 'labels': LABELS, - 'insertId': IID, - 'severity': SEVERITY, - 'httpRequest': REQUEST, - 'timestamp': _datetime_to_rfc3339(TIMESTAMP), - 'resource': RESOURCE._to_dict(), - 'trace': TRACE, - 'spanId': SPANID, - 'traceSampled': True, - 'sourceLocation': { - 'file': FILE, - 'line': str(LINE), - 'function': FUNCTION, - }, - 'operation': OPERATION, + "logName": LOG_NAME, + "textPayload": TEXT, + "labels": LABELS, + "insertId": IID, + "severity": SEVERITY, + "httpRequest": REQUEST, + "timestamp": _datetime_to_rfc3339(TIMESTAMP), + "resource": RESOURCE._to_dict(), + "trace": TRACE, + "spanId": SPANID, + "traceSampled": True, + "sourceLocation": {"file": FILE, "line": str(LINE), "function": FUNCTION}, + "operation": OPERATION, } entry = self._make_one( log_name=LOG_NAME, @@ -566,8 +478,8 @@ def test_to_api_repr_explicit(self): class TestStructEntry(unittest.TestCase): - PROJECT = 'PROJECT' - LOGGER_NAME = 'LOGGER_NAME' + PROJECT = "PROJECT" + LOGGER_NAME = "LOGGER_NAME" @staticmethod def _get_target_class(): @@ -581,13 +493,13 @@ def _make_one(self, *args, **kw): def test_to_api_repr_defaults(self): from google.cloud.logging.logger import _GLOBAL_RESOURCE - LOG_NAME = 'test.log' - JSON_PAYLOAD = {'key': 'value'} + LOG_NAME = "test.log" + JSON_PAYLOAD = {"key": "value"} entry = self._make_one(log_name=LOG_NAME, payload=JSON_PAYLOAD) expected = { - 'logName': LOG_NAME, - 'jsonPayload': JSON_PAYLOAD, - 'resource': _GLOBAL_RESOURCE._to_dict(), + "logName": LOG_NAME, + "jsonPayload": JSON_PAYLOAD, + "resource": _GLOBAL_RESOURCE._to_dict(), } self.assertEqual(entry.to_api_repr(), expected) @@ -596,63 +508,42 @@ def test_to_api_repr_explicit(self): from google.cloud.logging.resource import Resource from google.cloud._helpers import _datetime_to_rfc3339 - LOG_NAME = 'test.log' - JSON_PAYLOAD = {'key': 'value'} - LABELS = {'foo': 'bar', 'baz': 'qux'} - IID = 'IID' - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } + LOG_NAME = "test.log" + JSON_PAYLOAD = {"key": "value"} + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource( - type='gae_app', - labels={ - 'module_id': 'default', - 'version_id': 'test' - } + type="gae_app", labels={"module_id": "default", "version_id": "test"} ) - TRACE = '12345678-1234-5678-1234-567812345678' - SPANID = '000000000000004a' - FILE = 'my_file.py' + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + FILE = "my_file.py" LINE = 123 - FUNCTION = 'my_function' - SOURCE_LOCATION = { - 'file': FILE, - 'line': LINE, - 'function': FUNCTION, - } - OP_ID = 'OP_ID' - PRODUCER = 'PRODUCER' - OPERATION = { - 'id': OP_ID, - 'producer': PRODUCER, - 'first': True, - 'last': False, - } + FUNCTION = "my_function" + SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION} + OP_ID = "OP_ID" + PRODUCER = "PRODUCER" + OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} expected = { - 'logName': LOG_NAME, - 'jsonPayload': JSON_PAYLOAD, - 'labels': LABELS, - 'insertId': IID, - 'severity': SEVERITY, - 'httpRequest': REQUEST, - 'timestamp': _datetime_to_rfc3339(TIMESTAMP), - 'resource': RESOURCE._to_dict(), - 'trace': TRACE, - 'spanId': SPANID, - 'traceSampled': True, - 'sourceLocation': { - 'file': FILE, - 'line': str(LINE), - 'function': FUNCTION, - }, - 'operation': OPERATION, + "logName": LOG_NAME, + "jsonPayload": JSON_PAYLOAD, + "labels": LABELS, + "insertId": IID, + "severity": SEVERITY, + "httpRequest": REQUEST, + "timestamp": _datetime_to_rfc3339(TIMESTAMP), + "resource": RESOURCE._to_dict(), + "trace": TRACE, + "spanId": SPANID, + "traceSampled": True, + "sourceLocation": {"file": FILE, "line": str(LINE), "function": FUNCTION}, + "operation": OPERATION, } entry = self._make_one( log_name=LOG_NAME, @@ -675,8 +566,8 @@ def test_to_api_repr_explicit(self): class TestProtobufEntry(unittest.TestCase): - PROJECT = 'PROJECT' - LOGGER_NAME = 'LOGGER_NAME' + PROJECT = "PROJECT" + LOGGER_NAME = "LOGGER_NAME" @staticmethod def _get_target_class(): @@ -688,7 +579,7 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_constructor_basic(self): - payload = {'foo': 'bar'} + payload = {"foo": "bar"} pb_entry = self._make_one(payload=payload, logger=mock.sentinel.logger) @@ -732,14 +623,14 @@ def test_parse_message(self): from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value - message = Struct(fields={'foo': Value(bool_value=False)}) - with_true = Struct(fields={'foo': Value(bool_value=True)}) + message = Struct(fields={"foo": Value(bool_value=False)}) + with_true = Struct(fields={"foo": Value(bool_value=True)}) payload = json.loads(MessageToJson(with_true)) entry = self._make_one(payload=payload, logger=mock.sentinel.logger) entry.parse_message(message) - self.assertTrue(message.fields['foo']) + self.assertTrue(message.fields["foo"]) def test_to_api_repr_proto_defaults(self): from google.protobuf.json_format import MessageToDict @@ -747,14 +638,14 @@ def test_to_api_repr_proto_defaults(self): from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - LOG_NAME = 'test.log' - message = Struct(fields={'foo': Value(bool_value=True)}) + LOG_NAME = "test.log" + message = Struct(fields={"foo": Value(bool_value=True)}) entry = self._make_one(log_name=LOG_NAME, payload=message) expected = { - 'logName': LOG_NAME, - 'protoPayload': MessageToDict(message), - 'resource': _GLOBAL_RESOURCE._to_dict(), + "logName": LOG_NAME, + "protoPayload": MessageToDict(message), + "resource": _GLOBAL_RESOURCE._to_dict(), } self.assertEqual(entry.to_api_repr(), expected) @@ -766,63 +657,42 @@ def test_to_api_repr_proto_explicit(self): from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - LOG_NAME = 'test.log' - message = Struct(fields={'foo': Value(bool_value=True)}) - LABELS = {'foo': 'bar', 'baz': 'qux'} - IID = 'IID' - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } + LOG_NAME = "test.log" + message = Struct(fields={"foo": Value(bool_value=True)}) + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource( - type='gae_app', - labels={ - 'module_id': 'default', - 'version_id': 'test' - } + type="gae_app", labels={"module_id": "default", "version_id": "test"} ) - TRACE = '12345678-1234-5678-1234-567812345678' - SPANID = '000000000000004a' - FILE = 'my_file.py' + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + FILE = "my_file.py" LINE = 123 - FUNCTION = 'my_function' - SOURCE_LOCATION = { - 'file': FILE, - 'line': LINE, - 'function': FUNCTION, - } - OP_ID = 'OP_ID' - PRODUCER = 'PRODUCER' - OPERATION = { - 'id': OP_ID, - 'producer': PRODUCER, - 'first': True, - 'last': False, - } + FUNCTION = "my_function" + SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION} + OP_ID = "OP_ID" + PRODUCER = "PRODUCER" + OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} expected = { - 'logName': LOG_NAME, - 'protoPayload': MessageToDict(message), - 'labels': LABELS, - 'insertId': IID, - 'severity': SEVERITY, - 'httpRequest': REQUEST, - 'timestamp': _datetime_to_rfc3339(TIMESTAMP), - 'resource': RESOURCE._to_dict(), - 'trace': TRACE, - 'spanId': SPANID, - 'traceSampled': True, - 'sourceLocation': { - 'file': FILE, - 'line': str(LINE), - 'function': FUNCTION, - }, - 'operation': OPERATION, + "logName": LOG_NAME, + "protoPayload": MessageToDict(message), + "labels": LABELS, + "insertId": IID, + "severity": SEVERITY, + "httpRequest": REQUEST, + "timestamp": _datetime_to_rfc3339(TIMESTAMP), + "resource": RESOURCE._to_dict(), + "trace": TRACE, + "spanId": SPANID, + "traceSampled": True, + "sourceLocation": {"file": FILE, "line": str(LINE), "function": FUNCTION}, + "operation": OPERATION, } entry = self._make_one( @@ -848,18 +718,16 @@ def _datetime_to_rfc3339_w_nanos(value): from google.cloud._helpers import _RFC3339_NO_FRACTION no_fraction = value.strftime(_RFC3339_NO_FRACTION) - return '%s.%09dZ' % (no_fraction, value.microsecond * 1000) + return "%s.%09dZ" % (no_fraction, value.microsecond * 1000) class _Logger(object): - def __init__(self, name, client): self.name = name self.client = client class _Client(object): - def __init__(self, project): self.project = project diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 158a727beeb1..5bf6a706815f 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -25,8 +25,8 @@ def _make_credentials(): class TestLogger(unittest.TestCase): - PROJECT = 'test-project' - LOGGER_NAME = 'logger-name' + PROJECT = "test-project" + LOGGER_NAME = "logger-name" @staticmethod def _get_target_class(): @@ -44,24 +44,28 @@ def test_ctor_defaults(self): self.assertEqual(logger.name, self.LOGGER_NAME) self.assertIs(logger.client, client) self.assertEqual(logger.project, self.PROJECT) - self.assertEqual(logger.full_name, 'projects/%s/logs/%s' - % (self.PROJECT, self.LOGGER_NAME)) - self.assertEqual(logger.path, '/projects/%s/logs/%s' - % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual( + logger.full_name, "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + ) + self.assertEqual( + logger.path, "/projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + ) self.assertIsNone(logger.labels) def test_ctor_explicit(self): - LABELS = {'foo': 'bar', 'baz': 'qux'} + LABELS = {"foo": "bar", "baz": "qux"} conn = object() client = _Client(self.PROJECT, conn) logger = self._make_one(self.LOGGER_NAME, client=client, labels=LABELS) self.assertEqual(logger.name, self.LOGGER_NAME) self.assertIs(logger.client, client) self.assertEqual(logger.project, self.PROJECT) - self.assertEqual(logger.full_name, 'projects/%s/logs/%s' - % (self.PROJECT, self.LOGGER_NAME)) - self.assertEqual(logger.path, '/projects/%s/logs/%s' - % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual( + logger.full_name, "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + ) + self.assertEqual( + logger.path, "/projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + ) self.assertEqual(logger.labels, LABELS) def test_batch_w_bound_client(self): @@ -89,70 +93,59 @@ def test_batch_w_alternate_client(self): self.assertIs(batch.client, client2) def test_log_empty_defaults_w_default_labels(self): - DEFAULT_LABELS = {'foo': 'spam'} - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'resource': { - 'type': 'global', - 'labels': {}, - }, - 'labels': DEFAULT_LABELS, - }] + DEFAULT_LABELS = {"foo": "spam"} + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "resource": {"type": "global", "labels": {}}, + "labels": DEFAULT_LABELS, + } + ] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client, - labels=DEFAULT_LABELS) + logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) logger.log_empty() - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_empty_w_explicit(self): import datetime from google.cloud.logging.resource import Resource - ALT_LOG_NAME = 'projects/foo/logs/alt.log.name' - DEFAULT_LABELS = {'foo': 'spam'} - LABELS = {'foo': 'bar', 'baz': 'qux'} - IID = 'IID' - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - TRACE = '12345678-1234-5678-1234-567812345678' - SPANID = '000000000000004a' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } + ALT_LOG_NAME = "projects/foo/logs/alt.log.name" + DEFAULT_LABELS = {"foo": "spam"} + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource( - type='gae_app', - labels={ - 'module_id': 'default', - 'version_id': 'test' - } + type="gae_app", labels={"module_id": "default", "version_id": "test"} ) - ENTRIES = [{ - 'logName': ALT_LOG_NAME, - 'labels': LABELS, - 'insertId': IID, - 'severity': SEVERITY, - 'httpRequest': REQUEST, - 'timestamp': '2016-12-31T00:01:02.999999Z', - 'resource': RESOURCE._to_dict(), - 'trace': TRACE, - 'spanId': SPANID, - 'traceSampled': True, - }] + ENTRIES = [ + { + "logName": ALT_LOG_NAME, + "labels": LABELS, + "insertId": IID, + "severity": SEVERITY, + "httpRequest": REQUEST, + "timestamp": "2016-12-31T00:01:02.999999Z", + "resource": RESOURCE._to_dict(), + "trace": TRACE, + "spanId": SPANID, + "traceSampled": True, + } + ] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client1, - labels=DEFAULT_LABELS) + logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) logger.log_empty( log_name=ALT_LOG_NAME, @@ -168,98 +161,83 @@ def test_log_empty_w_explicit(self): trace_sampled=True, ) - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_text_defaults(self): - TEXT = 'TEXT' - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'textPayload': TEXT, - 'resource': { - 'type': 'global', - 'labels': {}, - }, - }] + TEXT = "TEXT" + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "textPayload": TEXT, + "resource": {"type": "global", "labels": {}}, + } + ] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client) logger.log_text(TEXT) - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_text_w_unicode_and_default_labels(self): - TEXT = u'TEXT' - DEFAULT_LABELS = {'foo': 'spam'} - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'textPayload': TEXT, - 'resource': { - 'type': 'global', - 'labels': {}, - }, - 'labels': DEFAULT_LABELS, - }] + TEXT = u"TEXT" + DEFAULT_LABELS = {"foo": "spam"} + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "textPayload": TEXT, + "resource": {"type": "global", "labels": {}}, + "labels": DEFAULT_LABELS, + } + ] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client, - labels=DEFAULT_LABELS) + logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) logger.log_text(TEXT) - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_text_explicit(self): import datetime from google.cloud.logging.resource import Resource - ALT_LOG_NAME = 'projects/foo/logs/alt.log.name' - TEXT = 'TEXT' - DEFAULT_LABELS = {'foo': 'spam'} - LABELS = {'foo': 'bar', 'baz': 'qux'} - IID = 'IID' - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - TRACE = '12345678-1234-5678-1234-567812345678' - SPANID = '000000000000004a' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } + ALT_LOG_NAME = "projects/foo/logs/alt.log.name" + TEXT = "TEXT" + DEFAULT_LABELS = {"foo": "spam"} + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource( - type='gae_app', - labels={ - 'module_id': 'default', - 'version_id': 'test' - } + type="gae_app", labels={"module_id": "default", "version_id": "test"} ) - ENTRIES = [{ - 'logName': ALT_LOG_NAME, - 'textPayload': TEXT, - 'labels': LABELS, - 'insertId': IID, - 'severity': SEVERITY, - 'httpRequest': REQUEST, - 'timestamp': '2016-12-31T00:01:02.999999Z', - 'resource': RESOURCE._to_dict(), - 'trace': TRACE, - 'spanId': SPANID, - 'traceSampled': True, - }] + ENTRIES = [ + { + "logName": ALT_LOG_NAME, + "textPayload": TEXT, + "labels": LABELS, + "insertId": IID, + "severity": SEVERITY, + "httpRequest": REQUEST, + "timestamp": "2016-12-31T00:01:02.999999Z", + "resource": RESOURCE._to_dict(), + "trace": TRACE, + "spanId": SPANID, + "traceSampled": True, + } + ] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() - logger = self._make_one( - self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) + logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) logger.log_text( TEXT, @@ -276,98 +254,83 @@ def test_log_text_explicit(self): trace_sampled=True, ) - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_struct_defaults(self): - STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'jsonPayload': STRUCT, - 'resource': { - 'type': 'global', - 'labels': {}, - }, - }] + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "jsonPayload": STRUCT, + "resource": {"type": "global", "labels": {}}, + } + ] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client) logger.log_struct(STRUCT) - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_struct_w_default_labels(self): - STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} - DEFAULT_LABELS = {'foo': 'spam'} - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'jsonPayload': STRUCT, - 'resource': { - 'type': 'global', - 'labels': {}, - }, - 'labels': DEFAULT_LABELS, - }] + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} + DEFAULT_LABELS = {"foo": "spam"} + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "jsonPayload": STRUCT, + "resource": {"type": "global", "labels": {}}, + "labels": DEFAULT_LABELS, + } + ] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client, - labels=DEFAULT_LABELS) + logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) logger.log_struct(STRUCT) - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_struct_w_explicit(self): import datetime from google.cloud.logging.resource import Resource - ALT_LOG_NAME = 'projects/foo/logs/alt.log.name' - STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} - DEFAULT_LABELS = {'foo': 'spam'} - LABELS = {'foo': 'bar', 'baz': 'qux'} - IID = 'IID' - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - TRACE = '12345678-1234-5678-1234-567812345678' - SPANID = '000000000000004a' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } + ALT_LOG_NAME = "projects/foo/logs/alt.log.name" + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} + DEFAULT_LABELS = {"foo": "spam"} + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource( - type='gae_app', - labels={ - 'module_id': 'default', - 'version_id': 'test' - } + type="gae_app", labels={"module_id": "default", "version_id": "test"} ) - ENTRIES = [{ - 'logName': ALT_LOG_NAME, - 'jsonPayload': STRUCT, - 'labels': LABELS, - 'insertId': IID, - 'severity': SEVERITY, - 'httpRequest': REQUEST, - 'timestamp': '2016-12-31T00:01:02.999999Z', - 'resource': RESOURCE._to_dict(), - 'trace': TRACE, - 'spanId': SPANID, - 'traceSampled': True, - }] + ENTRIES = [ + { + "logName": ALT_LOG_NAME, + "jsonPayload": STRUCT, + "labels": LABELS, + "insertId": IID, + "severity": SEVERITY, + "httpRequest": REQUEST, + "timestamp": "2016-12-31T00:01:02.999999Z", + "resource": RESOURCE._to_dict(), + "trace": TRACE, + "spanId": SPANID, + "traceSampled": True, + } + ] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client1, - labels=DEFAULT_LABELS) + logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) logger.log_struct( STRUCT, @@ -384,59 +347,51 @@ def test_log_struct_w_explicit(self): trace_sampled=True, ) - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_proto_defaults(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value - message = Struct(fields={'foo': Value(bool_value=True)}) - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'protoPayload': json.loads(MessageToJson(message)), - 'resource': { - 'type': 'global', - 'labels': {}, - }, - }] + message = Struct(fields={"foo": Value(bool_value=True)}) + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "protoPayload": json.loads(MessageToJson(message)), + "resource": {"type": "global", "labels": {}}, + } + ] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client) logger.log_proto(message) - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_proto_w_default_labels(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value - message = Struct(fields={'foo': Value(bool_value=True)}) - DEFAULT_LABELS = {'foo': 'spam'} - ENTRIES = [{ - 'logName': 'projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME), - 'protoPayload': json.loads(MessageToJson(message)), - 'resource': { - 'type': 'global', - 'labels': {}, - }, - 'labels': DEFAULT_LABELS, - }] + message = Struct(fields={"foo": Value(bool_value=True)}) + DEFAULT_LABELS = {"foo": "spam"} + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "protoPayload": json.loads(MessageToJson(message)), + "resource": {"type": "global", "labels": {}}, + "labels": DEFAULT_LABELS, + } + ] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client, - labels=DEFAULT_LABELS) + logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) logger.log_proto(message) - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_proto_w_explicit(self): import json @@ -446,48 +401,41 @@ def test_log_proto_w_explicit(self): from google.protobuf.struct_pb2 import Value from google.cloud.logging.resource import Resource - message = Struct(fields={'foo': Value(bool_value=True)}) - ALT_LOG_NAME = 'projects/foo/logs/alt.log.name' - DEFAULT_LABELS = {'foo': 'spam'} - LABELS = {'foo': 'bar', 'baz': 'qux'} - IID = 'IID' - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - TRACE = '12345678-1234-5678-1234-567812345678' - SPANID = '000000000000004a' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } + message = Struct(fields={"foo": Value(bool_value=True)}) + ALT_LOG_NAME = "projects/foo/logs/alt.log.name" + DEFAULT_LABELS = {"foo": "spam"} + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource( - type='gae_app', - labels={ - 'module_id': 'default', - 'version_id': 'test' - } + type="gae_app", labels={"module_id": "default", "version_id": "test"} ) - ENTRIES = [{ - 'logName': ALT_LOG_NAME, - 'protoPayload': json.loads(MessageToJson(message)), - 'labels': LABELS, - 'insertId': IID, - 'severity': SEVERITY, - 'httpRequest': REQUEST, - 'timestamp': '2016-12-31T00:01:02.999999Z', - 'resource': RESOURCE._to_dict(), - 'trace': TRACE, - 'spanId': SPANID, - 'traceSampled': True, - }] + ENTRIES = [ + { + "logName": ALT_LOG_NAME, + "protoPayload": json.loads(MessageToJson(message)), + "labels": LABELS, + "insertId": IID, + "severity": SEVERITY, + "httpRequest": REQUEST, + "timestamp": "2016-12-31T00:01:02.999999Z", + "resource": RESOURCE._to_dict(), + "trace": TRACE, + "spanId": SPANID, + "traceSampled": True, + } + ] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() - logger = self._make_one(self.LOGGER_NAME, client=client1, - labels=DEFAULT_LABELS) + logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) logger.log_proto( message, @@ -504,8 +452,7 @@ def test_log_proto_w_explicit(self): trace_sampled=True, ) - self.assertEqual(api._write_entries_called_with, - (ENTRIES, None, None, None)) + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) @@ -514,8 +461,9 @@ def test_delete_w_bound_client(self): logger.delete() - self.assertEqual(api._logger_delete_called_with, - (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual( + api._logger_delete_called_with, (self.PROJECT, self.LOGGER_NAME) + ) def test_delete_w_alternate_client(self): client1 = _Client(project=self.PROJECT) @@ -525,21 +473,20 @@ def test_delete_w_alternate_client(self): logger.delete(client=client2) - self.assertEqual(api._logger_delete_called_with, - (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual( + api._logger_delete_called_with, (self.PROJECT, self.LOGGER_NAME) + ) def test_list_entries_defaults(self): import six from google.cloud.logging.client import Client - TOKEN = 'TOKEN' + TOKEN = "TOKEN" - client = Client(project=self.PROJECT, - credentials=_make_credentials(), - _use_grpc=False) - returned = { - 'nextPageToken': TOKEN, - } + client = Client( + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) + returned = {"nextPageToken": TOKEN} client._connection = _Connection(returned) logger = self._make_one(self.LOGGER_NAME, client=client) @@ -552,34 +499,37 @@ def test_list_entries_defaults(self): self.assertEqual(len(entries), 0) self.assertEqual(token, TOKEN) called_with = client._connection._called_with - FILTER = 'logName=projects/%s/logs/%s' % ( - self.PROJECT, self.LOGGER_NAME) - self.assertEqual(called_with, { - 'method': 'POST', - 'path': '/entries:list', - 'data': { - 'filter': FILTER, - 'projectIds': [self.PROJECT], + FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + self.assertEqual( + called_with, + { + "method": "POST", + "path": "/entries:list", + "data": {"filter": FILTER, "projectIds": [self.PROJECT]}, }, - }) + ) def test_list_entries_explicit(self): from google.cloud.logging import DESCENDING from google.cloud.logging.client import Client - PROJECT1 = 'PROJECT1' - PROJECT2 = 'PROJECT2' - FILTER = 'resource.type:global' - TOKEN = 'TOKEN' + PROJECT1 = "PROJECT1" + PROJECT2 = "PROJECT2" + FILTER = "resource.type:global" + TOKEN = "TOKEN" PAGE_SIZE = 42 - client = Client(project=self.PROJECT, - credentials=_make_credentials(), - _use_grpc=False) + client = Client( + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) client._connection = _Connection({}) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries( - projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, - page_size=PAGE_SIZE, page_token=TOKEN) + projects=[PROJECT1, PROJECT2], + filter_=FILTER, + order_by=DESCENDING, + page_size=PAGE_SIZE, + page_token=TOKEN, + ) entries = list(iterator) token = iterator.next_page_token @@ -587,24 +537,30 @@ def test_list_entries_explicit(self): self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) called_with = client._connection._called_with - combined_filter = '%s AND logName=projects/%s/logs/%s' % ( - FILTER, self.PROJECT, self.LOGGER_NAME) - self.assertEqual(called_with, { - 'method': 'POST', - 'path': '/entries:list', - 'data': { - 'filter': combined_filter, - 'orderBy': DESCENDING, - 'pageSize': PAGE_SIZE, - 'pageToken': TOKEN, - 'projectIds': [PROJECT1, PROJECT2], + combined_filter = "%s AND logName=projects/%s/logs/%s" % ( + FILTER, + self.PROJECT, + self.LOGGER_NAME, + ) + self.assertEqual( + called_with, + { + "method": "POST", + "path": "/entries:list", + "data": { + "filter": combined_filter, + "orderBy": DESCENDING, + "pageSize": PAGE_SIZE, + "pageToken": TOKEN, + "projectIds": [PROJECT1, PROJECT2], + }, }, - }) + ) class TestBatch(unittest.TestCase): - PROJECT = 'test-project' + PROJECT = "test-project" @staticmethod def _get_target_class(): @@ -638,27 +594,19 @@ def test_log_empty_explicit(self): from google.cloud.logging.resource import Resource from google.cloud.logging.entries import LogEntry - LABELS = {'foo': 'bar', 'baz': 'qux'} - IID = 'IID' - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource( - type='gae_app', - labels={ - 'module_id': 'default', - 'version_id': 'test' - } + type="gae_app", labels={"module_id": "default", "version_id": "test"} ) - TRACE = '12345678-1234-5678-1234-567812345678' - SPANID = '000000000000004a' + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" ENTRY = LogEntry( labels=LABELS, insert_id=IID, @@ -691,7 +639,7 @@ def test_log_text_defaults(self): from google.cloud.logging.entries import _GLOBAL_RESOURCE from google.cloud.logging.entries import TextEntry - TEXT = 'This is the entry text' + TEXT = "This is the entry text" ENTRY = TextEntry(payload=TEXT, resource=_GLOBAL_RESOURCE) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() @@ -704,28 +652,20 @@ def test_log_text_explicit(self): from google.cloud.logging.resource import Resource from google.cloud.logging.entries import TextEntry - TEXT = 'This is the entry text' - LABELS = {'foo': 'bar', 'baz': 'qux'} - IID = 'IID' - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } + TEXT = "This is the entry text" + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource( - type='gae_app', - labels={ - 'module_id': 'default', - 'version_id': 'test' - } + type="gae_app", labels={"module_id": "default", "version_id": "test"} ) - TRACE = '12345678-1234-5678-1234-567812345678' - SPANID = '000000000000004a' + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" ENTRY = TextEntry( payload=TEXT, labels=LABELS, @@ -760,7 +700,7 @@ def test_log_struct_defaults(self): from google.cloud.logging.entries import _GLOBAL_RESOURCE from google.cloud.logging.entries import StructEntry - STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} + STRUCT = {"message": "Message text", "weather": "partly cloudy"} ENTRY = StructEntry(payload=STRUCT, resource=_GLOBAL_RESOURCE) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() @@ -773,27 +713,19 @@ def test_log_struct_explicit(self): from google.cloud.logging.resource import Resource from google.cloud.logging.entries import StructEntry - STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} - LABELS = {'foo': 'bar', 'baz': 'qux'} - IID = 'IID' - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - TRACE = '12345678-1234-5678-1234-567812345678' - SPANID = '000000000000004a' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } + STRUCT = {"message": "Message text", "weather": "partly cloudy"} + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource( - type='gae_app', - labels={ - 'module_id': 'default', - 'version_id': 'test', - } + type="gae_app", labels={"module_id": "default", "version_id": "test"} ) ENTRY = StructEntry( payload=STRUCT, @@ -831,7 +763,7 @@ def test_log_proto_defaults(self): from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - message = Struct(fields={'foo': Value(bool_value=True)}) + message = Struct(fields={"foo": Value(bool_value=True)}) ENTRY = ProtobufEntry(payload=message, resource=_GLOBAL_RESOURCE) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() @@ -846,27 +778,19 @@ def test_log_proto_explicit(self): from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - message = Struct(fields={'foo': Value(bool_value=True)}) - LABELS = {'foo': 'bar', 'baz': 'qux'} - IID = 'IID' - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - TRACE = '12345678-1234-5678-1234-567812345678' - SPANID = '000000000000004a' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } + message = Struct(fields={"foo": Value(bool_value=True)}) + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource( - type='gae_app', - labels={ - 'module_id': 'default', - 'version_id': 'test', - } + type="gae_app", labels={"module_id": "default", "version_id": "test"} ) ENTRY = ProtobufEntry( payload=message, @@ -905,17 +829,15 @@ def test_commit_w_unknown_entry_type(self): client = _Client(project=self.PROJECT, connection=_make_credentials()) api = client.logging_api = _DummyLoggingAPI() batch = self._make_one(logger, client) - batch.entries.append(LogEntry(severity='blah')) - ENTRY = { - 'severity': 'blah', - 'resource': _GLOBAL_RESOURCE._to_dict(), - } + batch.entries.append(LogEntry(severity="blah")) + ENTRY = {"severity": "blah", "resource": _GLOBAL_RESOURCE._to_dict()} batch.commit() self.assertEqual(list(batch.entries), []) - self.assertEqual(api._write_entries_called_with, - ([ENTRY], logger.full_name, None, None)) + self.assertEqual( + api._write_entries_called_with, ([ENTRY], logger.full_name, None, None) + ) def test_commit_w_resource_specified(self): from google.cloud.logging.entries import _GLOBAL_RESOURCE @@ -925,25 +847,22 @@ def test_commit_w_resource_specified(self): client = _Client(project=self.PROJECT, connection=_make_credentials()) api = client.logging_api = _DummyLoggingAPI() RESOURCE = Resource( - type='gae_app', - labels={ - 'module_id': 'default', - 'version_id': 'test', - } + type="gae_app", labels={"module_id": "default", "version_id": "test"} ) batch = self._make_one(logger, client, resource=RESOURCE) - MESSAGE = 'This is the entry text' + MESSAGE = "This is the entry text" ENTRIES = [ - {'textPayload': MESSAGE}, - {'textPayload': MESSAGE, 'resource': _GLOBAL_RESOURCE._to_dict()}, + {"textPayload": MESSAGE}, + {"textPayload": MESSAGE, "resource": _GLOBAL_RESOURCE._to_dict()}, ] batch.log_text(MESSAGE, resource=None) batch.log_text(MESSAGE) batch.commit() - self.assertEqual(api._write_entries_called_with, - (ENTRIES, logger.full_name, - RESOURCE._to_dict(), None)) + self.assertEqual( + api._write_entries_called_with, + (ENTRIES, logger.full_name, RESOURCE._to_dict(), None), + ) def test_commit_w_bound_client(self): import json @@ -954,46 +873,50 @@ def test_commit_w_bound_client(self): from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud.logging.entries import _GLOBAL_RESOURCE - TEXT = 'This is the entry text' - STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} - message = Struct(fields={'foo': Value(bool_value=True)}) - IID1 = 'IID1' - IID2 = 'IID2' - IID3 = 'IID3' + TEXT = "This is the entry text" + STRUCT = {"message": TEXT, "weather": "partly cloudy"} + message = Struct(fields={"foo": Value(bool_value=True)}) + IID1 = "IID1" + IID2 = "IID2" + IID3 = "IID3" TIMESTAMP1 = datetime.datetime(2016, 12, 31, 0, 0, 1, 999999) TIMESTAMP2 = datetime.datetime(2016, 12, 31, 0, 0, 2, 999999) TIMESTAMP3 = datetime.datetime(2016, 12, 31, 0, 0, 3, 999999) - TRACE1 = '12345678-1234-5678-1234-567812345678' - TRACE2 = '12345678-1234-5678-1234-567812345679' - TRACE3 = '12345678-1234-5678-1234-567812345670' - SPANID1 = '000000000000004a' - SPANID2 = '000000000000004b' - SPANID3 = '000000000000004c' - ENTRIES = [{ - 'textPayload': TEXT, - 'insertId': IID1, - 'timestamp': _datetime_to_rfc3339(TIMESTAMP1), - 'resource': _GLOBAL_RESOURCE._to_dict(), - 'trace': TRACE1, - 'spanId': SPANID1, - 'traceSampled': True, - }, { - 'jsonPayload': STRUCT, - 'insertId': IID2, - 'timestamp': _datetime_to_rfc3339(TIMESTAMP2), - 'resource': _GLOBAL_RESOURCE._to_dict(), - 'trace': TRACE2, - 'spanId': SPANID2, - 'traceSampled': False, - }, { - 'protoPayload': json.loads(MessageToJson(message)), - 'insertId': IID3, - 'timestamp': _datetime_to_rfc3339(TIMESTAMP3), - 'resource': _GLOBAL_RESOURCE._to_dict(), - 'trace': TRACE3, - 'spanId': SPANID3, - 'traceSampled': True, - }] + TRACE1 = "12345678-1234-5678-1234-567812345678" + TRACE2 = "12345678-1234-5678-1234-567812345679" + TRACE3 = "12345678-1234-5678-1234-567812345670" + SPANID1 = "000000000000004a" + SPANID2 = "000000000000004b" + SPANID3 = "000000000000004c" + ENTRIES = [ + { + "textPayload": TEXT, + "insertId": IID1, + "timestamp": _datetime_to_rfc3339(TIMESTAMP1), + "resource": _GLOBAL_RESOURCE._to_dict(), + "trace": TRACE1, + "spanId": SPANID1, + "traceSampled": True, + }, + { + "jsonPayload": STRUCT, + "insertId": IID2, + "timestamp": _datetime_to_rfc3339(TIMESTAMP2), + "resource": _GLOBAL_RESOURCE._to_dict(), + "trace": TRACE2, + "spanId": SPANID2, + "traceSampled": False, + }, + { + "protoPayload": json.loads(MessageToJson(message)), + "insertId": IID3, + "timestamp": _datetime_to_rfc3339(TIMESTAMP3), + "resource": _GLOBAL_RESOURCE._to_dict(), + "trace": TRACE3, + "spanId": SPANID3, + "traceSampled": True, + }, + ] client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = _Logger() @@ -1026,8 +949,9 @@ def test_commit_w_bound_client(self): batch.commit() self.assertEqual(list(batch.entries), []) - self.assertEqual(api._write_entries_called_with, - (ENTRIES, logger.full_name, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, logger.full_name, None, None) + ) def test_commit_w_alternate_client(self): import json @@ -1037,35 +961,36 @@ def test_commit_w_alternate_client(self): from google.cloud.logging.logger import Logger from google.cloud.logging.entries import _GLOBAL_RESOURCE - TEXT = 'This is the entry text' - STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} - message = Struct(fields={'foo': Value(bool_value=True)}) - DEFAULT_LABELS = {'foo': 'spam'} - LABELS = { - 'foo': 'bar', - 'baz': 'qux', - } - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } + TEXT = "This is the entry text" + STRUCT = {"message": TEXT, "weather": "partly cloudy"} + message = Struct(fields={"foo": Value(bool_value=True)}) + DEFAULT_LABELS = {"foo": "spam"} + LABELS = {"foo": "bar", "baz": "qux"} + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() - logger = Logger('logger_name', client1, labels=DEFAULT_LABELS) + logger = Logger("logger_name", client1, labels=DEFAULT_LABELS) ENTRIES = [ - {'textPayload': TEXT, 'labels': LABELS, 'resource': - _GLOBAL_RESOURCE._to_dict()}, - {'jsonPayload': STRUCT, 'severity': SEVERITY, - 'resource': _GLOBAL_RESOURCE._to_dict()}, - {'protoPayload': json.loads(MessageToJson(message)), - 'httpRequest': REQUEST, - 'resource': _GLOBAL_RESOURCE._to_dict()}, + { + "textPayload": TEXT, + "labels": LABELS, + "resource": _GLOBAL_RESOURCE._to_dict(), + }, + { + "jsonPayload": STRUCT, + "severity": SEVERITY, + "resource": _GLOBAL_RESOURCE._to_dict(), + }, + { + "protoPayload": json.loads(MessageToJson(message)), + "httpRequest": REQUEST, + "resource": _GLOBAL_RESOURCE._to_dict(), + }, ] batch = self._make_one(logger, client=client1) @@ -1075,8 +1000,10 @@ def test_commit_w_alternate_client(self): batch.commit(client=client2) self.assertEqual(list(batch.entries), []) - self.assertEqual(api._write_entries_called_with, - (ENTRIES, logger.full_name, None, DEFAULT_LABELS)) + self.assertEqual( + api._write_entries_called_with, + (ENTRIES, logger.full_name, None, DEFAULT_LABELS), + ) def test_context_mgr_success(self): import json @@ -1086,31 +1013,35 @@ def test_context_mgr_success(self): from google.cloud.logging.logger import Logger from google.cloud.logging.entries import _GLOBAL_RESOURCE - TEXT = 'This is the entry text' - STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} - message = Struct(fields={'foo': Value(bool_value=True)}) - DEFAULT_LABELS = {'foo': 'spam'} - LABELS = {'foo': 'bar', 'baz': 'qux'} - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } + TEXT = "This is the entry text" + STRUCT = {"message": TEXT, "weather": "partly cloudy"} + message = Struct(fields={"foo": Value(bool_value=True)}) + DEFAULT_LABELS = {"foo": "spam"} + LABELS = {"foo": "bar", "baz": "qux"} + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() - logger = Logger('logger_name', client, labels=DEFAULT_LABELS) + logger = Logger("logger_name", client, labels=DEFAULT_LABELS) ENTRIES = [ - {'textPayload': TEXT, 'httpRequest': REQUEST, - 'resource': _GLOBAL_RESOURCE._to_dict()}, - {'jsonPayload': STRUCT, 'labels': LABELS, - 'resource': _GLOBAL_RESOURCE._to_dict()}, - {'protoPayload': json.loads(MessageToJson(message)), - 'resource': _GLOBAL_RESOURCE._to_dict(), - 'severity': SEVERITY}, + { + "textPayload": TEXT, + "httpRequest": REQUEST, + "resource": _GLOBAL_RESOURCE._to_dict(), + }, + { + "jsonPayload": STRUCT, + "labels": LABELS, + "resource": _GLOBAL_RESOURCE._to_dict(), + }, + { + "protoPayload": json.loads(MessageToJson(message)), + "resource": _GLOBAL_RESOURCE._to_dict(), + "severity": SEVERITY, + }, ] batch = self._make_one(logger, client=client) @@ -1120,8 +1051,10 @@ def test_context_mgr_success(self): other.log_proto(message, severity=SEVERITY) self.assertEqual(list(batch.entries), []) - self.assertEqual(api._write_entries_called_with, - (ENTRIES, logger.full_name, None, DEFAULT_LABELS)) + self.assertEqual( + api._write_entries_called_with, + (ENTRIES, logger.full_name, None, DEFAULT_LABELS), + ) def test_context_mgr_failure(self): import datetime @@ -1131,29 +1064,24 @@ def test_context_mgr_failure(self): from google.cloud.logging.entries import StructEntry from google.cloud.logging.entries import ProtobufEntry - TEXT = 'This is the entry text' - STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} - LABELS = {'foo': 'bar', 'baz': 'qux'} - IID = 'IID' - SEVERITY = 'CRITICAL' - METHOD = 'POST' - URI = 'https://api.example.com/endpoint' - STATUS = '500' - REQUEST = { - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - } + TEXT = "This is the entry text" + STRUCT = {"message": TEXT, "weather": "partly cloudy"} + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) - message = Struct(fields={'foo': Value(bool_value=True)}) + message = Struct(fields={"foo": Value(bool_value=True)}) client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = _Logger() UNSENT = [ TextEntry(payload=TEXT, insert_id=IID, timestamp=TIMESTAMP), StructEntry(payload=STRUCT, severity=SEVERITY), - ProtobufEntry( - payload=message, labels=LABELS, http_request=REQUEST), + ProtobufEntry(payload=message, labels=LABELS, http_request=REQUEST), ] batch = self._make_one(logger, client=client) @@ -1174,25 +1102,22 @@ class _Logger(object): labels = None - def __init__(self, name='NAME', project='PROJECT'): - self.full_name = 'projects/%s/logs/%s' % (project, name) + def __init__(self, name="NAME", project="PROJECT"): + self.full_name = "projects/%s/logs/%s" % (project, name) class _DummyLoggingAPI(object): _write_entries_called_with = None - def write_entries(self, entries, logger_name=None, resource=None, - labels=None): - self._write_entries_called_with = ( - entries, logger_name, resource, labels) + def write_entries(self, entries, logger_name=None, resource=None, labels=None): + self._write_entries_called_with = (entries, logger_name, resource, labels) def logger_delete(self, project, logger_name): self._logger_delete_called_with = (project, logger_name) class _Client(object): - def __init__(self, project, connection=None): self.project = project self._connection = connection diff --git a/packages/google-cloud-logging/tests/unit/test_metric.py b/packages/google-cloud-logging/tests/unit/test_metric.py index 862ddf9bd6c5..93ee90b87470 100644 --- a/packages/google-cloud-logging/tests/unit/test_metric.py +++ b/packages/google-cloud-logging/tests/unit/test_metric.py @@ -17,10 +17,10 @@ class TestMetric(unittest.TestCase): - PROJECT = 'test-project' - METRIC_NAME = 'metric-name' - FILTER = 'logName:syslog AND severity>=ERROR' - DESCRIPTION = 'DESCRIPTION' + PROJECT = "test-project" + METRIC_NAME = "metric-name" + FILTER = "logName:syslog AND severity>=ERROR" + DESCRIPTION = "DESCRIPTION" @staticmethod def _get_target_class(): @@ -32,54 +32,52 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): - FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + FULL = "projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) client = _Client(self.PROJECT) metric = self._make_one(self.METRIC_NAME, client=client) self.assertEqual(metric.name, self.METRIC_NAME) self.assertIsNone(metric.filter_) - self.assertEqual(metric.description, '') + self.assertEqual(metric.description, "") self.assertIs(metric.client, client) self.assertEqual(metric.project, self.PROJECT) self.assertEqual(metric.full_name, FULL) - self.assertEqual(metric.path, '/%s' % (FULL,)) + self.assertEqual(metric.path, "/%s" % (FULL,)) def test_ctor_explicit(self): - FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + FULL = "projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) client = _Client(self.PROJECT) - metric = self._make_one(self.METRIC_NAME, self.FILTER, - client=client, description=self.DESCRIPTION) + metric = self._make_one( + self.METRIC_NAME, self.FILTER, client=client, description=self.DESCRIPTION + ) self.assertEqual(metric.name, self.METRIC_NAME) self.assertEqual(metric.filter_, self.FILTER) self.assertEqual(metric.description, self.DESCRIPTION) self.assertIs(metric.client, client) self.assertEqual(metric.project, self.PROJECT) self.assertEqual(metric.full_name, FULL) - self.assertEqual(metric.path, '/%s' % (FULL,)) + self.assertEqual(metric.path, "/%s" % (FULL,)) def test_from_api_repr_minimal(self): client = _Client(project=self.PROJECT) - FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - RESOURCE = { - 'name': self.METRIC_NAME, - 'filter': self.FILTER, - } + FULL = "projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) + RESOURCE = {"name": self.METRIC_NAME, "filter": self.FILTER} klass = self._get_target_class() metric = klass.from_api_repr(RESOURCE, client=client) self.assertEqual(metric.name, self.METRIC_NAME) self.assertEqual(metric.filter_, self.FILTER) - self.assertEqual(metric.description, '') + self.assertEqual(metric.description, "") self.assertIs(metric._client, client) self.assertEqual(metric.project, self.PROJECT) self.assertEqual(metric.full_name, FULL) def test_from_api_repr_w_description(self): client = _Client(project=self.PROJECT) - FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - DESCRIPTION = 'DESCRIPTION' + FULL = "projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) + DESCRIPTION = "DESCRIPTION" RESOURCE = { - 'name': self.METRIC_NAME, - 'filter': self.FILTER, - 'description': DESCRIPTION, + "name": self.METRIC_NAME, + "filter": self.FILTER, + "description": DESCRIPTION, } klass = self._get_target_class() metric = klass.from_api_repr(RESOURCE, client=client) @@ -99,20 +97,23 @@ def test_create_w_bound_client(self): self.assertEqual( api._metric_create_called_with, - (self.PROJECT, self.METRIC_NAME, self.FILTER, '')) + (self.PROJECT, self.METRIC_NAME, self.FILTER, ""), + ) def test_create_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.metrics_api = _DummyMetricsAPI() - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client1, - description=self.DESCRIPTION) + metric = self._make_one( + self.METRIC_NAME, self.FILTER, client=client1, description=self.DESCRIPTION + ) metric.create(client=client2) self.assertEqual( api._metric_create_called_with, - (self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION)) + (self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION), + ) def test_exists_miss_w_bound_client(self): client = _Client(project=self.PROJECT) @@ -121,14 +122,10 @@ def test_exists_miss_w_bound_client(self): self.assertFalse(metric.exists()) - self.assertEqual(api._metric_get_called_with, - (self.PROJECT, self.METRIC_NAME)) + self.assertEqual(api._metric_get_called_with, (self.PROJECT, self.METRIC_NAME)) def test_exists_hit_w_alternate_client(self): - RESOURCE = { - 'name': self.METRIC_NAME, - 'filter': self.FILTER, - } + RESOURCE = {"name": self.METRIC_NAME, "filter": self.FILTER} client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.metrics_api = _DummyMetricsAPI() @@ -137,34 +134,30 @@ def test_exists_hit_w_alternate_client(self): self.assertTrue(metric.exists(client=client2)) - self.assertEqual(api._metric_get_called_with, - (self.PROJECT, self.METRIC_NAME)) + self.assertEqual(api._metric_get_called_with, (self.PROJECT, self.METRIC_NAME)) def test_reload_w_bound_client(self): - NEW_FILTER = 'logName:syslog AND severity>=INFO' - RESOURCE = { - 'name': self.METRIC_NAME, - 'filter': NEW_FILTER, - } + NEW_FILTER = "logName:syslog AND severity>=INFO" + RESOURCE = {"name": self.METRIC_NAME, "filter": NEW_FILTER} client = _Client(project=self.PROJECT) api = client.metrics_api = _DummyMetricsAPI() api._metric_get_response = RESOURCE - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client, - description=self.DESCRIPTION) + metric = self._make_one( + self.METRIC_NAME, self.FILTER, client=client, description=self.DESCRIPTION + ) metric.reload() self.assertEqual(metric.filter_, NEW_FILTER) - self.assertEqual(metric.description, '') - self.assertEqual(api._metric_get_called_with, - (self.PROJECT, self.METRIC_NAME)) + self.assertEqual(metric.description, "") + self.assertEqual(api._metric_get_called_with, (self.PROJECT, self.METRIC_NAME)) def test_reload_w_alternate_client(self): - NEW_FILTER = 'logName:syslog AND severity>=INFO' + NEW_FILTER = "logName:syslog AND severity>=INFO" RESOURCE = { - 'name': self.METRIC_NAME, - 'description': self.DESCRIPTION, - 'filter': NEW_FILTER, + "name": self.METRIC_NAME, + "description": self.DESCRIPTION, + "filter": NEW_FILTER, } client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) @@ -176,8 +169,7 @@ def test_reload_w_alternate_client(self): self.assertEqual(metric.filter_, NEW_FILTER) self.assertEqual(metric.description, self.DESCRIPTION) - self.assertEqual(api._metric_get_called_with, - (self.PROJECT, self.METRIC_NAME)) + self.assertEqual(api._metric_get_called_with, (self.PROJECT, self.METRIC_NAME)) def test_update_w_bound_client(self): client = _Client(project=self.PROJECT) @@ -188,20 +180,23 @@ def test_update_w_bound_client(self): self.assertEqual( api._metric_update_called_with, - (self.PROJECT, self.METRIC_NAME, self.FILTER, '')) + (self.PROJECT, self.METRIC_NAME, self.FILTER, ""), + ) def test_update_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.metrics_api = _DummyMetricsAPI() - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client1, - description=self.DESCRIPTION) + metric = self._make_one( + self.METRIC_NAME, self.FILTER, client=client1, description=self.DESCRIPTION + ) metric.update(client=client2) self.assertEqual( api._metric_update_called_with, - (self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION)) + (self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION), + ) def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) @@ -210,8 +205,9 @@ def test_delete_w_bound_client(self): metric.delete() - self.assertEqual(api._metric_delete_called_with, - (self.PROJECT, self.METRIC_NAME)) + self.assertEqual( + api._metric_delete_called_with, (self.PROJECT, self.METRIC_NAME) + ) def test_delete_w_alternate_client(self): client1 = _Client(project=self.PROJECT) @@ -221,21 +217,19 @@ def test_delete_w_alternate_client(self): metric.delete(client=client2) - self.assertEqual(api._metric_delete_called_with, - (self.PROJECT, self.METRIC_NAME)) + self.assertEqual( + api._metric_delete_called_with, (self.PROJECT, self.METRIC_NAME) + ) class _Client(object): - def __init__(self, project): self.project = project class _DummyMetricsAPI(object): - def metric_create(self, project, metric_name, filter_, description): - self._metric_create_called_with = ( - project, metric_name, filter_, description) + self._metric_create_called_with = (project, metric_name, filter_, description) def metric_get(self, project, metric_name): from google.cloud.exceptions import NotFound @@ -244,11 +238,10 @@ def metric_get(self, project, metric_name): try: return self._metric_get_response except AttributeError: - raise NotFound('miss') + raise NotFound("miss") def metric_update(self, project, metric_name, filter_, description): - self._metric_update_called_with = ( - project, metric_name, filter_, description) + self._metric_update_called_with = (project, metric_name, filter_, description) def metric_delete(self, project, metric_name): self._metric_delete_called_with = (project, metric_name) diff --git a/packages/google-cloud-logging/tests/unit/test_sink.py b/packages/google-cloud-logging/tests/unit/test_sink.py index fdc8f80f1e5b..dc1ff9563f9c 100644 --- a/packages/google-cloud-logging/tests/unit/test_sink.py +++ b/packages/google-cloud-logging/tests/unit/test_sink.py @@ -17,11 +17,11 @@ class TestSink(unittest.TestCase): - PROJECT = 'test-project' - SINK_NAME = 'sink-name' - FILTER = 'logName:syslog AND severity>=INFO' - DESTINATION_URI = 'faux.googleapis.com/destination' - WRITER_IDENTITY = 'serviceAccount:project-123@example.com' + PROJECT = "test-project" + SINK_NAME = "sink-name" + FILTER = "logName:syslog AND severity>=INFO" + DESTINATION_URI = "faux.googleapis.com/destination" + WRITER_IDENTITY = "serviceAccount:project-123@example.com" @staticmethod def _get_target_class(): @@ -33,7 +33,7 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): - FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + FULL = "projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) client = _Client(self.PROJECT) sink = self._make_one(self.SINK_NAME, client=client) self.assertEqual(sink.name, self.SINK_NAME) @@ -42,29 +42,26 @@ def test_ctor_defaults(self): self.assertIs(sink.client, client) self.assertEqual(sink.project, self.PROJECT) self.assertEqual(sink.full_name, FULL) - self.assertEqual(sink.path, '/%s' % (FULL,)) + self.assertEqual(sink.path, "/%s" % (FULL,)) def test_ctor_explicit(self): - FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + FULL = "projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) client = _Client(self.PROJECT) - sink = self._make_one(self.SINK_NAME, self.FILTER, - self.DESTINATION_URI, - client=client) + sink = self._make_one( + self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client + ) self.assertEqual(sink.name, self.SINK_NAME) self.assertEqual(sink.filter_, self.FILTER) self.assertEqual(sink.destination, self.DESTINATION_URI) self.assertIs(sink.client, client) self.assertEqual(sink.project, self.PROJECT) self.assertEqual(sink.full_name, FULL) - self.assertEqual(sink.path, '/%s' % (FULL,)) + self.assertEqual(sink.path, "/%s" % (FULL,)) def test_from_api_repr_minimal(self): client = _Client(project=self.PROJECT) - FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) - RESOURCE = { - 'name': self.SINK_NAME, - 'destination': self.DESTINATION_URI, - } + FULL = "projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) + RESOURCE = {"name": self.SINK_NAME, "destination": self.DESTINATION_URI} klass = self._get_target_class() sink = klass.from_api_repr(RESOURCE, client=client) self.assertEqual(sink.name, self.SINK_NAME) @@ -77,12 +74,12 @@ def test_from_api_repr_minimal(self): def test_from_api_repr_full(self): client = _Client(project=self.PROJECT) - FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + FULL = "projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) RESOURCE = { - 'name': self.SINK_NAME, - 'destination': self.DESTINATION_URI, - 'filter': self.FILTER, - 'writerIdentity': self.WRITER_IDENTITY, + "name": self.SINK_NAME, + "destination": self.DESTINATION_URI, + "filter": self.FILTER, + "writerIdentity": self.WRITER_IDENTITY, } klass = self._get_target_class() sink = klass.from_api_repr(RESOURCE, client=client) @@ -98,14 +95,14 @@ def test_create_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() api._sink_create_response = { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - 'writerIdentity': self.WRITER_IDENTITY, + "name": self.SINK_NAME, + "filter": self.FILTER, + "destination": self.DESTINATION_URI, + "writerIdentity": self.WRITER_IDENTITY, } - sink = self._make_one(self.SINK_NAME, self.FILTER, - self.DESTINATION_URI, - client=client) + sink = self._make_one( + self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client + ) sink.create() @@ -115,27 +112,21 @@ def test_create_w_bound_client(self): self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) self.assertEqual( api._sink_create_called_with, - ( - self.PROJECT, - self.SINK_NAME, - self.FILTER, - self.DESTINATION_URI, - False, - ), + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, False), ) def test_create_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) - sink = self._make_one(self.SINK_NAME, self.FILTER, - self.DESTINATION_URI, - client=client1) + sink = self._make_one( + self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1 + ) api = client2.sinks_api = _DummySinksAPI() api._sink_create_response = { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - 'writerIdentity': self.WRITER_IDENTITY, + "name": self.SINK_NAME, + "filter": self.FILTER, + "destination": self.DESTINATION_URI, + "writerIdentity": self.WRITER_IDENTITY, } sink.create(client=client2, unique_writer_identity=True) @@ -146,52 +137,41 @@ def test_create_w_alternate_client(self): self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) self.assertEqual( api._sink_create_called_with, - ( - self.PROJECT, - self.SINK_NAME, - self.FILTER, - self.DESTINATION_URI, - True, - ), + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, True), ) def test_exists_miss_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() - sink = self._make_one(self.SINK_NAME, self.FILTER, - self.DESTINATION_URI, - client=client) + sink = self._make_one( + self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client + ) self.assertFalse(sink.exists()) - self.assertEqual(api._sink_get_called_with, - (self.PROJECT, self.SINK_NAME)) + self.assertEqual(api._sink_get_called_with, (self.PROJECT, self.SINK_NAME)) def test_exists_hit_w_alternate_client(self): RESOURCE = { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, + "name": self.SINK_NAME, + "filter": self.FILTER, + "destination": self.DESTINATION_URI, } client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.sinks_api = _DummySinksAPI() api._sink_get_response = RESOURCE - sink = self._make_one(self.SINK_NAME, self.FILTER, - self.DESTINATION_URI, - client=client1) + sink = self._make_one( + self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1 + ) self.assertTrue(sink.exists(client=client2)) - self.assertEqual(api._sink_get_called_with, - (self.PROJECT, self.SINK_NAME)) + self.assertEqual(api._sink_get_called_with, (self.PROJECT, self.SINK_NAME)) def test_reload_w_bound_client(self): - NEW_DESTINATION_URI = 'faux.googleapis.com/other' - RESOURCE = { - 'name': self.SINK_NAME, - 'destination': NEW_DESTINATION_URI, - } + NEW_DESTINATION_URI = "faux.googleapis.com/other" + RESOURCE = {"name": self.SINK_NAME, "destination": NEW_DESTINATION_URI} client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() api._sink_get_response = RESOURCE @@ -202,17 +182,16 @@ def test_reload_w_bound_client(self): self.assertEqual(sink.destination, NEW_DESTINATION_URI) self.assertIsNone(sink.filter_) self.assertIsNone(sink.writer_identity) - self.assertEqual(api._sink_get_called_with, - (self.PROJECT, self.SINK_NAME)) + self.assertEqual(api._sink_get_called_with, (self.PROJECT, self.SINK_NAME)) def test_reload_w_alternate_client(self): - NEW_FILTER = 'logName:syslog AND severity>=INFO' - NEW_DESTINATION_URI = 'faux.googleapis.com/other' + NEW_FILTER = "logName:syslog AND severity>=INFO" + NEW_DESTINATION_URI = "faux.googleapis.com/other" RESOURCE = { - 'name': self.SINK_NAME, - 'filter': NEW_FILTER, - 'destination': NEW_DESTINATION_URI, - 'writerIdentity': self.WRITER_IDENTITY, + "name": self.SINK_NAME, + "filter": NEW_FILTER, + "destination": NEW_DESTINATION_URI, + "writerIdentity": self.WRITER_IDENTITY, } client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) @@ -225,21 +204,20 @@ def test_reload_w_alternate_client(self): self.assertEqual(sink.destination, NEW_DESTINATION_URI) self.assertEqual(sink.filter_, NEW_FILTER) self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) - self.assertEqual(api._sink_get_called_with, - (self.PROJECT, self.SINK_NAME)) + self.assertEqual(api._sink_get_called_with, (self.PROJECT, self.SINK_NAME)) def test_update_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() api._sink_update_response = { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - 'writerIdentity': self.WRITER_IDENTITY, + "name": self.SINK_NAME, + "filter": self.FILTER, + "destination": self.DESTINATION_URI, + "writerIdentity": self.WRITER_IDENTITY, } - sink = self._make_one(self.SINK_NAME, self.FILTER, - self.DESTINATION_URI, - client=client) + sink = self._make_one( + self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client + ) sink.update() @@ -249,27 +227,22 @@ def test_update_w_bound_client(self): self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) self.assertEqual( api._sink_update_called_with, - ( - self.PROJECT, - self.SINK_NAME, - self.FILTER, - self.DESTINATION_URI, - False, - )) + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, False), + ) def test_update_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.sinks_api = _DummySinksAPI() api._sink_update_response = { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - 'writerIdentity': self.WRITER_IDENTITY, + "name": self.SINK_NAME, + "filter": self.FILTER, + "destination": self.DESTINATION_URI, + "writerIdentity": self.WRITER_IDENTITY, } - sink = self._make_one(self.SINK_NAME, self.FILTER, - self.DESTINATION_URI, - client=client1) + sink = self._make_one( + self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1 + ) sink.update(client=client2, unique_writer_identity=True) @@ -279,52 +252,49 @@ def test_update_w_alternate_client(self): self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) self.assertEqual( api._sink_update_called_with, - ( - self.PROJECT, - self.SINK_NAME, - self.FILTER, - self.DESTINATION_URI, - True, - )) + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, True), + ) def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() - sink = self._make_one(self.SINK_NAME, self.FILTER, - self.DESTINATION_URI, - client=client) + sink = self._make_one( + self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client + ) sink.delete() - self.assertEqual(api._sink_delete_called_with, - (self.PROJECT, self.SINK_NAME)) + self.assertEqual(api._sink_delete_called_with, (self.PROJECT, self.SINK_NAME)) def test_delete_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.sinks_api = _DummySinksAPI() - sink = self._make_one(self.SINK_NAME, self.FILTER, - self.DESTINATION_URI, - client=client1) + sink = self._make_one( + self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1 + ) sink.delete(client=client2) - self.assertEqual(api._sink_delete_called_with, - (self.PROJECT, self.SINK_NAME)) + self.assertEqual(api._sink_delete_called_with, (self.PROJECT, self.SINK_NAME)) class _Client(object): - def __init__(self, project): self.project = project class _DummySinksAPI(object): - - def sink_create(self, project, sink_name, filter_, destination, - unique_writer_identity=False): + def sink_create( + self, project, sink_name, filter_, destination, unique_writer_identity=False + ): self._sink_create_called_with = ( - project, sink_name, filter_, destination, unique_writer_identity) + project, + sink_name, + filter_, + destination, + unique_writer_identity, + ) return self._sink_create_response def sink_get(self, project, sink_name): @@ -334,12 +304,18 @@ def sink_get(self, project, sink_name): try: return self._sink_get_response except AttributeError: - raise NotFound('miss') + raise NotFound("miss") - def sink_update(self, project, sink_name, filter_, destination, - unique_writer_identity=False): + def sink_update( + self, project, sink_name, filter_, destination, unique_writer_identity=False + ): self._sink_update_called_with = ( - project, sink_name, filter_, destination, unique_writer_identity) + project, + sink_name, + filter_, + destination, + unique_writer_identity, + ) return self._sink_update_response def sink_delete(self, project, sink_name): From 45ce29ea34f4dc970055515feea2dbfde74f307a Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 29 Nov 2018 13:13:54 -0800 Subject: [PATCH 204/855] Run black at end of synth.py (#6698) * Run black at end of synth.py * blacken logging --- .../cloud/logging_v2/proto/log_entry_pb2.py | 998 +++++--- .../logging_v2/proto/log_entry_pb2_grpc.py | 1 - .../logging_v2/proto/logging_config_pb2.py | 2114 ++++++++++------- .../proto/logging_config_pb2_grpc.py | 303 +-- .../logging_v2/proto/logging_metrics_pb2.py | 1198 ++++++---- .../proto/logging_metrics_pb2_grpc.py | 163 +- .../cloud/logging_v2/proto/logging_pb2.py | 1621 ++++++++----- .../logging_v2/proto/logging_pb2_grpc.py | 163 +- packages/google-cloud-logging/noxfile.py | 27 +- packages/google-cloud-logging/synth.py | 4 +- 10 files changed, 4052 insertions(+), 2540 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py index 0b5f10f863cc..a69978eb1e2f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -3,354 +3,678 @@ # source: google/cloud/logging_v2/proto/log_entry.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2 -from google.logging.type import http_request_pb2 as google_dot_logging_dot_type_dot_http__request__pb2 -from google.logging.type import log_severity_pb2 as google_dot_logging_dot_type_dot_log__severity__pb2 +from google.api import ( + monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, +) +from google.logging.type import ( + http_request_pb2 as google_dot_logging_dot_type_dot_http__request__pb2, +) +from google.logging.type import ( + log_severity_pb2 as google_dot_logging_dot_type_dot_log__severity__pb2, +) from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/logging_v2/proto/log_entry.proto', - package='google.logging.v2', - syntax='proto3', - serialized_pb=_b('\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x8a\x06\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12\x37\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadata\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload\"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08\"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR,google_dot_logging_dot_type_dot_http__request__pb2.DESCRIPTOR,google_dot_logging_dot_type_dot_log__severity__pb2.DESCRIPTOR,google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) - - + name="google/cloud/logging_v2/proto/log_entry.proto", + package="google.logging.v2", + syntax="proto3", + serialized_pb=_b( + '\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x8a\x06\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12\x37\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadata\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, + google_dot_logging_dot_type_dot_http__request__pb2.DESCRIPTOR, + google_dot_logging_dot_type_dot_log__severity__pb2.DESCRIPTOR, + google_dot_protobuf_dot_any__pb2.DESCRIPTOR, + google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], +) _LOGENTRY_LABELSENTRY = _descriptor.Descriptor( - name='LabelsEntry', - full_name='google.logging.v2.LogEntry.LabelsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.logging.v2.LogEntry.LabelsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.logging.v2.LogEntry.LabelsEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1028, - serialized_end=1073, + name="LabelsEntry", + full_name="google.logging.v2.LogEntry.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.logging.v2.LogEntry.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.logging.v2.LogEntry.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1028, + serialized_end=1073, ) _LOGENTRY = _descriptor.Descriptor( - name='LogEntry', - full_name='google.logging.v2.LogEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='log_name', full_name='google.logging.v2.LogEntry.log_name', index=0, - number=12, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='resource', full_name='google.logging.v2.LogEntry.resource', index=1, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='proto_payload', full_name='google.logging.v2.LogEntry.proto_payload', index=2, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='text_payload', full_name='google.logging.v2.LogEntry.text_payload', index=3, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='json_payload', full_name='google.logging.v2.LogEntry.json_payload', index=4, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='timestamp', full_name='google.logging.v2.LogEntry.timestamp', index=5, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='receive_timestamp', full_name='google.logging.v2.LogEntry.receive_timestamp', index=6, - number=24, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='severity', full_name='google.logging.v2.LogEntry.severity', index=7, - number=10, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='insert_id', full_name='google.logging.v2.LogEntry.insert_id', index=8, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='http_request', full_name='google.logging.v2.LogEntry.http_request', index=9, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='labels', full_name='google.logging.v2.LogEntry.labels', index=10, - number=11, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='metadata', full_name='google.logging.v2.LogEntry.metadata', index=11, - number=25, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='operation', full_name='google.logging.v2.LogEntry.operation', index=12, - number=15, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='trace', full_name='google.logging.v2.LogEntry.trace', index=13, - number=22, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='span_id', full_name='google.logging.v2.LogEntry.span_id', index=14, - number=27, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='trace_sampled', full_name='google.logging.v2.LogEntry.trace_sampled', index=15, - number=30, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='source_location', full_name='google.logging.v2.LogEntry.source_location', index=16, - number=23, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_LOGENTRY_LABELSENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='payload', full_name='google.logging.v2.LogEntry.payload', - index=0, containing_type=None, fields=[]), - ], - serialized_start=306, - serialized_end=1084, + name="LogEntry", + full_name="google.logging.v2.LogEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="log_name", + full_name="google.logging.v2.LogEntry.log_name", + index=0, + number=12, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="resource", + full_name="google.logging.v2.LogEntry.resource", + index=1, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="proto_payload", + full_name="google.logging.v2.LogEntry.proto_payload", + index=2, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="text_payload", + full_name="google.logging.v2.LogEntry.text_payload", + index=3, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_payload", + full_name="google.logging.v2.LogEntry.json_payload", + index=4, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="timestamp", + full_name="google.logging.v2.LogEntry.timestamp", + index=5, + number=9, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="receive_timestamp", + full_name="google.logging.v2.LogEntry.receive_timestamp", + index=6, + number=24, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="severity", + full_name="google.logging.v2.LogEntry.severity", + index=7, + number=10, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="insert_id", + full_name="google.logging.v2.LogEntry.insert_id", + index=8, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="http_request", + full_name="google.logging.v2.LogEntry.http_request", + index=9, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.logging.v2.LogEntry.labels", + index=10, + number=11, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="metadata", + full_name="google.logging.v2.LogEntry.metadata", + index=11, + number=25, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="operation", + full_name="google.logging.v2.LogEntry.operation", + index=12, + number=15, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="trace", + full_name="google.logging.v2.LogEntry.trace", + index=13, + number=22, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="span_id", + full_name="google.logging.v2.LogEntry.span_id", + index=14, + number=27, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="trace_sampled", + full_name="google.logging.v2.LogEntry.trace_sampled", + index=15, + number=30, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="source_location", + full_name="google.logging.v2.LogEntry.source_location", + index=16, + number=23, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_LOGENTRY_LABELSENTRY], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="payload", + full_name="google.logging.v2.LogEntry.payload", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=306, + serialized_end=1084, ) _LOGENTRYOPERATION = _descriptor.Descriptor( - name='LogEntryOperation', - full_name='google.logging.v2.LogEntryOperation', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='google.logging.v2.LogEntryOperation.id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='producer', full_name='google.logging.v2.LogEntryOperation.producer', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='first', full_name='google.logging.v2.LogEntryOperation.first', index=2, - number=3, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='last', full_name='google.logging.v2.LogEntryOperation.last', index=3, - number=4, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1086, - serialized_end=1164, + name="LogEntryOperation", + full_name="google.logging.v2.LogEntryOperation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="id", + full_name="google.logging.v2.LogEntryOperation.id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="producer", + full_name="google.logging.v2.LogEntryOperation.producer", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="first", + full_name="google.logging.v2.LogEntryOperation.first", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="last", + full_name="google.logging.v2.LogEntryOperation.last", + index=3, + number=4, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1086, + serialized_end=1164, ) _LOGENTRYSOURCELOCATION = _descriptor.Descriptor( - name='LogEntrySourceLocation', - full_name='google.logging.v2.LogEntrySourceLocation', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='file', full_name='google.logging.v2.LogEntrySourceLocation.file', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='line', full_name='google.logging.v2.LogEntrySourceLocation.line', index=1, - number=2, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='function', full_name='google.logging.v2.LogEntrySourceLocation.function', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1166, - serialized_end=1236, + name="LogEntrySourceLocation", + full_name="google.logging.v2.LogEntrySourceLocation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="file", + full_name="google.logging.v2.LogEntrySourceLocation.file", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="line", + full_name="google.logging.v2.LogEntrySourceLocation.line", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="function", + full_name="google.logging.v2.LogEntrySourceLocation.function", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1166, + serialized_end=1236, ) _LOGENTRY_LABELSENTRY.containing_type = _LOGENTRY -_LOGENTRY.fields_by_name['resource'].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE -_LOGENTRY.fields_by_name['proto_payload'].message_type = google_dot_protobuf_dot_any__pb2._ANY -_LOGENTRY.fields_by_name['json_payload'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT -_LOGENTRY.fields_by_name['timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGENTRY.fields_by_name['receive_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGENTRY.fields_by_name['severity'].enum_type = google_dot_logging_dot_type_dot_log__severity__pb2._LOGSEVERITY -_LOGENTRY.fields_by_name['http_request'].message_type = google_dot_logging_dot_type_dot_http__request__pb2._HTTPREQUEST -_LOGENTRY.fields_by_name['labels'].message_type = _LOGENTRY_LABELSENTRY -_LOGENTRY.fields_by_name['metadata'].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEMETADATA -_LOGENTRY.fields_by_name['operation'].message_type = _LOGENTRYOPERATION -_LOGENTRY.fields_by_name['source_location'].message_type = _LOGENTRYSOURCELOCATION -_LOGENTRY.oneofs_by_name['payload'].fields.append( - _LOGENTRY.fields_by_name['proto_payload']) -_LOGENTRY.fields_by_name['proto_payload'].containing_oneof = _LOGENTRY.oneofs_by_name['payload'] -_LOGENTRY.oneofs_by_name['payload'].fields.append( - _LOGENTRY.fields_by_name['text_payload']) -_LOGENTRY.fields_by_name['text_payload'].containing_oneof = _LOGENTRY.oneofs_by_name['payload'] -_LOGENTRY.oneofs_by_name['payload'].fields.append( - _LOGENTRY.fields_by_name['json_payload']) -_LOGENTRY.fields_by_name['json_payload'].containing_oneof = _LOGENTRY.oneofs_by_name['payload'] -DESCRIPTOR.message_types_by_name['LogEntry'] = _LOGENTRY -DESCRIPTOR.message_types_by_name['LogEntryOperation'] = _LOGENTRYOPERATION -DESCRIPTOR.message_types_by_name['LogEntrySourceLocation'] = _LOGENTRYSOURCELOCATION +_LOGENTRY.fields_by_name[ + "resource" +].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE +_LOGENTRY.fields_by_name[ + "proto_payload" +].message_type = google_dot_protobuf_dot_any__pb2._ANY +_LOGENTRY.fields_by_name[ + "json_payload" +].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT +_LOGENTRY.fields_by_name[ + "timestamp" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGENTRY.fields_by_name[ + "receive_timestamp" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGENTRY.fields_by_name[ + "severity" +].enum_type = google_dot_logging_dot_type_dot_log__severity__pb2._LOGSEVERITY +_LOGENTRY.fields_by_name[ + "http_request" +].message_type = google_dot_logging_dot_type_dot_http__request__pb2._HTTPREQUEST +_LOGENTRY.fields_by_name["labels"].message_type = _LOGENTRY_LABELSENTRY +_LOGENTRY.fields_by_name[ + "metadata" +].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEMETADATA +_LOGENTRY.fields_by_name["operation"].message_type = _LOGENTRYOPERATION +_LOGENTRY.fields_by_name["source_location"].message_type = _LOGENTRYSOURCELOCATION +_LOGENTRY.oneofs_by_name["payload"].fields.append( + _LOGENTRY.fields_by_name["proto_payload"] +) +_LOGENTRY.fields_by_name["proto_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ + "payload" +] +_LOGENTRY.oneofs_by_name["payload"].fields.append( + _LOGENTRY.fields_by_name["text_payload"] +) +_LOGENTRY.fields_by_name["text_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ + "payload" +] +_LOGENTRY.oneofs_by_name["payload"].fields.append( + _LOGENTRY.fields_by_name["json_payload"] +) +_LOGENTRY.fields_by_name["json_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ + "payload" +] +DESCRIPTOR.message_types_by_name["LogEntry"] = _LOGENTRY +DESCRIPTOR.message_types_by_name["LogEntryOperation"] = _LOGENTRYOPERATION +DESCRIPTOR.message_types_by_name["LogEntrySourceLocation"] = _LOGENTRYSOURCELOCATION _sym_db.RegisterFileDescriptor(DESCRIPTOR) -LogEntry = _reflection.GeneratedProtocolMessageType('LogEntry', (_message.Message,), dict( - - LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( - DESCRIPTOR = _LOGENTRY_LABELSENTRY, - __module__ = 'google.cloud.logging_v2.proto.log_entry_pb2' - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry.LabelsEntry) - )) - , - DESCRIPTOR = _LOGENTRY, - __module__ = 'google.cloud.logging_v2.proto.log_entry_pb2' - , - __doc__ = """An individual entry in a log. +LogEntry = _reflection.GeneratedProtocolMessageType( + "LogEntry", + (_message.Message,), + dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_LOGENTRY_LABELSENTRY, + __module__="google.cloud.logging_v2.proto.log_entry_pb2" + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry.LabelsEntry) + ), + ), + DESCRIPTOR=_LOGENTRY, + __module__="google.cloud.logging_v2.proto.log_entry_pb2", + __doc__="""An individual entry in a log. Attributes: @@ -457,16 +781,19 @@ Optional. Source code location information associated with the log entry, if any. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry) + ), +) _sym_db.RegisterMessage(LogEntry) _sym_db.RegisterMessage(LogEntry.LabelsEntry) -LogEntryOperation = _reflection.GeneratedProtocolMessageType('LogEntryOperation', (_message.Message,), dict( - DESCRIPTOR = _LOGENTRYOPERATION, - __module__ = 'google.cloud.logging_v2.proto.log_entry_pb2' - , - __doc__ = """Additional information about a potentially long-running operation with +LogEntryOperation = _reflection.GeneratedProtocolMessageType( + "LogEntryOperation", + (_message.Message,), + dict( + DESCRIPTOR=_LOGENTRYOPERATION, + __module__="google.cloud.logging_v2.proto.log_entry_pb2", + __doc__="""Additional information about a potentially long-running operation with which a log entry is associated. @@ -487,15 +814,18 @@ Optional. Set this to True if this is the last log entry in the operation. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntryOperation) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntryOperation) + ), +) _sym_db.RegisterMessage(LogEntryOperation) -LogEntrySourceLocation = _reflection.GeneratedProtocolMessageType('LogEntrySourceLocation', (_message.Message,), dict( - DESCRIPTOR = _LOGENTRYSOURCELOCATION, - __module__ = 'google.cloud.logging_v2.proto.log_entry_pb2' - , - __doc__ = """Additional information about the source code location that produced the +LogEntrySourceLocation = _reflection.GeneratedProtocolMessageType( + "LogEntrySourceLocation", + (_message.Message,), + dict( + DESCRIPTOR=_LOGENTRYSOURCELOCATION, + __module__="google.cloud.logging_v2.proto.log_entry_pb2", + __doc__="""Additional information about the source code location that produced the log entry. @@ -516,13 +846,21 @@ ``qual.if.ied.Class.method`` (Java), ``dir/package.func`` (Go), ``function`` (Python). """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntrySourceLocation) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntrySourceLocation) + ), +) _sym_db.RegisterMessage(LogEntrySourceLocation) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2')) +DESCRIPTOR._options = _descriptor._ParseOptions( + descriptor_pb2.FileOptions(), + _b( + "\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" + ), +) _LOGENTRY_LABELSENTRY.has_options = True -_LOGENTRY_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_LOGENTRY_LABELSENTRY._options = _descriptor._ParseOptions( + descriptor_pb2.MessageOptions(), _b("8\001") +) # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py index a89435267cb2..07cb78fe03a9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py @@ -1,3 +1,2 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc - diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py index 17a3ec78985c..5782fd342c21 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -3,12 +3,14 @@ # source: google/cloud/logging_v2/proto/logging_config.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -21,677 +23,1075 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/logging_v2/proto/logging_config.proto', - package='google.logging.v2', - syntax='proto3', - serialized_pb=_b('\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xe3\x02\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12K\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormatB\x02\x18\x01\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12\x32\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\x12\x30\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02\"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\"S\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\x94\x19\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse\"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12\"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink\"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12\"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink\"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01\"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%\"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*\"\"/v2/{parent=organizations/*}/sinks:\x04sinkZ$\"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,\"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink\"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a\"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2\"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty\"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*\"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse\"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12\"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion\"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12\"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion\"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/\"\"/v2/{parent=projects/*}/exclusions:\texclusionZ4\"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ.\"!/v2/{parent=folders/*}/exclusions:\texclusionZ6\")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion\"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2\"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty\"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*\"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}B\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) - + name="google/cloud/logging_v2/proto/logging_config.proto", + package="google.logging.v2", + syntax="proto3", + serialized_pb=_b( + '\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xe3\x02\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12K\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormatB\x02\x18\x01\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12\x32\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\x12\x30\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"S\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\x94\x19\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}B\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, + google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], +) _LOGSINK_VERSIONFORMAT = _descriptor.EnumDescriptor( - name='VersionFormat', - full_name='google.logging.v2.LogSink.VersionFormat', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='VERSION_FORMAT_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='V2', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='V1', index=2, number=2, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=492, - serialized_end=555, + name="VersionFormat", + full_name="google.logging.v2.LogSink.VersionFormat", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="VERSION_FORMAT_UNSPECIFIED", + index=0, + number=0, + options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="V2", index=1, number=1, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="V1", index=2, number=2, options=None, type=None + ), + ], + containing_type=None, + options=None, + serialized_start=492, + serialized_end=555, ) _sym_db.RegisterEnumDescriptor(_LOGSINK_VERSIONFORMAT) _LOGSINK = _descriptor.Descriptor( - name='LogSink', - full_name='google.logging.v2.LogSink', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.logging.v2.LogSink.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='destination', full_name='google.logging.v2.LogSink.destination', index=1, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='filter', full_name='google.logging.v2.LogSink.filter', index=2, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='output_version_format', full_name='google.logging.v2.LogSink.output_version_format', index=3, - number=6, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')), file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='writer_identity', full_name='google.logging.v2.LogSink.writer_identity', index=4, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='include_children', full_name='google.logging.v2.LogSink.include_children', index=5, - number=9, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='start_time', full_name='google.logging.v2.LogSink.start_time', index=6, - number=10, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')), file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='end_time', full_name='google.logging.v2.LogSink.end_time', index=7, - number=11, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')), file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _LOGSINK_VERSIONFORMAT, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=200, - serialized_end=555, + name="LogSink", + full_name="google.logging.v2.LogSink", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.logging.v2.LogSink.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="destination", + full_name="google.logging.v2.LogSink.destination", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.logging.v2.LogSink.filter", + index=2, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="output_version_format", + full_name="google.logging.v2.LogSink.output_version_format", + index=3, + number=6, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=_descriptor._ParseOptions( + descriptor_pb2.FieldOptions(), _b("\030\001") + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="writer_identity", + full_name="google.logging.v2.LogSink.writer_identity", + index=4, + number=8, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="include_children", + full_name="google.logging.v2.LogSink.include_children", + index=5, + number=9, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="start_time", + full_name="google.logging.v2.LogSink.start_time", + index=6, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=_descriptor._ParseOptions( + descriptor_pb2.FieldOptions(), _b("\030\001") + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="end_time", + full_name="google.logging.v2.LogSink.end_time", + index=7, + number=11, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=_descriptor._ParseOptions( + descriptor_pb2.FieldOptions(), _b("\030\001") + ), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_LOGSINK_VERSIONFORMAT], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=200, + serialized_end=555, ) _LISTSINKSREQUEST = _descriptor.Descriptor( - name='ListSinksRequest', - full_name='google.logging.v2.ListSinksRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='parent', full_name='google.logging.v2.ListSinksRequest.parent', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_token', full_name='google.logging.v2.ListSinksRequest.page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_size', full_name='google.logging.v2.ListSinksRequest.page_size', index=2, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=557, - serialized_end=630, + name="ListSinksRequest", + full_name="google.logging.v2.ListSinksRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.logging.v2.ListSinksRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.logging.v2.ListSinksRequest.page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.logging.v2.ListSinksRequest.page_size", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=557, + serialized_end=630, ) _LISTSINKSRESPONSE = _descriptor.Descriptor( - name='ListSinksResponse', - full_name='google.logging.v2.ListSinksResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='sinks', full_name='google.logging.v2.ListSinksResponse.sinks', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='next_page_token', full_name='google.logging.v2.ListSinksResponse.next_page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=632, - serialized_end=719, + name="ListSinksResponse", + full_name="google.logging.v2.ListSinksResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="sinks", + full_name="google.logging.v2.ListSinksResponse.sinks", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.logging.v2.ListSinksResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=632, + serialized_end=719, ) _GETSINKREQUEST = _descriptor.Descriptor( - name='GetSinkRequest', - full_name='google.logging.v2.GetSinkRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='sink_name', full_name='google.logging.v2.GetSinkRequest.sink_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=721, - serialized_end=756, + name="GetSinkRequest", + full_name="google.logging.v2.GetSinkRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="sink_name", + full_name="google.logging.v2.GetSinkRequest.sink_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=721, + serialized_end=756, ) _CREATESINKREQUEST = _descriptor.Descriptor( - name='CreateSinkRequest', - full_name='google.logging.v2.CreateSinkRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='parent', full_name='google.logging.v2.CreateSinkRequest.parent', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='sink', full_name='google.logging.v2.CreateSinkRequest.sink', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='unique_writer_identity', full_name='google.logging.v2.CreateSinkRequest.unique_writer_identity', index=2, - number=3, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=758, - serialized_end=867, + name="CreateSinkRequest", + full_name="google.logging.v2.CreateSinkRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.logging.v2.CreateSinkRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="sink", + full_name="google.logging.v2.CreateSinkRequest.sink", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="unique_writer_identity", + full_name="google.logging.v2.CreateSinkRequest.unique_writer_identity", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=758, + serialized_end=867, ) _UPDATESINKREQUEST = _descriptor.Descriptor( - name='UpdateSinkRequest', - full_name='google.logging.v2.UpdateSinkRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='sink_name', full_name='google.logging.v2.UpdateSinkRequest.sink_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='sink', full_name='google.logging.v2.UpdateSinkRequest.sink', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='unique_writer_identity', full_name='google.logging.v2.UpdateSinkRequest.unique_writer_identity', index=2, - number=3, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='update_mask', full_name='google.logging.v2.UpdateSinkRequest.update_mask', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=870, - serialized_end=1031, + name="UpdateSinkRequest", + full_name="google.logging.v2.UpdateSinkRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="sink_name", + full_name="google.logging.v2.UpdateSinkRequest.sink_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="sink", + full_name="google.logging.v2.UpdateSinkRequest.sink", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="unique_writer_identity", + full_name="google.logging.v2.UpdateSinkRequest.unique_writer_identity", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_mask", + full_name="google.logging.v2.UpdateSinkRequest.update_mask", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=870, + serialized_end=1031, ) _DELETESINKREQUEST = _descriptor.Descriptor( - name='DeleteSinkRequest', - full_name='google.logging.v2.DeleteSinkRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='sink_name', full_name='google.logging.v2.DeleteSinkRequest.sink_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1033, - serialized_end=1071, + name="DeleteSinkRequest", + full_name="google.logging.v2.DeleteSinkRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="sink_name", + full_name="google.logging.v2.DeleteSinkRequest.sink_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1033, + serialized_end=1071, ) _LOGEXCLUSION = _descriptor.Descriptor( - name='LogExclusion', - full_name='google.logging.v2.LogExclusion', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.logging.v2.LogExclusion.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='description', full_name='google.logging.v2.LogExclusion.description', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='filter', full_name='google.logging.v2.LogExclusion.filter', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='disabled', full_name='google.logging.v2.LogExclusion.disabled', index=3, - number=4, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1073, - serialized_end=1156, + name="LogExclusion", + full_name="google.logging.v2.LogExclusion", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.logging.v2.LogExclusion.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="description", + full_name="google.logging.v2.LogExclusion.description", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.logging.v2.LogExclusion.filter", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="disabled", + full_name="google.logging.v2.LogExclusion.disabled", + index=3, + number=4, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1073, + serialized_end=1156, ) _LISTEXCLUSIONSREQUEST = _descriptor.Descriptor( - name='ListExclusionsRequest', - full_name='google.logging.v2.ListExclusionsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='parent', full_name='google.logging.v2.ListExclusionsRequest.parent', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_token', full_name='google.logging.v2.ListExclusionsRequest.page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_size', full_name='google.logging.v2.ListExclusionsRequest.page_size', index=2, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1158, - serialized_end=1236, + name="ListExclusionsRequest", + full_name="google.logging.v2.ListExclusionsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.logging.v2.ListExclusionsRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.logging.v2.ListExclusionsRequest.page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.logging.v2.ListExclusionsRequest.page_size", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1158, + serialized_end=1236, ) _LISTEXCLUSIONSRESPONSE = _descriptor.Descriptor( - name='ListExclusionsResponse', - full_name='google.logging.v2.ListExclusionsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='exclusions', full_name='google.logging.v2.ListExclusionsResponse.exclusions', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='next_page_token', full_name='google.logging.v2.ListExclusionsResponse.next_page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1238, - serialized_end=1340, + name="ListExclusionsResponse", + full_name="google.logging.v2.ListExclusionsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="exclusions", + full_name="google.logging.v2.ListExclusionsResponse.exclusions", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.logging.v2.ListExclusionsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1238, + serialized_end=1340, ) _GETEXCLUSIONREQUEST = _descriptor.Descriptor( - name='GetExclusionRequest', - full_name='google.logging.v2.GetExclusionRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.logging.v2.GetExclusionRequest.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1342, - serialized_end=1377, + name="GetExclusionRequest", + full_name="google.logging.v2.GetExclusionRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.logging.v2.GetExclusionRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1342, + serialized_end=1377, ) _CREATEEXCLUSIONREQUEST = _descriptor.Descriptor( - name='CreateExclusionRequest', - full_name='google.logging.v2.CreateExclusionRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='parent', full_name='google.logging.v2.CreateExclusionRequest.parent', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='exclusion', full_name='google.logging.v2.CreateExclusionRequest.exclusion', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1379, - serialized_end=1471, + name="CreateExclusionRequest", + full_name="google.logging.v2.CreateExclusionRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.logging.v2.CreateExclusionRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="exclusion", + full_name="google.logging.v2.CreateExclusionRequest.exclusion", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1379, + serialized_end=1471, ) _UPDATEEXCLUSIONREQUEST = _descriptor.Descriptor( - name='UpdateExclusionRequest', - full_name='google.logging.v2.UpdateExclusionRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.logging.v2.UpdateExclusionRequest.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='exclusion', full_name='google.logging.v2.UpdateExclusionRequest.exclusion', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='update_mask', full_name='google.logging.v2.UpdateExclusionRequest.update_mask', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1474, - serialized_end=1613, + name="UpdateExclusionRequest", + full_name="google.logging.v2.UpdateExclusionRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.logging.v2.UpdateExclusionRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="exclusion", + full_name="google.logging.v2.UpdateExclusionRequest.exclusion", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_mask", + full_name="google.logging.v2.UpdateExclusionRequest.update_mask", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1474, + serialized_end=1613, ) _DELETEEXCLUSIONREQUEST = _descriptor.Descriptor( - name='DeleteExclusionRequest', - full_name='google.logging.v2.DeleteExclusionRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.logging.v2.DeleteExclusionRequest.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1615, - serialized_end=1653, + name="DeleteExclusionRequest", + full_name="google.logging.v2.DeleteExclusionRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.logging.v2.DeleteExclusionRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1615, + serialized_end=1653, ) -_LOGSINK.fields_by_name['output_version_format'].enum_type = _LOGSINK_VERSIONFORMAT -_LOGSINK.fields_by_name['start_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGSINK.fields_by_name['end_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGSINK.fields_by_name["output_version_format"].enum_type = _LOGSINK_VERSIONFORMAT +_LOGSINK.fields_by_name[ + "start_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGSINK.fields_by_name[ + "end_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LOGSINK_VERSIONFORMAT.containing_type = _LOGSINK -_LISTSINKSRESPONSE.fields_by_name['sinks'].message_type = _LOGSINK -_CREATESINKREQUEST.fields_by_name['sink'].message_type = _LOGSINK -_UPDATESINKREQUEST.fields_by_name['sink'].message_type = _LOGSINK -_UPDATESINKREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTEXCLUSIONSRESPONSE.fields_by_name['exclusions'].message_type = _LOGEXCLUSION -_CREATEEXCLUSIONREQUEST.fields_by_name['exclusion'].message_type = _LOGEXCLUSION -_UPDATEEXCLUSIONREQUEST.fields_by_name['exclusion'].message_type = _LOGEXCLUSION -_UPDATEEXCLUSIONREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -DESCRIPTOR.message_types_by_name['LogSink'] = _LOGSINK -DESCRIPTOR.message_types_by_name['ListSinksRequest'] = _LISTSINKSREQUEST -DESCRIPTOR.message_types_by_name['ListSinksResponse'] = _LISTSINKSRESPONSE -DESCRIPTOR.message_types_by_name['GetSinkRequest'] = _GETSINKREQUEST -DESCRIPTOR.message_types_by_name['CreateSinkRequest'] = _CREATESINKREQUEST -DESCRIPTOR.message_types_by_name['UpdateSinkRequest'] = _UPDATESINKREQUEST -DESCRIPTOR.message_types_by_name['DeleteSinkRequest'] = _DELETESINKREQUEST -DESCRIPTOR.message_types_by_name['LogExclusion'] = _LOGEXCLUSION -DESCRIPTOR.message_types_by_name['ListExclusionsRequest'] = _LISTEXCLUSIONSREQUEST -DESCRIPTOR.message_types_by_name['ListExclusionsResponse'] = _LISTEXCLUSIONSRESPONSE -DESCRIPTOR.message_types_by_name['GetExclusionRequest'] = _GETEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name['CreateExclusionRequest'] = _CREATEEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name['UpdateExclusionRequest'] = _UPDATEEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name['DeleteExclusionRequest'] = _DELETEEXCLUSIONREQUEST +_LISTSINKSRESPONSE.fields_by_name["sinks"].message_type = _LOGSINK +_CREATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK +_UPDATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK +_UPDATESINKREQUEST.fields_by_name[ + "update_mask" +].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LISTEXCLUSIONSRESPONSE.fields_by_name["exclusions"].message_type = _LOGEXCLUSION +_CREATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION +_UPDATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION +_UPDATEEXCLUSIONREQUEST.fields_by_name[ + "update_mask" +].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +DESCRIPTOR.message_types_by_name["LogSink"] = _LOGSINK +DESCRIPTOR.message_types_by_name["ListSinksRequest"] = _LISTSINKSREQUEST +DESCRIPTOR.message_types_by_name["ListSinksResponse"] = _LISTSINKSRESPONSE +DESCRIPTOR.message_types_by_name["GetSinkRequest"] = _GETSINKREQUEST +DESCRIPTOR.message_types_by_name["CreateSinkRequest"] = _CREATESINKREQUEST +DESCRIPTOR.message_types_by_name["UpdateSinkRequest"] = _UPDATESINKREQUEST +DESCRIPTOR.message_types_by_name["DeleteSinkRequest"] = _DELETESINKREQUEST +DESCRIPTOR.message_types_by_name["LogExclusion"] = _LOGEXCLUSION +DESCRIPTOR.message_types_by_name["ListExclusionsRequest"] = _LISTEXCLUSIONSREQUEST +DESCRIPTOR.message_types_by_name["ListExclusionsResponse"] = _LISTEXCLUSIONSRESPONSE +DESCRIPTOR.message_types_by_name["GetExclusionRequest"] = _GETEXCLUSIONREQUEST +DESCRIPTOR.message_types_by_name["CreateExclusionRequest"] = _CREATEEXCLUSIONREQUEST +DESCRIPTOR.message_types_by_name["UpdateExclusionRequest"] = _UPDATEEXCLUSIONREQUEST +DESCRIPTOR.message_types_by_name["DeleteExclusionRequest"] = _DELETEEXCLUSIONREQUEST _sym_db.RegisterFileDescriptor(DESCRIPTOR) -LogSink = _reflection.GeneratedProtocolMessageType('LogSink', (_message.Message,), dict( - DESCRIPTOR = _LOGSINK, - __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' - , - __doc__ = """Describes a sink used to export log entries to one of the following +LogSink = _reflection.GeneratedProtocolMessageType( + "LogSink", + (_message.Message,), + dict( + DESCRIPTOR=_LOGSINK, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""Describes a sink used to export log entries to one of the following destinations in any project: a Cloud Storage bucket, a BigQuery dataset, or a Cloud Pub/Sub topic. A logs filter controls which log entries are exported. The sink must be created within a project, organization, @@ -761,15 +1161,18 @@ Deprecated. This field is ignored when creating or updating sinks. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogSink) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.LogSink) + ), +) _sym_db.RegisterMessage(LogSink) -ListSinksRequest = _reflection.GeneratedProtocolMessageType('ListSinksRequest', (_message.Message,), dict( - DESCRIPTOR = _LISTSINKSREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' - , - __doc__ = """The parameters to ``ListSinks``. +ListSinksRequest = _reflection.GeneratedProtocolMessageType( + "ListSinksRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTSINKSREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to ``ListSinks``. Attributes: @@ -791,15 +1194,18 @@ ``nextPageToken`` in the response indicates that more results might be available. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksRequest) + ), +) _sym_db.RegisterMessage(ListSinksRequest) -ListSinksResponse = _reflection.GeneratedProtocolMessageType('ListSinksResponse', (_message.Message,), dict( - DESCRIPTOR = _LISTSINKSRESPONSE, - __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' - , - __doc__ = """Result returned from ``ListSinks``. +ListSinksResponse = _reflection.GeneratedProtocolMessageType( + "ListSinksResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTSINKSRESPONSE, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""Result returned from ``ListSinks``. Attributes: @@ -811,15 +1217,18 @@ results, call the same method again using the value of ``nextPageToken`` as ``pageToken``. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksResponse) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksResponse) + ), +) _sym_db.RegisterMessage(ListSinksResponse) -GetSinkRequest = _reflection.GeneratedProtocolMessageType('GetSinkRequest', (_message.Message,), dict( - DESCRIPTOR = _GETSINKREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' - , - __doc__ = """The parameters to ``GetSink``. +GetSinkRequest = _reflection.GeneratedProtocolMessageType( + "GetSinkRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETSINKREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to ``GetSink``. Attributes: @@ -831,15 +1240,18 @@ "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: ``"projects/my-project-id/sinks/my-sink-id"``. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.GetSinkRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.GetSinkRequest) + ), +) _sym_db.RegisterMessage(GetSinkRequest) -CreateSinkRequest = _reflection.GeneratedProtocolMessageType('CreateSinkRequest', (_message.Message,), dict( - DESCRIPTOR = _CREATESINKREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' - , - __doc__ = """The parameters to ``CreateSink``. +CreateSinkRequest = _reflection.GeneratedProtocolMessageType( + "CreateSinkRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATESINKREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to ``CreateSink``. Attributes: @@ -866,15 +1278,18 @@ sink. For more information, see ``writer_identity`` in [LogSink][google.logging.v2.LogSink]. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.CreateSinkRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.CreateSinkRequest) + ), +) _sym_db.RegisterMessage(CreateSinkRequest) -UpdateSinkRequest = _reflection.GeneratedProtocolMessageType('UpdateSinkRequest', (_message.Message,), dict( - DESCRIPTOR = _UPDATESINKREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' - , - __doc__ = """The parameters to ``UpdateSink``. +UpdateSinkRequest = _reflection.GeneratedProtocolMessageType( + "UpdateSinkRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATESINKREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to ``UpdateSink``. Attributes: @@ -916,15 +1331,18 @@ google.protobuf#google.protobuf.FieldMask Example: ``updateMask=filter``. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateSinkRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateSinkRequest) + ), +) _sym_db.RegisterMessage(UpdateSinkRequest) -DeleteSinkRequest = _reflection.GeneratedProtocolMessageType('DeleteSinkRequest', (_message.Message,), dict( - DESCRIPTOR = _DELETESINKREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' - , - __doc__ = """The parameters to ``DeleteSink``. +DeleteSinkRequest = _reflection.GeneratedProtocolMessageType( + "DeleteSinkRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETESINKREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to ``DeleteSink``. Attributes: @@ -937,15 +1355,18 @@ "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: ``"projects/my-project-id/sinks/my-sink-id"``. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteSinkRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteSinkRequest) + ), +) _sym_db.RegisterMessage(DeleteSinkRequest) -LogExclusion = _reflection.GeneratedProtocolMessageType('LogExclusion', (_message.Message,), dict( - DESCRIPTOR = _LOGEXCLUSION, - __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' - , - __doc__ = """Specifies a set of log entries that are not to be stored in Logging. If +LogExclusion = _reflection.GeneratedProtocolMessageType( + "LogExclusion", + (_message.Message,), + dict( + DESCRIPTOR=_LOGEXCLUSION, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""Specifies a set of log entries that are not to be stored in Logging. If your project receives a large volume of logs, you might be able to use exclusions to reduce your chargeable logs. Exclusions are processed after log sinks, so you can export log entries before they are excluded. @@ -977,15 +1398,18 @@ `exclusions.patch `__ to change the value of this field. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogExclusion) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.LogExclusion) + ), +) _sym_db.RegisterMessage(LogExclusion) -ListExclusionsRequest = _reflection.GeneratedProtocolMessageType('ListExclusionsRequest', (_message.Message,), dict( - DESCRIPTOR = _LISTEXCLUSIONSREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' - , - __doc__ = """The parameters to ``ListExclusions``. +ListExclusionsRequest = _reflection.GeneratedProtocolMessageType( + "ListExclusionsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTEXCLUSIONSREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to ``ListExclusions``. Attributes: @@ -1007,15 +1431,18 @@ ``nextPageToken`` in the response indicates that more results might be available. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListExclusionsRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.ListExclusionsRequest) + ), +) _sym_db.RegisterMessage(ListExclusionsRequest) -ListExclusionsResponse = _reflection.GeneratedProtocolMessageType('ListExclusionsResponse', (_message.Message,), dict( - DESCRIPTOR = _LISTEXCLUSIONSRESPONSE, - __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' - , - __doc__ = """Result returned from ``ListExclusions``. +ListExclusionsResponse = _reflection.GeneratedProtocolMessageType( + "ListExclusionsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTEXCLUSIONSRESPONSE, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""Result returned from ``ListExclusions``. Attributes: @@ -1027,15 +1454,18 @@ results, call the same method again using the value of ``nextPageToken`` as ``pageToken``. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListExclusionsResponse) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.ListExclusionsResponse) + ), +) _sym_db.RegisterMessage(ListExclusionsResponse) -GetExclusionRequest = _reflection.GeneratedProtocolMessageType('GetExclusionRequest', (_message.Message,), dict( - DESCRIPTOR = _GETEXCLUSIONREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' - , - __doc__ = """The parameters to ``GetExclusion``. +GetExclusionRequest = _reflection.GeneratedProtocolMessageType( + "GetExclusionRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETEXCLUSIONREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to ``GetExclusion``. Attributes: @@ -1048,15 +1478,18 @@ Example: ``"projects/my-project-id/exclusions/my-exclusion- id"``. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.GetExclusionRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.GetExclusionRequest) + ), +) _sym_db.RegisterMessage(GetExclusionRequest) -CreateExclusionRequest = _reflection.GeneratedProtocolMessageType('CreateExclusionRequest', (_message.Message,), dict( - DESCRIPTOR = _CREATEEXCLUSIONREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' - , - __doc__ = """The parameters to ``CreateExclusion``. +CreateExclusionRequest = _reflection.GeneratedProtocolMessageType( + "CreateExclusionRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEEXCLUSIONREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to ``CreateExclusion``. Attributes: @@ -1072,15 +1505,18 @@ exclusion name that is not already used in the parent resource. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.CreateExclusionRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.CreateExclusionRequest) + ), +) _sym_db.RegisterMessage(CreateExclusionRequest) -UpdateExclusionRequest = _reflection.GeneratedProtocolMessageType('UpdateExclusionRequest', (_message.Message,), dict( - DESCRIPTOR = _UPDATEEXCLUSIONREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' - , - __doc__ = """The parameters to ``UpdateExclusion``. +UpdateExclusionRequest = _reflection.GeneratedProtocolMessageType( + "UpdateExclusionRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATEEXCLUSIONREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to ``UpdateExclusion``. Attributes: @@ -1105,15 +1541,18 @@ change the filter and description of an exclusion, specify an ``update_mask`` of ``"filter,description"``. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateExclusionRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateExclusionRequest) + ), +) _sym_db.RegisterMessage(UpdateExclusionRequest) -DeleteExclusionRequest = _reflection.GeneratedProtocolMessageType('DeleteExclusionRequest', (_message.Message,), dict( - DESCRIPTOR = _DELETEEXCLUSIONREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' - , - __doc__ = """The parameters to ``DeleteExclusion``. +DeleteExclusionRequest = _reflection.GeneratedProtocolMessageType( + "DeleteExclusionRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEEXCLUSIONREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to ``DeleteExclusion``. Attributes: @@ -1127,122 +1566,185 @@ Example: ``"projects/my-project-id/exclusions/my-exclusion- id"``. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteExclusionRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteExclusionRequest) + ), +) _sym_db.RegisterMessage(DeleteExclusionRequest) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\025com.google.logging.v2B\022LoggingConfigProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2')) -_LOGSINK.fields_by_name['output_version_format'].has_options = True -_LOGSINK.fields_by_name['output_version_format']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) -_LOGSINK.fields_by_name['start_time'].has_options = True -_LOGSINK.fields_by_name['start_time']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) -_LOGSINK.fields_by_name['end_time'].has_options = True -_LOGSINK.fields_by_name['end_time']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) +DESCRIPTOR._options = _descriptor._ParseOptions( + descriptor_pb2.FileOptions(), + _b( + "\n\025com.google.logging.v2B\022LoggingConfigProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" + ), +) +_LOGSINK.fields_by_name["output_version_format"].has_options = True +_LOGSINK.fields_by_name["output_version_format"]._options = _descriptor._ParseOptions( + descriptor_pb2.FieldOptions(), _b("\030\001") +) +_LOGSINK.fields_by_name["start_time"].has_options = True +_LOGSINK.fields_by_name["start_time"]._options = _descriptor._ParseOptions( + descriptor_pb2.FieldOptions(), _b("\030\001") +) +_LOGSINK.fields_by_name["end_time"].has_options = True +_LOGSINK.fields_by_name["end_time"]._options = _descriptor._ParseOptions( + descriptor_pb2.FieldOptions(), _b("\030\001") +) _CONFIGSERVICEV2 = _descriptor.ServiceDescriptor( - name='ConfigServiceV2', - full_name='google.logging.v2.ConfigServiceV2', - file=DESCRIPTOR, - index=0, - options=None, - serialized_start=1656, - serialized_end=4876, - methods=[ - _descriptor.MethodDescriptor( - name='ListSinks', - full_name='google.logging.v2.ConfigServiceV2.ListSinks', + name="ConfigServiceV2", + full_name="google.logging.v2.ConfigServiceV2", + file=DESCRIPTOR, index=0, - containing_service=None, - input_type=_LISTSINKSREQUEST, - output_type=_LISTSINKSRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\247\001\022\026/v2/{parent=*/*}/sinksZ\037\022\035/v2/{parent=projects/*}/sinksZ$\022\"/v2/{parent=organizations/*}/sinksZ\036\022\034/v2/{parent=folders/*}/sinksZ&\022$/v2/{parent=billingAccounts/*}/sinks')), - ), - _descriptor.MethodDescriptor( - name='GetSink', - full_name='google.logging.v2.ConfigServiceV2.GetSink', - index=1, - containing_service=None, - input_type=_GETSINKREQUEST, - output_type=_LOGSINK, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\300\001\022\033/v2/{sink_name=*/*/sinks/*}Z$\022\"/v2/{sink_name=projects/*/sinks/*}Z)\022\'/v2/{sink_name=organizations/*/sinks/*}Z#\022!/v2/{sink_name=folders/*/sinks/*}Z+\022)/v2/{sink_name=billingAccounts/*/sinks/*}')), - ), - _descriptor.MethodDescriptor( - name='CreateSink', - full_name='google.logging.v2.ConfigServiceV2.CreateSink', - index=2, - containing_service=None, - input_type=_CREATESINKREQUEST, - output_type=_LOGSINK, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\305\001\"\026/v2/{parent=*/*}/sinks:\004sinkZ%\"\035/v2/{parent=projects/*}/sinks:\004sinkZ*\"\"/v2/{parent=organizations/*}/sinks:\004sinkZ$\"\034/v2/{parent=folders/*}/sinks:\004sinkZ,\"$/v2/{parent=billingAccounts/*}/sinks:\004sink')), - ), - _descriptor.MethodDescriptor( - name='UpdateSink', - full_name='google.logging.v2.ConfigServiceV2.UpdateSink', - index=3, - containing_service=None, - input_type=_UPDATESINKREQUEST, - output_type=_LOGSINK, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\231\003\032\033/v2/{sink_name=*/*/sinks/*}:\004sinkZ*\032\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/\032\'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)\032!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ1\032)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sinkZ*2\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sink')), - ), - _descriptor.MethodDescriptor( - name='DeleteSink', - full_name='google.logging.v2.ConfigServiceV2.DeleteSink', - index=4, - containing_service=None, - input_type=_DELETESINKREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\300\001*\033/v2/{sink_name=*/*/sinks/*}Z$*\"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}')), - ), - _descriptor.MethodDescriptor( - name='ListExclusions', - full_name='google.logging.v2.ConfigServiceV2.ListExclusions', - index=5, - containing_service=None, - input_type=_LISTEXCLUSIONSREQUEST, - output_type=_LISTEXCLUSIONSRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\300\001\022\033/v2/{parent=*/*}/exclusionsZ$\022\"/v2/{parent=projects/*}/exclusionsZ)\022\'/v2/{parent=organizations/*}/exclusionsZ#\022!/v2/{parent=folders/*}/exclusionsZ+\022)/v2/{parent=billingAccounts/*}/exclusions')), - ), - _descriptor.MethodDescriptor( - name='GetExclusion', - full_name='google.logging.v2.ConfigServiceV2.GetExclusion', - index=6, - containing_service=None, - input_type=_GETEXCLUSIONREQUEST, - output_type=_LOGEXCLUSION, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\300\001\022\033/v2/{name=*/*/exclusions/*}Z$\022\"/v2/{name=projects/*/exclusions/*}Z)\022\'/v2/{name=organizations/*/exclusions/*}Z#\022!/v2/{name=folders/*/exclusions/*}Z+\022)/v2/{name=billingAccounts/*/exclusions/*}')), - ), - _descriptor.MethodDescriptor( - name='CreateExclusion', - full_name='google.logging.v2.ConfigServiceV2.CreateExclusion', - index=7, - containing_service=None, - input_type=_CREATEEXCLUSIONREQUEST, - output_type=_LOGEXCLUSION, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\367\001\"\033/v2/{parent=*/*}/exclusions:\texclusionZ/\"\"/v2/{parent=projects/*}/exclusions:\texclusionZ4\"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ.\"!/v2/{parent=folders/*}/exclusions:\texclusionZ6\")/v2/{parent=billingAccounts/*}/exclusions:\texclusion')), - ), - _descriptor.MethodDescriptor( - name='UpdateExclusion', - full_name='google.logging.v2.ConfigServiceV2.UpdateExclusion', - index=8, - containing_service=None, - input_type=_UPDATEEXCLUSIONREQUEST, - output_type=_LOGEXCLUSION, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\367\0012\033/v2/{name=*/*/exclusions/*}:\texclusionZ/2\"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion')), - ), - _descriptor.MethodDescriptor( - name='DeleteExclusion', - full_name='google.logging.v2.ConfigServiceV2.DeleteExclusion', - index=9, - containing_service=None, - input_type=_DELETEEXCLUSIONREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\300\001*\033/v2/{name=*/*/exclusions/*}Z$*\"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}')), - ), -]) + options=None, + serialized_start=1656, + serialized_end=4876, + methods=[ + _descriptor.MethodDescriptor( + name="ListSinks", + full_name="google.logging.v2.ConfigServiceV2.ListSinks", + index=0, + containing_service=None, + input_type=_LISTSINKSREQUEST, + output_type=_LISTSINKSRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002\247\001\022\026/v2/{parent=*/*}/sinksZ\037\022\035/v2/{parent=projects/*}/sinksZ$\022"/v2/{parent=organizations/*}/sinksZ\036\022\034/v2/{parent=folders/*}/sinksZ&\022$/v2/{parent=billingAccounts/*}/sinks' + ), + ), + ), + _descriptor.MethodDescriptor( + name="GetSink", + full_name="google.logging.v2.ConfigServiceV2.GetSink", + index=1, + containing_service=None, + input_type=_GETSINKREQUEST, + output_type=_LOGSINK, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002\300\001\022\033/v2/{sink_name=*/*/sinks/*}Z$\022\"/v2/{sink_name=projects/*/sinks/*}Z)\022'/v2/{sink_name=organizations/*/sinks/*}Z#\022!/v2/{sink_name=folders/*/sinks/*}Z+\022)/v2/{sink_name=billingAccounts/*/sinks/*}" + ), + ), + ), + _descriptor.MethodDescriptor( + name="CreateSink", + full_name="google.logging.v2.ConfigServiceV2.CreateSink", + index=2, + containing_service=None, + input_type=_CREATESINKREQUEST, + output_type=_LOGSINK, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002\305\001"\026/v2/{parent=*/*}/sinks:\004sinkZ%"\035/v2/{parent=projects/*}/sinks:\004sinkZ*""/v2/{parent=organizations/*}/sinks:\004sinkZ$"\034/v2/{parent=folders/*}/sinks:\004sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\004sink' + ), + ), + ), + _descriptor.MethodDescriptor( + name="UpdateSink", + full_name="google.logging.v2.ConfigServiceV2.UpdateSink", + index=3, + containing_service=None, + input_type=_UPDATESINKREQUEST, + output_type=_LOGSINK, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002\231\003\032\033/v2/{sink_name=*/*/sinks/*}:\004sinkZ*\032\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/\032'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)\032!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ1\032)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sinkZ*2\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/2'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sink" + ), + ), + ), + _descriptor.MethodDescriptor( + name="DeleteSink", + full_name="google.logging.v2.ConfigServiceV2.DeleteSink", + index=4, + containing_service=None, + input_type=_DELETESINKREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002\300\001*\033/v2/{sink_name=*/*/sinks/*}Z$*\"/v2/{sink_name=projects/*/sinks/*}Z)*'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}" + ), + ), + ), + _descriptor.MethodDescriptor( + name="ListExclusions", + full_name="google.logging.v2.ConfigServiceV2.ListExclusions", + index=5, + containing_service=None, + input_type=_LISTEXCLUSIONSREQUEST, + output_type=_LISTEXCLUSIONSRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002\300\001\022\033/v2/{parent=*/*}/exclusionsZ$\022\"/v2/{parent=projects/*}/exclusionsZ)\022'/v2/{parent=organizations/*}/exclusionsZ#\022!/v2/{parent=folders/*}/exclusionsZ+\022)/v2/{parent=billingAccounts/*}/exclusions" + ), + ), + ), + _descriptor.MethodDescriptor( + name="GetExclusion", + full_name="google.logging.v2.ConfigServiceV2.GetExclusion", + index=6, + containing_service=None, + input_type=_GETEXCLUSIONREQUEST, + output_type=_LOGEXCLUSION, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002\300\001\022\033/v2/{name=*/*/exclusions/*}Z$\022\"/v2/{name=projects/*/exclusions/*}Z)\022'/v2/{name=organizations/*/exclusions/*}Z#\022!/v2/{name=folders/*/exclusions/*}Z+\022)/v2/{name=billingAccounts/*/exclusions/*}" + ), + ), + ), + _descriptor.MethodDescriptor( + name="CreateExclusion", + full_name="google.logging.v2.ConfigServiceV2.CreateExclusion", + index=7, + containing_service=None, + input_type=_CREATEEXCLUSIONREQUEST, + output_type=_LOGEXCLUSION, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002\367\001"\033/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion' + ), + ), + ), + _descriptor.MethodDescriptor( + name="UpdateExclusion", + full_name="google.logging.v2.ConfigServiceV2.UpdateExclusion", + index=8, + containing_service=None, + input_type=_UPDATEEXCLUSIONREQUEST, + output_type=_LOGEXCLUSION, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002\367\0012\033/v2/{name=*/*/exclusions/*}:\texclusionZ/2\"/v2/{name=projects/*/exclusions/*}:\texclusionZ42'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion" + ), + ), + ), + _descriptor.MethodDescriptor( + name="DeleteExclusion", + full_name="google.logging.v2.ConfigServiceV2.DeleteExclusion", + index=9, + containing_service=None, + input_type=_DELETEEXCLUSIONREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002\300\001*\033/v2/{name=*/*/exclusions/*}Z$*\"/v2/{name=projects/*/exclusions/*}Z)*'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}" + ), + ), + ), + ], +) _sym_db.RegisterServiceDescriptor(_CONFIGSERVICEV2) -DESCRIPTOR.services_by_name['ConfigServiceV2'] = _CONFIGSERVICEV2 +DESCRIPTOR.services_by_name["ConfigServiceV2"] = _CONFIGSERVICEV2 # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py index 87fc8a1a6088..6e93d39b46b4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py @@ -1,211 +1,214 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -from google.cloud.logging_v2.proto import logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2 +from google.cloud.logging_v2.proto import ( + logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2, +) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 class ConfigServiceV2Stub(object): - """Service for configuring sinks used to export log entries out of + """Service for configuring sinks used to export log entries out of Logging. """ - def __init__(self, channel): - """Constructor. + def __init__(self, channel): + """Constructor. Args: channel: A grpc.Channel. """ - self.ListSinks = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListSinks', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksResponse.FromString, + self.ListSinks = channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListSinks", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksResponse.FromString, ) - self.GetSink = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSink', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetSinkRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString, + self.GetSink = channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSink", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetSinkRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString, ) - self.CreateSink = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateSink', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateSinkRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString, + self.CreateSink = channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateSink", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateSinkRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString, ) - self.UpdateSink = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSink', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateSinkRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString, + self.UpdateSink = channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSink", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateSinkRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString, ) - self.DeleteSink = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteSink', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteSinkRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + self.DeleteSink = channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteSink", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteSinkRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) - self.ListExclusions = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListExclusions', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsResponse.FromString, + self.ListExclusions = channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListExclusions", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsResponse.FromString, ) - self.GetExclusion = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetExclusion', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetExclusionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString, + self.GetExclusion = channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetExclusion", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetExclusionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString, ) - self.CreateExclusion = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateExclusion', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateExclusionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString, + self.CreateExclusion = channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateExclusion", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateExclusionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString, ) - self.UpdateExclusion = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateExclusion', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateExclusionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString, + self.UpdateExclusion = channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateExclusion", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateExclusionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString, ) - self.DeleteExclusion = channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteExclusion', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteExclusionRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + self.DeleteExclusion = channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteExclusion", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteExclusionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) class ConfigServiceV2Servicer(object): - """Service for configuring sinks used to export log entries out of + """Service for configuring sinks used to export log entries out of Logging. """ - def ListSinks(self, request, context): - """Lists sinks. + def ListSinks(self, request, context): + """Lists sinks. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def GetSink(self, request, context): - """Gets a sink. + def GetSink(self, request, context): + """Gets a sink. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def CreateSink(self, request, context): - """Creates a sink that exports specified log entries to a destination. The + def CreateSink(self, request, context): + """Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's `writer_identity` is not permitted to write to the destination. A sink can export log entries only from the resource owning the sink. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def UpdateSink(self, request, context): - """Updates a sink. This method replaces the following fields in the existing + def UpdateSink(self, request, context): + """Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: `destination`, and `filter`. The updated sink might also have a new `writer_identity`; see the `unique_writer_identity` field. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def DeleteSink(self, request, context): - """Deletes a sink. If the sink has a unique `writer_identity`, then that + def DeleteSink(self, request, context): + """Deletes a sink. If the sink has a unique `writer_identity`, then that service account is also deleted. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def ListExclusions(self, request, context): - """Lists all the exclusions in a parent resource. + def ListExclusions(self, request, context): + """Lists all the exclusions in a parent resource. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def GetExclusion(self, request, context): - """Gets the description of an exclusion. + def GetExclusion(self, request, context): + """Gets the description of an exclusion. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def CreateExclusion(self, request, context): - """Creates a new exclusion in a specified parent resource. + def CreateExclusion(self, request, context): + """Creates a new exclusion in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def UpdateExclusion(self, request, context): - """Changes one or more properties of an existing exclusion. + def UpdateExclusion(self, request, context): + """Changes one or more properties of an existing exclusion. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def DeleteExclusion(self, request, context): - """Deletes an exclusion. + def DeleteExclusion(self, request, context): + """Deletes an exclusion. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_ConfigServiceV2Servicer_to_server(servicer, server): - rpc_method_handlers = { - 'ListSinks': grpc.unary_unary_rpc_method_handler( - servicer.ListSinks, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksResponse.SerializeToString, - ), - 'GetSink': grpc.unary_unary_rpc_method_handler( - servicer.GetSink, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetSinkRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString, - ), - 'CreateSink': grpc.unary_unary_rpc_method_handler( - servicer.CreateSink, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateSinkRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString, - ), - 'UpdateSink': grpc.unary_unary_rpc_method_handler( - servicer.UpdateSink, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateSinkRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString, - ), - 'DeleteSink': grpc.unary_unary_rpc_method_handler( - servicer.DeleteSink, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteSinkRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'ListExclusions': grpc.unary_unary_rpc_method_handler( - servicer.ListExclusions, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsResponse.SerializeToString, - ), - 'GetExclusion': grpc.unary_unary_rpc_method_handler( - servicer.GetExclusion, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetExclusionRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString, - ), - 'CreateExclusion': grpc.unary_unary_rpc_method_handler( - servicer.CreateExclusion, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateExclusionRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString, - ), - 'UpdateExclusion': grpc.unary_unary_rpc_method_handler( - servicer.UpdateExclusion, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateExclusionRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString, - ), - 'DeleteExclusion': grpc.unary_unary_rpc_method_handler( - servicer.DeleteExclusion, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteExclusionRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.logging.v2.ConfigServiceV2', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) + rpc_method_handlers = { + "ListSinks": grpc.unary_unary_rpc_method_handler( + servicer.ListSinks, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksResponse.SerializeToString, + ), + "GetSink": grpc.unary_unary_rpc_method_handler( + servicer.GetSink, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetSinkRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString, + ), + "CreateSink": grpc.unary_unary_rpc_method_handler( + servicer.CreateSink, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateSinkRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString, + ), + "UpdateSink": grpc.unary_unary_rpc_method_handler( + servicer.UpdateSink, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateSinkRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString, + ), + "DeleteSink": grpc.unary_unary_rpc_method_handler( + servicer.DeleteSink, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteSinkRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "ListExclusions": grpc.unary_unary_rpc_method_handler( + servicer.ListExclusions, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsResponse.SerializeToString, + ), + "GetExclusion": grpc.unary_unary_rpc_method_handler( + servicer.GetExclusion, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetExclusionRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString, + ), + "CreateExclusion": grpc.unary_unary_rpc_method_handler( + servicer.CreateExclusion, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateExclusionRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString, + ), + "UpdateExclusion": grpc.unary_unary_rpc_method_handler( + servicer.UpdateExclusion, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateExclusionRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString, + ), + "DeleteExclusion": grpc.unary_unary_rpc_method_handler( + servicer.DeleteExclusion, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteExclusionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.logging.v2.ConfigServiceV2", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py index 0be74ac88f55..5988162a3e76 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -3,12 +3,14 @@ # source: google/cloud/logging_v2/proto/logging_metrics.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -23,406 +25,630 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/logging_v2/proto/logging_metrics.proto', - package='google.logging.v2', - syntax='proto3', - serialized_pb=_b('\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1dgoogle/api/distribution.proto\x1a\x17google/api/metric.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xb1\x03\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01\"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric\"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric\"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xd4\x05\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric\".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric\"/\x82\xd3\xe4\x93\x02)\"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric\"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty\".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}B\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_distribution__pb2.DESCRIPTOR,google_dot_api_dot_metric__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) - + name="google/cloud/logging_v2/proto/logging_metrics.proto", + package="google.logging.v2", + syntax="proto3", + serialized_pb=_b( + '\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1dgoogle/api/distribution.proto\x1a\x17google/api/metric.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xb1\x03\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xd4\x05\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"/\x82\xd3\xe4\x93\x02)"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}B\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_distribution__pb2.DESCRIPTOR, + google_dot_api_dot_metric__pb2.DESCRIPTOR, + google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, + google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], +) _LOGMETRIC_APIVERSION = _descriptor.EnumDescriptor( - name='ApiVersion', - full_name='google.logging.v2.LogMetric.ApiVersion', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='V2', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='V1', index=1, number=1, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=662, - serialized_end=690, + name="ApiVersion", + full_name="google.logging.v2.LogMetric.ApiVersion", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="V2", index=0, number=0, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="V1", index=1, number=1, options=None, type=None + ), + ], + containing_type=None, + options=None, + serialized_start=662, + serialized_end=690, ) _sym_db.RegisterEnumDescriptor(_LOGMETRIC_APIVERSION) _LOGMETRIC_LABELEXTRACTORSENTRY = _descriptor.Descriptor( - name='LabelExtractorsEntry', - full_name='google.logging.v2.LogMetric.LabelExtractorsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.logging.v2.LogMetric.LabelExtractorsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.logging.v2.LogMetric.LabelExtractorsEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=606, - serialized_end=660, + name="LabelExtractorsEntry", + full_name="google.logging.v2.LogMetric.LabelExtractorsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.logging.v2.LogMetric.LabelExtractorsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.logging.v2.LogMetric.LabelExtractorsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=606, + serialized_end=660, ) _LOGMETRIC = _descriptor.Descriptor( - name='LogMetric', - full_name='google.logging.v2.LogMetric', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.logging.v2.LogMetric.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='description', full_name='google.logging.v2.LogMetric.description', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='filter', full_name='google.logging.v2.LogMetric.filter', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='metric_descriptor', full_name='google.logging.v2.LogMetric.metric_descriptor', index=3, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value_extractor', full_name='google.logging.v2.LogMetric.value_extractor', index=4, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='label_extractors', full_name='google.logging.v2.LogMetric.label_extractors', index=5, - number=7, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='bucket_options', full_name='google.logging.v2.LogMetric.bucket_options', index=6, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='version', full_name='google.logging.v2.LogMetric.version', index=7, - number=4, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')), file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY, ], - enum_types=[ - _LOGMETRIC_APIVERSION, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=257, - serialized_end=690, + name="LogMetric", + full_name="google.logging.v2.LogMetric", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.logging.v2.LogMetric.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="description", + full_name="google.logging.v2.LogMetric.description", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.logging.v2.LogMetric.filter", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="metric_descriptor", + full_name="google.logging.v2.LogMetric.metric_descriptor", + index=3, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value_extractor", + full_name="google.logging.v2.LogMetric.value_extractor", + index=4, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="label_extractors", + full_name="google.logging.v2.LogMetric.label_extractors", + index=5, + number=7, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="bucket_options", + full_name="google.logging.v2.LogMetric.bucket_options", + index=6, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="version", + full_name="google.logging.v2.LogMetric.version", + index=7, + number=4, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=_descriptor._ParseOptions( + descriptor_pb2.FieldOptions(), _b("\030\001") + ), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY], + enum_types=[_LOGMETRIC_APIVERSION], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=257, + serialized_end=690, ) _LISTLOGMETRICSREQUEST = _descriptor.Descriptor( - name='ListLogMetricsRequest', - full_name='google.logging.v2.ListLogMetricsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='parent', full_name='google.logging.v2.ListLogMetricsRequest.parent', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_token', full_name='google.logging.v2.ListLogMetricsRequest.page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_size', full_name='google.logging.v2.ListLogMetricsRequest.page_size', index=2, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=692, - serialized_end=770, + name="ListLogMetricsRequest", + full_name="google.logging.v2.ListLogMetricsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.logging.v2.ListLogMetricsRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.logging.v2.ListLogMetricsRequest.page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.logging.v2.ListLogMetricsRequest.page_size", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=692, + serialized_end=770, ) _LISTLOGMETRICSRESPONSE = _descriptor.Descriptor( - name='ListLogMetricsResponse', - full_name='google.logging.v2.ListLogMetricsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='metrics', full_name='google.logging.v2.ListLogMetricsResponse.metrics', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='next_page_token', full_name='google.logging.v2.ListLogMetricsResponse.next_page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=772, - serialized_end=868, + name="ListLogMetricsResponse", + full_name="google.logging.v2.ListLogMetricsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="metrics", + full_name="google.logging.v2.ListLogMetricsResponse.metrics", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.logging.v2.ListLogMetricsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=772, + serialized_end=868, ) _GETLOGMETRICREQUEST = _descriptor.Descriptor( - name='GetLogMetricRequest', - full_name='google.logging.v2.GetLogMetricRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='metric_name', full_name='google.logging.v2.GetLogMetricRequest.metric_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=870, - serialized_end=912, + name="GetLogMetricRequest", + full_name="google.logging.v2.GetLogMetricRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="metric_name", + full_name="google.logging.v2.GetLogMetricRequest.metric_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=870, + serialized_end=912, ) _CREATELOGMETRICREQUEST = _descriptor.Descriptor( - name='CreateLogMetricRequest', - full_name='google.logging.v2.CreateLogMetricRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='parent', full_name='google.logging.v2.CreateLogMetricRequest.parent', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='metric', full_name='google.logging.v2.CreateLogMetricRequest.metric', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=914, - serialized_end=1000, + name="CreateLogMetricRequest", + full_name="google.logging.v2.CreateLogMetricRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.logging.v2.CreateLogMetricRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="metric", + full_name="google.logging.v2.CreateLogMetricRequest.metric", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=914, + serialized_end=1000, ) _UPDATELOGMETRICREQUEST = _descriptor.Descriptor( - name='UpdateLogMetricRequest', - full_name='google.logging.v2.UpdateLogMetricRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='metric_name', full_name='google.logging.v2.UpdateLogMetricRequest.metric_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='metric', full_name='google.logging.v2.UpdateLogMetricRequest.metric', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1002, - serialized_end=1093, + name="UpdateLogMetricRequest", + full_name="google.logging.v2.UpdateLogMetricRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="metric_name", + full_name="google.logging.v2.UpdateLogMetricRequest.metric_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="metric", + full_name="google.logging.v2.UpdateLogMetricRequest.metric", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1002, + serialized_end=1093, ) _DELETELOGMETRICREQUEST = _descriptor.Descriptor( - name='DeleteLogMetricRequest', - full_name='google.logging.v2.DeleteLogMetricRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='metric_name', full_name='google.logging.v2.DeleteLogMetricRequest.metric_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1095, - serialized_end=1140, + name="DeleteLogMetricRequest", + full_name="google.logging.v2.DeleteLogMetricRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="metric_name", + full_name="google.logging.v2.DeleteLogMetricRequest.metric_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1095, + serialized_end=1140, ) _LOGMETRIC_LABELEXTRACTORSENTRY.containing_type = _LOGMETRIC -_LOGMETRIC.fields_by_name['metric_descriptor'].message_type = google_dot_api_dot_metric__pb2._METRICDESCRIPTOR -_LOGMETRIC.fields_by_name['label_extractors'].message_type = _LOGMETRIC_LABELEXTRACTORSENTRY -_LOGMETRIC.fields_by_name['bucket_options'].message_type = google_dot_api_dot_distribution__pb2._DISTRIBUTION_BUCKETOPTIONS -_LOGMETRIC.fields_by_name['version'].enum_type = _LOGMETRIC_APIVERSION +_LOGMETRIC.fields_by_name[ + "metric_descriptor" +].message_type = google_dot_api_dot_metric__pb2._METRICDESCRIPTOR +_LOGMETRIC.fields_by_name[ + "label_extractors" +].message_type = _LOGMETRIC_LABELEXTRACTORSENTRY +_LOGMETRIC.fields_by_name[ + "bucket_options" +].message_type = google_dot_api_dot_distribution__pb2._DISTRIBUTION_BUCKETOPTIONS +_LOGMETRIC.fields_by_name["version"].enum_type = _LOGMETRIC_APIVERSION _LOGMETRIC_APIVERSION.containing_type = _LOGMETRIC -_LISTLOGMETRICSRESPONSE.fields_by_name['metrics'].message_type = _LOGMETRIC -_CREATELOGMETRICREQUEST.fields_by_name['metric'].message_type = _LOGMETRIC -_UPDATELOGMETRICREQUEST.fields_by_name['metric'].message_type = _LOGMETRIC -DESCRIPTOR.message_types_by_name['LogMetric'] = _LOGMETRIC -DESCRIPTOR.message_types_by_name['ListLogMetricsRequest'] = _LISTLOGMETRICSREQUEST -DESCRIPTOR.message_types_by_name['ListLogMetricsResponse'] = _LISTLOGMETRICSRESPONSE -DESCRIPTOR.message_types_by_name['GetLogMetricRequest'] = _GETLOGMETRICREQUEST -DESCRIPTOR.message_types_by_name['CreateLogMetricRequest'] = _CREATELOGMETRICREQUEST -DESCRIPTOR.message_types_by_name['UpdateLogMetricRequest'] = _UPDATELOGMETRICREQUEST -DESCRIPTOR.message_types_by_name['DeleteLogMetricRequest'] = _DELETELOGMETRICREQUEST +_LISTLOGMETRICSRESPONSE.fields_by_name["metrics"].message_type = _LOGMETRIC +_CREATELOGMETRICREQUEST.fields_by_name["metric"].message_type = _LOGMETRIC +_UPDATELOGMETRICREQUEST.fields_by_name["metric"].message_type = _LOGMETRIC +DESCRIPTOR.message_types_by_name["LogMetric"] = _LOGMETRIC +DESCRIPTOR.message_types_by_name["ListLogMetricsRequest"] = _LISTLOGMETRICSREQUEST +DESCRIPTOR.message_types_by_name["ListLogMetricsResponse"] = _LISTLOGMETRICSRESPONSE +DESCRIPTOR.message_types_by_name["GetLogMetricRequest"] = _GETLOGMETRICREQUEST +DESCRIPTOR.message_types_by_name["CreateLogMetricRequest"] = _CREATELOGMETRICREQUEST +DESCRIPTOR.message_types_by_name["UpdateLogMetricRequest"] = _UPDATELOGMETRICREQUEST +DESCRIPTOR.message_types_by_name["DeleteLogMetricRequest"] = _DELETELOGMETRICREQUEST _sym_db.RegisterFileDescriptor(DESCRIPTOR) -LogMetric = _reflection.GeneratedProtocolMessageType('LogMetric', (_message.Message,), dict( - - LabelExtractorsEntry = _reflection.GeneratedProtocolMessageType('LabelExtractorsEntry', (_message.Message,), dict( - DESCRIPTOR = _LOGMETRIC_LABELEXTRACTORSENTRY, - __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' - # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric.LabelExtractorsEntry) - )) - , - DESCRIPTOR = _LOGMETRIC, - __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' - , - __doc__ = """Describes a logs-based metric. The value of the metric is the number of +LogMetric = _reflection.GeneratedProtocolMessageType( + "LogMetric", + (_message.Message,), + dict( + LabelExtractorsEntry=_reflection.GeneratedProtocolMessageType( + "LabelExtractorsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_LOGMETRIC_LABELEXTRACTORSENTRY, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2" + # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric.LabelExtractorsEntry) + ), + ), + DESCRIPTOR=_LOGMETRIC, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", + __doc__="""Describes a logs-based metric. The value of the metric is the number of log entries that match a logs filter in a given time interval. Logs-based metric can also be used to extract values from logs and @@ -517,16 +743,19 @@ metric. The v2 format is used by default and cannot be changed. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric) + ), +) _sym_db.RegisterMessage(LogMetric) _sym_db.RegisterMessage(LogMetric.LabelExtractorsEntry) -ListLogMetricsRequest = _reflection.GeneratedProtocolMessageType('ListLogMetricsRequest', (_message.Message,), dict( - DESCRIPTOR = _LISTLOGMETRICSREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' - , - __doc__ = """The parameters to ListLogMetrics. +ListLogMetricsRequest = _reflection.GeneratedProtocolMessageType( + "ListLogMetricsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTLOGMETRICSREQUEST, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", + __doc__="""The parameters to ListLogMetrics. Attributes: @@ -545,15 +774,18 @@ ``nextPageToken`` in the response indicates that more results might be available. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsRequest) + ), +) _sym_db.RegisterMessage(ListLogMetricsRequest) -ListLogMetricsResponse = _reflection.GeneratedProtocolMessageType('ListLogMetricsResponse', (_message.Message,), dict( - DESCRIPTOR = _LISTLOGMETRICSRESPONSE, - __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' - , - __doc__ = """Result returned from ListLogMetrics. +ListLogMetricsResponse = _reflection.GeneratedProtocolMessageType( + "ListLogMetricsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTLOGMETRICSRESPONSE, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", + __doc__="""Result returned from ListLogMetrics. Attributes: @@ -565,15 +797,18 @@ results, call this method again using the value of ``nextPageToken`` as ``pageToken``. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsResponse) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsResponse) + ), +) _sym_db.RegisterMessage(ListLogMetricsResponse) -GetLogMetricRequest = _reflection.GeneratedProtocolMessageType('GetLogMetricRequest', (_message.Message,), dict( - DESCRIPTOR = _GETLOGMETRICREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' - , - __doc__ = """The parameters to GetLogMetric. +GetLogMetricRequest = _reflection.GeneratedProtocolMessageType( + "GetLogMetricRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETLOGMETRICREQUEST, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", + __doc__="""The parameters to GetLogMetric. Attributes: @@ -581,15 +816,18 @@ The resource name of the desired metric: :: "projects/[PROJECT_ID]/metrics/[METRIC_ID]" """, - # @@protoc_insertion_point(class_scope:google.logging.v2.GetLogMetricRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.GetLogMetricRequest) + ), +) _sym_db.RegisterMessage(GetLogMetricRequest) -CreateLogMetricRequest = _reflection.GeneratedProtocolMessageType('CreateLogMetricRequest', (_message.Message,), dict( - DESCRIPTOR = _CREATELOGMETRICREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' - , - __doc__ = """The parameters to CreateLogMetric. +CreateLogMetricRequest = _reflection.GeneratedProtocolMessageType( + "CreateLogMetricRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATELOGMETRICREQUEST, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", + __doc__="""The parameters to CreateLogMetric. Attributes: @@ -601,15 +839,18 @@ The new logs-based metric, which must not have an identifier that already exists. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.CreateLogMetricRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.CreateLogMetricRequest) + ), +) _sym_db.RegisterMessage(CreateLogMetricRequest) -UpdateLogMetricRequest = _reflection.GeneratedProtocolMessageType('UpdateLogMetricRequest', (_message.Message,), dict( - DESCRIPTOR = _UPDATELOGMETRICREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' - , - __doc__ = """The parameters to UpdateLogMetric. +UpdateLogMetricRequest = _reflection.GeneratedProtocolMessageType( + "UpdateLogMetricRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATELOGMETRICREQUEST, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", + __doc__="""The parameters to UpdateLogMetric. Attributes: @@ -622,15 +863,18 @@ metric: The updated metric. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateLogMetricRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateLogMetricRequest) + ), +) _sym_db.RegisterMessage(UpdateLogMetricRequest) -DeleteLogMetricRequest = _reflection.GeneratedProtocolMessageType('DeleteLogMetricRequest', (_message.Message,), dict( - DESCRIPTOR = _DELETELOGMETRICREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' - , - __doc__ = """The parameters to DeleteLogMetric. +DeleteLogMetricRequest = _reflection.GeneratedProtocolMessageType( + "DeleteLogMetricRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETELOGMETRICREQUEST, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", + __doc__="""The parameters to DeleteLogMetric. Attributes: @@ -638,75 +882,105 @@ The resource name of the metric to delete: :: "projects/[PROJECT_ID]/metrics/[METRIC_ID]" """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogMetricRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogMetricRequest) + ), +) _sym_db.RegisterMessage(DeleteLogMetricRequest) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\025com.google.logging.v2B\023LoggingMetricsProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2')) +DESCRIPTOR._options = _descriptor._ParseOptions( + descriptor_pb2.FileOptions(), + _b( + "\n\025com.google.logging.v2B\023LoggingMetricsProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" + ), +) _LOGMETRIC_LABELEXTRACTORSENTRY.has_options = True -_LOGMETRIC_LABELEXTRACTORSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) -_LOGMETRIC.fields_by_name['version'].has_options = True -_LOGMETRIC.fields_by_name['version']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) +_LOGMETRIC_LABELEXTRACTORSENTRY._options = _descriptor._ParseOptions( + descriptor_pb2.MessageOptions(), _b("8\001") +) +_LOGMETRIC.fields_by_name["version"].has_options = True +_LOGMETRIC.fields_by_name["version"]._options = _descriptor._ParseOptions( + descriptor_pb2.FieldOptions(), _b("\030\001") +) _METRICSSERVICEV2 = _descriptor.ServiceDescriptor( - name='MetricsServiceV2', - full_name='google.logging.v2.MetricsServiceV2', - file=DESCRIPTOR, - index=0, - options=None, - serialized_start=1143, - serialized_end=1867, - methods=[ - _descriptor.MethodDescriptor( - name='ListLogMetrics', - full_name='google.logging.v2.MetricsServiceV2.ListLogMetrics', + name="MetricsServiceV2", + full_name="google.logging.v2.MetricsServiceV2", + file=DESCRIPTOR, index=0, - containing_service=None, - input_type=_LISTLOGMETRICSREQUEST, - output_type=_LISTLOGMETRICSRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002!\022\037/v2/{parent=projects/*}/metrics')), - ), - _descriptor.MethodDescriptor( - name='GetLogMetric', - full_name='google.logging.v2.MetricsServiceV2.GetLogMetric', - index=1, - containing_service=None, - input_type=_GETLOGMETRICREQUEST, - output_type=_LOGMETRIC, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002(\022&/v2/{metric_name=projects/*/metrics/*}')), - ), - _descriptor.MethodDescriptor( - name='CreateLogMetric', - full_name='google.logging.v2.MetricsServiceV2.CreateLogMetric', - index=2, - containing_service=None, - input_type=_CREATELOGMETRICREQUEST, - output_type=_LOGMETRIC, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002)\"\037/v2/{parent=projects/*}/metrics:\006metric')), - ), - _descriptor.MethodDescriptor( - name='UpdateLogMetric', - full_name='google.logging.v2.MetricsServiceV2.UpdateLogMetric', - index=3, - containing_service=None, - input_type=_UPDATELOGMETRICREQUEST, - output_type=_LOGMETRIC, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0020\032&/v2/{metric_name=projects/*/metrics/*}:\006metric')), - ), - _descriptor.MethodDescriptor( - name='DeleteLogMetric', - full_name='google.logging.v2.MetricsServiceV2.DeleteLogMetric', - index=4, - containing_service=None, - input_type=_DELETELOGMETRICREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002(*&/v2/{metric_name=projects/*/metrics/*}')), - ), -]) + options=None, + serialized_start=1143, + serialized_end=1867, + methods=[ + _descriptor.MethodDescriptor( + name="ListLogMetrics", + full_name="google.logging.v2.MetricsServiceV2.ListLogMetrics", + index=0, + containing_service=None, + input_type=_LISTLOGMETRICSREQUEST, + output_type=_LISTLOGMETRICSRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b("\202\323\344\223\002!\022\037/v2/{parent=projects/*}/metrics"), + ), + ), + _descriptor.MethodDescriptor( + name="GetLogMetric", + full_name="google.logging.v2.MetricsServiceV2.GetLogMetric", + index=1, + containing_service=None, + input_type=_GETLOGMETRICREQUEST, + output_type=_LOGMETRIC, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b("\202\323\344\223\002(\022&/v2/{metric_name=projects/*/metrics/*}"), + ), + ), + _descriptor.MethodDescriptor( + name="CreateLogMetric", + full_name="google.logging.v2.MetricsServiceV2.CreateLogMetric", + index=2, + containing_service=None, + input_type=_CREATELOGMETRICREQUEST, + output_type=_LOGMETRIC, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002)"\037/v2/{parent=projects/*}/metrics:\006metric' + ), + ), + ), + _descriptor.MethodDescriptor( + name="UpdateLogMetric", + full_name="google.logging.v2.MetricsServiceV2.UpdateLogMetric", + index=3, + containing_service=None, + input_type=_UPDATELOGMETRICREQUEST, + output_type=_LOGMETRIC, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\0020\032&/v2/{metric_name=projects/*/metrics/*}:\006metric" + ), + ), + ), + _descriptor.MethodDescriptor( + name="DeleteLogMetric", + full_name="google.logging.v2.MetricsServiceV2.DeleteLogMetric", + index=4, + containing_service=None, + input_type=_DELETELOGMETRICREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b("\202\323\344\223\002(*&/v2/{metric_name=projects/*/metrics/*}"), + ), + ), + ], +) _sym_db.RegisterServiceDescriptor(_METRICSSERVICEV2) -DESCRIPTOR.services_by_name['MetricsServiceV2'] = _METRICSSERVICEV2 +DESCRIPTOR.services_by_name["MetricsServiceV2"] = _METRICSSERVICEV2 # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py index f5a7b50f3383..09f84e038a1b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py @@ -1,115 +1,118 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -from google.cloud.logging_v2.proto import logging_metrics_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2 +from google.cloud.logging_v2.proto import ( + logging_metrics_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2, +) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 class MetricsServiceV2Stub(object): - """Service for configuring logs-based metrics. + """Service for configuring logs-based metrics. """ - def __init__(self, channel): - """Constructor. + def __init__(self, channel): + """Constructor. Args: channel: A grpc.Channel. """ - self.ListLogMetrics = channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/ListLogMetrics', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.FromString, + self.ListLogMetrics = channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/ListLogMetrics", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.FromString, ) - self.GetLogMetric = channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/GetLogMetric', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, + self.GetLogMetric = channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/GetLogMetric", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, ) - self.CreateLogMetric = channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/CreateLogMetric', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, + self.CreateLogMetric = channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/CreateLogMetric", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, ) - self.UpdateLogMetric = channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, + self.UpdateLogMetric = channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, ) - self.DeleteLogMetric = channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + self.DeleteLogMetric = channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) class MetricsServiceV2Servicer(object): - """Service for configuring logs-based metrics. + """Service for configuring logs-based metrics. """ - def ListLogMetrics(self, request, context): - """Lists logs-based metrics. + def ListLogMetrics(self, request, context): + """Lists logs-based metrics. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def GetLogMetric(self, request, context): - """Gets a logs-based metric. + def GetLogMetric(self, request, context): + """Gets a logs-based metric. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def CreateLogMetric(self, request, context): - """Creates a logs-based metric. + def CreateLogMetric(self, request, context): + """Creates a logs-based metric. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def UpdateLogMetric(self, request, context): - """Creates or updates a logs-based metric. + def UpdateLogMetric(self, request, context): + """Creates or updates a logs-based metric. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def DeleteLogMetric(self, request, context): - """Deletes a logs-based metric. + def DeleteLogMetric(self, request, context): + """Deletes a logs-based metric. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_MetricsServiceV2Servicer_to_server(servicer, server): - rpc_method_handlers = { - 'ListLogMetrics': grpc.unary_unary_rpc_method_handler( - servicer.ListLogMetrics, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.SerializeToString, - ), - 'GetLogMetric': grpc.unary_unary_rpc_method_handler( - servicer.GetLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, - ), - 'CreateLogMetric': grpc.unary_unary_rpc_method_handler( - servicer.CreateLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, - ), - 'UpdateLogMetric': grpc.unary_unary_rpc_method_handler( - servicer.UpdateLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, - ), - 'DeleteLogMetric': grpc.unary_unary_rpc_method_handler( - servicer.DeleteLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.logging.v2.MetricsServiceV2', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) + rpc_method_handlers = { + "ListLogMetrics": grpc.unary_unary_rpc_method_handler( + servicer.ListLogMetrics, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.SerializeToString, + ), + "GetLogMetric": grpc.unary_unary_rpc_method_handler( + servicer.GetLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, + ), + "CreateLogMetric": grpc.unary_unary_rpc_method_handler( + servicer.CreateLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, + ), + "UpdateLogMetric": grpc.unary_unary_rpc_method_handler( + servicer.UpdateLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, + ), + "DeleteLogMetric": grpc.unary_unary_rpc_method_handler( + servicer.DeleteLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.logging.v2.MetricsServiceV2", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py index 126ec3a53d6a..f80dbe6f4f78 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py @@ -3,20 +3,26 @@ # source: google/cloud/logging_v2/proto/logging.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2 -from google.cloud.logging_v2.proto import log_entry_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2 +from google.api import ( + monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, +) +from google.cloud.logging_v2.proto import ( + log_entry_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2, +) from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -24,530 +30,852 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/logging_v2/proto/logging.proto', - package='google.logging.v2', - syntax='proto3', - serialized_pb=_b('\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\"\xa9\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x19\n\x17WriteLogEntriesResponse\"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01\"\x91\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t\"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t\"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xd8\x07\n\x10LoggingServiceV2\x12\xeb\x01\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty\"\xa0\x01\x82\xd3\xe4\x93\x02\x99\x01* /v2/{log_name=projects/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse\"(\x82\xd3\xe4\x93\x02\"\x12 /v2/monitoredResourceDescriptors\x12\xff\x01\n\x08ListLogs\x12\".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse\"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logsB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR,google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) - - + name="google/cloud/logging_v2/proto/logging.proto", + package="google.logging.v2", + syntax="proto3", + serialized_pb=_b( + '\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t"\xa9\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x19\n\x17WriteLogEntriesResponse"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01"\x91\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xd8\x07\n\x10LoggingServiceV2\x12\xeb\x01\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty"\xa0\x01\x82\xd3\xe4\x93\x02\x99\x01* /v2/{log_name=projects/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse"\x1c\x82\xd3\xe4\x93\x02\x16"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse"\x1b\x82\xd3\xe4\x93\x02\x15"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse"(\x82\xd3\xe4\x93\x02"\x12 /v2/monitoredResourceDescriptors\x12\xff\x01\n\x08ListLogs\x12".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logsB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, + google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2.DESCRIPTOR, + google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, + google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_rpc_dot_status__pb2.DESCRIPTOR, + ], +) _DELETELOGREQUEST = _descriptor.Descriptor( - name='DeleteLogRequest', - full_name='google.logging.v2.DeleteLogRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='log_name', full_name='google.logging.v2.DeleteLogRequest.log_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=299, - serialized_end=335, + name="DeleteLogRequest", + full_name="google.logging.v2.DeleteLogRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="log_name", + full_name="google.logging.v2.DeleteLogRequest.log_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=299, + serialized_end=335, ) _WRITELOGENTRIESREQUEST_LABELSENTRY = _descriptor.Descriptor( - name='LabelsEntry', - full_name='google.logging.v2.WriteLogEntriesRequest.LabelsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.logging.v2.WriteLogEntriesRequest.LabelsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.logging.v2.WriteLogEntriesRequest.LabelsEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=590, - serialized_end=635, + name="LabelsEntry", + full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=590, + serialized_end=635, ) _WRITELOGENTRIESREQUEST = _descriptor.Descriptor( - name='WriteLogEntriesRequest', - full_name='google.logging.v2.WriteLogEntriesRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='log_name', full_name='google.logging.v2.WriteLogEntriesRequest.log_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='resource', full_name='google.logging.v2.WriteLogEntriesRequest.resource', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='labels', full_name='google.logging.v2.WriteLogEntriesRequest.labels', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='entries', full_name='google.logging.v2.WriteLogEntriesRequest.entries', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='partial_success', full_name='google.logging.v2.WriteLogEntriesRequest.partial_success', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='dry_run', full_name='google.logging.v2.WriteLogEntriesRequest.dry_run', index=5, - number=6, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=338, - serialized_end=635, + name="WriteLogEntriesRequest", + full_name="google.logging.v2.WriteLogEntriesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="log_name", + full_name="google.logging.v2.WriteLogEntriesRequest.log_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="resource", + full_name="google.logging.v2.WriteLogEntriesRequest.resource", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.logging.v2.WriteLogEntriesRequest.labels", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entries", + full_name="google.logging.v2.WriteLogEntriesRequest.entries", + index=3, + number=4, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="partial_success", + full_name="google.logging.v2.WriteLogEntriesRequest.partial_success", + index=4, + number=5, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="dry_run", + full_name="google.logging.v2.WriteLogEntriesRequest.dry_run", + index=5, + number=6, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=338, + serialized_end=635, ) _WRITELOGENTRIESRESPONSE = _descriptor.Descriptor( - name='WriteLogEntriesResponse', - full_name='google.logging.v2.WriteLogEntriesResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=637, - serialized_end=662, + name="WriteLogEntriesResponse", + full_name="google.logging.v2.WriteLogEntriesResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=637, + serialized_end=662, ) _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY = _descriptor.Descriptor( - name='LogEntryErrorsEntry', - full_name='google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.key', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=792, - serialized_end=865, + name="LogEntryErrorsEntry", + full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.key", + index=0, + number=1, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.value", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=792, + serialized_end=865, ) _WRITELOGENTRIESPARTIALERRORS = _descriptor.Descriptor( - name='WriteLogEntriesPartialErrors', - full_name='google.logging.v2.WriteLogEntriesPartialErrors', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='log_entry_errors', full_name='google.logging.v2.WriteLogEntriesPartialErrors.log_entry_errors', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=665, - serialized_end=865, + name="WriteLogEntriesPartialErrors", + full_name="google.logging.v2.WriteLogEntriesPartialErrors", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="log_entry_errors", + full_name="google.logging.v2.WriteLogEntriesPartialErrors.log_entry_errors", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=665, + serialized_end=865, ) _LISTLOGENTRIESREQUEST = _descriptor.Descriptor( - name='ListLogEntriesRequest', - full_name='google.logging.v2.ListLogEntriesRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project_ids', full_name='google.logging.v2.ListLogEntriesRequest.project_ids', index=0, - number=1, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')), file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='resource_names', full_name='google.logging.v2.ListLogEntriesRequest.resource_names', index=1, - number=8, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='filter', full_name='google.logging.v2.ListLogEntriesRequest.filter', index=2, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='order_by', full_name='google.logging.v2.ListLogEntriesRequest.order_by', index=3, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_size', full_name='google.logging.v2.ListLogEntriesRequest.page_size', index=4, - number=4, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_token', full_name='google.logging.v2.ListLogEntriesRequest.page_token', index=5, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=868, - serialized_end=1013, + name="ListLogEntriesRequest", + full_name="google.logging.v2.ListLogEntriesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project_ids", + full_name="google.logging.v2.ListLogEntriesRequest.project_ids", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=_descriptor._ParseOptions( + descriptor_pb2.FieldOptions(), _b("\030\001") + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="resource_names", + full_name="google.logging.v2.ListLogEntriesRequest.resource_names", + index=1, + number=8, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.logging.v2.ListLogEntriesRequest.filter", + index=2, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="order_by", + full_name="google.logging.v2.ListLogEntriesRequest.order_by", + index=3, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.logging.v2.ListLogEntriesRequest.page_size", + index=4, + number=4, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.logging.v2.ListLogEntriesRequest.page_token", + index=5, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=868, + serialized_end=1013, ) _LISTLOGENTRIESRESPONSE = _descriptor.Descriptor( - name='ListLogEntriesResponse', - full_name='google.logging.v2.ListLogEntriesResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='entries', full_name='google.logging.v2.ListLogEntriesResponse.entries', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='next_page_token', full_name='google.logging.v2.ListLogEntriesResponse.next_page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1015, - serialized_end=1110, + name="ListLogEntriesResponse", + full_name="google.logging.v2.ListLogEntriesResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="entries", + full_name="google.logging.v2.ListLogEntriesResponse.entries", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.logging.v2.ListLogEntriesResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1015, + serialized_end=1110, ) _LISTMONITOREDRESOURCEDESCRIPTORSREQUEST = _descriptor.Descriptor( - name='ListMonitoredResourceDescriptorsRequest', - full_name='google.logging.v2.ListMonitoredResourceDescriptorsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='page_size', full_name='google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_size', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_token', full_name='google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1112, - serialized_end=1192, + name="ListMonitoredResourceDescriptorsRequest", + full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_size", + index=0, + number=1, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1112, + serialized_end=1192, ) _LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE = _descriptor.Descriptor( - name='ListMonitoredResourceDescriptorsResponse', - full_name='google.logging.v2.ListMonitoredResourceDescriptorsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='resource_descriptors', full_name='google.logging.v2.ListMonitoredResourceDescriptorsResponse.resource_descriptors', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='next_page_token', full_name='google.logging.v2.ListMonitoredResourceDescriptorsResponse.next_page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1195, - serialized_end=1333, + name="ListMonitoredResourceDescriptorsResponse", + full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="resource_descriptors", + full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse.resource_descriptors", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1195, + serialized_end=1333, ) _LISTLOGSREQUEST = _descriptor.Descriptor( - name='ListLogsRequest', - full_name='google.logging.v2.ListLogsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='parent', full_name='google.logging.v2.ListLogsRequest.parent', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_size', full_name='google.logging.v2.ListLogsRequest.page_size', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_token', full_name='google.logging.v2.ListLogsRequest.page_token', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1335, - serialized_end=1407, + name="ListLogsRequest", + full_name="google.logging.v2.ListLogsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.logging.v2.ListLogsRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.logging.v2.ListLogsRequest.page_size", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.logging.v2.ListLogsRequest.page_token", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1335, + serialized_end=1407, ) _LISTLOGSRESPONSE = _descriptor.Descriptor( - name='ListLogsResponse', - full_name='google.logging.v2.ListLogsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='log_names', full_name='google.logging.v2.ListLogsResponse.log_names', index=0, - number=3, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='next_page_token', full_name='google.logging.v2.ListLogsResponse.next_page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1409, - serialized_end=1471, + name="ListLogsResponse", + full_name="google.logging.v2.ListLogsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="log_names", + full_name="google.logging.v2.ListLogsResponse.log_names", + index=0, + number=3, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.logging.v2.ListLogsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1409, + serialized_end=1471, ) _WRITELOGENTRIESREQUEST_LABELSENTRY.containing_type = _WRITELOGENTRIESREQUEST -_WRITELOGENTRIESREQUEST.fields_by_name['resource'].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE -_WRITELOGENTRIESREQUEST.fields_by_name['labels'].message_type = _WRITELOGENTRIESREQUEST_LABELSENTRY -_WRITELOGENTRIESREQUEST.fields_by_name['entries'].message_type = google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.fields_by_name['value'].message_type = google_dot_rpc_dot_status__pb2._STATUS -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.containing_type = _WRITELOGENTRIESPARTIALERRORS -_WRITELOGENTRIESPARTIALERRORS.fields_by_name['log_entry_errors'].message_type = _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY -_LISTLOGENTRIESRESPONSE.fields_by_name['entries'].message_type = google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY -_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE.fields_by_name['resource_descriptors'].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEDESCRIPTOR -DESCRIPTOR.message_types_by_name['DeleteLogRequest'] = _DELETELOGREQUEST -DESCRIPTOR.message_types_by_name['WriteLogEntriesRequest'] = _WRITELOGENTRIESREQUEST -DESCRIPTOR.message_types_by_name['WriteLogEntriesResponse'] = _WRITELOGENTRIESRESPONSE -DESCRIPTOR.message_types_by_name['WriteLogEntriesPartialErrors'] = _WRITELOGENTRIESPARTIALERRORS -DESCRIPTOR.message_types_by_name['ListLogEntriesRequest'] = _LISTLOGENTRIESREQUEST -DESCRIPTOR.message_types_by_name['ListLogEntriesResponse'] = _LISTLOGENTRIESRESPONSE -DESCRIPTOR.message_types_by_name['ListMonitoredResourceDescriptorsRequest'] = _LISTMONITOREDRESOURCEDESCRIPTORSREQUEST -DESCRIPTOR.message_types_by_name['ListMonitoredResourceDescriptorsResponse'] = _LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE -DESCRIPTOR.message_types_by_name['ListLogsRequest'] = _LISTLOGSREQUEST -DESCRIPTOR.message_types_by_name['ListLogsResponse'] = _LISTLOGSRESPONSE +_WRITELOGENTRIESREQUEST.fields_by_name[ + "resource" +].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE +_WRITELOGENTRIESREQUEST.fields_by_name[ + "labels" +].message_type = _WRITELOGENTRIESREQUEST_LABELSENTRY +_WRITELOGENTRIESREQUEST.fields_by_name[ + "entries" +].message_type = ( + google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY +) +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.fields_by_name[ + "value" +].message_type = google_dot_rpc_dot_status__pb2._STATUS +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.containing_type = ( + _WRITELOGENTRIESPARTIALERRORS +) +_WRITELOGENTRIESPARTIALERRORS.fields_by_name[ + "log_entry_errors" +].message_type = _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY +_LISTLOGENTRIESRESPONSE.fields_by_name[ + "entries" +].message_type = ( + google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY +) +_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE.fields_by_name[ + "resource_descriptors" +].message_type = ( + google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEDESCRIPTOR +) +DESCRIPTOR.message_types_by_name["DeleteLogRequest"] = _DELETELOGREQUEST +DESCRIPTOR.message_types_by_name["WriteLogEntriesRequest"] = _WRITELOGENTRIESREQUEST +DESCRIPTOR.message_types_by_name["WriteLogEntriesResponse"] = _WRITELOGENTRIESRESPONSE +DESCRIPTOR.message_types_by_name[ + "WriteLogEntriesPartialErrors" +] = _WRITELOGENTRIESPARTIALERRORS +DESCRIPTOR.message_types_by_name["ListLogEntriesRequest"] = _LISTLOGENTRIESREQUEST +DESCRIPTOR.message_types_by_name["ListLogEntriesResponse"] = _LISTLOGENTRIESRESPONSE +DESCRIPTOR.message_types_by_name[ + "ListMonitoredResourceDescriptorsRequest" +] = _LISTMONITOREDRESOURCEDESCRIPTORSREQUEST +DESCRIPTOR.message_types_by_name[ + "ListMonitoredResourceDescriptorsResponse" +] = _LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE +DESCRIPTOR.message_types_by_name["ListLogsRequest"] = _LISTLOGSREQUEST +DESCRIPTOR.message_types_by_name["ListLogsResponse"] = _LISTLOGSRESPONSE _sym_db.RegisterFileDescriptor(DESCRIPTOR) -DeleteLogRequest = _reflection.GeneratedProtocolMessageType('DeleteLogRequest', (_message.Message,), dict( - DESCRIPTOR = _DELETELOGREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_pb2' - , - __doc__ = """The parameters to DeleteLog. +DeleteLogRequest = _reflection.GeneratedProtocolMessageType( + "DeleteLogRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETELOGREQUEST, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""The parameters to DeleteLog. Attributes: @@ -562,22 +890,27 @@ rcemanager.googleapis.com%2Factivity"``. For more information about log names, see [LogEntry][google.logging.v2.LogEntry]. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogRequest) + ), +) _sym_db.RegisterMessage(DeleteLogRequest) -WriteLogEntriesRequest = _reflection.GeneratedProtocolMessageType('WriteLogEntriesRequest', (_message.Message,), dict( - - LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( - DESCRIPTOR = _WRITELOGENTRIESREQUEST_LABELSENTRY, - __module__ = 'google.cloud.logging_v2.proto.logging_pb2' - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest.LabelsEntry) - )) - , - DESCRIPTOR = _WRITELOGENTRIESREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_pb2' - , - __doc__ = """The parameters to WriteLogEntries. +WriteLogEntriesRequest = _reflection.GeneratedProtocolMessageType( + "WriteLogEntriesRequest", + (_message.Message,), + dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_WRITELOGENTRIESREQUEST_LABELSENTRY, + __module__="google.cloud.logging_v2.proto.logging_pb2" + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest.LabelsEntry) + ), + ), + DESCRIPTOR=_WRITELOGENTRIESREQUEST, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""The parameters to WriteLogEntries. Attributes: @@ -645,33 +978,41 @@ checking whether the logging API endpoints are working properly before sending valuable data. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest) + ), +) _sym_db.RegisterMessage(WriteLogEntriesRequest) _sym_db.RegisterMessage(WriteLogEntriesRequest.LabelsEntry) -WriteLogEntriesResponse = _reflection.GeneratedProtocolMessageType('WriteLogEntriesResponse', (_message.Message,), dict( - DESCRIPTOR = _WRITELOGENTRIESRESPONSE, - __module__ = 'google.cloud.logging_v2.proto.logging_pb2' - , - __doc__ = """Result returned from WriteLogEntries. empty +WriteLogEntriesResponse = _reflection.GeneratedProtocolMessageType( + "WriteLogEntriesResponse", + (_message.Message,), + dict( + DESCRIPTOR=_WRITELOGENTRIESRESPONSE, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""Result returned from WriteLogEntries. empty """, - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesResponse) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesResponse) + ), +) _sym_db.RegisterMessage(WriteLogEntriesResponse) -WriteLogEntriesPartialErrors = _reflection.GeneratedProtocolMessageType('WriteLogEntriesPartialErrors', (_message.Message,), dict( - - LogEntryErrorsEntry = _reflection.GeneratedProtocolMessageType('LogEntryErrorsEntry', (_message.Message,), dict( - DESCRIPTOR = _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY, - __module__ = 'google.cloud.logging_v2.proto.logging_pb2' - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry) - )) - , - DESCRIPTOR = _WRITELOGENTRIESPARTIALERRORS, - __module__ = 'google.cloud.logging_v2.proto.logging_pb2' - , - __doc__ = """Error details for WriteLogEntries with partial success. +WriteLogEntriesPartialErrors = _reflection.GeneratedProtocolMessageType( + "WriteLogEntriesPartialErrors", + (_message.Message,), + dict( + LogEntryErrorsEntry=_reflection.GeneratedProtocolMessageType( + "LogEntryErrorsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY, + __module__="google.cloud.logging_v2.proto.logging_pb2" + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry) + ), + ), + DESCRIPTOR=_WRITELOGENTRIESPARTIALERRORS, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""Error details for WriteLogEntries with partial success. Attributes: @@ -682,16 +1023,19 @@ ``WriteLogEntriesRequest.entries``. Failed requests for which no entries are written will not include per-entry errors. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors) + ), +) _sym_db.RegisterMessage(WriteLogEntriesPartialErrors) _sym_db.RegisterMessage(WriteLogEntriesPartialErrors.LogEntryErrorsEntry) -ListLogEntriesRequest = _reflection.GeneratedProtocolMessageType('ListLogEntriesRequest', (_message.Message,), dict( - DESCRIPTOR = _LISTLOGENTRIESREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_pb2' - , - __doc__ = """The parameters to ``ListLogEntries``. +ListLogEntriesRequest = _reflection.GeneratedProtocolMessageType( + "ListLogEntriesRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTLOGENTRIESREQUEST, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""The parameters to ``ListLogEntries``. Attributes: @@ -738,15 +1082,18 @@ The values of other method parameters should be identical to those in the previous call. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesRequest) + ), +) _sym_db.RegisterMessage(ListLogEntriesRequest) -ListLogEntriesResponse = _reflection.GeneratedProtocolMessageType('ListLogEntriesResponse', (_message.Message,), dict( - DESCRIPTOR = _LISTLOGENTRIESRESPONSE, - __module__ = 'google.cloud.logging_v2.proto.logging_pb2' - , - __doc__ = """Result returned from ``ListLogEntries``. +ListLogEntriesResponse = _reflection.GeneratedProtocolMessageType( + "ListLogEntriesResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTLOGENTRIESRESPONSE, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""Result returned from ``ListLogEntries``. Attributes: @@ -768,15 +1115,18 @@ name or resource type, or to narrow the time range of the search. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesResponse) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesResponse) + ), +) _sym_db.RegisterMessage(ListLogEntriesResponse) -ListMonitoredResourceDescriptorsRequest = _reflection.GeneratedProtocolMessageType('ListMonitoredResourceDescriptorsRequest', (_message.Message,), dict( - DESCRIPTOR = _LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_pb2' - , - __doc__ = """The parameters to ListMonitoredResourceDescriptors +ListMonitoredResourceDescriptorsRequest = _reflection.GeneratedProtocolMessageType( + "ListMonitoredResourceDescriptorsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""The parameters to ListMonitoredResourceDescriptors Attributes: @@ -792,15 +1142,18 @@ values of other method parameters should be identical to those in the previous call. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsRequest) + ), +) _sym_db.RegisterMessage(ListMonitoredResourceDescriptorsRequest) -ListMonitoredResourceDescriptorsResponse = _reflection.GeneratedProtocolMessageType('ListMonitoredResourceDescriptorsResponse', (_message.Message,), dict( - DESCRIPTOR = _LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, - __module__ = 'google.cloud.logging_v2.proto.logging_pb2' - , - __doc__ = """Result returned from ListMonitoredResourceDescriptors. +ListMonitoredResourceDescriptorsResponse = _reflection.GeneratedProtocolMessageType( + "ListMonitoredResourceDescriptorsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""Result returned from ListMonitoredResourceDescriptors. Attributes: @@ -812,15 +1165,18 @@ set of results, call this method again using the value of ``nextPageToken`` as ``pageToken``. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsResponse) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsResponse) + ), +) _sym_db.RegisterMessage(ListMonitoredResourceDescriptorsResponse) -ListLogsRequest = _reflection.GeneratedProtocolMessageType('ListLogsRequest', (_message.Message,), dict( - DESCRIPTOR = _LISTLOGSREQUEST, - __module__ = 'google.cloud.logging_v2.proto.logging_pb2' - , - __doc__ = """The parameters to ListLogs. +ListLogsRequest = _reflection.GeneratedProtocolMessageType( + "ListLogsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTLOGSREQUEST, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""The parameters to ListLogs. Attributes: @@ -841,15 +1197,18 @@ values of other method parameters should be identical to those in the previous call. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsRequest) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsRequest) + ), +) _sym_db.RegisterMessage(ListLogsRequest) -ListLogsResponse = _reflection.GeneratedProtocolMessageType('ListLogsResponse', (_message.Message,), dict( - DESCRIPTOR = _LISTLOGSRESPONSE, - __module__ = 'google.cloud.logging_v2.proto.logging_pb2' - , - __doc__ = """Result returned from ListLogs. +ListLogsResponse = _reflection.GeneratedProtocolMessageType( + "ListLogsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTLOGSRESPONSE, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""Result returned from ListLogs. Attributes: @@ -863,77 +1222,109 @@ set of results, call this method again using the value of ``nextPageToken`` as ``pageToken``. """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsResponse) - )) + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsResponse) + ), +) _sym_db.RegisterMessage(ListLogsResponse) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\025com.google.logging.v2B\014LoggingProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2')) +DESCRIPTOR._options = _descriptor._ParseOptions( + descriptor_pb2.FileOptions(), + _b( + "\n\025com.google.logging.v2B\014LoggingProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" + ), +) _WRITELOGENTRIESREQUEST_LABELSENTRY.has_options = True -_WRITELOGENTRIESREQUEST_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_WRITELOGENTRIESREQUEST_LABELSENTRY._options = _descriptor._ParseOptions( + descriptor_pb2.MessageOptions(), _b("8\001") +) _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.has_options = True -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) -_LISTLOGENTRIESREQUEST.fields_by_name['project_ids'].has_options = True -_LISTLOGENTRIESREQUEST.fields_by_name['project_ids']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY._options = _descriptor._ParseOptions( + descriptor_pb2.MessageOptions(), _b("8\001") +) +_LISTLOGENTRIESREQUEST.fields_by_name["project_ids"].has_options = True +_LISTLOGENTRIESREQUEST.fields_by_name[ + "project_ids" +]._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b("\030\001")) _LOGGINGSERVICEV2 = _descriptor.ServiceDescriptor( - name='LoggingServiceV2', - full_name='google.logging.v2.LoggingServiceV2', - file=DESCRIPTOR, - index=0, - options=None, - serialized_start=1474, - serialized_end=2458, - methods=[ - _descriptor.MethodDescriptor( - name='DeleteLog', - full_name='google.logging.v2.LoggingServiceV2.DeleteLog', + name="LoggingServiceV2", + full_name="google.logging.v2.LoggingServiceV2", + file=DESCRIPTOR, index=0, - containing_service=None, - input_type=_DELETELOGREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\231\001* /v2/{log_name=projects/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}')), - ), - _descriptor.MethodDescriptor( - name='WriteLogEntries', - full_name='google.logging.v2.LoggingServiceV2.WriteLogEntries', - index=1, - containing_service=None, - input_type=_WRITELOGENTRIESREQUEST, - output_type=_WRITELOGENTRIESRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\026\"\021/v2/entries:write:\001*')), - ), - _descriptor.MethodDescriptor( - name='ListLogEntries', - full_name='google.logging.v2.LoggingServiceV2.ListLogEntries', - index=2, - containing_service=None, - input_type=_LISTLOGENTRIESREQUEST, - output_type=_LISTLOGENTRIESRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\025\"\020/v2/entries:list:\001*')), - ), - _descriptor.MethodDescriptor( - name='ListMonitoredResourceDescriptors', - full_name='google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors', - index=3, - containing_service=None, - input_type=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, - output_type=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\"\022 /v2/monitoredResourceDescriptors')), - ), - _descriptor.MethodDescriptor( - name='ListLogs', - full_name='google.logging.v2.LoggingServiceV2.ListLogs', - index=4, - containing_service=None, - input_type=_LISTLOGSREQUEST, - output_type=_LISTLOGSRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\242\001\022\025/v2/{parent=*/*}/logsZ\036\022\034/v2/{parent=projects/*}/logsZ#\022!/v2/{parent=organizations/*}/logsZ\035\022\033/v2/{parent=folders/*}/logsZ%\022#/v2/{parent=billingAccounts/*}/logs')), - ), -]) + options=None, + serialized_start=1474, + serialized_end=2458, + methods=[ + _descriptor.MethodDescriptor( + name="DeleteLog", + full_name="google.logging.v2.LoggingServiceV2.DeleteLog", + index=0, + containing_service=None, + input_type=_DELETELOGREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002\231\001* /v2/{log_name=projects/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}" + ), + ), + ), + _descriptor.MethodDescriptor( + name="WriteLogEntries", + full_name="google.logging.v2.LoggingServiceV2.WriteLogEntries", + index=1, + containing_service=None, + input_type=_WRITELOGENTRIESREQUEST, + output_type=_WRITELOGENTRIESRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b('\202\323\344\223\002\026"\021/v2/entries:write:\001*'), + ), + ), + _descriptor.MethodDescriptor( + name="ListLogEntries", + full_name="google.logging.v2.LoggingServiceV2.ListLogEntries", + index=2, + containing_service=None, + input_type=_LISTLOGENTRIESREQUEST, + output_type=_LISTLOGENTRIESRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b('\202\323\344\223\002\025"\020/v2/entries:list:\001*'), + ), + ), + _descriptor.MethodDescriptor( + name="ListMonitoredResourceDescriptors", + full_name="google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors", + index=3, + containing_service=None, + input_type=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, + output_type=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b('\202\323\344\223\002"\022 /v2/monitoredResourceDescriptors'), + ), + ), + _descriptor.MethodDescriptor( + name="ListLogs", + full_name="google.logging.v2.LoggingServiceV2.ListLogs", + index=4, + containing_service=None, + input_type=_LISTLOGSREQUEST, + output_type=_LISTLOGSRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002\242\001\022\025/v2/{parent=*/*}/logsZ\036\022\034/v2/{parent=projects/*}/logsZ#\022!/v2/{parent=organizations/*}/logsZ\035\022\033/v2/{parent=folders/*}/logsZ%\022#/v2/{parent=billingAccounts/*}/logs" + ), + ), + ), + ], +) _sym_db.RegisterServiceDescriptor(_LOGGINGSERVICEV2) -DESCRIPTOR.services_by_name['LoggingServiceV2'] = _LOGGINGSERVICEV2 +DESCRIPTOR.services_by_name["LoggingServiceV2"] = _LOGGINGSERVICEV2 # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py index 8f5d9e646e5f..d67dd2c95fd0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py @@ -1,63 +1,65 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -from google.cloud.logging_v2.proto import logging_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2 +from google.cloud.logging_v2.proto import ( + logging_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2, +) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 class LoggingServiceV2Stub(object): - """Service for ingesting and querying logs. + """Service for ingesting and querying logs. """ - def __init__(self, channel): - """Constructor. + def __init__(self, channel): + """Constructor. Args: channel: A grpc.Channel. """ - self.DeleteLog = channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/DeleteLog', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + self.DeleteLog = channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/DeleteLog", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) - self.WriteLogEntries = channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/WriteLogEntries', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.FromString, + self.WriteLogEntries = channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/WriteLogEntries", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.FromString, ) - self.ListLogEntries = channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogEntries', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.FromString, + self.ListLogEntries = channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogEntries", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.FromString, ) - self.ListMonitoredResourceDescriptors = channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.FromString, + self.ListMonitoredResourceDescriptors = channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.FromString, ) - self.ListLogs = channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogs', - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.FromString, + self.ListLogs = channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogs", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.FromString, ) class LoggingServiceV2Servicer(object): - """Service for ingesting and querying logs. + """Service for ingesting and querying logs. """ - def DeleteLog(self, request, context): - """Deletes all the log entries in a log. + def DeleteLog(self, request, context): + """Deletes all the log entries in a log. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not be deleted. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def WriteLogEntries(self, request, context): - """Writes log entries to Logging. This API method is the + def WriteLogEntries(self, request, context): + """Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is used, directly or indirectly, by the Logging agent (fluentd) and all logging libraries configured to use Logging. @@ -65,63 +67,64 @@ def WriteLogEntries(self, request, context): different resources (projects, organizations, billing accounts or folders) """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def ListLogEntries(self, request, context): - """Lists log entries. Use this method to retrieve log entries from + def ListLogEntries(self, request, context): + """Lists log entries. Use this method to retrieve log entries from Logging. For ways to export log entries, see [Exporting Logs](/logging/docs/export). """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def ListMonitoredResourceDescriptors(self, request, context): - """Lists the descriptors for monitored resource types used by Logging. + def ListMonitoredResourceDescriptors(self, request, context): + """Lists the descriptors for monitored resource types used by Logging. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def ListLogs(self, request, context): - """Lists the logs in projects, organizations, folders, or billing accounts. + def ListLogs(self, request, context): + """Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_LoggingServiceV2Servicer_to_server(servicer, server): - rpc_method_handlers = { - 'DeleteLog': grpc.unary_unary_rpc_method_handler( - servicer.DeleteLog, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'WriteLogEntries': grpc.unary_unary_rpc_method_handler( - servicer.WriteLogEntries, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.SerializeToString, - ), - 'ListLogEntries': grpc.unary_unary_rpc_method_handler( - servicer.ListLogEntries, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.SerializeToString, - ), - 'ListMonitoredResourceDescriptors': grpc.unary_unary_rpc_method_handler( - servicer.ListMonitoredResourceDescriptors, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.SerializeToString, - ), - 'ListLogs': grpc.unary_unary_rpc_method_handler( - servicer.ListLogs, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.logging.v2.LoggingServiceV2', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) + rpc_method_handlers = { + "DeleteLog": grpc.unary_unary_rpc_method_handler( + servicer.DeleteLog, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "WriteLogEntries": grpc.unary_unary_rpc_method_handler( + servicer.WriteLogEntries, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.SerializeToString, + ), + "ListLogEntries": grpc.unary_unary_rpc_method_handler( + servicer.ListLogEntries, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.SerializeToString, + ), + "ListMonitoredResourceDescriptors": grpc.unary_unary_rpc_method_handler( + servicer.ListMonitoredResourceDescriptors, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.SerializeToString, + ), + "ListLogs": grpc.unary_unary_rpc_method_handler( + servicer.ListLogs, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.logging.v2.LoggingServiceV2", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 882d014b9f08..1b9445a86686 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -33,41 +33,38 @@ 'webob', ) + @nox.session(python="3.7") -def blacken(session): - """Run black. +def lint(session): + """Run linters. - Format code to uniform standard. + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. """ - session.install("black") + session.install("flake8", "black", *LOCAL_DEPS) session.run( "black", + "--check", "google", "tests", "docs", - "--exclude", - ".*/proto/.*|.*/gapic/.*|.*/.*_pb2.py", ) + session.run("flake8", "google", "tests") @nox.session(python="3.7") -def lint(session): - """Run linters. +def blacken(session): + """Run black. - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. + Format code to uniform standard. """ - session.install("flake8", "black", *LOCAL_DEPS) + session.install("black") session.run( "black", - "--check", "google", "tests", "docs", - "--exclude", - ".*/proto/.*|.*/gapic/.*|.*/.*_pb2.py", ) - session.run("flake8", "google", "tests") @nox.session(python="3.7") diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/synth.py index a3e978335ac5..922d805e9b2c 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/synth.py @@ -42,4 +42,6 @@ # ---------------------------------------------------------------------------- templated_files = common.py_library(unit_cov_level=95, cov_level=100) # Don't move noxfile. logging has special testing setups for django, etc -s.move(templated_files, exclude="noxfile.py") +s.move(templated_files, excludes="noxfile.py") + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) From 54825d19caff9c68aa705a070a58974fd3eea251 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 29 Nov 2018 13:23:53 -0800 Subject: [PATCH 205/855] omit local deps (#6701) --- packages/google-cloud-logging/.coveragerc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/.coveragerc b/packages/google-cloud-logging/.coveragerc index 51fec440cebf..6b9ab9da4a1b 100644 --- a/packages/google-cloud-logging/.coveragerc +++ b/packages/google-cloud-logging/.coveragerc @@ -14,5 +14,5 @@ exclude_lines = omit = */gapic/*.py */proto/*.py - */google-cloud-python/core/*.py + */core/*.py */site-packages/*.py \ No newline at end of file From 2822bb4ee2c48d07918d8164f8b71cd870f9490f Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Mon, 3 Dec 2018 13:59:48 -0800 Subject: [PATCH 206/855] Use moved iam.policy now at google.api_core.iam.policy (#6741) * update references to iam to use api-core\ * Update dependency to api_core --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 85eeaf96889e..99315b7383cd 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ - 'google-api-core[grpc] >= 1.4.1, < 2.0.0dev', + 'google-api-core[grpc] >= 1.6.0, < 2.0.0dev', 'google-cloud-core >= 0.28.0, < 0.29dev', ] extras = { From 9a7fa64629adf1e43010f6a5925515ffff9eb634 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 4 Dec 2018 09:00:08 -0800 Subject: [PATCH 207/855] Update dependency to google-cloud-core (#6835) --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 99315b7383cd..c2f94980c314 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -30,7 +30,7 @@ release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ 'google-api-core[grpc] >= 1.6.0, < 2.0.0dev', - 'google-cloud-core >= 0.28.0, < 0.29dev', + 'google-cloud-core >= 0.29.0, < 0.30dev', ] extras = { } From cd70f5cd4b6c3f85bbc42f36aef5b7f7c673dadf Mon Sep 17 00:00:00 2001 From: Brian Quinlan Date: Wed, 5 Dec 2018 09:27:01 -0800 Subject: [PATCH 208/855] Change the url to the canonical one (#6843) --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index c2f94980c314..d880ec9e2f4b 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -64,7 +64,7 @@ author='Google LLC', author_email='googleapis-packages@google.com', license='Apache 2.0', - url='https://github.com/GoogleCloudPlatform/google-cloud-python', + url='https://github.com/googleapis/google-cloud-python', classifiers=[ release_status, 'Intended Audience :: Developers', From 916414649c15a35ebae165db88616363e83d5425 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Mon, 10 Dec 2018 15:30:20 -0800 Subject: [PATCH 209/855] Release logging 1.9.0 (#6893) * Release 1.9.0 --- packages/google-cloud-logging/CHANGELOG.md | 28 ++++++++++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index b72a4fbe87bc..d319b3a08ca4 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,34 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## 1.9.0 + +12-10-2018 12:55 PST + + +### Implementation Changes +- Import `iam.policy` from `google.api_core`. ([#6741](https://github.com/googleapis/google-cloud-python/pull/6741)) +- Pick up fixes to GAPIC generator. ([#6631](https://github.com/googleapis/google-cloud-python/pull/6631)) +- Fix `client_info` bug, update docstrings via synth. ([#6435](https://github.com/googleapis/google-cloud-python/pull/6435)) +- Revert "Allow turning on JSON Detection in StackDriver" ([#6352](https://github.com/googleapis/google-cloud-python/pull/6352)) +- Allow turning on JSON Detection in StackDriver ([#6293](https://github.com/googleapis/google-cloud-python/pull/6293)) + +### New Features +- Add support for additional 'LogEntry' fields ([#6229](https://github.com/googleapis/google-cloud-python/pull/6229)) + +### Dependencies +- Update dependency to google-cloud-core ([#6835](https://github.com/googleapis/google-cloud-python/pull/6835)) +- Bump minimum `api_core` version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/googleapis/google-cloud-python/pull/6391)) + + +### Internal / Testing Changes +- Change the url to the canonical one ([#6843](https://github.com/googleapis/google-cloud-python/pull/6843)) +- Omit local deps ([#6701](https://github.com/googleapis/google-cloud-python/pull/6701)) +- Run black at end of synth.py ([#6698](https://github.com/googleapis/google-cloud-python/pull/6698)) +- Blackening Continued... ([#6667](https://github.com/googleapis/google-cloud-python/pull/6667)) +- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) +- Logging: add 'synth.py'. ([#6081](https://github.com/googleapis/google-cloud-python/pull/6081)) + ## 1.8.0 10-17-2018 14:23 PDT diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index d880ec9e2f4b..d6f43390ac19 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-logging' description = 'Stackdriver Logging API client library' -version = '1.8.0' +version = '1.9.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 34711de2b832ed96cdfafcf93c7bc8d00dc85e6b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 13 Dec 2018 18:02:28 -0500 Subject: [PATCH 210/855] Docs/fixit: normalize docs for 'page_size' / 'max_results' / 'page_token' (#6842) --- .../google/cloud/logging/client.py | 45 ++++++++++++------- .../google/cloud/logging/logger.py | 15 ++++--- 2 files changed, 40 insertions(+), 20 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 5e493b27f757..e8f16b2fd45a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -191,13 +191,18 @@ def list_entries( or :data:`~google.cloud.logging.DESCENDING`. :type page_size: int - :param page_size: maximum number of entries to return, If not passed, - defaults to a value set by the API. + :param page_size: + Optional. The maximum number of entries in each page of results + from this request. Non-positive values are ignored. Defaults + to a sensible value set by the API. :type page_token: str - :param page_token: opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. + :param page_token: + Optional. If present, return the next batch of entries, using + the value, which must correspond to the ``nextPageToken`` value + returned in the previous response. Deprecated: use the ``pages`` + property of the returned iterator instead of manually passing + the token. :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` @@ -244,13 +249,18 @@ def list_sinks(self, page_size=None, page_token=None): https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/list :type page_size: int - :param page_size: maximum number of sinks to return, If not passed, - defaults to a value set by the API. + :param page_size: + Optional. The maximum number of sinks in each page of results from + this request. Non-positive values are ignored. Defaults to a + sensible value set by the API. :type page_token: str - :param page_token: opaque marker for the next "page" of sinks. If not - passed, the API will return the first page of - sinks. + :param page_token: + Optional. If present, return the next batch of sinks, using the + value, which must correspond to the ``nextPageToken`` value + returned in the previous response. Deprecated: use the ``pages`` + property of the returned iterator instead of manually passing the + token. :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of @@ -288,13 +298,18 @@ def list_metrics(self, page_size=None, page_token=None): https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list :type page_size: int - :param page_size: maximum number of metrics to return, If not passed, - defaults to a value set by the API. + :param page_size: + Optional. The maximum number of metrics in each page of results + from this request. Non-positive values are ignored. Defaults to a + sensible value set by the API. :type page_token: str - :param page_token: opaque marker for the next "page" of metrics. If not - passed, the API will return the first page of - metrics. + :param page_token: + Optional. If present, return the next batch of metrics, using the + value, which must correspond to the ``nextPageToken`` value + returned in the previous response. Deprecated: use the ``pages`` + property of the returned iterator instead of manually passing the + token. :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.logging.metric.Metric` diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 104d09e37a37..b212b6e8b0c3 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -249,13 +249,18 @@ def list_entries( or :data:`~google.cloud.logging.DESCENDING`. :type page_size: int - :param page_size: maximum number of entries to return, If not passed, - defaults to a value set by the API. + :param page_size: + Optional. The maximum number of entries in each page of results + from this request. Non-positive values are ignored. Defaults + to a sensible value set by the API. :type page_token: str - :param page_token: opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. + :param page_token: + Optional. If present, return the next batch of entries, using + the value, which must correspond to the ``nextPageToken`` value + returned in the previous response. Deprecated: use the ``pages`` + property of the returned iterator instead of manually passing + the token. :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of log entries accessible to the current logger. From 2faa6c1da21194cbf14bb955b15b3394a4802e07 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 14 Dec 2018 12:10:12 -0500 Subject: [PATCH 211/855] Logging: allow setting name, args on default handler (post-blacken) (#6828) - Expose 'stream' argument to handler constructors. Defaults to 'None', passed to stdlib's 'logging.StreamHandler'. - Expose 'name' argument to 'ContainerEngineHandler'. * Add '**kw' to 'Client.{get_default_handler,setup_logging}'. Plumb them through to the underlying handler constructor. Closes #6206. --- .../google/cloud/logging/client.py | 18 ++-- .../cloud/logging/handlers/app_engine.py | 11 ++- .../logging/handlers/container_engine.py | 10 +++ .../google/cloud/logging/handlers/handlers.py | 7 +- .../tests/unit/handlers/test_app_engine.py | 40 ++++++--- .../unit/handlers/test_container_engine.py | 8 ++ .../tests/unit/handlers/test_handlers.py | 44 ++++++++- .../tests/unit/test_client.py | 90 ++++++++++++++++--- 8 files changed, 190 insertions(+), 38 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index e8f16b2fd45a..b5f0b02daaf9 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -317,9 +317,12 @@ def list_metrics(self, page_size=None, page_token=None): """ return self.metrics_api.list_metrics(self.project, page_size, page_token) - def get_default_handler(self): + def get_default_handler(self, **kw): """Return the default logging handler based on the local environment. + :type kw: dict + :param kw: keyword args passed to handler constructor + :rtype: :class:`logging.Handler` :returns: The default log handler based on the environment """ @@ -329,14 +332,14 @@ def get_default_handler(self): _APPENGINE_FLEXIBLE_ENV_VM in os.environ or _APPENGINE_INSTANCE_ID in os.environ ): - return AppEngineHandler(self) + return AppEngineHandler(self, **kw) elif gke_cluster_name is not None: - return ContainerEngineHandler() + return ContainerEngineHandler(**kw) else: - return CloudLoggingHandler(self) + return CloudLoggingHandler(self, **kw) def setup_logging( - self, log_level=logging.INFO, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS + self, log_level=logging.INFO, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, **kw ): """Attach default Stackdriver logging handler to the root logger. @@ -354,6 +357,9 @@ def setup_logging( handler to. This will always include the loggers in the path of the logging client itself. + + :type kw: dict + :param kw: keyword args passed to handler constructor """ - handler = self.get_default_handler() + handler = self.get_default_handler(**kw) setup_logging(handler, log_level=log_level, excluded_loggers=excluded_loggers) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py index cf6aa304eae7..d0179fb6dcfc 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py @@ -46,12 +46,19 @@ class AppEngineHandler(logging.StreamHandler): :param transport: The transport class. It should be a subclass of :class:`.Transport`. If unspecified, :class:`.BackgroundThreadTransport` will be used. + + :type stream: file-like object + :param stream: (optional) stream to be used by the handler. """ def __init__( - self, client, name=_DEFAULT_GAE_LOGGER_NAME, transport=BackgroundThreadTransport + self, + client, + name=_DEFAULT_GAE_LOGGER_NAME, + transport=BackgroundThreadTransport, + stream=None, ): - super(AppEngineHandler, self).__init__() + super(AppEngineHandler, self).__init__(stream) self.name = name self.client = client self.transport = transport(client, name) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py b/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py index 6c0868c0ad83..3e80b7650de1 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py @@ -29,8 +29,18 @@ class ContainerEngineHandler(logging.StreamHandler): This handler is written to format messages for the Google Container Engine (GKE) fluentd plugin, so that metadata such as log level are properly set. + + :type name: str + :param name: (optional) the name of the custom log in Stackdriver Logging. + + :type stream: file-like object + :param stream: (optional) stream to be used by the handler. """ + def __init__(self, name=None, stream=None): + super(ContainerEngineHandler, self).__init__() + self.name = name + def format(self, record): """Format the message into JSON expected by fluentd. diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py index aee214a09bb6..111cec8d27cf 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py @@ -58,6 +58,9 @@ class CloudLoggingHandler(logging.StreamHandler): :type labels: dict :param labels: (Optional) Mapping of labels for the entry. + :type stream: file-like object + :param stream: (optional) stream to be used by the handler. + Example: .. code-block:: python @@ -74,7 +77,6 @@ class CloudLoggingHandler(logging.StreamHandler): cloud_logger.addHandler(handler) cloud_logger.error('bad news') # API call - """ def __init__( @@ -84,8 +86,9 @@ def __init__( transport=BackgroundThreadTransport, resource=_GLOBAL_RESOURCE, labels=None, + stream=None, ): - super(CloudLoggingHandler, self).__init__() + super(CloudLoggingHandler, self).__init__(stream) self.name = name self.client = client self.transport = transport(client, name) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index 13d21d58d33f..eef4ac7410e3 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -29,11 +29,9 @@ def _get_target_class(self): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_constructor(self): - from google.cloud.logging.handlers.app_engine import _GAE_PROJECT_ENV_FLEX - from google.cloud.logging.handlers.app_engine import _GAE_PROJECT_ENV_STANDARD - from google.cloud.logging.handlers.app_engine import _GAE_SERVICE_ENV - from google.cloud.logging.handlers.app_engine import _GAE_VERSION_ENV + def test_constructor_w_gae_standard_env(self): + import sys + from google.cloud.logging.handlers import app_engine client = mock.Mock(project=self.PROJECT, spec=["project"]) @@ -42,35 +40,51 @@ def test_constructor(self): with mock.patch( "os.environ", new={ - _GAE_PROJECT_ENV_STANDARD: "test_project", - _GAE_SERVICE_ENV: "test_service", - _GAE_VERSION_ENV: "test_version", + app_engine._GAE_PROJECT_ENV_STANDARD: "test_project", + app_engine._GAE_SERVICE_ENV: "test_service", + app_engine._GAE_VERSION_ENV: "test_version", }, ): handler = self._make_one(client, transport=_Transport) + self.assertIs(handler.client, client) + self.assertEqual(handler.name, app_engine._DEFAULT_GAE_LOGGER_NAME) self.assertEqual(handler.resource.type, "gae_app") self.assertEqual(handler.resource.labels["project_id"], "test_project") self.assertEqual(handler.resource.labels["module_id"], "test_service") self.assertEqual(handler.resource.labels["version_id"], "test_version") + self.assertIs(handler.stream, sys.stderr) + + def test_constructor_w_gae_flex_env(self): + import io + from google.cloud.logging.handlers import app_engine + + client = mock.Mock(project=self.PROJECT, spec=["project"]) + name = "test-logger" + stream = io.BytesIO() # Verify that _GAE_PROJECT_ENV_FLEX environment variable takes # precedence over _GAE_PROJECT_ENV_STANDARD. with mock.patch( "os.environ", new={ - _GAE_PROJECT_ENV_FLEX: "test_project_2", - _GAE_PROJECT_ENV_STANDARD: "test_project_should_be_overridden", - _GAE_SERVICE_ENV: "test_service_2", - _GAE_VERSION_ENV: "test_version_2", + app_engine._GAE_PROJECT_ENV_FLEX: "test_project_2", + app_engine._GAE_PROJECT_ENV_STANDARD: "test_project_should_be_overridden", + app_engine._GAE_SERVICE_ENV: "test_service_2", + app_engine._GAE_VERSION_ENV: "test_version_2", }, ): - handler = self._make_one(client, transport=_Transport) + handler = self._make_one( + client, name=name, transport=_Transport, stream=stream + ) + self.assertIs(handler.client, client) + self.assertEqual(handler.name, name) self.assertEqual(handler.resource.type, "gae_app") self.assertEqual(handler.resource.labels["project_id"], "test_project_2") self.assertEqual(handler.resource.labels["module_id"], "test_service_2") self.assertEqual(handler.resource.labels["version_id"], "test_version_2") + self.assertIs(handler.stream, stream) def test_emit(self): client = mock.Mock(project=self.PROJECT, spec=["project"]) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py index cbe833146c57..09ee329ba3f2 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py @@ -28,6 +28,14 @@ def _get_target_class(self): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) + def test_ctor_defaults(self): + handler = self._make_one() + self.assertIsNone(handler.name) + + def test_ctor_w_name(self): + handler = self._make_one(name="foo") + self.assertEqual(handler.name, "foo") + def test_format(self): import logging import json diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index ff738046d892..5559791bc2fa 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -29,10 +29,47 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_ctor(self): + def test_ctor_defaults(self): + import sys + from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging.handlers.handlers import DEFAULT_LOGGER_NAME + client = _Client(self.PROJECT) handler = self._make_one(client, transport=_Transport) - self.assertEqual(handler.client, client) + self.assertEqual(handler.name, DEFAULT_LOGGER_NAME) + self.assertIs(handler.client, client) + self.assertIsInstance(handler.transport, _Transport) + self.assertIs(handler.transport.client, client) + self.assertEqual(handler.transport.name, DEFAULT_LOGGER_NAME) + self.assertIs(handler.resource, _GLOBAL_RESOURCE) + self.assertIsNone(handler.labels) + self.assertIs(handler.stream, sys.stderr) + + def test_ctor_explicit(self): + import io + from google.cloud.logging.resource import Resource + + resource = Resource("resource_type", {"resource_label": "value"}) + labels = {"handler_lable": "value"} + name = "test-logger" + client = _Client(self.PROJECT) + stream = io.BytesIO() + handler = self._make_one( + client, + name=name, + transport=_Transport, + resource=resource, + labels=labels, + stream=stream, + ) + self.assertEqual(handler.name, name) + self.assertIs(handler.client, client) + self.assertIsInstance(handler.transport, _Transport) + self.assertIs(handler.transport.client, client) + self.assertEqual(handler.transport.name, name) + self.assertIs(handler.resource, resource) + self.assertEqual(handler.labels, labels) + self.assertIs(handler.stream, stream) def test_emit(self): from google.cloud.logging.logger import _GLOBAL_RESOURCE @@ -108,7 +145,8 @@ def __init__(self, project): class _Transport(object): def __init__(self, client, name): - pass + self.client = client + self.name = name def send(self, record, message, resource, labels=None): self.send_called_with = (record, message, resource, labels) diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index e3b33a266bdd..5ea17eb78f0b 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -552,20 +552,23 @@ def test_get_default_handler_app_engine(self): from google.cloud.logging.handlers import AppEngineHandler credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) with _Monkey(os, environ={_APPENGINE_FLEXIBLE_ENV_VM: "True"}): - client = self._make_one( - project=self.PROJECT, credentials=credentials, _use_grpc=False - ) handler = client.get_default_handler() + handler.transport.worker.stop() + self.assertIsInstance(handler, AppEngineHandler) def test_get_default_handler_container_engine(self): from google.cloud.logging.handlers import ContainerEngineHandler + credentials = _make_credentials() client = self._make_one( - project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + project=self.PROJECT, credentials=credentials, _use_grpc=False ) patch = mock.patch( @@ -579,29 +582,92 @@ def test_get_default_handler_container_engine(self): self.assertIsInstance(handler, ContainerEngineHandler) def test_get_default_handler_general(self): + import io from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging.resource import Resource - credentials = _make_credentials() + name = "test-logger" + resource = Resource("resource_type", {"resource_label": "value"}) + labels = {"handler_label": "value"} + stream = io.BytesIO() + credentials = _make_credentials() client = self._make_one( project=self.PROJECT, credentials=credentials, _use_grpc=False ) - handler = client.get_default_handler() + + handler = client.get_default_handler( + name=name, resource=resource, labels=labels, stream=stream + ) + + handler.transport.worker.stop() self.assertIsInstance(handler, CloudLoggingHandler) + self.assertEqual(handler.name, name) + self.assertEqual(handler.resource, resource) + self.assertEqual(handler.labels, labels) def test_setup_logging(self): - setup_logging = mock.Mock(spec=[]) + from google.cloud.logging.handlers import CloudLoggingHandler credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) - with mock.patch("google.cloud.logging.client.setup_logging", new=setup_logging): - client = self._make_one( - project=self.PROJECT, credentials=credentials, _use_grpc=False - ) + with mock.patch("google.cloud.logging.client.setup_logging") as mocked: client.setup_logging() - setup_logging.assert_called() + self.assertEqual(len(mocked.mock_calls), 1) + _, args, kwargs = mocked.mock_calls[0] + + handler, = args + self.assertIsInstance(handler, CloudLoggingHandler) + + handler.transport.worker.stop() + + expected_kwargs = { + "excluded_loggers": ("google.cloud", "google.auth", "google_auth_httplib2"), + "log_level": 20, + } + self.assertEqual(kwargs, expected_kwargs) + + def test_setup_logging_w_extra_kwargs(self): + import io + from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging.resource import Resource + + name = "test-logger" + resource = Resource("resource_type", {"resource_label": "value"}) + labels = {"handler_label": "value"} + stream = io.BytesIO() + + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) + + with mock.patch("google.cloud.logging.client.setup_logging") as mocked: + client.setup_logging( + name=name, resource=resource, labels=labels, stream=stream + ) + + self.assertEqual(len(mocked.mock_calls), 1) + _, args, kwargs = mocked.mock_calls[0] + + handler, = args + self.assertIsInstance(handler, CloudLoggingHandler) + self.assertEqual(handler.name, name) + self.assertEqual(handler.resource, resource) + self.assertEqual(handler.labels, labels) + + handler.transport.worker.stop() + + expected_kwargs = { + "excluded_loggers": ("google.cloud", "google.auth", "google_auth_httplib2"), + "log_level": 20, + } + self.assertEqual(kwargs, expected_kwargs) class _Connection(object): From d959633cb6024e61dff42b9fcffc6cb23bf5f4da Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 14 Dec 2018 12:25:37 -0800 Subject: [PATCH 212/855] Document Python 2 deprecation (#6910) --- packages/google-cloud-logging/README.rst | 9 +++++++++ packages/google-cloud-logging/setup.py | 1 + 2 files changed, 10 insertions(+) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index a0bc55247761..78635f780b53 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -46,6 +46,15 @@ dependencies. .. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Python >= 3.4 + +Deprecated Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python == 2.7. Python 2.7 support will be removed on January 1, 2020. + + Mac/Linux ^^^^^^^^^ diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index d6f43390ac19..8c58cd0ab45d 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -84,6 +84,7 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, + python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*', include_package_data=True, zip_safe=False, ) From 0ca4ca90bb2be6c78a4ffe91bf27f0f901075a80 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 19 Dec 2018 10:04:03 -0800 Subject: [PATCH 213/855] Release logging 1.9.1 (#6947) * Release 1.9.1 --- packages/google-cloud-logging/CHANGELOG.md | 12 ++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index d319b3a08ca4..80303c9dbe98 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## 1.9.1 + +12-17-2018 16:49 PST + + +### Implementation Changes +- Allow setting name, args on default handler (post-blacken) ([#6828](https://github.com/googleapis/google-cloud-python/pull/6828)) + +### Documentation +- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) +- Normalize docs for `page_size` / `max_results` / `page_token`. ([#6842](https://github.com/googleapis/google-cloud-python/pull/6842)) + ## 1.9.0 12-10-2018 12:55 PST diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 8c58cd0ab45d..d6b5b7ea3896 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-logging' description = 'Stackdriver Logging API client library' -version = '1.9.0' +version = '1.9.1' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 35ba340f838a337c74b651bac1f463d06968c7a1 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Sat, 5 Jan 2019 13:42:26 -0500 Subject: [PATCH 214/855] GAPIC generation fixes. (#7061) * Pick up stub docstring fix from GAPIC generator. * Pick up order-of-enum fix from GAPIC generator. --- .../google/cloud/logging_v2/gapic/enums.py | 92 +++++++++---------- .../config_service_v2_grpc_transport.py | 20 ++-- .../logging_service_v2_grpc_transport.py | 10 +- .../metrics_service_v2_grpc_transport.py | 10 +- packages/google-cloud-logging/synth.metadata | 39 ++++++++ 5 files changed, 105 insertions(+), 66 deletions(-) create mode 100644 packages/google-cloud-logging/synth.metadata diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py index 403002a8abee..d47a652d3d5b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py @@ -61,20 +61,6 @@ class LaunchStage(enum.IntEnum): DEPRECATED = 5 -class NullValue(enum.IntEnum): - """ - ``NullValue`` is a singleton enumeration to represent the null value for - the ``Value`` type union. - - The JSON representation for ``NullValue`` is JSON ``null``. - - Attributes: - NULL_VALUE (int): Null value. - """ - - NULL_VALUE = 0 - - class LogSeverity(enum.IntEnum): """ The severity of the event described in a log entry, expressed as one of @@ -120,6 +106,20 @@ class LogSeverity(enum.IntEnum): EMERGENCY = 800 +class NullValue(enum.IntEnum): + """ + ``NullValue`` is a singleton enumeration to represent the null value for + the ``Value`` type union. + + The JSON representation for ``NullValue`` is JSON ``null``. + + Attributes: + NULL_VALUE (int): Null value. + """ + + NULL_VALUE = 0 + + class LabelDescriptor(object): class ValueType(enum.IntEnum): """ @@ -136,6 +136,38 @@ class ValueType(enum.IntEnum): INT64 = 2 +class LogMetric(object): + class ApiVersion(enum.IntEnum): + """ + Logging API version. + + Attributes: + V2 (int): Logging API v2. + V1 (int): Logging API v1. + """ + + V2 = 0 + V1 = 1 + + +class LogSink(object): + class VersionFormat(enum.IntEnum): + """ + Available log entry formats. Log entries can be written to + Logging in either format and can be exported in either format. + Version 2 is the preferred format. + + Attributes: + VERSION_FORMAT_UNSPECIFIED (int): An unspecified format version that will default to V2. + V2 (int): ``LogEntry`` version 2 format. + V1 (int): ``LogEntry`` version 1 format. + """ + + VERSION_FORMAT_UNSPECIFIED = 0 + V2 = 1 + V1 = 2 + + class MetricDescriptor(object): class MetricKind(enum.IntEnum): """ @@ -180,35 +212,3 @@ class ValueType(enum.IntEnum): STRING = 4 DISTRIBUTION = 5 MONEY = 6 - - -class LogMetric(object): - class ApiVersion(enum.IntEnum): - """ - Logging API version. - - Attributes: - V2 (int): Logging API v2. - V1 (int): Logging API v1. - """ - - V2 = 0 - V1 = 1 - - -class LogSink(object): - class VersionFormat(enum.IntEnum): - """ - Available log entry formats. Log entries can be written to - Logging in either format and can be exported in either format. - Version 2 is the preferred format. - - Attributes: - VERSION_FORMAT_UNSPECIFIED (int): An unspecified format version that will default to V2. - V2 (int): ``LogEntry`` version 2 format. - V1 (int): ``LogEntry`` version 1 format. - """ - - VERSION_FORMAT_UNSPECIFIED = 0 - V2 = 1 - V1 = 2 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py index f27ed84c91a0..61272c0d553b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py @@ -105,7 +105,7 @@ def channel(self): @property def list_sinks(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`ConfigServiceV2Client.list_sinks`. Lists sinks. @@ -118,7 +118,7 @@ def list_sinks(self): @property def get_sink(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`ConfigServiceV2Client.get_sink`. Gets a sink. @@ -131,7 +131,7 @@ def get_sink(self): @property def create_sink(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`ConfigServiceV2Client.create_sink`. Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the @@ -147,7 +147,7 @@ def create_sink(self): @property def update_sink(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`ConfigServiceV2Client.update_sink`. Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, and @@ -163,7 +163,7 @@ def update_sink(self): @property def delete_sink(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`ConfigServiceV2Client.delete_sink`. Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -177,7 +177,7 @@ def delete_sink(self): @property def list_exclusions(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`ConfigServiceV2Client.list_exclusions`. Lists all the exclusions in a parent resource. @@ -190,7 +190,7 @@ def list_exclusions(self): @property def get_exclusion(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`ConfigServiceV2Client.get_exclusion`. Gets the description of an exclusion. @@ -203,7 +203,7 @@ def get_exclusion(self): @property def create_exclusion(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`ConfigServiceV2Client.create_exclusion`. Creates a new exclusion in a specified parent resource. Only log entries belonging to that resource can be excluded. @@ -218,7 +218,7 @@ def create_exclusion(self): @property def update_exclusion(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`ConfigServiceV2Client.update_exclusion`. Changes one or more properties of an existing exclusion. @@ -231,7 +231,7 @@ def update_exclusion(self): @property def delete_exclusion(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`ConfigServiceV2Client.delete_exclusion`. Deletes an exclusion. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py index 008d803d5415..6ffd26bfcde5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py @@ -103,7 +103,7 @@ def channel(self): @property def delete_log(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`LoggingServiceV2Client.delete_log`. Deletes all the log entries in a log. The log reappears if it receives new entries. @@ -119,7 +119,7 @@ def delete_log(self): @property def write_log_entries(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`LoggingServiceV2Client.write_log_entries`. Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method @@ -138,7 +138,7 @@ def write_log_entries(self): @property def list_log_entries(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`LoggingServiceV2Client.list_log_entries`. Lists log entries. Use this method to retrieve log entries from Logging. For ways to export log entries, see `Exporting @@ -153,7 +153,7 @@ def list_log_entries(self): @property def list_monitored_resource_descriptors(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`LoggingServiceV2Client.list_monitored_resource_descriptors`. Lists the descriptors for monitored resource types used by Logging. @@ -166,7 +166,7 @@ def list_monitored_resource_descriptors(self): @property def list_logs(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`LoggingServiceV2Client.list_logs`. Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py index 6876c305e36e..9a558a5b9895 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py @@ -105,7 +105,7 @@ def channel(self): @property def list_log_metrics(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`MetricsServiceV2Client.list_log_metrics`. Lists logs-based metrics. @@ -118,7 +118,7 @@ def list_log_metrics(self): @property def get_log_metric(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`MetricsServiceV2Client.get_log_metric`. Gets a logs-based metric. @@ -131,7 +131,7 @@ def get_log_metric(self): @property def create_log_metric(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`MetricsServiceV2Client.create_log_metric`. Creates a logs-based metric. @@ -144,7 +144,7 @@ def create_log_metric(self): @property def update_log_metric(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`MetricsServiceV2Client.update_log_metric`. Creates or updates a logs-based metric. @@ -157,7 +157,7 @@ def update_log_metric(self): @property def delete_log_metric(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`MetricsServiceV2Client.delete_log_metric`. Deletes a logs-based metric. diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata new file mode 100644 index 000000000000..9693f14f8198 --- /dev/null +++ b/packages/google-cloud-logging/synth.metadata @@ -0,0 +1,39 @@ +{ + "updateTime": "2019-01-05T17:54:33.074139Z", + "sources": [ + { + "generator": { + "name": "artman", + "version": "0.16.4", + "dockerImage": "googleapis/artman@sha256:8b45fae963557c3299921037ecbb86f0689f41b1b4aea73408ebc50562cb2857" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "a111a53c0c6722afcd793b64724ceef7862db5b9", + "internalRef": "227896184" + } + }, + { + "template": { + "name": "python_library", + "origin": "synthtool.gcp", + "version": "2018.12.6" + } + } + ], + "destinations": [ + { + "client": { + "source": "googleapis", + "apiName": "logging", + "apiVersion": "v2", + "language": "python", + "generator": "gapic", + "config": "google/logging/artman_logging.yaml" + } + } + ] +} \ No newline at end of file From 35c5308b6ca790151a2bc63298505592a00ef1ee Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 7 Jan 2019 13:43:14 -0500 Subject: [PATCH 215/855] Use 'python-3.6' for 'blacken' run. (#7064) Closes #7063. --- packages/google-cloud-logging/noxfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 1b9445a86686..089aeb588cfa 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -52,7 +52,7 @@ def lint(session): session.run("flake8", "google", "tests") -@nox.session(python="3.7") +@nox.session(python="3.6") def blacken(session): """Run black. From 1dfc298c1bfd3858b78578e58895f9a41f1c1c55 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Wed, 9 Jan 2019 11:27:50 -0800 Subject: [PATCH 216/855] Protoc-generated serialization update. (#7088) --- .../cloud/logging_v2/proto/log_entry_pb2.py | 77 +++---- .../logging_v2/proto/logging_config_pb2.py | 217 +++++++----------- .../logging_v2/proto/logging_metrics_pb2.py | 119 ++++------ .../cloud/logging_v2/proto/logging_pb2.py | 142 +++++------- packages/google-cloud-logging/synth.metadata | 10 +- 5 files changed, 232 insertions(+), 333 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py index a69978eb1e2f..3aa17006be30 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -9,7 +9,6 @@ from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) @@ -35,6 +34,9 @@ name="google/cloud/logging_v2/proto/log_entry.proto", package="google.logging.v2", syntax="proto3", + serialized_options=_b( + "\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" + ), serialized_pb=_b( '\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x8a\x06\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12\x37\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadata\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), @@ -72,7 +74,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -90,14 +92,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + serialized_options=_b("8\001"), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -128,7 +130,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -146,7 +148,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -164,7 +166,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -182,7 +184,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -200,7 +202,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -218,7 +220,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -236,7 +238,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -254,7 +256,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -272,7 +274,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -290,7 +292,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -308,7 +310,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -326,7 +328,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -344,7 +346,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -362,7 +364,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -380,7 +382,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -398,7 +400,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -416,14 +418,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[_LOGENTRY_LABELSENTRY], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -463,7 +465,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -481,7 +483,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -499,7 +501,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -517,14 +519,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -556,7 +558,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -574,7 +576,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -592,14 +594,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -852,15 +854,6 @@ _sym_db.RegisterMessage(LogEntrySourceLocation) -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions( - descriptor_pb2.FileOptions(), - _b( - "\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), -) -_LOGENTRY_LABELSENTRY.has_options = True -_LOGENTRY_LABELSENTRY._options = _descriptor._ParseOptions( - descriptor_pb2.MessageOptions(), _b("8\001") -) +DESCRIPTOR._options = None +_LOGENTRY_LABELSENTRY._options = None # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py index 5782fd342c21..d62ef9c95eec 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -9,7 +9,6 @@ from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) @@ -26,6 +25,9 @@ name="google/cloud/logging_v2/proto/logging_config.proto", package="google.logging.v2", syntax="proto3", + serialized_options=_b( + "\n\025com.google.logging.v2B\022LoggingConfigProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" + ), serialized_pb=_b( '\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xe3\x02\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12K\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormatB\x02\x18\x01\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12\x32\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\x12\x30\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"S\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\x94\x19\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}B\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), @@ -48,18 +50,18 @@ name="VERSION_FORMAT_UNSPECIFIED", index=0, number=0, - options=None, + serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( - name="V2", index=1, number=1, options=None, type=None + name="V2", index=1, number=1, serialized_options=None, type=None ), _descriptor.EnumValueDescriptor( - name="V1", index=2, number=2, options=None, type=None + name="V1", index=2, number=2, serialized_options=None, type=None ), ], containing_type=None, - options=None, + serialized_options=None, serialized_start=492, serialized_end=555, ) @@ -88,7 +90,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -106,7 +108,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -124,7 +126,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -142,9 +144,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=_descriptor._ParseOptions( - descriptor_pb2.FieldOptions(), _b("\030\001") - ), + serialized_options=_b("\030\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -162,7 +162,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -180,7 +180,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -198,9 +198,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=_descriptor._ParseOptions( - descriptor_pb2.FieldOptions(), _b("\030\001") - ), + serialized_options=_b("\030\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -218,16 +216,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=_descriptor._ParseOptions( - descriptor_pb2.FieldOptions(), _b("\030\001") - ), + serialized_options=_b("\030\001"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[_LOGSINK_VERSIONFORMAT], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -259,7 +255,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -277,7 +273,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -295,14 +291,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -334,7 +330,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -352,14 +348,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -391,14 +387,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -430,7 +426,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -448,7 +444,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -466,14 +462,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -505,7 +501,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -523,7 +519,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -541,7 +537,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -559,14 +555,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -598,14 +594,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -637,7 +633,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -655,7 +651,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -673,7 +669,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -691,14 +687,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -730,7 +726,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -748,7 +744,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -766,14 +762,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -805,7 +801,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -823,14 +819,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -862,14 +858,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -901,7 +897,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -919,14 +915,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -958,7 +954,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -976,7 +972,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -994,14 +990,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1033,14 +1029,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1572,32 +1568,17 @@ _sym_db.RegisterMessage(DeleteExclusionRequest) -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions( - descriptor_pb2.FileOptions(), - _b( - "\n\025com.google.logging.v2B\022LoggingConfigProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), -) -_LOGSINK.fields_by_name["output_version_format"].has_options = True -_LOGSINK.fields_by_name["output_version_format"]._options = _descriptor._ParseOptions( - descriptor_pb2.FieldOptions(), _b("\030\001") -) -_LOGSINK.fields_by_name["start_time"].has_options = True -_LOGSINK.fields_by_name["start_time"]._options = _descriptor._ParseOptions( - descriptor_pb2.FieldOptions(), _b("\030\001") -) -_LOGSINK.fields_by_name["end_time"].has_options = True -_LOGSINK.fields_by_name["end_time"]._options = _descriptor._ParseOptions( - descriptor_pb2.FieldOptions(), _b("\030\001") -) +DESCRIPTOR._options = None +_LOGSINK.fields_by_name["output_version_format"]._options = None +_LOGSINK.fields_by_name["start_time"]._options = None +_LOGSINK.fields_by_name["end_time"]._options = None _CONFIGSERVICEV2 = _descriptor.ServiceDescriptor( name="ConfigServiceV2", full_name="google.logging.v2.ConfigServiceV2", file=DESCRIPTOR, index=0, - options=None, + serialized_options=None, serialized_start=1656, serialized_end=4876, methods=[ @@ -1608,11 +1589,8 @@ containing_service=None, input_type=_LISTSINKSREQUEST, output_type=_LISTSINKSRESPONSE, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - '\202\323\344\223\002\247\001\022\026/v2/{parent=*/*}/sinksZ\037\022\035/v2/{parent=projects/*}/sinksZ$\022"/v2/{parent=organizations/*}/sinksZ\036\022\034/v2/{parent=folders/*}/sinksZ&\022$/v2/{parent=billingAccounts/*}/sinks' - ), + serialized_options=_b( + '\202\323\344\223\002\247\001\022\026/v2/{parent=*/*}/sinksZ\037\022\035/v2/{parent=projects/*}/sinksZ$\022"/v2/{parent=organizations/*}/sinksZ\036\022\034/v2/{parent=folders/*}/sinksZ&\022$/v2/{parent=billingAccounts/*}/sinks' ), ), _descriptor.MethodDescriptor( @@ -1622,11 +1600,8 @@ containing_service=None, input_type=_GETSINKREQUEST, output_type=_LOGSINK, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002\300\001\022\033/v2/{sink_name=*/*/sinks/*}Z$\022\"/v2/{sink_name=projects/*/sinks/*}Z)\022'/v2/{sink_name=organizations/*/sinks/*}Z#\022!/v2/{sink_name=folders/*/sinks/*}Z+\022)/v2/{sink_name=billingAccounts/*/sinks/*}" - ), + serialized_options=_b( + "\202\323\344\223\002\300\001\022\033/v2/{sink_name=*/*/sinks/*}Z$\022\"/v2/{sink_name=projects/*/sinks/*}Z)\022'/v2/{sink_name=organizations/*/sinks/*}Z#\022!/v2/{sink_name=folders/*/sinks/*}Z+\022)/v2/{sink_name=billingAccounts/*/sinks/*}" ), ), _descriptor.MethodDescriptor( @@ -1636,11 +1611,8 @@ containing_service=None, input_type=_CREATESINKREQUEST, output_type=_LOGSINK, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - '\202\323\344\223\002\305\001"\026/v2/{parent=*/*}/sinks:\004sinkZ%"\035/v2/{parent=projects/*}/sinks:\004sinkZ*""/v2/{parent=organizations/*}/sinks:\004sinkZ$"\034/v2/{parent=folders/*}/sinks:\004sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\004sink' - ), + serialized_options=_b( + '\202\323\344\223\002\305\001"\026/v2/{parent=*/*}/sinks:\004sinkZ%"\035/v2/{parent=projects/*}/sinks:\004sinkZ*""/v2/{parent=organizations/*}/sinks:\004sinkZ$"\034/v2/{parent=folders/*}/sinks:\004sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\004sink' ), ), _descriptor.MethodDescriptor( @@ -1650,11 +1622,8 @@ containing_service=None, input_type=_UPDATESINKREQUEST, output_type=_LOGSINK, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002\231\003\032\033/v2/{sink_name=*/*/sinks/*}:\004sinkZ*\032\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/\032'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)\032!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ1\032)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sinkZ*2\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/2'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sink" - ), + serialized_options=_b( + "\202\323\344\223\002\231\003\032\033/v2/{sink_name=*/*/sinks/*}:\004sinkZ*\032\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/\032'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)\032!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ1\032)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sinkZ*2\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/2'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sink" ), ), _descriptor.MethodDescriptor( @@ -1664,11 +1633,8 @@ containing_service=None, input_type=_DELETESINKREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002\300\001*\033/v2/{sink_name=*/*/sinks/*}Z$*\"/v2/{sink_name=projects/*/sinks/*}Z)*'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}" - ), + serialized_options=_b( + "\202\323\344\223\002\300\001*\033/v2/{sink_name=*/*/sinks/*}Z$*\"/v2/{sink_name=projects/*/sinks/*}Z)*'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}" ), ), _descriptor.MethodDescriptor( @@ -1678,11 +1644,8 @@ containing_service=None, input_type=_LISTEXCLUSIONSREQUEST, output_type=_LISTEXCLUSIONSRESPONSE, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002\300\001\022\033/v2/{parent=*/*}/exclusionsZ$\022\"/v2/{parent=projects/*}/exclusionsZ)\022'/v2/{parent=organizations/*}/exclusionsZ#\022!/v2/{parent=folders/*}/exclusionsZ+\022)/v2/{parent=billingAccounts/*}/exclusions" - ), + serialized_options=_b( + "\202\323\344\223\002\300\001\022\033/v2/{parent=*/*}/exclusionsZ$\022\"/v2/{parent=projects/*}/exclusionsZ)\022'/v2/{parent=organizations/*}/exclusionsZ#\022!/v2/{parent=folders/*}/exclusionsZ+\022)/v2/{parent=billingAccounts/*}/exclusions" ), ), _descriptor.MethodDescriptor( @@ -1692,11 +1655,8 @@ containing_service=None, input_type=_GETEXCLUSIONREQUEST, output_type=_LOGEXCLUSION, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002\300\001\022\033/v2/{name=*/*/exclusions/*}Z$\022\"/v2/{name=projects/*/exclusions/*}Z)\022'/v2/{name=organizations/*/exclusions/*}Z#\022!/v2/{name=folders/*/exclusions/*}Z+\022)/v2/{name=billingAccounts/*/exclusions/*}" - ), + serialized_options=_b( + "\202\323\344\223\002\300\001\022\033/v2/{name=*/*/exclusions/*}Z$\022\"/v2/{name=projects/*/exclusions/*}Z)\022'/v2/{name=organizations/*/exclusions/*}Z#\022!/v2/{name=folders/*/exclusions/*}Z+\022)/v2/{name=billingAccounts/*/exclusions/*}" ), ), _descriptor.MethodDescriptor( @@ -1706,11 +1666,8 @@ containing_service=None, input_type=_CREATEEXCLUSIONREQUEST, output_type=_LOGEXCLUSION, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - '\202\323\344\223\002\367\001"\033/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion' - ), + serialized_options=_b( + '\202\323\344\223\002\367\001"\033/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion' ), ), _descriptor.MethodDescriptor( @@ -1720,11 +1677,8 @@ containing_service=None, input_type=_UPDATEEXCLUSIONREQUEST, output_type=_LOGEXCLUSION, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002\367\0012\033/v2/{name=*/*/exclusions/*}:\texclusionZ/2\"/v2/{name=projects/*/exclusions/*}:\texclusionZ42'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion" - ), + serialized_options=_b( + "\202\323\344\223\002\367\0012\033/v2/{name=*/*/exclusions/*}:\texclusionZ/2\"/v2/{name=projects/*/exclusions/*}:\texclusionZ42'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion" ), ), _descriptor.MethodDescriptor( @@ -1734,11 +1688,8 @@ containing_service=None, input_type=_DELETEEXCLUSIONREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002\300\001*\033/v2/{name=*/*/exclusions/*}Z$*\"/v2/{name=projects/*/exclusions/*}Z)*'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}" - ), + serialized_options=_b( + "\202\323\344\223\002\300\001*\033/v2/{name=*/*/exclusions/*}Z$*\"/v2/{name=projects/*/exclusions/*}Z)*'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}" ), ), ], diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py index 5988162a3e76..54dc8f703c20 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -9,7 +9,6 @@ from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) @@ -28,6 +27,9 @@ name="google/cloud/logging_v2/proto/logging_metrics.proto", package="google.logging.v2", syntax="proto3", + serialized_options=_b( + "\n\025com.google.logging.v2B\023LoggingMetricsProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" + ), serialized_pb=_b( '\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1dgoogle/api/distribution.proto\x1a\x17google/api/metric.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xb1\x03\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xd4\x05\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"/\x82\xd3\xe4\x93\x02)"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}B\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), @@ -49,14 +51,14 @@ file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( - name="V2", index=0, number=0, options=None, type=None + name="V2", index=0, number=0, serialized_options=None, type=None ), _descriptor.EnumValueDescriptor( - name="V1", index=1, number=1, options=None, type=None + name="V1", index=1, number=1, serialized_options=None, type=None ), ], containing_type=None, - options=None, + serialized_options=None, serialized_start=662, serialized_end=690, ) @@ -85,7 +87,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -103,14 +105,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + serialized_options=_b("8\001"), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -141,7 +143,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -159,7 +161,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -177,7 +179,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -195,7 +197,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -213,7 +215,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -231,7 +233,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -249,7 +251,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -267,16 +269,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=_descriptor._ParseOptions( - descriptor_pb2.FieldOptions(), _b("\030\001") - ), + serialized_options=_b("\030\001"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY], enum_types=[_LOGMETRIC_APIVERSION], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -308,7 +308,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -326,7 +326,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -344,14 +344,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -383,7 +383,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -401,14 +401,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -440,14 +440,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -479,7 +479,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -497,14 +497,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -536,7 +536,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -554,14 +554,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -593,14 +593,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -888,28 +888,16 @@ _sym_db.RegisterMessage(DeleteLogMetricRequest) -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions( - descriptor_pb2.FileOptions(), - _b( - "\n\025com.google.logging.v2B\023LoggingMetricsProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), -) -_LOGMETRIC_LABELEXTRACTORSENTRY.has_options = True -_LOGMETRIC_LABELEXTRACTORSENTRY._options = _descriptor._ParseOptions( - descriptor_pb2.MessageOptions(), _b("8\001") -) -_LOGMETRIC.fields_by_name["version"].has_options = True -_LOGMETRIC.fields_by_name["version"]._options = _descriptor._ParseOptions( - descriptor_pb2.FieldOptions(), _b("\030\001") -) +DESCRIPTOR._options = None +_LOGMETRIC_LABELEXTRACTORSENTRY._options = None +_LOGMETRIC.fields_by_name["version"]._options = None _METRICSSERVICEV2 = _descriptor.ServiceDescriptor( name="MetricsServiceV2", full_name="google.logging.v2.MetricsServiceV2", file=DESCRIPTOR, index=0, - options=None, + serialized_options=None, serialized_start=1143, serialized_end=1867, methods=[ @@ -920,9 +908,8 @@ containing_service=None, input_type=_LISTLOGMETRICSREQUEST, output_type=_LISTLOGMETRICSRESPONSE, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b("\202\323\344\223\002!\022\037/v2/{parent=projects/*}/metrics"), + serialized_options=_b( + "\202\323\344\223\002!\022\037/v2/{parent=projects/*}/metrics" ), ), _descriptor.MethodDescriptor( @@ -932,9 +919,8 @@ containing_service=None, input_type=_GETLOGMETRICREQUEST, output_type=_LOGMETRIC, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b("\202\323\344\223\002(\022&/v2/{metric_name=projects/*/metrics/*}"), + serialized_options=_b( + "\202\323\344\223\002(\022&/v2/{metric_name=projects/*/metrics/*}" ), ), _descriptor.MethodDescriptor( @@ -944,11 +930,8 @@ containing_service=None, input_type=_CREATELOGMETRICREQUEST, output_type=_LOGMETRIC, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - '\202\323\344\223\002)"\037/v2/{parent=projects/*}/metrics:\006metric' - ), + serialized_options=_b( + '\202\323\344\223\002)"\037/v2/{parent=projects/*}/metrics:\006metric' ), ), _descriptor.MethodDescriptor( @@ -958,11 +941,8 @@ containing_service=None, input_type=_UPDATELOGMETRICREQUEST, output_type=_LOGMETRIC, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\0020\032&/v2/{metric_name=projects/*/metrics/*}:\006metric" - ), + serialized_options=_b( + "\202\323\344\223\0020\032&/v2/{metric_name=projects/*/metrics/*}:\006metric" ), ), _descriptor.MethodDescriptor( @@ -972,9 +952,8 @@ containing_service=None, input_type=_DELETELOGMETRICREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b("\202\323\344\223\002(*&/v2/{metric_name=projects/*/metrics/*}"), + serialized_options=_b( + "\202\323\344\223\002(*&/v2/{metric_name=projects/*/metrics/*}" ), ), ], diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py index f80dbe6f4f78..a37710bcf382 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py @@ -9,7 +9,6 @@ from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) @@ -33,6 +32,9 @@ name="google/cloud/logging_v2/proto/logging.proto", package="google.logging.v2", syntax="proto3", + serialized_options=_b( + "\n\025com.google.logging.v2B\014LoggingProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" + ), serialized_pb=_b( '\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t"\xa9\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x19\n\x17WriteLogEntriesResponse"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01"\x91\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xd8\x07\n\x10LoggingServiceV2\x12\xeb\x01\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty"\xa0\x01\x82\xd3\xe4\x93\x02\x99\x01* /v2/{log_name=projects/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse"\x1c\x82\xd3\xe4\x93\x02\x16"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse"\x1b\x82\xd3\xe4\x93\x02\x15"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse"(\x82\xd3\xe4\x93\x02"\x12 /v2/monitoredResourceDescriptors\x12\xff\x01\n\x08ListLogs\x12".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logsB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), @@ -70,14 +72,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -109,7 +111,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -127,14 +129,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + serialized_options=_b("8\001"), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -165,7 +167,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -183,7 +185,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -201,7 +203,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -219,7 +221,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -237,7 +239,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -255,14 +257,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -282,7 +284,7 @@ extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -314,7 +316,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -332,14 +334,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + serialized_options=_b("8\001"), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -370,14 +372,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -409,9 +411,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=_descriptor._ParseOptions( - descriptor_pb2.FieldOptions(), _b("\030\001") - ), + serialized_options=_b("\030\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -429,7 +429,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -447,7 +447,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -465,7 +465,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -483,7 +483,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -501,14 +501,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -540,7 +540,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -558,14 +558,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -597,7 +597,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -615,14 +615,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -654,7 +654,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -672,14 +672,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -711,7 +711,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -729,7 +729,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -747,14 +747,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -786,7 +786,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -804,14 +804,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1228,32 +1228,17 @@ _sym_db.RegisterMessage(ListLogsResponse) -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions( - descriptor_pb2.FileOptions(), - _b( - "\n\025com.google.logging.v2B\014LoggingProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), -) -_WRITELOGENTRIESREQUEST_LABELSENTRY.has_options = True -_WRITELOGENTRIESREQUEST_LABELSENTRY._options = _descriptor._ParseOptions( - descriptor_pb2.MessageOptions(), _b("8\001") -) -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.has_options = True -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY._options = _descriptor._ParseOptions( - descriptor_pb2.MessageOptions(), _b("8\001") -) -_LISTLOGENTRIESREQUEST.fields_by_name["project_ids"].has_options = True -_LISTLOGENTRIESREQUEST.fields_by_name[ - "project_ids" -]._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b("\030\001")) +DESCRIPTOR._options = None +_WRITELOGENTRIESREQUEST_LABELSENTRY._options = None +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY._options = None +_LISTLOGENTRIESREQUEST.fields_by_name["project_ids"]._options = None _LOGGINGSERVICEV2 = _descriptor.ServiceDescriptor( name="LoggingServiceV2", full_name="google.logging.v2.LoggingServiceV2", file=DESCRIPTOR, index=0, - options=None, + serialized_options=None, serialized_start=1474, serialized_end=2458, methods=[ @@ -1264,11 +1249,8 @@ containing_service=None, input_type=_DELETELOGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002\231\001* /v2/{log_name=projects/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}" - ), + serialized_options=_b( + "\202\323\344\223\002\231\001* /v2/{log_name=projects/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}" ), ), _descriptor.MethodDescriptor( @@ -1278,9 +1260,8 @@ containing_service=None, input_type=_WRITELOGENTRIESREQUEST, output_type=_WRITELOGENTRIESRESPONSE, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b('\202\323\344\223\002\026"\021/v2/entries:write:\001*'), + serialized_options=_b( + '\202\323\344\223\002\026"\021/v2/entries:write:\001*' ), ), _descriptor.MethodDescriptor( @@ -1290,9 +1271,8 @@ containing_service=None, input_type=_LISTLOGENTRIESREQUEST, output_type=_LISTLOGENTRIESRESPONSE, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b('\202\323\344\223\002\025"\020/v2/entries:list:\001*'), + serialized_options=_b( + '\202\323\344\223\002\025"\020/v2/entries:list:\001*' ), ), _descriptor.MethodDescriptor( @@ -1302,9 +1282,8 @@ containing_service=None, input_type=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, output_type=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b('\202\323\344\223\002"\022 /v2/monitoredResourceDescriptors'), + serialized_options=_b( + '\202\323\344\223\002"\022 /v2/monitoredResourceDescriptors' ), ), _descriptor.MethodDescriptor( @@ -1314,11 +1293,8 @@ containing_service=None, input_type=_LISTLOGSREQUEST, output_type=_LISTLOGSRESPONSE, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002\242\001\022\025/v2/{parent=*/*}/logsZ\036\022\034/v2/{parent=projects/*}/logsZ#\022!/v2/{parent=organizations/*}/logsZ\035\022\033/v2/{parent=folders/*}/logsZ%\022#/v2/{parent=billingAccounts/*}/logs" - ), + serialized_options=_b( + "\202\323\344\223\002\242\001\022\025/v2/{parent=*/*}/logsZ\036\022\034/v2/{parent=projects/*}/logsZ#\022!/v2/{parent=organizations/*}/logsZ\035\022\033/v2/{parent=folders/*}/logsZ%\022#/v2/{parent=billingAccounts/*}/logs" ), ), ], diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 9693f14f8198..26a79dd6cc2f 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-01-05T17:54:33.074139Z", + "updateTime": "2019-01-09T13:23:25.969464Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.4", - "dockerImage": "googleapis/artman@sha256:8b45fae963557c3299921037ecbb86f0689f41b1b4aea73408ebc50562cb2857" + "version": "0.16.5", + "dockerImage": "googleapis/artman@sha256:5a96c2c5c6f9570cc9556b63dc9ce1838777fd9166b5b64e43ad8e0ecee2fe2c" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "a111a53c0c6722afcd793b64724ceef7862db5b9", - "internalRef": "227896184" + "sha": "659d66ec24bf40b35a41a0b79218d96ba3add3d3", + "internalRef": "228437827" } }, { From d2ef390afbd030df4c308f27f07f3912e06cc46f Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Thu, 17 Jan 2019 15:35:17 -0800 Subject: [PATCH 217/855] Re-generated logging. Change WriteLogEntries retry. Update copyright headers. --- .../logging_v2/gapic/config_service_v2_client.py | 2 +- .../google/cloud/logging_v2/gapic/enums.py | 2 +- .../logging_v2/gapic/logging_service_v2_client.py | 2 +- .../gapic/logging_service_v2_client_config.py | 2 +- .../logging_v2/gapic/metrics_service_v2_client.py | 2 +- .../transports/config_service_v2_grpc_transport.py | 2 +- .../transports/logging_service_v2_grpc_transport.py | 2 +- .../transports/metrics_service_v2_grpc_transport.py | 2 +- packages/google-cloud-logging/synth.metadata | 12 ++++++------ .../gapic/v2/test_config_service_v2_client_v2.py | 2 +- .../gapic/v2/test_logging_service_v2_client_v2.py | 2 +- .../gapic/v2/test_metrics_service_v2_client_v2.py | 2 +- 12 files changed, 17 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index 4e5aec994f23..a24f9c544c77 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py index d47a652d3d5b..98ac090418ef 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index 692f01f290a6..1add4e7046c8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py index 5293e5e38015..b3da612f6caf 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py @@ -33,7 +33,7 @@ }, "WriteLogEntries": { "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", + "retry_codes_name": "idempotent", "retry_params_name": "default", "bundling": { "element_count_threshold": 1000, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 8a8d0c1d2bd3..faca01a595fe 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py index 61272c0d553b..91102e9d6420 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py index 6ffd26bfcde5..4e3a9e82a06c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py index 9a558a5b9895..f4f953f6a617 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 26a79dd6cc2f..b46285d99187 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-01-09T13:23:25.969464Z", + "updateTime": "2019-01-17T13:22:21.430628Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.5", - "dockerImage": "googleapis/artman@sha256:5a96c2c5c6f9570cc9556b63dc9ce1838777fd9166b5b64e43ad8e0ecee2fe2c" + "version": "0.16.6", + "dockerImage": "googleapis/artman@sha256:12722f2ca3fbc3b53cc6aa5f0e569d7d221b46bd876a2136497089dec5e3634e" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "659d66ec24bf40b35a41a0b79218d96ba3add3d3", - "internalRef": "228437827" + "sha": "0ac60e21a1aa86c07c1836865b35308ba8178b05", + "internalRef": "229626798" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2018.12.6" + "version": "2019.1.16" } } ], diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py index 429dafb233ac..9c299abaccf5 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py index 538a5e8bdfcf..b43ac02f0068 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py index b12e191dc7f7..ce00373b102b 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 1cd52c4e75ff4e84ac3e80ddf5ecb9ba1d72ee0a Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 18 Jan 2019 09:29:29 -0800 Subject: [PATCH 218/855] Release 1.10.0 (#7196) --- packages/google-cloud-logging/CHANGELOG.md | 14 ++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 80303c9dbe98..ca0d1a6af370 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## 1.10.0 + +01-17-2019 15:37 PST + + +### Implementation Changes +- Change WriteLogEntries retry policy. +- Protoc-generated serialization update. ([#7088](https://github.com/googleapis/google-cloud-python/pull/7088)) +- GAPIC generation fixes. ([#7061](https://github.com/googleapis/google-cloud-python/pull/7061)) + +### Internal / Testing Changes +- Update copyright headers. +- Use 'python-3.6' for 'blacken' run. ([#7064](https://github.com/googleapis/google-cloud-python/pull/7064)) + ## 1.9.1 12-17-2018 16:49 PST diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index d6b5b7ea3896..6ca5ce279550 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-logging' description = 'Stackdriver Logging API client library' -version = '1.9.1' +version = '1.10.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 5d01514349cf75a75ce2544f5c81be5eb538ba91 Mon Sep 17 00:00:00 2001 From: salrashid123 Date: Mon, 28 Jan 2019 09:57:37 -0800 Subject: [PATCH 219/855] Add snippet for logging a resource. (#7212) --- packages/google-cloud-logging/docs/snippets.py | 13 +++++++++++++ packages/google-cloud-logging/docs/usage.rst | 11 +++++++++++ 2 files changed, 24 insertions(+) diff --git a/packages/google-cloud-logging/docs/snippets.py b/packages/google-cloud-logging/docs/snippets.py index 1b3987e8fd61..35e12a2dca53 100644 --- a/packages/google-cloud-logging/docs/snippets.py +++ b/packages/google-cloud-logging/docs/snippets.py @@ -133,6 +133,19 @@ def logger_usage(client, to_delete): ) # API call # [END logger_log_struct] + # [START logger_log_resource_text] + from google.cloud.logging.resource import Resource + res = Resource(type="generic_node", + labels={ + 'location': 'us-central1-a', + 'namespace': 'default', + 'node_id': '10.10.10.1' + }) + logger.log_struct( + {"message": "My first entry", "weather": "partly cloudy"}, resource=res + ) + # [END logger_log_resource_text] + # [START logger_list_entries] from google.cloud.logging import DESCENDING diff --git a/packages/google-cloud-logging/docs/usage.rst b/packages/google-cloud-logging/docs/usage.rst index 0204a584dd73..122a850fecba 100644 --- a/packages/google-cloud-logging/docs/usage.rst +++ b/packages/google-cloud-logging/docs/usage.rst @@ -24,6 +24,17 @@ Write a dictionary entry to the logger. :end-before: [END logger_log_struct] :dedent: 4 +Write a simple text entry and resource to the logger. + +Supported Resource values are listed at `Monitored Resource Types`_ + +.. _Monitored Resource Types: https://cloud.google.com/logging/docs/api/v2/resource-list + + +.. literalinclude:: snippets.py + :start-after: [START logger_log_resource_text] + :end-before: [END logger_log_resource_text] + :dedent: 4 Retrieving log entries ---------------------- From 59bfb0312281f593dfde244d0df2ab1113d9dc27 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Tue, 29 Jan 2019 09:23:04 -0800 Subject: [PATCH 220/855] Docs: reformatted snippet. --- packages/google-cloud-logging/docs/snippets.py | 13 ++++++++----- packages/google-cloud-logging/synth.metadata | 10 +++++----- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-logging/docs/snippets.py b/packages/google-cloud-logging/docs/snippets.py index 35e12a2dca53..1a823994dc7c 100644 --- a/packages/google-cloud-logging/docs/snippets.py +++ b/packages/google-cloud-logging/docs/snippets.py @@ -135,12 +135,15 @@ def logger_usage(client, to_delete): # [START logger_log_resource_text] from google.cloud.logging.resource import Resource - res = Resource(type="generic_node", + + res = Resource( + type="generic_node", labels={ - 'location': 'us-central1-a', - 'namespace': 'default', - 'node_id': '10.10.10.1' - }) + "location": "us-central1-a", + "namespace": "default", + "node_id": "10.10.10.1", + }, + ) logger.log_struct( {"message": "My first entry", "weather": "partly cloudy"}, resource=res ) diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index b46285d99187..e91c4f077558 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-01-17T13:22:21.430628Z", + "updateTime": "2019-01-29T13:23:16.990671Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.6", - "dockerImage": "googleapis/artman@sha256:12722f2ca3fbc3b53cc6aa5f0e569d7d221b46bd876a2136497089dec5e3634e" + "version": "0.16.7", + "dockerImage": "googleapis/artman@sha256:d6c8ced606eb49973ca95d2af7c55a681acc042db0f87d135968349e7bf6dd80" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "0ac60e21a1aa86c07c1836865b35308ba8178b05", - "internalRef": "229626798" + "sha": "3cdb7227019524e7963071cf80a9624bf055b284", + "internalRef": "231246006" } }, { From 1c6f92e8b0039c786e9b7c713d14da1d6bf1b3f8 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 29 Jan 2019 13:28:49 -0800 Subject: [PATCH 221/855] Add protos as an artifact to library (#7205) --- .../cloud/logging_v2/proto/http_request.proto | 93 +++ .../cloud/logging_v2/proto/log_entry.proto | 193 ++++++ .../cloud/logging_v2/proto/log_entry_pb2.py | 1 + .../cloud/logging_v2/proto/log_severity.proto | 73 ++ .../cloud/logging_v2/proto/logging.proto | 346 ++++++++++ .../logging_v2/proto/logging_config.proto | 633 ++++++++++++++++++ .../logging_v2/proto/logging_config_pb2.py | 1 + .../logging_v2/proto/logging_metrics.proto | 265 ++++++++ .../logging_v2/proto/logging_metrics_pb2.py | 1 + .../cloud/logging_v2/proto/logging_pb2.py | 1 + packages/google-cloud-logging/synth.py | 1 + 11 files changed, 1608 insertions(+) create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/http_request.proto create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/log_severity.proto create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/http_request.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/http_request.proto new file mode 100644 index 000000000000..21b1367ab8ca --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/http_request.proto @@ -0,0 +1,93 @@ +// Copyright 2018 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.logging.type; + +import "google/api/annotations.proto"; +import "google/protobuf/duration.proto"; + +option csharp_namespace = "Google.Cloud.Logging.Type"; +option go_package = "google.golang.org/genproto/googleapis/logging/type;ltype"; +option java_multiple_files = true; +option java_outer_classname = "HttpRequestProto"; +option java_package = "com.google.logging.type"; +option php_namespace = "Google\\Cloud\\Logging\\Type"; + + +// A common proto for logging HTTP requests. Only contains semantics +// defined by the HTTP specification. Product-specific logging +// information MUST be defined in a separate message. +message HttpRequest { + // The request method. Examples: `"GET"`, `"HEAD"`, `"PUT"`, `"POST"`. + string request_method = 1; + + // The scheme (http, https), the host name, the path and the query + // portion of the URL that was requested. + // Example: `"http://example.com/some/info?color=red"`. + string request_url = 2; + + // The size of the HTTP request message in bytes, including the request + // headers and the request body. + int64 request_size = 3; + + // The response code indicating the status of response. + // Examples: 200, 404. + int32 status = 4; + + // The size of the HTTP response message sent back to the client, in bytes, + // including the response headers and the response body. + int64 response_size = 5; + + // The user agent sent by the client. Example: + // `"Mozilla/4.0 (compatible; MSIE 6.0; Windows 98; Q312461; .NET CLR 1.0.3705)"`. + string user_agent = 6; + + // The IP address (IPv4 or IPv6) of the client that issued the HTTP + // request. Examples: `"192.168.1.1"`, `"FE80::0202:B3FF:FE1E:8329"`. + string remote_ip = 7; + + // The IP address (IPv4 or IPv6) of the origin server that the request was + // sent to. + string server_ip = 13; + + // The referer URL of the request, as defined in + // [HTTP/1.1 Header Field Definitions](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html). + string referer = 8; + + // The request processing latency on the server, from the time the request was + // received until the response was sent. + google.protobuf.Duration latency = 14; + + // Whether or not a cache lookup was attempted. + bool cache_lookup = 11; + + // Whether or not an entity was served from cache + // (with or without validation). + bool cache_hit = 9; + + // Whether or not the response was validated with the origin server before + // being served from cache. This field is only meaningful if `cache_hit` is + // True. + bool cache_validated_with_origin_server = 10; + + // The number of HTTP response bytes inserted into cache. Set only when a + // cache fill was attempted. + int64 cache_fill_bytes = 12; + + // Protocol used for the request. Examples: "HTTP/1.1", "HTTP/2", "websocket" + string protocol = 15; +} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto new file mode 100644 index 000000000000..2f1530e23f69 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto @@ -0,0 +1,193 @@ +// Copyright 2018 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.logging.v2; + +import "google/api/annotations.proto"; +import "google/api/monitored_resource.proto"; +import "google/logging/type/http_request.proto"; +import "google/logging/type/log_severity.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.Logging.V2"; +option go_package = "google.golang.org/genproto/googleapis/logging/v2;logging"; +option java_multiple_files = true; +option java_outer_classname = "LogEntryProto"; +option java_package = "com.google.logging.v2"; +option php_namespace = "Google\\Cloud\\Logging\\V2"; + + +// An individual entry in a log. +message LogEntry { + // Required. The resource name of the log to which this log entry belongs: + // + // "projects/[PROJECT_ID]/logs/[LOG_ID]" + // "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + // "folders/[FOLDER_ID]/logs/[LOG_ID]" + // + // A project number may optionally be used in place of PROJECT_ID. The + // project number is translated to its corresponding PROJECT_ID internally + // and the `log_name` field will contain PROJECT_ID in queries and exports. + // + // `[LOG_ID]` must be URL-encoded within `log_name`. Example: + // `"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"`. + // `[LOG_ID]` must be less than 512 characters long and can only include the + // following characters: upper and lower case alphanumeric characters, + // forward-slash, underscore, hyphen, and period. + // + // For backward compatibility, if `log_name` begins with a forward-slash, such + // as `/projects/...`, then the log entry is ingested as usual but the + // forward-slash is removed. Listing the log entry will not show the leading + // slash and filtering for a log name with a leading slash will never return + // any results. + string log_name = 12; + + // Required. The primary monitored resource associated with this log entry. + // Example: a log entry that reports a database error would be + // associated with the monitored resource designating the particular + // database that reported the error. + google.api.MonitoredResource resource = 8; + + // Optional. The log entry payload, which can be one of multiple types. + oneof payload { + // The log entry payload, represented as a protocol buffer. Some + // Google Cloud Platform services use this field for their log + // entry payloads. + google.protobuf.Any proto_payload = 2; + + // The log entry payload, represented as a Unicode string (UTF-8). + string text_payload = 3; + + // The log entry payload, represented as a structure that is + // expressed as a JSON object. + google.protobuf.Struct json_payload = 6; + } + + // Optional. The time the event described by the log entry occurred. + // This time is used to compute the log entry's age and to enforce + // the logs retention period. If this field is omitted in a new log + // entry, then Logging assigns it the current time. + // Timestamps have nanosecond accuracy, but trailing zeros in the fractional + // seconds might be omitted when the timestamp is displayed. + // + // Incoming log entries should have timestamps that are no more than + // the [logs retention period](/logging/quotas) in the past, + // and no more than 24 hours in the future. Log entries outside those time + // boundaries will not be available when calling `entries.list`, but + // those log entries can still be exported with + // [LogSinks](/logging/docs/api/tasks/exporting-logs). + google.protobuf.Timestamp timestamp = 9; + + // Output only. The time the log entry was received by Logging. + google.protobuf.Timestamp receive_timestamp = 24; + + // Optional. The severity of the log entry. The default value is + // `LogSeverity.DEFAULT`. + google.logging.type.LogSeverity severity = 10; + + // Optional. A unique identifier for the log entry. If you provide a value, + // then Logging considers other log entries in the same project, + // with the same `timestamp`, and with the same `insert_id` to be duplicates + // which can be removed. If omitted in new log entries, then + // Logging assigns its own unique identifier. The `insert_id` is also used + // to order log entries that have the same `timestamp` value. + string insert_id = 4; + + // Optional. Information about the HTTP request associated with this + // log entry, if applicable. + google.logging.type.HttpRequest http_request = 7; + + // Optional. A set of user-defined (key, value) data that provides additional + // information about the log entry. + map labels = 11; + + // Output only. Additional metadata about the monitored resource. + // Only `k8s_container`, `k8s_pod`, and `k8s_node` MonitoredResources have + // this field populated. + google.api.MonitoredResourceMetadata metadata = 25; + + // Optional. Information about an operation associated with the log entry, if + // applicable. + LogEntryOperation operation = 15; + + // Optional. Resource name of the trace associated with the log entry, if any. + // If it contains a relative resource name, the name is assumed to be relative + // to `//tracing.googleapis.com`. Example: + // `projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824` + string trace = 22; + + // Optional. The span ID within the trace associated with the log entry. + // For Trace spans, this is the same format that the Trace + // API v2 uses: a 16-character hexadecimal encoding of an 8-byte array, such + // as "000000000000004a". + string span_id = 27; + + // Optional. The sampling decision of the trace associated with the log entry. + // True means that the trace resource name in the `trace` field was sampled + // for storage in a trace backend. False means that the trace was not sampled + // for storage when this log entry was written, or the sampling decision was + // unknown at the time. A non-sampled `trace` value is still useful as a + // request correlation identifier. The default is False. + bool trace_sampled = 30; + + // Optional. Source code location information associated with the log entry, + // if any. + LogEntrySourceLocation source_location = 23; +} + +// Additional information about a potentially long-running operation with which +// a log entry is associated. +message LogEntryOperation { + // Optional. An arbitrary operation identifier. Log entries with the + // same identifier are assumed to be part of the same operation. + string id = 1; + + // Optional. An arbitrary producer identifier. The combination of + // `id` and `producer` must be globally unique. Examples for `producer`: + // `"MyDivision.MyBigCompany.com"`, `"github.com/MyProject/MyApplication"`. + string producer = 2; + + // Optional. Set this to True if this is the first log entry in the operation. + bool first = 3; + + // Optional. Set this to True if this is the last log entry in the operation. + bool last = 4; +} + +// Additional information about the source code location that produced the log +// entry. +message LogEntrySourceLocation { + // Optional. Source file name. Depending on the runtime environment, this + // might be a simple name or a fully-qualified name. + string file = 1; + + // Optional. Line within the source file. 1-based; 0 indicates no line number + // available. + int64 line = 2; + + // Optional. Human-readable name of the function or method being invoked, with + // optional context such as the class or package name. This information may be + // used in contexts such as the logs viewer, where a file and line number are + // less meaningful. The format can vary by language. For example: + // `qual.if.ied.Class.method` (Java), `dir/package.func` (Go), `function` + // (Python). + string function = 3; +} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py index 3aa17006be30..8cba40a04d64 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -857,3 +857,4 @@ DESCRIPTOR._options = None _LOGENTRY_LABELSENTRY._options = None # @@protoc_insertion_point(module_scope) +# -*- coding: utf-8 -*- diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_severity.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_severity.proto new file mode 100644 index 000000000000..ccb08cacb445 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_severity.proto @@ -0,0 +1,73 @@ +// Copyright 2018 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.logging.type; + +import "google/api/annotations.proto"; + +option csharp_namespace = "Google.Cloud.Logging.Type"; +option go_package = "google.golang.org/genproto/googleapis/logging/type;ltype"; +option java_multiple_files = true; +option java_outer_classname = "LogSeverityProto"; +option java_package = "com.google.logging.type"; +option php_namespace = "Google\\Cloud\\Logging\\Type"; + + +// The severity of the event described in a log entry, expressed as one of the +// standard severity levels listed below. For your reference, the levels are +// assigned the listed numeric values. The effect of using numeric values other +// than those listed is undefined. +// +// You can filter for log entries by severity. For example, the following +// filter expression will match log entries with severities `INFO`, `NOTICE`, +// and `WARNING`: +// +// severity > DEBUG AND severity <= WARNING +// +// If you are writing log entries, you should map other severity encodings to +// one of these standard levels. For example, you might map all of Java's FINE, +// FINER, and FINEST levels to `LogSeverity.DEBUG`. You can preserve the +// original severity level in the log entry payload if you wish. +enum LogSeverity { + // (0) The log entry has no assigned severity level. + DEFAULT = 0; + + // (100) Debug or trace information. + DEBUG = 100; + + // (200) Routine information, such as ongoing status or performance. + INFO = 200; + + // (300) Normal but significant events, such as start up, shut down, or + // a configuration change. + NOTICE = 300; + + // (400) Warning events might cause problems. + WARNING = 400; + + // (500) Error events are likely to cause problems. + ERROR = 500; + + // (600) Critical events cause more severe problems or outages. + CRITICAL = 600; + + // (700) A person must take an action immediately. + ALERT = 700; + + // (800) One or more systems are unusable. + EMERGENCY = 800; +} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto new file mode 100644 index 000000000000..b1812e6f82d2 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto @@ -0,0 +1,346 @@ +// Copyright 2018 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.logging.v2; + +import "google/api/annotations.proto"; +import "google/api/monitored_resource.proto"; +import "google/logging/v2/log_entry.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/status.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.Logging.V2"; +option go_package = "google.golang.org/genproto/googleapis/logging/v2;logging"; +option java_multiple_files = true; +option java_outer_classname = "LoggingProto"; +option java_package = "com.google.logging.v2"; +option php_namespace = "Google\\Cloud\\Logging\\V2"; + + +// Service for ingesting and querying logs. +service LoggingServiceV2 { + // Deletes all the log entries in a log. + // The log reappears if it receives new entries. + // Log entries written shortly before the delete operation might not be + // deleted. + rpc DeleteLog(DeleteLogRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2/{log_name=projects/*/logs/*}" + additional_bindings { + delete: "/v2/{log_name=organizations/*/logs/*}" + } + additional_bindings { + delete: "/v2/{log_name=folders/*/logs/*}" + } + additional_bindings { + delete: "/v2/{log_name=billingAccounts/*/logs/*}" + } + }; + } + + // Writes log entries to Logging. This API method is the + // only way to send log entries to Logging. This method + // is used, directly or indirectly, by the Logging agent + // (fluentd) and all logging libraries configured to use Logging. + // A single request may contain log entries for a maximum of 1000 + // different resources (projects, organizations, billing accounts or + // folders) + rpc WriteLogEntries(WriteLogEntriesRequest) returns (WriteLogEntriesResponse) { + option (google.api.http) = { + post: "/v2/entries:write" + body: "*" + }; + } + + // Lists log entries. Use this method to retrieve log entries from + // Logging. For ways to export log entries, see + // [Exporting Logs](/logging/docs/export). + rpc ListLogEntries(ListLogEntriesRequest) returns (ListLogEntriesResponse) { + option (google.api.http) = { + post: "/v2/entries:list" + body: "*" + }; + } + + // Lists the descriptors for monitored resource types used by Logging. + rpc ListMonitoredResourceDescriptors(ListMonitoredResourceDescriptorsRequest) returns (ListMonitoredResourceDescriptorsResponse) { + option (google.api.http) = { + get: "/v2/monitoredResourceDescriptors" + }; + } + + // Lists the logs in projects, organizations, folders, or billing accounts. + // Only logs that have entries are listed. + rpc ListLogs(ListLogsRequest) returns (ListLogsResponse) { + option (google.api.http) = { + get: "/v2/{parent=*/*}/logs" + additional_bindings { + get: "/v2/{parent=projects/*}/logs" + } + additional_bindings { + get: "/v2/{parent=organizations/*}/logs" + } + additional_bindings { + get: "/v2/{parent=folders/*}/logs" + } + additional_bindings { + get: "/v2/{parent=billingAccounts/*}/logs" + } + }; + } +} + +// The parameters to DeleteLog. +message DeleteLogRequest { + // Required. The resource name of the log to delete: + // + // "projects/[PROJECT_ID]/logs/[LOG_ID]" + // "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + // "folders/[FOLDER_ID]/logs/[LOG_ID]" + // + // `[LOG_ID]` must be URL-encoded. For example, + // `"projects/my-project-id/logs/syslog"`, + // `"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"`. + // For more information about log names, see + // [LogEntry][google.logging.v2.LogEntry]. + string log_name = 1; +} + +// The parameters to WriteLogEntries. +message WriteLogEntriesRequest { + // Optional. A default log resource name that is assigned to all log entries + // in `entries` that do not specify a value for `log_name`: + // + // "projects/[PROJECT_ID]/logs/[LOG_ID]" + // "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + // "folders/[FOLDER_ID]/logs/[LOG_ID]" + // + // `[LOG_ID]` must be URL-encoded. For example: + // + // "projects/my-project-id/logs/syslog" + // "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + // + // The permission logging.logEntries.create is needed on each + // project, organization, billing account, or folder that is receiving + // new log entries, whether the resource is specified in + // logName or in an individual log entry. + string log_name = 1; + + // Optional. A default monitored resource object that is assigned to all log + // entries in `entries` that do not specify a value for `resource`. Example: + // + // { "type": "gce_instance", + // "labels": { + // "zone": "us-central1-a", "instance_id": "00000000000000000000" }} + // + // See [LogEntry][google.logging.v2.LogEntry]. + google.api.MonitoredResource resource = 2; + + // Optional. Default labels that are added to the `labels` field of all log + // entries in `entries`. If a log entry already has a label with the same key + // as a label in this parameter, then the log entry's label is not changed. + // See [LogEntry][google.logging.v2.LogEntry]. + map labels = 3; + + // Required. The log entries to send to Logging. The order of log + // entries in this list does not matter. Values supplied in this method's + // `log_name`, `resource`, and `labels` fields are copied into those log + // entries in this list that do not include values for their corresponding + // fields. For more information, see the + // [LogEntry][google.logging.v2.LogEntry] type. + // + // If the `timestamp` or `insert_id` fields are missing in log entries, then + // this method supplies the current time or a unique identifier, respectively. + // The supplied values are chosen so that, among the log entries that did not + // supply their own values, the entries earlier in the list will sort before + // the entries later in the list. See the `entries.list` method. + // + // Log entries with timestamps that are more than the + // [logs retention period](/logging/quota-policy) in the past or more than + // 24 hours in the future will not be available when calling `entries.list`. + // However, those log entries can still be exported with + // [LogSinks](/logging/docs/api/tasks/exporting-logs). + // + // To improve throughput and to avoid exceeding the + // [quota limit](/logging/quota-policy) for calls to `entries.write`, + // you should try to include several log entries in this list, + // rather than calling this method for each individual log entry. + repeated LogEntry entries = 4; + + // Optional. Whether valid entries should be written even if some other + // entries fail due to INVALID_ARGUMENT or PERMISSION_DENIED errors. If any + // entry is not written, then the response status is the error associated + // with one of the failed entries and the response includes error details + // keyed by the entries' zero-based index in the `entries.write` method. + bool partial_success = 5; + + // Optional. If true, the request should expect normal response, but the + // entries won't be persisted nor exported. Useful for checking whether the + // logging API endpoints are working properly before sending valuable data. + bool dry_run = 6; +} + +// Result returned from WriteLogEntries. +// empty +message WriteLogEntriesResponse { + +} + +// Error details for WriteLogEntries with partial success. +message WriteLogEntriesPartialErrors { + // When `WriteLogEntriesRequest.partial_success` is true, records the error + // status for entries that were not written due to a permanent error, keyed + // by the entry's zero-based index in `WriteLogEntriesRequest.entries`. + // + // Failed requests for which no entries are written will not include + // per-entry errors. + map log_entry_errors = 1; +} + +// The parameters to `ListLogEntries`. +message ListLogEntriesRequest { + // Deprecated. Use `resource_names` instead. One or more project identifiers + // or project numbers from which to retrieve log entries. Example: + // `"my-project-1A"`. If present, these project identifiers are converted to + // resource name format and added to the list of resources in + // `resource_names`. + repeated string project_ids = 1 [deprecated = true]; + + // Required. Names of one or more parent resources from which to + // retrieve log entries: + // + // "projects/[PROJECT_ID]" + // "organizations/[ORGANIZATION_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]" + // "folders/[FOLDER_ID]" + // + // Projects listed in the `project_ids` field are added to this list. + repeated string resource_names = 8; + + // Optional. A filter that chooses which log entries to return. See [Advanced + // Logs Filters](/logging/docs/view/advanced_filters). Only log entries that + // match the filter are returned. An empty filter matches all log entries in + // the resources listed in `resource_names`. Referencing a parent resource + // that is not listed in `resource_names` will cause the filter to return no + // results. + // The maximum length of the filter is 20000 characters. + string filter = 2; + + // Optional. How the results should be sorted. Presently, the only permitted + // values are `"timestamp asc"` (default) and `"timestamp desc"`. The first + // option returns entries in order of increasing values of + // `LogEntry.timestamp` (oldest first), and the second option returns entries + // in order of decreasing timestamps (newest first). Entries with equal + // timestamps are returned in order of their `insert_id` values. + string order_by = 3; + + // Optional. The maximum number of results to return from this request. + // Non-positive values are ignored. The presence of `next_page_token` in the + // response indicates that more results might be available. + int32 page_size = 4; + + // Optional. If present, then retrieve the next batch of results from the + // preceding call to this method. `page_token` must be the value of + // `next_page_token` from the previous response. The values of other method + // parameters should be identical to those in the previous call. + string page_token = 5; +} + +// Result returned from `ListLogEntries`. +message ListLogEntriesResponse { + // A list of log entries. If `entries` is empty, `nextPageToken` may still be + // returned, indicating that more entries may exist. See `nextPageToken` for + // more information. + repeated LogEntry entries = 1; + + // If there might be more results than those appearing in this response, then + // `nextPageToken` is included. To get the next set of results, call this + // method again using the value of `nextPageToken` as `pageToken`. + // + // If a value for `next_page_token` appears and the `entries` field is empty, + // it means that the search found no log entries so far but it did not have + // time to search all the possible log entries. Retry the method with this + // value for `page_token` to continue the search. Alternatively, consider + // speeding up the search by changing your filter to specify a single log name + // or resource type, or to narrow the time range of the search. + string next_page_token = 2; +} + +// The parameters to ListMonitoredResourceDescriptors +message ListMonitoredResourceDescriptorsRequest { + // Optional. The maximum number of results to return from this request. + // Non-positive values are ignored. The presence of `nextPageToken` in the + // response indicates that more results might be available. + int32 page_size = 1; + + // Optional. If present, then retrieve the next batch of results from the + // preceding call to this method. `pageToken` must be the value of + // `nextPageToken` from the previous response. The values of other method + // parameters should be identical to those in the previous call. + string page_token = 2; +} + +// Result returned from ListMonitoredResourceDescriptors. +message ListMonitoredResourceDescriptorsResponse { + // A list of resource descriptors. + repeated google.api.MonitoredResourceDescriptor resource_descriptors = 1; + + // If there might be more results than those appearing in this response, then + // `nextPageToken` is included. To get the next set of results, call this + // method again using the value of `nextPageToken` as `pageToken`. + string next_page_token = 2; +} + +// The parameters to ListLogs. +message ListLogsRequest { + // Required. The resource name that owns the logs: + // + // "projects/[PROJECT_ID]" + // "organizations/[ORGANIZATION_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]" + // "folders/[FOLDER_ID]" + string parent = 1; + + // Optional. The maximum number of results to return from this request. + // Non-positive values are ignored. The presence of `nextPageToken` in the + // response indicates that more results might be available. + int32 page_size = 2; + + // Optional. If present, then retrieve the next batch of results from the + // preceding call to this method. `pageToken` must be the value of + // `nextPageToken` from the previous response. The values of other method + // parameters should be identical to those in the previous call. + string page_token = 3; +} + +// Result returned from ListLogs. +message ListLogsResponse { + // A list of log names. For example, + // `"projects/my-project/syslog"` or + // `"organizations/123/cloudresourcemanager.googleapis.com%2Factivity"`. + repeated string log_names = 3; + + // If there might be more results than those appearing in this response, then + // `nextPageToken` is included. To get the next set of results, call this + // method again using the value of `nextPageToken` as `pageToken`. + string next_page_token = 2; +} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto new file mode 100644 index 000000000000..8803ace8180e --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto @@ -0,0 +1,633 @@ +// Copyright 2018 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.logging.v2; + +import "google/api/annotations.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; +import "google/protobuf/timestamp.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.Logging.V2"; +option go_package = "google.golang.org/genproto/googleapis/logging/v2;logging"; +option java_multiple_files = true; +option java_outer_classname = "LoggingConfigProto"; +option java_package = "com.google.logging.v2"; +option php_namespace = "Google\\Cloud\\Logging\\V2"; + + +// Service for configuring sinks used to export log entries out of +// Logging. +service ConfigServiceV2 { + // Lists sinks. + rpc ListSinks(ListSinksRequest) returns (ListSinksResponse) { + option (google.api.http) = { + get: "/v2/{parent=*/*}/sinks" + additional_bindings { + get: "/v2/{parent=projects/*}/sinks" + } + additional_bindings { + get: "/v2/{parent=organizations/*}/sinks" + } + additional_bindings { + get: "/v2/{parent=folders/*}/sinks" + } + additional_bindings { + get: "/v2/{parent=billingAccounts/*}/sinks" + } + }; + } + + // Gets a sink. + rpc GetSink(GetSinkRequest) returns (LogSink) { + option (google.api.http) = { + get: "/v2/{sink_name=*/*/sinks/*}" + additional_bindings { + get: "/v2/{sink_name=projects/*/sinks/*}" + } + additional_bindings { + get: "/v2/{sink_name=organizations/*/sinks/*}" + } + additional_bindings { + get: "/v2/{sink_name=folders/*/sinks/*}" + } + additional_bindings { + get: "/v2/{sink_name=billingAccounts/*/sinks/*}" + } + }; + } + + // Creates a sink that exports specified log entries to a destination. The + // export of newly-ingested log entries begins immediately, unless the sink's + // `writer_identity` is not permitted to write to the destination. A sink can + // export log entries only from the resource owning the sink. + rpc CreateSink(CreateSinkRequest) returns (LogSink) { + option (google.api.http) = { + post: "/v2/{parent=*/*}/sinks" + body: "sink" + additional_bindings { + post: "/v2/{parent=projects/*}/sinks" + body: "sink" + } + additional_bindings { + post: "/v2/{parent=organizations/*}/sinks" + body: "sink" + } + additional_bindings { + post: "/v2/{parent=folders/*}/sinks" + body: "sink" + } + additional_bindings { + post: "/v2/{parent=billingAccounts/*}/sinks" + body: "sink" + } + }; + } + + // Updates a sink. This method replaces the following fields in the existing + // sink with values from the new sink: `destination`, and `filter`. + // The updated sink might also have a new `writer_identity`; see the + // `unique_writer_identity` field. + rpc UpdateSink(UpdateSinkRequest) returns (LogSink) { + option (google.api.http) = { + put: "/v2/{sink_name=*/*/sinks/*}" + body: "sink" + additional_bindings { + put: "/v2/{sink_name=projects/*/sinks/*}" + body: "sink" + } + additional_bindings { + put: "/v2/{sink_name=organizations/*/sinks/*}" + body: "sink" + } + additional_bindings { + put: "/v2/{sink_name=folders/*/sinks/*}" + body: "sink" + } + additional_bindings { + put: "/v2/{sink_name=billingAccounts/*/sinks/*}" + body: "sink" + } + additional_bindings { + patch: "/v2/{sink_name=projects/*/sinks/*}" + body: "sink" + } + additional_bindings { + patch: "/v2/{sink_name=organizations/*/sinks/*}" + body: "sink" + } + additional_bindings { + patch: "/v2/{sink_name=folders/*/sinks/*}" + body: "sink" + } + additional_bindings { + patch: "/v2/{sink_name=billingAccounts/*/sinks/*}" + body: "sink" + } + }; + } + + // Deletes a sink. If the sink has a unique `writer_identity`, then that + // service account is also deleted. + rpc DeleteSink(DeleteSinkRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2/{sink_name=*/*/sinks/*}" + additional_bindings { + delete: "/v2/{sink_name=projects/*/sinks/*}" + } + additional_bindings { + delete: "/v2/{sink_name=organizations/*/sinks/*}" + } + additional_bindings { + delete: "/v2/{sink_name=folders/*/sinks/*}" + } + additional_bindings { + delete: "/v2/{sink_name=billingAccounts/*/sinks/*}" + } + }; + } + + // Lists all the exclusions in a parent resource. + rpc ListExclusions(ListExclusionsRequest) returns (ListExclusionsResponse) { + option (google.api.http) = { + get: "/v2/{parent=*/*}/exclusions" + additional_bindings { + get: "/v2/{parent=projects/*}/exclusions" + } + additional_bindings { + get: "/v2/{parent=organizations/*}/exclusions" + } + additional_bindings { + get: "/v2/{parent=folders/*}/exclusions" + } + additional_bindings { + get: "/v2/{parent=billingAccounts/*}/exclusions" + } + }; + } + + // Gets the description of an exclusion. + rpc GetExclusion(GetExclusionRequest) returns (LogExclusion) { + option (google.api.http) = { + get: "/v2/{name=*/*/exclusions/*}" + additional_bindings { + get: "/v2/{name=projects/*/exclusions/*}" + } + additional_bindings { + get: "/v2/{name=organizations/*/exclusions/*}" + } + additional_bindings { + get: "/v2/{name=folders/*/exclusions/*}" + } + additional_bindings { + get: "/v2/{name=billingAccounts/*/exclusions/*}" + } + }; + } + + // Creates a new exclusion in a specified parent resource. + // Only log entries belonging to that resource can be excluded. + // You can have up to 10 exclusions in a resource. + rpc CreateExclusion(CreateExclusionRequest) returns (LogExclusion) { + option (google.api.http) = { + post: "/v2/{parent=*/*}/exclusions" + body: "exclusion" + additional_bindings { + post: "/v2/{parent=projects/*}/exclusions" + body: "exclusion" + } + additional_bindings { + post: "/v2/{parent=organizations/*}/exclusions" + body: "exclusion" + } + additional_bindings { + post: "/v2/{parent=folders/*}/exclusions" + body: "exclusion" + } + additional_bindings { + post: "/v2/{parent=billingAccounts/*}/exclusions" + body: "exclusion" + } + }; + } + + // Changes one or more properties of an existing exclusion. + rpc UpdateExclusion(UpdateExclusionRequest) returns (LogExclusion) { + option (google.api.http) = { + patch: "/v2/{name=*/*/exclusions/*}" + body: "exclusion" + additional_bindings { + patch: "/v2/{name=projects/*/exclusions/*}" + body: "exclusion" + } + additional_bindings { + patch: "/v2/{name=organizations/*/exclusions/*}" + body: "exclusion" + } + additional_bindings { + patch: "/v2/{name=folders/*/exclusions/*}" + body: "exclusion" + } + additional_bindings { + patch: "/v2/{name=billingAccounts/*/exclusions/*}" + body: "exclusion" + } + }; + } + + // Deletes an exclusion. + rpc DeleteExclusion(DeleteExclusionRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2/{name=*/*/exclusions/*}" + additional_bindings { + delete: "/v2/{name=projects/*/exclusions/*}" + } + additional_bindings { + delete: "/v2/{name=organizations/*/exclusions/*}" + } + additional_bindings { + delete: "/v2/{name=folders/*/exclusions/*}" + } + additional_bindings { + delete: "/v2/{name=billingAccounts/*/exclusions/*}" + } + }; + } +} + +// Describes a sink used to export log entries to one of the following +// destinations in any project: a Cloud Storage bucket, a BigQuery dataset, or a +// Cloud Pub/Sub topic. A logs filter controls which log entries are +// exported. The sink must be created within a project, organization, billing +// account, or folder. +message LogSink { + // Available log entry formats. Log entries can be written to + // Logging in either format and can be exported in either format. + // Version 2 is the preferred format. + enum VersionFormat { + // An unspecified format version that will default to V2. + VERSION_FORMAT_UNSPECIFIED = 0; + + // `LogEntry` version 2 format. + V2 = 1; + + // `LogEntry` version 1 format. + V1 = 2; + } + + // Required. The client-assigned sink identifier, unique within the + // project. Example: `"my-syslog-errors-to-pubsub"`. Sink identifiers are + // limited to 100 characters and can include only the following characters: + // upper and lower-case alphanumeric characters, underscores, hyphens, and + // periods. + string name = 1; + + // Required. The export destination: + // + // "storage.googleapis.com/[GCS_BUCKET]" + // "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]" + // "pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" + // + // The sink's `writer_identity`, set when the sink is created, must + // have permission to write to the destination or else the log + // entries are not exported. For more information, see + // [Exporting Logs With Sinks](/logging/docs/api/tasks/exporting-logs). + string destination = 3; + + // Optional. + // An [advanced logs filter](/logging/docs/view/advanced_filters). The only + // exported log entries are those that are in the resource owning the sink and + // that match the filter. For example: + // + // logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR + string filter = 5; + + // Deprecated. The log entry format to use for this sink's exported log + // entries. The v2 format is used by default and cannot be changed. + VersionFormat output_version_format = 6 [deprecated = true]; + + // Output only. An IAM identity—a service account or group—under + // which Logging writes the exported log entries to the sink's + // destination. This field is set by + // [sinks.create](/logging/docs/api/reference/rest/v2/projects.sinks/create) + // and + // [sinks.update](/logging/docs/api/reference/rest/v2/projects.sinks/update), + // based on the setting of `unique_writer_identity` in those methods. + // + // Until you grant this identity write-access to the destination, log entry + // exports from this sink will fail. For more information, + // see [Granting access for a + // resource](/iam/docs/granting-roles-to-service-accounts#granting_access_to_a_service_account_for_a_resource). + // Consult the destination service's documentation to determine the + // appropriate IAM roles to assign to the identity. + string writer_identity = 8; + + // Optional. This field applies only to sinks owned by organizations and + // folders. If the field is false, the default, only the logs owned by the + // sink's parent resource are available for export. If the field is true, then + // logs from all the projects, folders, and billing accounts contained in the + // sink's parent resource are also available for export. Whether a particular + // log entry from the children is exported depends on the sink's filter + // expression. For example, if this field is true, then the filter + // `resource.type=gce_instance` would export all Compute Engine VM instance + // log entries from all projects in the sink's parent. To only export entries + // from certain child projects, filter on the project part of the log name: + // + // logName:("projects/test-project1/" OR "projects/test-project2/") AND + // resource.type=gce_instance + bool include_children = 9; + + // Deprecated. This field is ignored when creating or updating sinks. + google.protobuf.Timestamp start_time = 10 [deprecated = true]; + + // Deprecated. This field is ignored when creating or updating sinks. + google.protobuf.Timestamp end_time = 11 [deprecated = true]; +} + +// The parameters to `ListSinks`. +message ListSinksRequest { + // Required. The parent resource whose sinks are to be listed: + // + // "projects/[PROJECT_ID]" + // "organizations/[ORGANIZATION_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]" + // "folders/[FOLDER_ID]" + string parent = 1; + + // Optional. If present, then retrieve the next batch of results from the + // preceding call to this method. `pageToken` must be the value of + // `nextPageToken` from the previous response. The values of other method + // parameters should be identical to those in the previous call. + string page_token = 2; + + // Optional. The maximum number of results to return from this request. + // Non-positive values are ignored. The presence of `nextPageToken` in the + // response indicates that more results might be available. + int32 page_size = 3; +} + +// Result returned from `ListSinks`. +message ListSinksResponse { + // A list of sinks. + repeated LogSink sinks = 1; + + // If there might be more results than appear in this response, then + // `nextPageToken` is included. To get the next set of results, call the same + // method again using the value of `nextPageToken` as `pageToken`. + string next_page_token = 2; +} + +// The parameters to `GetSink`. +message GetSinkRequest { + // Required. The resource name of the sink: + // + // "projects/[PROJECT_ID]/sinks/[SINK_ID]" + // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + // "folders/[FOLDER_ID]/sinks/[SINK_ID]" + // + // Example: `"projects/my-project-id/sinks/my-sink-id"`. + string sink_name = 1; +} + +// The parameters to `CreateSink`. +message CreateSinkRequest { + // Required. The resource in which to create the sink: + // + // "projects/[PROJECT_ID]" + // "organizations/[ORGANIZATION_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]" + // "folders/[FOLDER_ID]" + // + // Examples: `"projects/my-logging-project"`, `"organizations/123456789"`. + string parent = 1; + + // Required. The new sink, whose `name` parameter is a sink identifier that + // is not already in use. + LogSink sink = 2; + + // Optional. Determines the kind of IAM identity returned as `writer_identity` + // in the new sink. If this value is omitted or set to false, and if the + // sink's parent is a project, then the value returned as `writer_identity` is + // the same group or service account used by Logging before the + // addition of writer identities to this API. The sink's destination must be + // in the same project as the sink itself. + // + // If this field is set to true, or if the sink is owned by a non-project + // resource such as an organization, then the value of `writer_identity` will + // be a unique service account used only for exports from the new sink. For + // more information, see `writer_identity` in [LogSink][google.logging.v2.LogSink]. + bool unique_writer_identity = 3; +} + +// The parameters to `UpdateSink`. +message UpdateSinkRequest { + // Required. The full resource name of the sink to update, including the + // parent resource and the sink identifier: + // + // "projects/[PROJECT_ID]/sinks/[SINK_ID]" + // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + // "folders/[FOLDER_ID]/sinks/[SINK_ID]" + // + // Example: `"projects/my-project-id/sinks/my-sink-id"`. + string sink_name = 1; + + // Required. The updated sink, whose name is the same identifier that appears + // as part of `sink_name`. + LogSink sink = 2; + + // Optional. See + // [sinks.create](/logging/docs/api/reference/rest/v2/projects.sinks/create) + // for a description of this field. When updating a sink, the effect of this + // field on the value of `writer_identity` in the updated sink depends on both + // the old and new values of this field: + // + // + If the old and new values of this field are both false or both true, + // then there is no change to the sink's `writer_identity`. + // + If the old value is false and the new value is true, then + // `writer_identity` is changed to a unique service account. + // + It is an error if the old value is true and the new value is + // set to false or defaulted to false. + bool unique_writer_identity = 3; + + // Optional. Field mask that specifies the fields in `sink` that need + // an update. A sink field will be overwritten if, and only if, it is + // in the update mask. `name` and output only fields cannot be updated. + // + // An empty updateMask is temporarily treated as using the following mask + // for backwards compatibility purposes: + // destination,filter,includeChildren + // At some point in the future, behavior will be removed and specifying an + // empty updateMask will be an error. + // + // For a detailed `FieldMask` definition, see + // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + // + // Example: `updateMask=filter`. + google.protobuf.FieldMask update_mask = 4; +} + +// The parameters to `DeleteSink`. +message DeleteSinkRequest { + // Required. The full resource name of the sink to delete, including the + // parent resource and the sink identifier: + // + // "projects/[PROJECT_ID]/sinks/[SINK_ID]" + // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + // "folders/[FOLDER_ID]/sinks/[SINK_ID]" + // + // Example: `"projects/my-project-id/sinks/my-sink-id"`. + string sink_name = 1; +} + +// Specifies a set of log entries that are not to be stored in +// Logging. If your project receives a large volume of logs, you might be able +// to use exclusions to reduce your chargeable logs. Exclusions are processed +// after log sinks, so you can export log entries before they are excluded. +// Audit log entries and log entries from Amazon Web Services are never +// excluded. +message LogExclusion { + // Required. A client-assigned identifier, such as + // `"load-balancer-exclusion"`. Identifiers are limited to 100 characters and + // can include only letters, digits, underscores, hyphens, and periods. + string name = 1; + + // Optional. A description of this exclusion. + string description = 2; + + // Required. + // An [advanced logs filter](/logging/docs/view/advanced_filters) + // that matches the log entries to be excluded. By using the + // [sample function](/logging/docs/view/advanced_filters#sample), + // you can exclude less than 100% of the matching log entries. + // For example, the following filter matches 99% of low-severity log + // entries from load balancers: + // + // `"resource.type=http_load_balancer severity=ERROR" + // + // The maximum length of the filter is 20000 characters. + string filter = 3; + + // Optional. The metric descriptor associated with the logs-based metric. + // If unspecified, it uses a default metric descriptor with a DELTA metric + // kind, INT64 value type, with no labels and a unit of "1". Such a metric + // counts the number of log entries matching the `filter` expression. + // + // The `name`, `type`, and `description` fields in the `metric_descriptor` + // are output only, and is constructed using the `name` and `description` + // field in the LogMetric. + // + // To create a logs-based metric that records a distribution of log values, a + // DELTA metric kind with a DISTRIBUTION value type must be used along with + // a `value_extractor` expression in the LogMetric. + // + // Each label in the metric descriptor must have a matching label + // name as the key and an extractor expression as the value in the + // `label_extractors` map. + // + // The `metric_kind` and `value_type` fields in the `metric_descriptor` cannot + // be updated once initially configured. New labels can be added in the + // `metric_descriptor`, but existing labels cannot be modified except for + // their description. + google.api.MetricDescriptor metric_descriptor = 5; + + // Optional. A `value_extractor` is required when using a distribution + // logs-based metric to extract the values to record from a log entry. + // Two functions are supported for value extraction: `EXTRACT(field)` or + // `REGEXP_EXTRACT(field, regex)`. The argument are: + // 1. field: The name of the log entry field from which the value is to be + // extracted. + // 2. regex: A regular expression using the Google RE2 syntax + // (https://github.com/google/re2/wiki/Syntax) with a single capture + // group to extract data from the specified log entry field. The value + // of the field is converted to a string before applying the regex. + // It is an error to specify a regex that does not include exactly one + // capture group. + // + // The result of the extraction must be convertible to a double type, as the + // distribution always records double values. If either the extraction or + // the conversion to double fails, then those values are not recorded in the + // distribution. + // + // Example: `REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")` + string value_extractor = 6; + + // Optional. A map from a label key string to an extractor expression which is + // used to extract data from a log entry field and assign as the label value. + // Each label key specified in the LabelDescriptor must have an associated + // extractor expression in this map. The syntax of the extractor expression + // is the same as for the `value_extractor` field. + // + // The extracted value is converted to the type defined in the label + // descriptor. If the either the extraction or the type conversion fails, + // the label will have a default value. The default value for a string + // label is an empty string, for an integer label its 0, and for a boolean + // label its `false`. + // + // Note that there are upper bounds on the maximum number of labels and the + // number of active time series that are allowed in a project. + map label_extractors = 7; + + // Optional. The `bucket_options` are required when the logs-based metric is + // using a DISTRIBUTION value type and it describes the bucket boundaries + // used to create a histogram of the extracted values. + google.api.Distribution.BucketOptions bucket_options = 8; + + // Deprecated. The API version that created or updated this metric. + // The v2 format is used by default and cannot be changed. + ApiVersion version = 4 [deprecated = true]; +} + +// The parameters to ListLogMetrics. +message ListLogMetricsRequest { + // Required. The name of the project containing the metrics: + // + // "projects/[PROJECT_ID]" + string parent = 1; + + // Optional. If present, then retrieve the next batch of results from the + // preceding call to this method. `pageToken` must be the value of + // `nextPageToken` from the previous response. The values of other method + // parameters should be identical to those in the previous call. + string page_token = 2; + + // Optional. The maximum number of results to return from this request. + // Non-positive values are ignored. The presence of `nextPageToken` in the + // response indicates that more results might be available. + int32 page_size = 3; +} + +// Result returned from ListLogMetrics. +message ListLogMetricsResponse { + // A list of logs-based metrics. + repeated LogMetric metrics = 1; + + // If there might be more results than appear in this response, then + // `nextPageToken` is included. To get the next set of results, call this + // method again using the value of `nextPageToken` as `pageToken`. + string next_page_token = 2; +} + +// The parameters to GetLogMetric. +message GetLogMetricRequest { + // The resource name of the desired metric: + // + // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + string metric_name = 1; +} + +// The parameters to CreateLogMetric. +message CreateLogMetricRequest { + // The resource name of the project in which to create the metric: + // + // "projects/[PROJECT_ID]" + // + // The new metric must be provided in the request. + string parent = 1; + + // The new logs-based metric, which must not have an identifier that + // already exists. + LogMetric metric = 2; +} + +// The parameters to UpdateLogMetric. +message UpdateLogMetricRequest { + // The resource name of the metric to update: + // + // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + // + // The updated metric must be provided in the request and it's + // `name` field must be the same as `[METRIC_ID]` If the metric + // does not exist in `[PROJECT_ID]`, then a new metric is created. + string metric_name = 1; + + // The updated metric. + LogMetric metric = 2; +} + +// The parameters to DeleteLogMetric. +message DeleteLogMetricRequest { + // The resource name of the metric to delete: + // + // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + string metric_name = 1; +} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py index 54dc8f703c20..af0e78dd36ad 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -963,3 +963,4 @@ DESCRIPTOR.services_by_name["MetricsServiceV2"] = _METRICSSERVICEV2 # @@protoc_insertion_point(module_scope) +# -*- coding: utf-8 -*- diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py index a37710bcf382..585f8f85624f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py @@ -1304,3 +1304,4 @@ DESCRIPTOR.services_by_name["LoggingServiceV2"] = _LOGGINGSERVICEV2 # @@protoc_insertion_point(module_scope) +# -*- coding: utf-8 -*- diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/synth.py index 922d805e9b2c..c24382ce4d72 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/synth.py @@ -27,6 +27,7 @@ "v2", config_path="/google/logging/artman_logging.yaml", artman_output_name="logging-v2", + include_protos=True, ) s.move(library / "google/cloud/logging_v2/proto") From c738d866d66b91f6652c3f477f1a027479b5ee63 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Wed, 30 Jan 2019 10:21:47 -0800 Subject: [PATCH 222/855] Trivial gapic-generator change. (#7230) --- .../google/cloud/logging_v2/proto/log_entry_pb2.py | 1 - .../google/cloud/logging_v2/proto/logging_config_pb2.py | 1 - .../google/cloud/logging_v2/proto/logging_metrics_pb2.py | 1 - .../google/cloud/logging_v2/proto/logging_pb2.py | 1 - packages/google-cloud-logging/synth.metadata | 6 +++--- 5 files changed, 3 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py index 8cba40a04d64..3aa17006be30 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -857,4 +857,3 @@ DESCRIPTOR._options = None _LOGENTRY_LABELSENTRY._options = None # @@protoc_insertion_point(module_scope) -# -*- coding: utf-8 -*- diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py index 5d6592292ecc..d62ef9c95eec 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -1699,4 +1699,3 @@ DESCRIPTOR.services_by_name["ConfigServiceV2"] = _CONFIGSERVICEV2 # @@protoc_insertion_point(module_scope) -# -*- coding: utf-8 -*- diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py index af0e78dd36ad..54dc8f703c20 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -963,4 +963,3 @@ DESCRIPTOR.services_by_name["MetricsServiceV2"] = _METRICSSERVICEV2 # @@protoc_insertion_point(module_scope) -# -*- coding: utf-8 -*- diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py index 585f8f85624f..a37710bcf382 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py @@ -1304,4 +1304,3 @@ DESCRIPTOR.services_by_name["LoggingServiceV2"] = _LOGGINGSERVICEV2 # @@protoc_insertion_point(module_scope) -# -*- coding: utf-8 -*- diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index e91c4f077558..385891134f85 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2019-01-29T13:23:16.990671Z", + "updateTime": "2019-01-30T13:24:12.640581Z", "sources": [ { "generator": { @@ -12,8 +12,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "3cdb7227019524e7963071cf80a9624bf055b284", - "internalRef": "231246006" + "sha": "f0195b40fd5f5f412e6819ac01df08cedb8e5fd7", + "internalRef": "231440809" } }, { From a98a340c2384b3fad1adccaeeb1129ceb943f550 Mon Sep 17 00:00:00 2001 From: Pravin Dahal Date: Mon, 11 Feb 2019 19:12:32 +0100 Subject: [PATCH 223/855] Updated client library documentation URLs. (#7307) Previously, the URLs would redirect using JavaScript, which would either be slow or not work at all (in case JavaScript is disabled on the browser) --- packages/google-cloud-logging/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 78635f780b53..379dd813c249 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -14,7 +14,7 @@ Logging configuration. .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-logging.svg :target: https://pypi.org/project/google-cloud-logging/ .. _Stackdriver Logging API: https://cloud.google.com/logging -.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/logging/usage.html +.. _Client Library Documentation: https://googleapis.github.io/google-cloud-python/latest/logging/usage.html .. _Product Documentation: https://cloud.google.com/logging/docs Quick Start @@ -30,7 +30,7 @@ In order to use this library, you first need to go through the following steps: .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Stackdriver Logging API.: https://cloud.google.com/logging -.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/latest/core/auth.html +.. _Setup Authentication.: https://googleapis.github.io/google-cloud-python/latest/core/auth.html Installation ~~~~~~~~~~~~ From 79f8126c2c8143e253634e94687804f9416d5aa4 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Tue, 26 Feb 2019 09:20:38 -0800 Subject: [PATCH 224/855] Copy lintified proto files (via synth). (#7450) --- .../cloud/logging_v2/proto/log_entry.proto | 1 - .../cloud/logging_v2/proto/logging.proto | 39 ++----- .../logging_v2/proto/logging_config.proto | 107 +++++------------- .../logging_v2/proto/logging_metrics.proto | 1 - packages/google-cloud-logging/synth.metadata | 10 +- 5 files changed, 46 insertions(+), 112 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto index 2f1530e23f69..de9786daf733 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto @@ -33,7 +33,6 @@ option java_outer_classname = "LogEntryProto"; option java_package = "com.google.logging.v2"; option php_namespace = "Google\\Cloud\\Logging\\V2"; - // An individual entry in a log. message LogEntry { // Required. The resource name of the log to which this log entry belongs: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto index b1812e6f82d2..d04cd5c03dd0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto @@ -33,7 +33,6 @@ option java_outer_classname = "LoggingProto"; option java_package = "com.google.logging.v2"; option php_namespace = "Google\\Cloud\\Logging\\V2"; - // Service for ingesting and querying logs. service LoggingServiceV2 { // Deletes all the log entries in a log. @@ -43,15 +42,9 @@ service LoggingServiceV2 { rpc DeleteLog(DeleteLogRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v2/{log_name=projects/*/logs/*}" - additional_bindings { - delete: "/v2/{log_name=organizations/*/logs/*}" - } - additional_bindings { - delete: "/v2/{log_name=folders/*/logs/*}" - } - additional_bindings { - delete: "/v2/{log_name=billingAccounts/*/logs/*}" - } + additional_bindings { delete: "/v2/{log_name=organizations/*/logs/*}" } + additional_bindings { delete: "/v2/{log_name=folders/*/logs/*}" } + additional_bindings { delete: "/v2/{log_name=billingAccounts/*/logs/*}" } }; } @@ -62,7 +55,8 @@ service LoggingServiceV2 { // A single request may contain log entries for a maximum of 1000 // different resources (projects, organizations, billing accounts or // folders) - rpc WriteLogEntries(WriteLogEntriesRequest) returns (WriteLogEntriesResponse) { + rpc WriteLogEntries(WriteLogEntriesRequest) + returns (WriteLogEntriesResponse) { option (google.api.http) = { post: "/v2/entries:write" body: "*" @@ -80,7 +74,8 @@ service LoggingServiceV2 { } // Lists the descriptors for monitored resource types used by Logging. - rpc ListMonitoredResourceDescriptors(ListMonitoredResourceDescriptorsRequest) returns (ListMonitoredResourceDescriptorsResponse) { + rpc ListMonitoredResourceDescriptors(ListMonitoredResourceDescriptorsRequest) + returns (ListMonitoredResourceDescriptorsResponse) { option (google.api.http) = { get: "/v2/monitoredResourceDescriptors" }; @@ -91,18 +86,10 @@ service LoggingServiceV2 { rpc ListLogs(ListLogsRequest) returns (ListLogsResponse) { option (google.api.http) = { get: "/v2/{parent=*/*}/logs" - additional_bindings { - get: "/v2/{parent=projects/*}/logs" - } - additional_bindings { - get: "/v2/{parent=organizations/*}/logs" - } - additional_bindings { - get: "/v2/{parent=folders/*}/logs" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*}/logs" - } + additional_bindings { get: "/v2/{parent=projects/*}/logs" } + additional_bindings { get: "/v2/{parent=organizations/*}/logs" } + additional_bindings { get: "/v2/{parent=folders/*}/logs" } + additional_bindings { get: "/v2/{parent=billingAccounts/*}/logs" } }; } } @@ -201,9 +188,7 @@ message WriteLogEntriesRequest { // Result returned from WriteLogEntries. // empty -message WriteLogEntriesResponse { - -} +message WriteLogEntriesResponse {} // Error details for WriteLogEntries with partial success. message WriteLogEntriesPartialErrors { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto index 8803ace8180e..2afea1062df5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto @@ -30,7 +30,6 @@ option java_outer_classname = "LoggingConfigProto"; option java_package = "com.google.logging.v2"; option php_namespace = "Google\\Cloud\\Logging\\V2"; - // Service for configuring sinks used to export log entries out of // Logging. service ConfigServiceV2 { @@ -38,18 +37,10 @@ service ConfigServiceV2 { rpc ListSinks(ListSinksRequest) returns (ListSinksResponse) { option (google.api.http) = { get: "/v2/{parent=*/*}/sinks" - additional_bindings { - get: "/v2/{parent=projects/*}/sinks" - } - additional_bindings { - get: "/v2/{parent=organizations/*}/sinks" - } - additional_bindings { - get: "/v2/{parent=folders/*}/sinks" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*}/sinks" - } + additional_bindings { get: "/v2/{parent=projects/*}/sinks" } + additional_bindings { get: "/v2/{parent=organizations/*}/sinks" } + additional_bindings { get: "/v2/{parent=folders/*}/sinks" } + additional_bindings { get: "/v2/{parent=billingAccounts/*}/sinks" } }; } @@ -57,18 +48,10 @@ service ConfigServiceV2 { rpc GetSink(GetSinkRequest) returns (LogSink) { option (google.api.http) = { get: "/v2/{sink_name=*/*/sinks/*}" - additional_bindings { - get: "/v2/{sink_name=projects/*/sinks/*}" - } - additional_bindings { - get: "/v2/{sink_name=organizations/*/sinks/*}" - } - additional_bindings { - get: "/v2/{sink_name=folders/*/sinks/*}" - } - additional_bindings { - get: "/v2/{sink_name=billingAccounts/*/sinks/*}" - } + additional_bindings { get: "/v2/{sink_name=projects/*/sinks/*}" } + additional_bindings { get: "/v2/{sink_name=organizations/*/sinks/*}" } + additional_bindings { get: "/v2/{sink_name=folders/*/sinks/*}" } + additional_bindings { get: "/v2/{sink_name=billingAccounts/*/sinks/*}" } }; } @@ -80,18 +63,12 @@ service ConfigServiceV2 { option (google.api.http) = { post: "/v2/{parent=*/*}/sinks" body: "sink" - additional_bindings { - post: "/v2/{parent=projects/*}/sinks" - body: "sink" - } + additional_bindings { post: "/v2/{parent=projects/*}/sinks" body: "sink" } additional_bindings { post: "/v2/{parent=organizations/*}/sinks" body: "sink" } - additional_bindings { - post: "/v2/{parent=folders/*}/sinks" - body: "sink" - } + additional_bindings { post: "/v2/{parent=folders/*}/sinks" body: "sink" } additional_bindings { post: "/v2/{parent=billingAccounts/*}/sinks" body: "sink" @@ -147,15 +124,9 @@ service ConfigServiceV2 { rpc DeleteSink(DeleteSinkRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v2/{sink_name=*/*/sinks/*}" - additional_bindings { - delete: "/v2/{sink_name=projects/*/sinks/*}" - } - additional_bindings { - delete: "/v2/{sink_name=organizations/*/sinks/*}" - } - additional_bindings { - delete: "/v2/{sink_name=folders/*/sinks/*}" - } + additional_bindings { delete: "/v2/{sink_name=projects/*/sinks/*}" } + additional_bindings { delete: "/v2/{sink_name=organizations/*/sinks/*}" } + additional_bindings { delete: "/v2/{sink_name=folders/*/sinks/*}" } additional_bindings { delete: "/v2/{sink_name=billingAccounts/*/sinks/*}" } @@ -166,18 +137,10 @@ service ConfigServiceV2 { rpc ListExclusions(ListExclusionsRequest) returns (ListExclusionsResponse) { option (google.api.http) = { get: "/v2/{parent=*/*}/exclusions" - additional_bindings { - get: "/v2/{parent=projects/*}/exclusions" - } - additional_bindings { - get: "/v2/{parent=organizations/*}/exclusions" - } - additional_bindings { - get: "/v2/{parent=folders/*}/exclusions" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*}/exclusions" - } + additional_bindings { get: "/v2/{parent=projects/*}/exclusions" } + additional_bindings { get: "/v2/{parent=organizations/*}/exclusions" } + additional_bindings { get: "/v2/{parent=folders/*}/exclusions" } + additional_bindings { get: "/v2/{parent=billingAccounts/*}/exclusions" } }; } @@ -185,18 +148,10 @@ service ConfigServiceV2 { rpc GetExclusion(GetExclusionRequest) returns (LogExclusion) { option (google.api.http) = { get: "/v2/{name=*/*/exclusions/*}" - additional_bindings { - get: "/v2/{name=projects/*/exclusions/*}" - } - additional_bindings { - get: "/v2/{name=organizations/*/exclusions/*}" - } - additional_bindings { - get: "/v2/{name=folders/*/exclusions/*}" - } - additional_bindings { - get: "/v2/{name=billingAccounts/*/exclusions/*}" - } + additional_bindings { get: "/v2/{name=projects/*/exclusions/*}" } + additional_bindings { get: "/v2/{name=organizations/*/exclusions/*}" } + additional_bindings { get: "/v2/{name=folders/*/exclusions/*}" } + additional_bindings { get: "/v2/{name=billingAccounts/*/exclusions/*}" } }; } @@ -254,15 +209,9 @@ service ConfigServiceV2 { rpc DeleteExclusion(DeleteExclusionRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v2/{name=*/*/exclusions/*}" - additional_bindings { - delete: "/v2/{name=projects/*/exclusions/*}" - } - additional_bindings { - delete: "/v2/{name=organizations/*/exclusions/*}" - } - additional_bindings { - delete: "/v2/{name=folders/*/exclusions/*}" - } + additional_bindings { delete: "/v2/{name=projects/*/exclusions/*}" } + additional_bindings { delete: "/v2/{name=organizations/*/exclusions/*}" } + additional_bindings { delete: "/v2/{name=folders/*/exclusions/*}" } additional_bindings { delete: "/v2/{name=billingAccounts/*/exclusions/*}" } @@ -431,7 +380,8 @@ message CreateSinkRequest { // If this field is set to true, or if the sink is owned by a non-project // resource such as an organization, then the value of `writer_identity` will // be a unique service account used only for exports from the new sink. For - // more information, see `writer_identity` in [LogSink][google.logging.v2.LogSink]. + // more information, see `writer_identity` in + // [LogSink][google.logging.v2.LogSink]. bool unique_writer_identity = 3; } @@ -611,8 +561,9 @@ message UpdateExclusionRequest { // Required. A nonempty list of fields to change in the existing exclusion. // New values for the fields are taken from the corresponding fields in the - // [LogExclusion][google.logging.v2.LogExclusion] included in this request. Fields not mentioned in - // `update_mask` are not changed and are ignored in the request. + // [LogExclusion][google.logging.v2.LogExclusion] included in this request. + // Fields not mentioned in `update_mask` are not changed and are ignored in + // the request. // // For example, to change the filter and description of an exclusion, // specify an `update_mask` of `"filter,description"`. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto index 229fb7ca5ccf..dd3fa87821dc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto @@ -32,7 +32,6 @@ option java_outer_classname = "LoggingMetricsProto"; option java_package = "com.google.logging.v2"; option php_namespace = "Google\\Cloud\\Logging\\V2"; - // Service for configuring logs-based metrics. service MetricsServiceV2 { // Lists logs-based metrics. diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 385891134f85..d46efbb6bebe 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-01-30T13:24:12.640581Z", + "updateTime": "2019-02-26T13:20:43.903128Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.7", - "dockerImage": "googleapis/artman@sha256:d6c8ced606eb49973ca95d2af7c55a681acc042db0f87d135968349e7bf6dd80" + "version": "0.16.14", + "dockerImage": "googleapis/artman@sha256:f3d61ae45abaeefb6be5f228cda22732c2f1b00fb687c79c4bd4f2c42bb1e1a7" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f0195b40fd5f5f412e6819ac01df08cedb8e5fd7", - "internalRef": "231440809" + "sha": "29f098cb03a9983cc9cb15993de5da64419046f2", + "internalRef": "235621085" } }, { From c3818d6918f70cde461ec75ad85f3a43333e19f7 Mon Sep 17 00:00:00 2001 From: rob salmond Date: Tue, 19 Mar 2019 11:30:23 -0600 Subject: [PATCH 225/855] Use FQDN for GCE metadata endpoint. (#7520) Closes #7517. --- packages/google-cloud-logging/google/cloud/logging/_helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/_helpers.py index 837028f716d7..ed7a0ba8cf7b 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging/_helpers.py @@ -21,7 +21,7 @@ from google.cloud.logging.entries import StructEntry from google.cloud.logging.entries import TextEntry -METADATA_URL = "http://metadata/computeMetadata/v1/" +METADATA_URL = "http://metadata.google.internal./computeMetadata/v1/" METADATA_HEADERS = {"Metadata-Flavor": "Google"} From b71e3eb2b7ae8983877c4d7341094de46fbb8a05 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 20 Mar 2019 13:41:12 -0700 Subject: [PATCH 226/855] Remove classifier for Python 3.4 for end-of-life. (#7535) * Remove classifier for Python 3.4 for end-of-life. * Update supported versions in Client README, Contributing Guide --- packages/google-cloud-logging/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 379dd813c249..e7687dc74bb1 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -48,7 +48,7 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.4 +Python >= 3.5 Deprecated Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^ From 49d9ff4ee502105276358c07a3ee00fe00d7faef Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Mon, 1 Apr 2019 09:14:19 -0700 Subject: [PATCH 227/855] Add routing header to method metadata (via synth). (#7598) --- .../gapic/config_service_v2_client.py | 131 ++++++++++++++++++ .../gapic/logging_service_v2_client.py | 27 ++++ .../gapic/metrics_service_v2_client.py | 66 +++++++++ packages/google-cloud-logging/synth.metadata | 12 +- 4 files changed, 230 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index a24f9c544c77..a2bb6dbbc587 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -23,6 +23,7 @@ import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import google.api_core.page_iterator import google.api_core.path_template @@ -280,6 +281,19 @@ def list_sinks( request = logging_config_pb2.ListSinksRequest( parent=parent, page_size=page_size ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( @@ -356,6 +370,19 @@ def get_sink( ) request = logging_config_pb2.GetSinkRequest(sink_name=sink_name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("sink_name", sink_name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["get_sink"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -448,6 +475,19 @@ def create_sink( request = logging_config_pb2.CreateSinkRequest( parent=parent, sink=sink, unique_writer_identity=unique_writer_identity ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["create_sink"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -561,6 +601,19 @@ def update_sink( unique_writer_identity=unique_writer_identity, update_mask=update_mask, ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("sink_name", sink_name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["update_sink"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -625,6 +678,19 @@ def delete_sink( ) request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("sink_name", sink_name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + self._inner_api_calls["delete_sink"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -711,6 +777,19 @@ def list_exclusions( request = logging_config_pb2.ListExclusionsRequest( parent=parent, page_size=page_size ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( @@ -787,6 +866,19 @@ def get_exclusion( ) request = logging_config_pb2.GetExclusionRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["get_exclusion"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -866,6 +958,19 @@ def create_exclusion( request = logging_config_pb2.CreateExclusionRequest( parent=parent, exclusion=exclusion ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["create_exclusion"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -956,6 +1061,19 @@ def update_exclusion( request = logging_config_pb2.UpdateExclusionRequest( name=name, exclusion=exclusion, update_mask=update_mask ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["update_exclusion"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -1018,6 +1136,19 @@ def delete_exclusion( ) request = logging_config_pb2.DeleteExclusionRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + self._inner_api_calls["delete_exclusion"]( request, retry=retry, timeout=timeout, metadata=metadata ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index 1add4e7046c8..1f04984b82d7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -23,6 +23,7 @@ import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import google.api_core.page_iterator import google.api_core.path_template @@ -247,6 +248,19 @@ def delete_log( ) request = logging_pb2.DeleteLogRequest(log_name=log_name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("log_name", log_name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + self._inner_api_calls["delete_log"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -700,6 +714,19 @@ def list_logs( ) request = logging_pb2.ListLogsRequest(parent=parent, page_size=page_size) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index faca01a595fe..4a987ed47d2d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -23,6 +23,7 @@ import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import google.api_core.page_iterator import google.api_core.path_template @@ -267,6 +268,19 @@ def list_log_metrics( request = logging_metrics_pb2.ListLogMetricsRequest( parent=parent, page_size=page_size ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( @@ -338,6 +352,19 @@ def get_log_metric( ) request = logging_metrics_pb2.GetLogMetricRequest(metric_name=metric_name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("metric_name", metric_name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["get_log_metric"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -411,6 +438,19 @@ def create_log_metric( request = logging_metrics_pb2.CreateLogMetricRequest( parent=parent, metric=metric ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["create_log_metric"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -485,6 +525,19 @@ def update_log_metric( request = logging_metrics_pb2.UpdateLogMetricRequest( metric_name=metric_name, metric=metric ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("metric_name", metric_name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["update_log_metric"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -542,6 +595,19 @@ def delete_log_metric( ) request = logging_metrics_pb2.DeleteLogMetricRequest(metric_name=metric_name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("metric_name", metric_name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + self._inner_api_calls["delete_log_metric"]( request, retry=retry, timeout=timeout, metadata=metadata ) diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index d46efbb6bebe..9680da28f7c2 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-02-26T13:20:43.903128Z", + "updateTime": "2019-03-28T12:19:01.156618Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.14", - "dockerImage": "googleapis/artman@sha256:f3d61ae45abaeefb6be5f228cda22732c2f1b00fb687c79c4bd4f2c42bb1e1a7" + "version": "0.16.20", + "dockerImage": "googleapis/artman@sha256:e3c054a2fb85a12481c722af616c7fb6f1d02d862248385eecbec3e4240ebd1e" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "29f098cb03a9983cc9cb15993de5da64419046f2", - "internalRef": "235621085" + "sha": "6a84b3267b0a95e922608b9891219075047eee29", + "internalRef": "240640999" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.1.16" + "version": "2019.2.26" } } ], From db1472b392ac783b96943899e35b18f693a56f43 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Tue, 30 Apr 2019 09:37:35 -0700 Subject: [PATCH 228/855] Reorder methods in file (via synth). (#7810) --- .../gapic/config_service_v2_client.py | 18 +++++++++--------- .../gapic/metrics_service_v2_client.py | 12 ++++++------ packages/google-cloud-logging/synth.metadata | 12 ++++++------ 3 files changed, 21 insertions(+), 21 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index a2bb6dbbc587..da19b8c4da5f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -77,6 +77,15 @@ def from_service_account_file(cls, filename, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def exclusion_path(cls, project, exclusion): + """Return a fully-qualified exclusion string.""" + return google.api_core.path_template.expand( + "projects/{project}/exclusions/{exclusion}", + project=project, + exclusion=exclusion, + ) + @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" @@ -91,15 +100,6 @@ def sink_path(cls, project, sink): "projects/{project}/sinks/{sink}", project=project, sink=sink ) - @classmethod - def exclusion_path(cls, project, exclusion): - """Return a fully-qualified exclusion string.""" - return google.api_core.path_template.expand( - "projects/{project}/exclusions/{exclusion}", - project=project, - exclusion=exclusion, - ) - def __init__( self, transport=None, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 4a987ed47d2d..99cfc44991b8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -77,17 +77,17 @@ def from_service_account_file(cls, filename, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" + def metric_path(cls, project, metric): + """Return a fully-qualified metric string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}/metrics/{metric}", project=project, metric=metric ) @classmethod - def metric_path(cls, project, metric): - """Return a fully-qualified metric string.""" + def project_path(cls, project): + """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}/metrics/{metric}", project=project, metric=metric + "projects/{project}", project=project ) def __init__( diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 9680da28f7c2..938a0c325ed6 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-03-28T12:19:01.156618Z", + "updateTime": "2019-04-27T12:12:27.695009Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.20", - "dockerImage": "googleapis/artman@sha256:e3c054a2fb85a12481c722af616c7fb6f1d02d862248385eecbec3e4240ebd1e" + "version": "0.17.1", + "dockerImage": "googleapis/artman@sha256:a40ca4dd4ef031c0ded4df4909ffdf7b3f20d29b23e682ef991eb60ba0ca6025" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "6a84b3267b0a95e922608b9891219075047eee29", - "internalRef": "240640999" + "sha": "808110e242c682d7ac2bab6d9c49fc3bf72d7604", + "internalRef": "245313728" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.2.26" + "version": "2019.4.10" } } ], From bef16925e59ad9895104deb3497805c16f06943e Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 8 May 2019 11:50:28 -0400 Subject: [PATCH 229/855] Add client_info support to client / connection. (#7874) --- .../google/cloud/logging/_http.py | 14 ++++++++----- .../google/cloud/logging/client.py | 18 +++++++++++++++-- .../tests/unit/test__http.py | 6 ++---- .../tests/unit/test_client.py | 20 ++++++++++++++++++- 4 files changed, 46 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index d13baa175c1e..aa6d511f0106 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -25,16 +25,22 @@ from google.cloud.logging.metric import Metric -_CLIENT_INFO = _http.CLIENT_INFO_TEMPLATE.format(__version__) - - class Connection(_http.JSONConnection): """A connection to Google Stackdriver Logging via the JSON REST API. :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that owns the current connection. + + :type client_info: :class:`~google.api_core.client_info.ClientInfo` + :param client_info: (Optional) instance used to generate user agent. """ + def __init__(self, client, client_info=None): + super(Connection, self).__init__(client, client_info) + + self._client_info.gapic_version = __version__ + self._client_info.client_library_version = __version__ + API_BASE_URL = "https://logging.googleapis.com" """The base of the API call URL.""" @@ -44,8 +50,6 @@ class Connection(_http.JSONConnection): API_URL_TEMPLATE = "{api_base_url}/{api_version}{path}" """A template for the URL of a particular API call.""" - _EXTRA_HEADERS = {_http.CLIENT_INFO_HEADER: _CLIENT_INFO} - class _LoggingAPI(object): """Helper mapping logging-related APIs. diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index b5f0b02daaf9..af65a83f1c9b 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -86,6 +86,13 @@ class Client(ClientWithProject): environment variable This parameter should be considered private, and could change in the future. + + :type client_info: :class:`~google.api_core.client_info.ClientInfo` + :param client_info: + The client info used to send a user-agent string along with API + requests. If ``None``, then default info will be used. Generally, + you only need to set this if you're developing your own library + or partner tool. """ _logging_api = None @@ -100,11 +107,18 @@ class Client(ClientWithProject): ) """The scopes required for authenticating as a Logging consumer.""" - def __init__(self, project=None, credentials=None, _http=None, _use_grpc=None): + def __init__( + self, + project=None, + credentials=None, + _http=None, + _use_grpc=None, + client_info=None, + ): super(Client, self).__init__( project=project, credentials=credentials, _http=_http ) - self._connection = Connection(self) + self._connection = Connection(self, client_info=client_info) if _use_grpc is None: self._use_grpc = _USE_GRPC else: diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index e4c4ecb279ef..49abeafdcb77 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -44,9 +44,7 @@ def test_default_url(self): def test_extra_headers(self): import requests - from google.cloud import _http as base_http - from google.cloud.logging import _http as MUT http = mock.create_autospec(requests.Session, instance=True) response = requests.Response() @@ -63,8 +61,8 @@ def test_extra_headers(self): expected_headers = { "Accept-Encoding": "gzip", - base_http.CLIENT_INFO_HEADER: MUT._CLIENT_INFO, - "User-Agent": conn.USER_AGENT, + base_http.CLIENT_INFO_HEADER: conn.user_agent, + "User-Agent": conn.user_agent, } expected_uri = conn.build_api_url("/rainbow") http.request.assert_called_once_with( diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 5ea17eb78f0b..740feb8d5a17 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -43,10 +43,28 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_ctor(self): + def test_ctor_defaults(self): + from google.cloud._http import ClientInfo + from google.cloud.logging._http import Connection + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) self.assertEqual(client.project, self.PROJECT) + self.assertIsInstance(client._connection, Connection) + self.assertIsInstance(client._connection._client_info, ClientInfo) + + def test_ctor_explicit(self): + from google.cloud._http import ClientInfo + from google.cloud.logging._http import Connection + + creds = _make_credentials() + client_info = ClientInfo() + client = self._make_one( + project=self.PROJECT, credentials=creds, client_info=client_info + ) + self.assertEqual(client.project, self.PROJECT) + self.assertIsInstance(client._connection, Connection) + self.assertIs(client._connection._client_info, client_info) def test_logging_api_wo_gapic(self): from google.cloud.logging._http import _LoggingAPI From 544726dc7a8ebe3ecacd228c8aabd96f7f4b096c Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 8 May 2019 16:16:04 -0400 Subject: [PATCH 230/855] Plumb 'Client._client_info' through to GAPIC API wrapper. (#7901) --- .../google/cloud/logging/_gapic.py | 11 +++-------- .../google/cloud/logging/client.py | 5 ++++- .../google-cloud-logging/tests/unit/test__gapic.py | 12 ++++++------ .../google-cloud-logging/tests/unit/test_client.py | 1 + 4 files changed, 14 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gapic.py b/packages/google-cloud-logging/google/cloud/logging/_gapic.py index 00e9f5f2ee15..32897c088142 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_gapic.py +++ b/packages/google-cloud-logging/google/cloud/logging/_gapic.py @@ -17,7 +17,6 @@ import functools -from google.api_core.gapic_v1 import client_info from google.cloud.logging_v2.gapic.config_service_v2_client import ConfigServiceV2Client from google.cloud.logging_v2.gapic.logging_service_v2_client import ( LoggingServiceV2Client, @@ -31,15 +30,11 @@ from google.protobuf.json_format import MessageToDict from google.protobuf.json_format import ParseDict -from google.cloud.logging import __version__ from google.cloud.logging._helpers import entry_from_resource from google.cloud.logging.sink import Sink from google.cloud.logging.metric import Metric -_CLIENT_INFO = client_info.ClientInfo(client_library_version=__version__) - - class _LoggingAPI(object): """Helper mapping logging-related APIs. @@ -544,7 +539,7 @@ def make_logging_api(client): :returns: A metrics API instance with the proper credentials. """ generated = LoggingServiceV2Client( - credentials=client._credentials, client_info=_CLIENT_INFO + credentials=client._credentials, client_info=client._client_info ) return _LoggingAPI(generated, client) @@ -559,7 +554,7 @@ def make_metrics_api(client): :returns: A metrics API instance with the proper credentials. """ generated = MetricsServiceV2Client( - credentials=client._credentials, client_info=_CLIENT_INFO + credentials=client._credentials, client_info=client._client_info ) return _MetricsAPI(generated, client) @@ -574,6 +569,6 @@ def make_sinks_api(client): :returns: A metrics API instance with the proper credentials. """ generated = ConfigServiceV2Client( - credentials=client._credentials, client_info=_CLIENT_INFO + credentials=client._credentials, client_info=client._client_info ) return _SinksAPI(generated, client) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index af65a83f1c9b..18bd8a335dcd 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -87,7 +87,9 @@ class Client(ClientWithProject): This parameter should be considered private, and could change in the future. - :type client_info: :class:`~google.api_core.client_info.ClientInfo` + :type client_info: + :class:`google.api_core.client_info.ClientInfo` or + :class:`google.api_core.gapic_v1.client_info.ClientInfo` :param client_info: The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, @@ -119,6 +121,7 @@ def __init__( project=project, credentials=credentials, _http=_http ) self._connection = Connection(self, client_info=client_info) + self._client_info = client_info if _use_grpc is None: self._use_grpc = _USE_GRPC else: diff --git a/packages/google-cloud-logging/tests/unit/test__gapic.py b/packages/google-cloud-logging/tests/unit/test__gapic.py index 03ff0a7a14f6..ad6ded2bd1f7 100644 --- a/packages/google-cloud-logging/tests/unit/test__gapic.py +++ b/packages/google-cloud-logging/tests/unit/test__gapic.py @@ -584,32 +584,32 @@ def test_registered_type(self): @mock.patch("google.cloud.logging._gapic.LoggingServiceV2Client", autospec=True) def test_make_logging_api(gapic_client): - client = mock.Mock(spec=["_credentials"]) + client = mock.Mock(spec=["_credentials", "_client_info"]) api = _gapic.make_logging_api(client) assert api._client == client assert api._gapic_api == gapic_client.return_value gapic_client.assert_called_once_with( - credentials=client._credentials, client_info=_gapic._CLIENT_INFO + credentials=client._credentials, client_info=client._client_info ) @mock.patch("google.cloud.logging._gapic.MetricsServiceV2Client", autospec=True) def test_make_metrics_api(gapic_client): - client = mock.Mock(spec=["_credentials"]) + client = mock.Mock(spec=["_credentials", "_client_info"]) api = _gapic.make_metrics_api(client) assert api._client == client assert api._gapic_api == gapic_client.return_value gapic_client.assert_called_once_with( - credentials=client._credentials, client_info=_gapic._CLIENT_INFO + credentials=client._credentials, client_info=client._client_info ) @mock.patch("google.cloud.logging._gapic.ConfigServiceV2Client", autospec=True) def test_make_sinks_api(gapic_client): - client = mock.Mock(spec=["_credentials"]) + client = mock.Mock(spec=["_credentials", "_client_info"]) api = _gapic.make_sinks_api(client) assert api._client == client assert api._gapic_api == gapic_client.return_value gapic_client.assert_called_once_with( - credentials=client._credentials, client_info=_gapic._CLIENT_INFO + credentials=client._credentials, client_info=client._client_info ) diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 740feb8d5a17..e750df7de454 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -63,6 +63,7 @@ def test_ctor_explicit(self): project=self.PROJECT, credentials=creds, client_info=client_info ) self.assertEqual(client.project, self.PROJECT) + self.assertIs(client._client_info, client_info) self.assertIsInstance(client._connection, Connection) self.assertIs(client._connection._client_info, client_info) From 9ee6c9550d8592d863452dcf2f48ef7a5dd82fc2 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 16 May 2019 13:22:32 -0400 Subject: [PATCH 231/855] Pin 'google-cloud-core >= 1.0.0, < 2.0dev'. (#7993) --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 6ca5ce279550..f162ac72430c 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -30,7 +30,7 @@ release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ 'google-api-core[grpc] >= 1.6.0, < 2.0.0dev', - 'google-cloud-core >= 0.29.0, < 0.30dev', + "google-cloud-core >= 1.0.0, < 2.0dev", ] extras = { } From cd88541e767fa26563adc740319d6437c9c77ac5 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 16 May 2019 17:11:15 -0400 Subject: [PATCH 232/855] Release logging-1.11.0 (#8006) --- packages/google-cloud-logging/CHANGELOG.md | 27 ++++++++++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index ca0d1a6af370..d1b92bd471d1 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,33 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## 1.11.0 + +05-16-2019 12:27 PDT + + +### Implementation Changes +- Add routing header to method metadata (via synth). ([#7598](https://github.com/googleapis/google-cloud-python/pull/7598)) +- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) +- Use FQDN for GCE metadata endpoint. ([#7520](https://github.com/googleapis/google-cloud-python/pull/7520)) + +### New Features +- Add `client_info` support to client. ([#7874](https://github.com/googleapis/google-cloud-python/pull/7874)) and ([#7901](https://github.com/googleapis/google-cloud-python/pull/7901)) + +### Dependencies +- Pin `google-cloud-core >= 1.0.0, < 2.0dev`. ([#7993](https://github.com/googleapis/google-cloud-python/pull/7993)) + +### Documentation +- Update client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) +- Reformat snippet (via synth). ([#7216](https://github.com/googleapis/google-cloud-python/pull/7216)) +- Add snippet for logging a resource. ([#7212](https://github.com/googleapis/google-cloud-python/pull/7212)) + +### Internal / Testing Changes +- Reorder methods in file (via synth). ([#7810](https://github.com/googleapis/google-cloud-python/pull/7810)) +- Copy lintified proto files (via synth). ([#7450](https://github.com/googleapis/google-cloud-python/pull/7450)) +- Trivial gapic-generator change. ([#7230](https://github.com/googleapis/google-cloud-python/pull/7230)) +- Add protos as an artifact to library ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) + ## 1.10.0 01-17-2019 15:37 PST diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index f162ac72430c..c2109a604435 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-logging' description = 'Stackdriver Logging API client library' -version = '1.10.0' +version = '1.11.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From ffd37259ef609d57ba3e0370495f098fe21e82b0 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 17 May 2019 15:21:09 -0400 Subject: [PATCH 233/855] Use alabaster theme everwhere. (#8021) 'sphinx_rtd_theme' is no longer installed by default. --- packages/google-cloud-logging/docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index 79ef19f4a583..ec7c3ac73fee 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -120,7 +120,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = "sphinx_rtd_theme" +html_theme = "alabaster" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the From 8f8fbfe89574925139c0acd1822b9df444e4d05e Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 22 May 2019 09:19:27 -0700 Subject: [PATCH 234/855] [CHANGE ME] Re-generated logging to pick up changes in the API or client library generator. (#8064) --- .../logging_v2/gapic/config_service_v2_client.py | 2 ++ .../google/cloud/logging_v2/gapic/enums.py | 1 + .../logging_v2/gapic/logging_service_v2_client.py | 2 ++ .../logging_v2/gapic/metrics_service_v2_client.py | 2 ++ .../transports/config_service_v2_grpc_transport.py | 1 + .../transports/logging_service_v2_grpc_transport.py | 1 + .../transports/metrics_service_v2_grpc_transport.py | 1 + packages/google-cloud-logging/synth.metadata | 12 ++++++------ .../gapic/v2/test_config_service_v2_client_v2.py | 1 + .../gapic/v2/test_logging_service_v2_client_v2.py | 1 + .../gapic/v2/test_metrics_service_v2_client_v2.py | 1 + 11 files changed, 19 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index da19b8c4da5f..3e52190f050e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + """Accesses the google.logging.v2 ConfigServiceV2 API.""" import functools @@ -41,6 +42,7 @@ from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 + _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py index 98ac090418ef..e677017ccbd2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + """Wrappers for protocol buffer enum types.""" import enum diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index 1f04984b82d7..13a4ad4cfcd9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + """Accesses the google.logging.v2 LoggingServiceV2 API.""" import functools @@ -38,6 +39,7 @@ from google.cloud.logging_v2.proto import logging_pb2_grpc from google.protobuf import empty_pb2 + _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 99cfc44991b8..0ee7a871f5e3 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + """Accesses the google.logging.v2 MetricsServiceV2 API.""" import functools @@ -43,6 +44,7 @@ from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 + _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py index 91102e9d6420..3ef9f6949eab 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + import google.api_core.grpc_helpers from google.cloud.logging_v2.proto import logging_config_pb2_grpc diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py index 4e3a9e82a06c..388b9e644431 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + import google.api_core.grpc_helpers from google.cloud.logging_v2.proto import logging_pb2_grpc diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py index f4f953f6a617..4b1e18149f24 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + import google.api_core.grpc_helpers from google.cloud.logging_v2.proto import logging_metrics_pb2_grpc diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 938a0c325ed6..ecd32783bf21 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-04-27T12:12:27.695009Z", + "updateTime": "2019-05-21T12:25:22.738575Z", "sources": [ { "generator": { "name": "artman", - "version": "0.17.1", - "dockerImage": "googleapis/artman@sha256:a40ca4dd4ef031c0ded4df4909ffdf7b3f20d29b23e682ef991eb60ba0ca6025" + "version": "0.20.0", + "dockerImage": "googleapis/artman@sha256:3246adac900f4bdbd62920e80de2e5877380e44036b3feae13667ec255ebf5ec" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "808110e242c682d7ac2bab6d9c49fc3bf72d7604", - "internalRef": "245313728" + "sha": "32a10f69e2c9ce15bba13ab1ff928bacebb25160", + "internalRef": "249058354" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.4.10" + "version": "2019.5.2" } } ], diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py index 9c299abaccf5..5ac89493cd7e 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + """Unit tests.""" import mock diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py index b43ac02f0068..30aa9b807329 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + """Unit tests.""" import mock diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py index ce00373b102b..e9dd3e348d48 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + """Unit tests.""" import mock From 6a17ce6f1fdc2cfba859c3256d464bd1346d7a07 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Jun 2019 14:33:19 -0400 Subject: [PATCH 235/855] Clarify worker thread implementation. (#8228) --- .../handlers/transports/background_thread.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 1eb6d212af5d..9b629a66e863 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -25,7 +25,6 @@ import threading import time -from six.moves import range from six.moves import queue from google.cloud.logging.handlers.transports.base import Transport @@ -56,8 +55,8 @@ def _get_many(queue_, max_items=None, max_latency=0): item from a queue. This number includes the time required to retrieve the first item. - :rtype: Sequence - :returns: A sequence of items retrieved from the queue. + :rtype: list + :returns: items retrieved from the queue. """ start = time.time() # Always return at least one item. @@ -132,8 +131,8 @@ def _thread_main(self): """ _LOGGER.debug("Background thread started.") - quit_ = False - while True: + done = False + while not done: batch = self._cloud_logger.batch() items = _get_many( self._queue, @@ -143,20 +142,15 @@ def _thread_main(self): for item in items: if item is _WORKER_TERMINATOR: - quit_ = True - # Continue processing items, don't break, try to process - # all items we got back before quitting. + done = True # Continue processing items. else: batch.log_struct(**item) self._safely_commit_batch(batch) - for _ in range(len(items)): + for _ in items: self._queue.task_done() - if quit_: - break - _LOGGER.debug("Background thread exited gracefully.") def start(self): From 54f7162f2a02c1c15f7b391c1d45469006d5acc7 Mon Sep 17 00:00:00 2001 From: Paul Adams Date: Thu, 6 Jun 2019 19:42:54 +0100 Subject: [PATCH 236/855] Set the 'timestamp' on log records created by handler. (#8227) Closes #8222. --- .../cloud/logging/handlers/transports/background_thread.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 9b629a66e863..7a743c23f458 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -261,6 +261,7 @@ def enqueue( "labels": labels, "trace": trace, "span_id": span_id, + "timestamp": datetime.utcfromtimestamp(record.created), } ) From c81cb2014be3c033749724b62bb457ee58e21522 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 24 Jun 2019 14:53:10 -0700 Subject: [PATCH 237/855] Fix tests broken in PR #8227. (#8273) --- .../logging/handlers/transports/background_thread.py | 3 ++- .../unit/handlers/transports/test_background_thread.py | 10 ++++++++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index 7a743c23f458..be7710ab1479 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -20,6 +20,7 @@ from __future__ import print_function import atexit +import datetime import logging import sys import threading @@ -261,7 +262,7 @@ def enqueue( "labels": labels, "trace": trace, "span_id": span_id, - "timestamp": datetime.utcfromtimestamp(record.created), + "timestamp": datetime.datetime.utcfromtimestamp(record.created), } ) diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 6842bc455968..717fb1ffc9d9 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import time import logging import unittest @@ -243,7 +244,9 @@ def test__main_thread_terminated_non_empty_queue(self): worker = self._make_one(_Logger(self.NAME)) self._start_with_thread_patch(worker) - worker.enqueue(mock.Mock(), "") + record = mock.Mock() + record.created = time.time() + worker.enqueue(record, "") worker._main_thread_terminated() self.assertFalse(worker.is_alive) @@ -253,7 +256,9 @@ def test__main_thread_terminated_did_not_join(self): self._start_with_thread_patch(worker) worker._thread._terminate_on_join = False - worker.enqueue(mock.Mock(), "") + record = mock.Mock() + record.created = time.time() + worker.enqueue(record, "") worker._main_thread_terminated() self.assertFalse(worker.is_alive) @@ -431,6 +436,7 @@ def log_struct( labels=None, trace=None, span_id=None, + timestamp=None, ): from google.cloud.logging.logger import _GLOBAL_RESOURCE From 9b044a81575513c076ceb65fbafb6e4502327c17 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 25 Jun 2019 12:44:16 -0700 Subject: [PATCH 238/855] All: Add docs job to publish to googleapis.dev. (#8464) --- packages/google-cloud-logging/.repo-metadata.json | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 packages/google-cloud-logging/.repo-metadata.json diff --git a/packages/google-cloud-logging/.repo-metadata.json b/packages/google-cloud-logging/.repo-metadata.json new file mode 100644 index 000000000000..33d1b1f819ee --- /dev/null +++ b/packages/google-cloud-logging/.repo-metadata.json @@ -0,0 +1,12 @@ +{ + "name": "logging", + "name_pretty": "Stackdriver Logging", + "product_documentation": "https://cloud.google.com/logging/docs", + "client_documentation": "https://googleapis.dev/python/logging/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559764", + "release_level": "ga", + "language": "python", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-logging", + "api_id": "logging.googleapis.com" +} \ No newline at end of file From b4ea0ce763f9f9f6baa2a3a57cac10f1a1803225 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 27 Jun 2019 14:29:17 -0700 Subject: [PATCH 239/855] Add nox session 'docs' to remaining manual clients. (#8478) --- packages/google-cloud-logging/docs/README.rst | 1 + packages/google-cloud-logging/docs/conf.py | 49 ++++++++++++++++--- packages/google-cloud-logging/docs/index.rst | 2 +- packages/google-cloud-logging/noxfile.py | 22 +++++++++ 4 files changed, 67 insertions(+), 7 deletions(-) create mode 120000 packages/google-cloud-logging/docs/README.rst diff --git a/packages/google-cloud-logging/docs/README.rst b/packages/google-cloud-logging/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-logging/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index ec7c3ac73fee..4095e0431c07 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -20,12 +20,12 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.91.4" +__version__ = "0.1.0" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' +needs_sphinx = "1.6.3" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -36,6 +36,7 @@ "sphinx.ext.intersphinx", "sphinx.ext.coverage", "sphinx.ext.napoleon", + "sphinx.ext.todo", "sphinx.ext.viewcode", ] @@ -44,13 +45,18 @@ autodoc_default_flags = ["members"] autosummary_generate = True + # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] -source_suffix = ".rst" +source_suffix = [".rst", ".md"] # The encoding of source files. # source_encoding = 'utf-8-sig' @@ -116,6 +122,7 @@ # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True + # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for @@ -125,7 +132,15 @@ # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -# html_theme_options = {} +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] @@ -214,6 +229,18 @@ # Output file base name for HTML help builder. htmlhelp_basename = "google-cloud-logging-doc" +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + # -- Options for LaTeX output --------------------------------------------- latex_elements = { @@ -260,6 +287,7 @@ # If false, no module index is generated. # latex_domain_indices = True + # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples @@ -277,6 +305,7 @@ # If true, show URL addresses after external links. # man_show_urls = False + # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples @@ -289,7 +318,7 @@ u"google-cloud-logging Documentation", author, "google-cloud-logging", - "GAPIC library for the {metadata.shortName} v2 service", + "GAPIC library for the logging API", "APIs", ) ] @@ -306,12 +335,20 @@ # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False + # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google.api_core": ( + "https://googleapis.github.io/google-cloud-python/latest", + None, + ), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://docs.python-requests.org/en/master/", None), } + # Napoleon settings napoleon_google_docstring = True napoleon_numpy_docstring = True diff --git a/packages/google-cloud-logging/docs/index.rst b/packages/google-cloud-logging/docs/index.rst index 0b0c45a7fe01..67ad362dfc69 100644 --- a/packages/google-cloud-logging/docs/index.rst +++ b/packages/google-cloud-logging/docs/index.rst @@ -1,4 +1,4 @@ -.. include:: /../logging/README.rst +.. include:: README.rst Usage Documentation ------------------- diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 089aeb588cfa..d2a53d2de4fb 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -15,6 +15,7 @@ from __future__ import absolute_import import os +import shutil import sys import nox @@ -165,3 +166,24 @@ def cover(session): session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") + +@nox.session(python="3.7") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) \ No newline at end of file From 7636df4bf01ee70aef9773f03b0edc4b0c78c77f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 28 Jun 2019 14:01:53 -0400 Subject: [PATCH 240/855] Add 'client_options' support, update list method docstrings (via synth). (#8535) Supersedes #8320. --- .../gapic/config_service_v2_client.py | 33 +++++++++++---- .../gapic/logging_service_v2_client.py | 41 +++++++++++++------ .../gapic/metrics_service_v2_client.py | 25 ++++++++--- .../config_service_v2_grpc_transport.py | 8 +++- .../logging_service_v2_grpc_transport.py | 8 +++- .../metrics_service_v2_grpc_transport.py | 8 +++- .../cloud/logging_v2/proto/log_entry_pb2.py | 1 + .../logging_v2/proto/logging_config_pb2.py | 1 + .../logging_v2/proto/logging_metrics_pb2.py | 1 + .../cloud/logging_v2/proto/logging_pb2.py | 1 + packages/google-cloud-logging/synth.metadata | 10 ++--- 11 files changed, 99 insertions(+), 38 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index 3e52190f050e..7102218ca68c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -21,6 +21,7 @@ import warnings from google.oauth2 import service_account +import google.api_core.client_options import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method @@ -109,6 +110,7 @@ def __init__( credentials=None, client_config=None, client_info=None, + client_options=None, ): """Constructor. @@ -139,6 +141,9 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. """ # Raise deprecation warnings for things we want to go away. if client_config is not None: @@ -157,6 +162,15 @@ def __init__( stacklevel=2, ) + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + # Instantiate the transport. # The transport is responsible for handling serialization and # deserialization and actually sending data to the service. @@ -165,6 +179,7 @@ def __init__( self.transport = transport( credentials=credentials, default_class=config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport, + address=api_endpoint, ) else: if credentials: @@ -175,7 +190,7 @@ def __init__( self.transport = transport else: self.transport = config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport( - address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials ) if client_info is None: @@ -257,10 +272,10 @@ def list_sinks( that is provided to the method. Returns: - A :class:`~google.gax.PageIterator` instance. By default, this - is an iterable of :class:`~google.cloud.logging_v2.types.LogSink` instances. - This object can also be configured to iterate over the pages - of the response through the `options` parameter. + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.logging_v2.types.LogSink` instances. + You can also iterate over the pages of the response + using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -753,10 +768,10 @@ def list_exclusions( that is provided to the method. Returns: - A :class:`~google.gax.PageIterator` instance. By default, this - is an iterable of :class:`~google.cloud.logging_v2.types.LogExclusion` instances. - This object can also be configured to iterate over the pages - of the response through the `options` parameter. + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.logging_v2.types.LogExclusion` instances. + You can also iterate over the pages of the response + using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index 13a4ad4cfcd9..4c24acbb2bed 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -21,6 +21,7 @@ import warnings from google.oauth2 import service_account +import google.api_core.client_options import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method @@ -94,6 +95,7 @@ def __init__( credentials=None, client_config=None, client_info=None, + client_options=None, ): """Constructor. @@ -124,6 +126,9 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. """ # Raise deprecation warnings for things we want to go away. if client_config is not None: @@ -142,6 +147,15 @@ def __init__( stacklevel=2, ) + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + # Instantiate the transport. # The transport is responsible for handling serialization and # deserialization and actually sending data to the service. @@ -150,6 +164,7 @@ def __init__( self.transport = transport( credentials=credentials, default_class=logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport, + address=api_endpoint, ) else: if credentials: @@ -160,7 +175,7 @@ def __init__( self.transport = transport else: self.transport = logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport( - address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials ) if client_info is None: @@ -500,10 +515,10 @@ def list_log_entries( that is provided to the method. Returns: - A :class:`~google.gax.PageIterator` instance. By default, this - is an iterable of :class:`~google.cloud.logging_v2.types.LogEntry` instances. - This object can also be configured to iterate over the pages - of the response through the `options` parameter. + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.logging_v2.types.LogEntry` instances. + You can also iterate over the pages of the response + using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -590,10 +605,10 @@ def list_monitored_resource_descriptors( that is provided to the method. Returns: - A :class:`~google.gax.PageIterator` instance. By default, this - is an iterable of :class:`~google.cloud.logging_v2.types.MonitoredResourceDescriptor` instances. - This object can also be configured to iterate over the pages - of the response through the `options` parameter. + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.logging_v2.types.MonitoredResourceDescriptor` instances. + You can also iterate over the pages of the response + using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -692,10 +707,10 @@ def list_logs( that is provided to the method. Returns: - A :class:`~google.gax.PageIterator` instance. By default, this - is an iterable of :class:`str` instances. - This object can also be configured to iterate over the pages - of the response through the `options` parameter. + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`str` instances. + You can also iterate over the pages of the response + using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 0ee7a871f5e3..22a9a32aef55 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -21,6 +21,7 @@ import warnings from google.oauth2 import service_account +import google.api_core.client_options import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method @@ -99,6 +100,7 @@ def __init__( credentials=None, client_config=None, client_info=None, + client_options=None, ): """Constructor. @@ -129,6 +131,9 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. """ # Raise deprecation warnings for things we want to go away. if client_config is not None: @@ -147,6 +152,15 @@ def __init__( stacklevel=2, ) + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + # Instantiate the transport. # The transport is responsible for handling serialization and # deserialization and actually sending data to the service. @@ -155,6 +169,7 @@ def __init__( self.transport = transport( credentials=credentials, default_class=metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport, + address=api_endpoint, ) else: if credentials: @@ -165,7 +180,7 @@ def __init__( self.transport = transport else: self.transport = metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport( - address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials ) if client_info is None: @@ -244,10 +259,10 @@ def list_log_metrics( that is provided to the method. Returns: - A :class:`~google.gax.PageIterator` instance. By default, this - is an iterable of :class:`~google.cloud.logging_v2.types.LogMetric` instances. - This object can also be configured to iterate over the pages - of the response through the `options` parameter. + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.logging_v2.types.LogMetric` instances. + You can also iterate over the pages of the response + using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py index 3ef9f6949eab..61f6a0fd514b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py @@ -77,7 +77,9 @@ def __init__( } @classmethod - def create_channel(cls, address="logging.googleapis.com:443", credentials=None): + def create_channel( + cls, address="logging.googleapis.com:443", credentials=None, **kwargs + ): """Create and return a gRPC channel object. Args: @@ -87,12 +89,14 @@ def create_channel(cls, address="logging.googleapis.com:443", credentials=None): credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. Returns: grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs ) @property diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py index 388b9e644431..c09932918fb9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py @@ -75,7 +75,9 @@ def __init__( } @classmethod - def create_channel(cls, address="logging.googleapis.com:443", credentials=None): + def create_channel( + cls, address="logging.googleapis.com:443", credentials=None, **kwargs + ): """Create and return a gRPC channel object. Args: @@ -85,12 +87,14 @@ def create_channel(cls, address="logging.googleapis.com:443", credentials=None): credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. Returns: grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs ) @property diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py index 4b1e18149f24..badaea4b3c98 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py @@ -77,7 +77,9 @@ def __init__( } @classmethod - def create_channel(cls, address="logging.googleapis.com:443", credentials=None): + def create_channel( + cls, address="logging.googleapis.com:443", credentials=None, **kwargs + ): """Create and return a gRPC channel object. Args: @@ -87,12 +89,14 @@ def create_channel(cls, address="logging.googleapis.com:443", credentials=None): credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. Returns: grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs ) @property diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py index 3aa17006be30..6dc9ec5817f1 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/log_entry.proto diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py index d62ef9c95eec..cf50eedc6090 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/logging_config.proto diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py index 54dc8f703c20..fc50185871d2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/logging_metrics.proto diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py index a37710bcf382..2438534d754b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/logging.proto diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index ecd32783bf21..7be7882c8885 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-05-21T12:25:22.738575Z", + "updateTime": "2019-06-28T17:38:07.047804Z", "sources": [ { "generator": { "name": "artman", - "version": "0.20.0", - "dockerImage": "googleapis/artman@sha256:3246adac900f4bdbd62920e80de2e5877380e44036b3feae13667ec255ebf5ec" + "version": "0.29.2", + "dockerImage": "googleapis/artman@sha256:45263333b058a4b3c26a8b7680a2710f43eae3d250f791a6cb66423991dcb2df" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "32a10f69e2c9ce15bba13ab1ff928bacebb25160", - "internalRef": "249058354" + "sha": "84c8ad4e52f8eec8f08a60636cfa597b86969b5c", + "internalRef": "255474859" } }, { From f58bf676d58ca5334bf3b785552ed504e0193b1d Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 1 Jul 2019 12:39:55 -0700 Subject: [PATCH 241/855] Re-add "generated" markers (via synth). (#8538) --- packages/google-cloud-logging/.coveragerc | 1 + packages/google-cloud-logging/.flake8 | 1 + packages/google-cloud-logging/setup.cfg | 1 + packages/google-cloud-logging/synth.metadata | 2 +- 4 files changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/.coveragerc b/packages/google-cloud-logging/.coveragerc index 6b9ab9da4a1b..b178b094aa1d 100644 --- a/packages/google-cloud-logging/.coveragerc +++ b/packages/google-cloud-logging/.coveragerc @@ -1,3 +1,4 @@ +# Generated by synthtool. DO NOT EDIT! [run] branch = True diff --git a/packages/google-cloud-logging/.flake8 b/packages/google-cloud-logging/.flake8 index 61766fa84d02..0268ecc9c55c 100644 --- a/packages/google-cloud-logging/.flake8 +++ b/packages/google-cloud-logging/.flake8 @@ -1,3 +1,4 @@ +# Generated by synthtool. DO NOT EDIT! [flake8] ignore = E203, E266, E501, W503 exclude = diff --git a/packages/google-cloud-logging/setup.cfg b/packages/google-cloud-logging/setup.cfg index 2a9acf13daa9..3bd555500e37 100644 --- a/packages/google-cloud-logging/setup.cfg +++ b/packages/google-cloud-logging/setup.cfg @@ -1,2 +1,3 @@ +# Generated by synthtool. DO NOT EDIT! [bdist_wheel] universal = 1 diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 7be7882c8885..b1a117da4b4c 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2019-06-28T17:38:07.047804Z", + "updateTime": "2019-06-29T12:26:54.494632Z", "sources": [ { "generator": { From 4a9d897b43d7001dc6c2fdef0b80e11074345afb Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 10 Jul 2019 10:18:01 -0700 Subject: [PATCH 242/855] Add path-construction helpers to GAPIC clients (via synth). (#8631) --- .../gapic/config_service_v2_client.py | 71 +++++++++++++++++++ .../gapic/logging_service_v2_client.py | 44 ++++++++++++ .../gapic/metrics_service_v2_client.py | 19 +++++ packages/google-cloud-logging/synth.metadata | 10 +-- 4 files changed, 139 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index 7102218ca68c..307e4a75917a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -80,6 +80,31 @@ def from_service_account_file(cls, filename, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def billing_path(cls, billing_account): + """Return a fully-qualified billing string.""" + return google.api_core.path_template.expand( + "billingAccounts/{billing_account}", billing_account=billing_account + ) + + @classmethod + def billing_exclusion_path(cls, billing_account, exclusion): + """Return a fully-qualified billing_exclusion string.""" + return google.api_core.path_template.expand( + "billingAccounts/{billing_account}/exclusions/{exclusion}", + billing_account=billing_account, + exclusion=exclusion, + ) + + @classmethod + def billing_sink_path(cls, billing_account, sink): + """Return a fully-qualified billing_sink string.""" + return google.api_core.path_template.expand( + "billingAccounts/{billing_account}/sinks/{sink}", + billing_account=billing_account, + sink=sink, + ) + @classmethod def exclusion_path(cls, project, exclusion): """Return a fully-qualified exclusion string.""" @@ -89,6 +114,52 @@ def exclusion_path(cls, project, exclusion): exclusion=exclusion, ) + @classmethod + def folder_path(cls, folder): + """Return a fully-qualified folder string.""" + return google.api_core.path_template.expand("folders/{folder}", folder=folder) + + @classmethod + def folder_exclusion_path(cls, folder, exclusion): + """Return a fully-qualified folder_exclusion string.""" + return google.api_core.path_template.expand( + "folders/{folder}/exclusions/{exclusion}", + folder=folder, + exclusion=exclusion, + ) + + @classmethod + def folder_sink_path(cls, folder, sink): + """Return a fully-qualified folder_sink string.""" + return google.api_core.path_template.expand( + "folders/{folder}/sinks/{sink}", folder=folder, sink=sink + ) + + @classmethod + def organization_path(cls, organization): + """Return a fully-qualified organization string.""" + return google.api_core.path_template.expand( + "organizations/{organization}", organization=organization + ) + + @classmethod + def organization_exclusion_path(cls, organization, exclusion): + """Return a fully-qualified organization_exclusion string.""" + return google.api_core.path_template.expand( + "organizations/{organization}/exclusions/{exclusion}", + organization=organization, + exclusion=exclusion, + ) + + @classmethod + def organization_sink_path(cls, organization, sink): + """Return a fully-qualified organization_sink string.""" + return google.api_core.path_template.expand( + "organizations/{organization}/sinks/{sink}", + organization=organization, + sink=sink, + ) + @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index 4c24acbb2bed..6b1634c8ce02 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -74,6 +74,34 @@ def from_service_account_file(cls, filename, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def billing_path(cls, billing_account): + """Return a fully-qualified billing string.""" + return google.api_core.path_template.expand( + "billingAccounts/{billing_account}", billing_account=billing_account + ) + + @classmethod + def billing_log_path(cls, billing_account, log): + """Return a fully-qualified billing_log string.""" + return google.api_core.path_template.expand( + "billingAccounts/{billing_account}/logs/{log}", + billing_account=billing_account, + log=log, + ) + + @classmethod + def folder_path(cls, folder): + """Return a fully-qualified folder string.""" + return google.api_core.path_template.expand("folders/{folder}", folder=folder) + + @classmethod + def folder_log_path(cls, folder, log): + """Return a fully-qualified folder_log string.""" + return google.api_core.path_template.expand( + "folders/{folder}/logs/{log}", folder=folder, log=log + ) + @classmethod def log_path(cls, project, log): """Return a fully-qualified log string.""" @@ -81,6 +109,22 @@ def log_path(cls, project, log): "projects/{project}/logs/{log}", project=project, log=log ) + @classmethod + def organization_path(cls, organization): + """Return a fully-qualified organization string.""" + return google.api_core.path_template.expand( + "organizations/{organization}", organization=organization + ) + + @classmethod + def organization_log_path(cls, organization, log): + """Return a fully-qualified organization_log string.""" + return google.api_core.path_template.expand( + "organizations/{organization}/logs/{log}", + organization=organization, + log=log, + ) + @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 22a9a32aef55..d9970710c3f7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -79,6 +79,18 @@ def from_service_account_file(cls, filename, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def billing_path(cls, billing_account): + """Return a fully-qualified billing string.""" + return google.api_core.path_template.expand( + "billingAccounts/{billing_account}", billing_account=billing_account + ) + + @classmethod + def folder_path(cls, folder): + """Return a fully-qualified folder string.""" + return google.api_core.path_template.expand("folders/{folder}", folder=folder) + @classmethod def metric_path(cls, project, metric): """Return a fully-qualified metric string.""" @@ -86,6 +98,13 @@ def metric_path(cls, project, metric): "projects/{project}/metrics/{metric}", project=project, metric=metric ) + @classmethod + def organization_path(cls, organization): + """Return a fully-qualified organization string.""" + return google.api_core.path_template.expand( + "organizations/{organization}", organization=organization + ) + @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index b1a117da4b4c..e5b76fc5e00b 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-06-29T12:26:54.494632Z", + "updateTime": "2019-07-10T12:23:32.614992Z", "sources": [ { "generator": { "name": "artman", - "version": "0.29.2", - "dockerImage": "googleapis/artman@sha256:45263333b058a4b3c26a8b7680a2710f43eae3d250f791a6cb66423991dcb2df" + "version": "0.29.4", + "dockerImage": "googleapis/artman@sha256:63f21e83cb92680b7001dc381069e962c9e6dee314fd8365ac554c07c89221fb" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "84c8ad4e52f8eec8f08a60636cfa597b86969b5c", - "internalRef": "255474859" + "sha": "16c0ea3cde17a897ba04b7b94d9bf4dd57e3227e", + "internalRef": "257239177" } }, { From 72aa1adfd8fd5c7beab6af3125d01f9b02ff4260 Mon Sep 17 00:00:00 2001 From: ylil93 Date: Mon, 15 Jul 2019 12:12:29 -0700 Subject: [PATCH 243/855] Add compatibility check badges to READMEs. (#8288) --- packages/google-cloud-logging/README.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index e7687dc74bb1..a06077102e76 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -1,7 +1,7 @@ Python Client for Stackdriver Logging ===================================== -|pypi| |versions| +|pypi| |versions| |compat_check_pypi| |compat_check_github| `Stackdriver Logging API`_: Writes log entries and manages your Stackdriver Logging configuration. @@ -13,6 +13,10 @@ Logging configuration. :target: https://pypi.org/project/google-cloud-logging/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-logging.svg :target: https://pypi.org/project/google-cloud-logging/ +.. |compat_check_pypi| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=google-cloud-logging + :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=google-cloud-logging +.. |compat_check_github| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Dlogging + :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Dlogging .. _Stackdriver Logging API: https://cloud.google.com/logging .. _Client Library Documentation: https://googleapis.github.io/google-cloud-python/latest/logging/usage.html .. _Product Documentation: https://cloud.google.com/logging/docs From df8854f1101179c5c2686be3ea3b518e1a04cdcd Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 19 Jul 2019 13:31:47 -0400 Subject: [PATCH 244/855] Bump minimum version for google-api-core to 1.14.0. (#8709) --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index c2109a604435..75f0f3675f5b 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ - 'google-api-core[grpc] >= 1.6.0, < 2.0.0dev', + 'google-api-core[grpc] >= 1.14.0, < 2.0.0dev', "google-cloud-core >= 1.0.0, < 2.0dev", ] extras = { From 91e56686de4f30d1bd07616f39e8ce2be8e3dcc5 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 19 Jul 2019 14:45:47 -0700 Subject: [PATCH 245/855] Link to googleapis.dev documentation in READMEs. (#8705) --- packages/google-cloud-logging/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index a06077102e76..dd60385f136f 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -18,7 +18,7 @@ Logging configuration. .. |compat_check_github| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Dlogging :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Dlogging .. _Stackdriver Logging API: https://cloud.google.com/logging -.. _Client Library Documentation: https://googleapis.github.io/google-cloud-python/latest/logging/usage.html +.. _Client Library Documentation: https://googleapis.dev/python/logging/latest .. _Product Documentation: https://cloud.google.com/logging/docs Quick Start @@ -34,7 +34,7 @@ In order to use this library, you first need to go through the following steps: .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Stackdriver Logging API.: https://cloud.google.com/logging -.. _Setup Authentication.: https://googleapis.github.io/google-cloud-python/latest/core/auth.html +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ From 0658e2d8e2d62ed90572a63d44fa23e16716a10d Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 26 Jul 2019 10:28:27 -0700 Subject: [PATCH 246/855] Release 1.12.0 (#8758) --- packages/google-cloud-logging/CHANGELOG.md | 28 ++++++++++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index d1b92bd471d1..173f7c81025a 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,34 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## 1.12.0 + +07-24-2019 16:47 PDT + + +### Implementation Changes +- Set the 'timestamp' on log records created by handler. ([#8227](https://github.com/googleapis/google-cloud-python/pull/8227)) +- Clarify worker thread implementation. ([#8228](https://github.com/googleapis/google-cloud-python/pull/8228)) + +### New Features +- Add path-construction helpers to GAPIC clients (via synth). ([#8631](https://github.com/googleapis/google-cloud-python/pull/8631)) +- Add 'client_options' support, update list method docstrings (via synth). ([#8535](https://github.com/googleapis/google-cloud-python/pull/8535)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) + +### Documentation +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) +- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) + +### Internal / Testing Changes +- Re-add "generated" markers (via synth). ([#8538](https://github.com/googleapis/google-cloud-python/pull/8538)) +- Add nox session 'docs' to remaining manual clients. ([#8478](https://github.com/googleapis/google-cloud-python/pull/8478)) +- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) +- Fix tests broken in PR [#8227](https://github.com/googleapis/google-cloud-python/pull/8227). ([#8273](https://github.com/googleapis/google-cloud-python/pull/8273)) +- Add empty lines. ([#8064](https://github.com/googleapis/google-cloud-python/pull/8064)) +- Use alabaster theme everwhere. ([#8021](https://github.com/googleapis/google-cloud-python/pull/8021)) + ## 1.11.0 05-16-2019 12:27 PDT diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 75f0f3675f5b..25fa8ebde55f 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-logging' description = 'Stackdriver Logging API client library' -version = '1.11.0' +version = '1.12.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 41e54c8bc7766bc721554ab45f29dff751ce8a06 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 29 Jul 2019 12:53:23 -0700 Subject: [PATCH 247/855] Update intersphinx mapping for requests. (#8805) --- packages/google-cloud-logging/docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index 4095e0431c07..7c979b311598 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -345,7 +345,7 @@ None, ), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://docs.python-requests.org/en/master/", None), + "requests": ("https://2.python-requests.org/en/master/", None), } From 4e91e7e378a471efaaac67b60e03f10574cb719b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 31 Jul 2019 17:04:46 -0400 Subject: [PATCH 248/855] Map stdlib loglevels to Stackdriver severity enum values. (#8837) Closes #7213. --- .../google/cloud/logging/_helpers.py | 41 +++++++++++++ .../handlers/transports/background_thread.py | 22 +++---- .../cloud/logging/handlers/transports/sync.py | 3 +- .../tests/system/test_system.py | 9 ++- .../transports/test_background_thread.py | 59 +++++++++++++++++-- .../unit/handlers/transports/test_sync.py | 10 +++- .../tests/unit/test__helpers.py | 42 +++++++++++++ 7 files changed, 166 insertions(+), 20 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/_helpers.py index ed7a0ba8cf7b..4df8b12736bc 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging/_helpers.py @@ -14,6 +14,8 @@ """Common logging helpers.""" +import logging + import requests from google.cloud.logging.entries import LogEntry @@ -21,6 +23,33 @@ from google.cloud.logging.entries import StructEntry from google.cloud.logging.entries import TextEntry +try: + from google.cloud.logging_v2.gapic.enums import LogSeverity +except ImportError: # pragma: NO COVER + + class LogSeverity(object): + """Map severities for non-GAPIC usage.""" + + DEFAULT = 0 + DEBUG = 100 + INFO = 200 + NOTICE = 300 + WARNING = 400 + ERROR = 500 + CRITICAL = 600 + ALERT = 700 + EMERGENCY = 800 + + +_NORMALIZED_SEVERITIES = { + logging.CRITICAL: LogSeverity.CRITICAL, + logging.ERROR: LogSeverity.ERROR, + logging.WARNING: LogSeverity.WARNING, + logging.INFO: LogSeverity.INFO, + logging.DEBUG: LogSeverity.DEBUG, + logging.NOTSET: LogSeverity.DEFAULT, +} + METADATA_URL = "http://metadata.google.internal./computeMetadata/v1/" METADATA_HEADERS = {"Metadata-Flavor": "Google"} @@ -82,3 +111,15 @@ def retrieve_metadata_server(metadata_key): pass return None + + +def _normalize_severity(stdlib_level): + """Normalize a Python stdlib severity to LogSeverity enum. + + :type stdlib_level: int + :param stdlib_level: 'levelno' from a :class:`logging.LogRecord` + + :rtype: int + :returns: Corresponding Stackdriver severity. + """ + return _NORMALIZED_SEVERITIES.get(stdlib_level, stdlib_level) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py index be7710ab1479..812b733cff92 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py @@ -28,6 +28,7 @@ from six.moves import queue +from google.cloud.logging import _helpers from google.cloud.logging.handlers.transports.base import Transport _DEFAULT_GRACE_PERIOD = 5.0 # Seconds @@ -254,17 +255,16 @@ def enqueue( :param span_id: (optional) span_id within the trace for the log entry. Specify the trace parameter if span_id is set. """ - self._queue.put_nowait( - { - "info": {"message": message, "python_logger": record.name}, - "severity": record.levelname, - "resource": resource, - "labels": labels, - "trace": trace, - "span_id": span_id, - "timestamp": datetime.datetime.utcfromtimestamp(record.created), - } - ) + queue_entry = { + "info": {"message": message, "python_logger": record.name}, + "severity": _helpers._normalize_severity(record.levelno), + "resource": resource, + "labels": labels, + "trace": trace, + "span_id": span_id, + "timestamp": datetime.datetime.utcfromtimestamp(record.created), + } + self._queue.put_nowait(queue_entry) def flush(self): """Submit any pending log records.""" diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py index 861f1ab3fdf7..e87eb4885fbf 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py @@ -17,6 +17,7 @@ Logs directly to the the Stackdriver Logging API with a synchronous call. """ +from google.cloud.logging import _helpers from google.cloud.logging.handlers.transports.base import Transport @@ -50,7 +51,7 @@ def send( info = {"message": message, "python_logger": record.name} self.logger.log_struct( info, - severity=record.levelname, + severity=_helpers._normalize_severity(record.levelno), resource=resource, labels=labels, trace=trace, diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 63add9f26fcf..8e2d694081a1 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -105,13 +105,16 @@ def setUp(self): self._handlers_cache = logging.getLogger().handlers[:] def tearDown(self): - retry = RetryErrors((NotFound, TooManyRequests, RetryError), max_tries=9) + retry_not_found = RetryErrors((NotFound), max_tries=4) + retry_other = RetryErrors((TooManyRequests, RetryError)) for doomed in self.to_delete: try: - retry(doomed.delete)() + retry_not_found(retry_other(doomed.delete))() except AttributeError: client, dataset = doomed - retry(client.delete_dataset)(dataset) + retry_not_found(retry_other(client.delete_dataset))(dataset) + except NotFound: + pass logging.getLogger().handlers = self._handlers_cache[:] @staticmethod diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 717fb1ffc9d9..7de912560aa7 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -264,11 +264,62 @@ def test__main_thread_terminated_did_not_join(self): self.assertFalse(worker.is_alive) @staticmethod - def _enqueue_record(worker, message): - record = logging.LogRecord( - "python_logger", logging.INFO, None, None, message, None, None + def _enqueue_record(worker, message, levelno=logging.INFO, **kw): + record = logging.LogRecord("testing", levelno, None, None, message, None, None) + worker.enqueue(record, message, **kw) + + def test_enqueue_defaults(self): + import datetime + from google.cloud.logging._helpers import LogSeverity + + worker = self._make_one(_Logger(self.NAME)) + self.assertTrue(worker._queue.empty()) + message = "TEST SEVERITY" + + self._enqueue_record(worker, message) + + entry = worker._queue.get_nowait() + expected_info = {"message": message, "python_logger": "testing"} + self.assertEqual(entry["info"], expected_info) + self.assertEqual(entry["severity"], LogSeverity.INFO) + self.assertIsNone(entry["resource"]) + self.assertIsNone(entry["labels"]) + self.assertIsNone(entry["trace"]) + self.assertIsNone(entry["span_id"]) + self.assertIsInstance(entry["timestamp"], datetime.datetime) + + def test_enqueue_explicit(self): + import datetime + from google.cloud.logging._helpers import LogSeverity + + worker = self._make_one(_Logger(self.NAME)) + self.assertTrue(worker._queue.empty()) + message = "TEST SEVERITY" + resource = object() + labels = {"foo": "bar"} + trace = "TRACE" + span_id = "SPAN_ID" + + self._enqueue_record( + worker, + message, + levelno=logging.ERROR, + resource=resource, + labels=labels, + trace=trace, + span_id=span_id, ) - worker.enqueue(record, message) + + entry = worker._queue.get_nowait() + + expected_info = {"message": message, "python_logger": "testing"} + self.assertEqual(entry["info"], expected_info) + self.assertEqual(entry["severity"], LogSeverity.ERROR) + self.assertIs(entry["resource"], resource) + self.assertIs(entry["labels"], labels) + self.assertIs(entry["trace"], trace) + self.assertIs(entry["span_id"], span_id) + self.assertIsInstance(entry["timestamp"], datetime.datetime) def test__thread_main(self): from google.cloud.logging.handlers.transports import background_thread diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py index ca6e9260c7a1..f2ff67d59d82 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py @@ -37,6 +37,7 @@ def test_ctor(self): def test_send(self): from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging._helpers import LogSeverity client = _Client(self.PROJECT) @@ -50,7 +51,14 @@ def test_send(self): transport.send(record, message, _GLOBAL_RESOURCE) EXPECTED_STRUCT = {"message": message, "python_logger": python_logger_name} - EXPECTED_SENT = (EXPECTED_STRUCT, "INFO", _GLOBAL_RESOURCE, None, None, None) + EXPECTED_SENT = ( + EXPECTED_STRUCT, + LogSeverity.INFO, + _GLOBAL_RESOURCE, + None, + None, + None, + ) self.assertEqual(transport.logger.log_struct_called_with, EXPECTED_SENT) diff --git a/packages/google-cloud-logging/tests/unit/test__helpers.py b/packages/google-cloud-logging/tests/unit/test__helpers.py index 7f11988f5275..db0804e66638 100644 --- a/packages/google-cloud-logging/tests/unit/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/test__helpers.py @@ -13,6 +13,7 @@ # limitations under the License. +import logging import unittest import mock @@ -121,6 +122,47 @@ def test_request_exception(self): self.assertIsNone(metadata) +class Test__normalize_severity(unittest.TestCase): + @staticmethod + def _stackdriver_severity(): + from google.cloud.logging._helpers import LogSeverity + + return LogSeverity + + def _normalize_severity_helper(self, stdlib_level, enum_level): + from google.cloud.logging._helpers import _normalize_severity + + self.assertEqual(_normalize_severity(stdlib_level), enum_level) + + def test__normalize_severity_critical(self): + severity = self._stackdriver_severity() + self._normalize_severity_helper(logging.CRITICAL, severity.CRITICAL) + + def test__normalize_severity_error(self): + severity = self._stackdriver_severity() + self._normalize_severity_helper(logging.ERROR, severity.ERROR) + + def test__normalize_severity_warning(self): + severity = self._stackdriver_severity() + self._normalize_severity_helper(logging.WARNING, severity.WARNING) + + def test__normalize_severity_info(self): + severity = self._stackdriver_severity() + self._normalize_severity_helper(logging.INFO, severity.INFO) + + def test__normalize_severity_debug(self): + severity = self._stackdriver_severity() + self._normalize_severity_helper(logging.DEBUG, severity.DEBUG) + + def test__normalize_severity_notset(self): + severity = self._stackdriver_severity() + self._normalize_severity_helper(logging.NOTSET, severity.DEFAULT) + + def test__normalize_severity_non_standard(self): + unknown_level = 35 + self._normalize_severity_helper(unknown_level, unknown_level) + + class EntryMock(object): def __init__(self): self.sentinel = object() From 11c890c78ef8f4d79e4dd5989e501196b45f3681 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 31 Jul 2019 17:46:55 -0400 Subject: [PATCH 249/855] Fix 'list_entries' example with projects. (#8858) Closes #7916. --- packages/google-cloud-logging/docs/snippets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/docs/snippets.py b/packages/google-cloud-logging/docs/snippets.py index 1a823994dc7c..778327989b0f 100644 --- a/packages/google-cloud-logging/docs/snippets.py +++ b/packages/google-cloud-logging/docs/snippets.py @@ -108,7 +108,7 @@ def client_list_entries_multi_project( # [START client_list_entries_multi_project] PROJECT_IDS = ["one-project", "another-project"] - for entry in client.list_entries(project_ids=PROJECT_IDS): # API call(s) + for entry in client.list_entries(projects=PROJECT_IDS): # API call(s) do_something_with(entry) # [END client_list_entries_multi_project] From 6a1a81e2c542f975f563b9ff08d6a632511ee120 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 1 Aug 2019 09:44:19 -0700 Subject: [PATCH 250/855] Remove gRPC size restrictions (4MB default) (#8860) --- .../transports/config_service_v2_grpc_transport.py | 9 ++++++++- .../transports/logging_service_v2_grpc_transport.py | 9 ++++++++- .../transports/metrics_service_v2_grpc_transport.py | 9 ++++++++- packages/google-cloud-logging/synth.metadata | 10 +++++----- packages/google-cloud-logging/synth.py | 8 ++++++++ 5 files changed, 37 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py index 61f6a0fd514b..8157764ec872 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py @@ -64,7 +64,14 @@ def __init__( # Create the channel. if channel is None: - channel = self.create_channel(address=address, credentials=credentials) + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) self._channel = channel diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py index c09932918fb9..caa20c480225 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py @@ -64,7 +64,14 @@ def __init__( # Create the channel. if channel is None: - channel = self.create_channel(address=address, credentials=credentials) + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) self._channel = channel diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py index badaea4b3c98..426edce6edd7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py @@ -64,7 +64,14 @@ def __init__( # Create the channel. if channel is None: - channel = self.create_channel(address=address, credentials=credentials) + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) self._channel = channel diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index e5b76fc5e00b..7b675cf58427 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-07-10T12:23:32.614992Z", + "updateTime": "2019-08-01T00:00:23.364082Z", "sources": [ { "generator": { "name": "artman", - "version": "0.29.4", - "dockerImage": "googleapis/artman@sha256:63f21e83cb92680b7001dc381069e962c9e6dee314fd8365ac554c07c89221fb" + "version": "0.31.0", + "dockerImage": "googleapis/artman@sha256:9aed6bbde54e26d2fcde7aa86d9f64c0278f741e58808c46573e488cbf6098f0" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "16c0ea3cde17a897ba04b7b94d9bf4dd57e3227e", - "internalRef": "257239177" + "sha": "e3b51e761db720974bc153a380a2f7ae5baee43d", + "internalRef": "260992170" } }, { diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/synth.py index c24382ce4d72..ec4788fc35f4 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/synth.py @@ -38,6 +38,14 @@ # https://github.com/googleapis/gapic-generator/issues/2097 s.replace("google/**/proto/*_pb2.py", r"(^.*$\n)*", r"# -*- coding: utf-8 -*-\n\g<0>") +# the logging service grpc transport channel shouldn't limit the size of a grpc message at the default 4mb +s.replace("google/cloud/logging_v2/gapic/transports/*_service_v2_grpc_transport.py", + "channel =.*\n(\s+)address=.*\n\s+credentials=.*,\n", + "\g<0>\g<1>options={\n" + "\g<1> 'grpc.max_send_message_length': -1,\n" + "\g<1> 'grpc.max_receive_message_length': -1,\n" + "\g<1>}.items(),\n") + # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- From 9c0213bc67f7cdb16c79236c86aade2e57115420 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 1 Aug 2019 10:12:52 -0700 Subject: [PATCH 251/855] Release 1.12.1 (#8881) --- packages/google-cloud-logging/CHANGELOG.md | 15 +++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 173f7c81025a..f7ad1b7451ed 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## 1.12.1 + +08-01-2019 09:45 PDT + + +### Implementation Changes +- Remove gRPC size restrictions (4MB default) ([#8860](https://github.com/googleapis/google-cloud-python/pull/8860)) +- Map stdlib loglevels to Stackdriver severity enum values. ([#8837](https://github.com/googleapis/google-cloud-python/pull/8837)) + +### Documentation +- Fix 'list_entries' example with projects. ([#8858](https://github.com/googleapis/google-cloud-python/pull/8858)) + +### Internal / Testing Changes +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + ## 1.12.0 07-24-2019 16:47 PDT diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 25fa8ebde55f..6d3da8a110f9 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-logging' description = 'Stackdriver Logging API client library' -version = '1.12.0' +version = '1.12.1' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From f32bf6194ea367a0d3dac761cac65eaff0ff2a8a Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 5 Aug 2019 15:35:37 -0400 Subject: [PATCH 252/855] Delete custom synth removing gRPC send/recv msg size limits. (#8939) Now handled via gapic-generator. Closes #8931. --- .../gapic/config_service_v2_client.py | 40 +++++++++---------- .../gapic/logging_service_v2_client.py | 20 +++++----- .../gapic/metrics_service_v2_client.py | 20 +++++----- packages/google-cloud-logging/synth.metadata | 10 ++--- packages/google-cloud-logging/synth.py | 8 ---- 5 files changed, 45 insertions(+), 53 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index 307e4a75917a..df942566a397 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -334,8 +334,8 @@ def list_sinks( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -428,8 +428,8 @@ def get_sink( Example: ``"projects/my-project-id/sinks/my-sink-id"``. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -531,8 +531,8 @@ def create_sink( will be a unique service account used only for exports from the new sink. For more information, see ``writer_identity`` in ``LogSink``. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -654,8 +654,8 @@ def update_sink( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -739,8 +739,8 @@ def delete_sink( Example: ``"projects/my-project-id/sinks/my-sink-id"``. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -830,8 +830,8 @@ def list_exclusions( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -924,8 +924,8 @@ def get_exclusion( Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -1014,8 +1014,8 @@ def create_exclusion( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.LogExclusion` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -1117,8 +1117,8 @@ def update_exclusion( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -1197,8 +1197,8 @@ def delete_exclusion( Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index 6b1634c8ce02..22aa07c592ae 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -282,8 +282,8 @@ def delete_log( ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. For more information about log names, see ``LogEntry``. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -433,8 +433,8 @@ def write_log_entries( entries won't be persisted nor exported. Useful for checking whether the logging API endpoints are working properly before sending valuable data. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -550,8 +550,8 @@ def list_log_entries( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -640,8 +640,8 @@ def list_monitored_resource_descriptors( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -742,8 +742,8 @@ def list_logs( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index d9970710c3f7..59dae9c7a78f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -269,8 +269,8 @@ def list_log_metrics( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -358,8 +358,8 @@ def get_log_metric( "projects/[PROJECT_ID]/metrics/[METRIC_ID]" retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -442,8 +442,8 @@ def create_log_metric( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.LogMetric` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -529,8 +529,8 @@ def update_log_metric( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.LogMetric` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -604,8 +604,8 @@ def delete_log_metric( "projects/[PROJECT_ID]/metrics/[METRIC_ID]" retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 7b675cf58427..8d3384c18458 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-01T00:00:23.364082Z", + "updateTime": "2019-08-05T19:11:10.264038Z", "sources": [ { "generator": { "name": "artman", - "version": "0.31.0", - "dockerImage": "googleapis/artman@sha256:9aed6bbde54e26d2fcde7aa86d9f64c0278f741e58808c46573e488cbf6098f0" + "version": "0.32.1", + "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e3b51e761db720974bc153a380a2f7ae5baee43d", - "internalRef": "260992170" + "sha": "f94d89f4b75b4bdf6a254da44eb0f70d34bcca14", + "internalRef": "261701508" } }, { diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/synth.py index ec4788fc35f4..c24382ce4d72 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/synth.py @@ -38,14 +38,6 @@ # https://github.com/googleapis/gapic-generator/issues/2097 s.replace("google/**/proto/*_pb2.py", r"(^.*$\n)*", r"# -*- coding: utf-8 -*-\n\g<0>") -# the logging service grpc transport channel shouldn't limit the size of a grpc message at the default 4mb -s.replace("google/cloud/logging_v2/gapic/transports/*_service_v2_grpc_transport.py", - "channel =.*\n(\s+)address=.*\n\s+credentials=.*,\n", - "\g<0>\g<1>options={\n" - "\g<1> 'grpc.max_send_message_length': -1,\n" - "\g<1> 'grpc.max_receive_message_length': -1,\n" - "\g<1>}.items(),\n") - # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- From 6e615c9754ab22998f683e3dc78f4732c6418787 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 16 Aug 2019 13:25:32 -0700 Subject: [PATCH 253/855] Remove compatability badges from READMEs. (#9035) --- packages/google-cloud-logging/README.rst | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index dd60385f136f..220a6cf17be2 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -1,7 +1,7 @@ Python Client for Stackdriver Logging ===================================== -|pypi| |versions| |compat_check_pypi| |compat_check_github| +|pypi| |versions| `Stackdriver Logging API`_: Writes log entries and manages your Stackdriver Logging configuration. @@ -13,10 +13,6 @@ Logging configuration. :target: https://pypi.org/project/google-cloud-logging/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-logging.svg :target: https://pypi.org/project/google-cloud-logging/ -.. |compat_check_pypi| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=google-cloud-logging - :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=google-cloud-logging -.. |compat_check_github| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Dlogging - :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Dlogging .. _Stackdriver Logging API: https://cloud.google.com/logging .. _Client Library Documentation: https://googleapis.dev/python/logging/latest .. _Product Documentation: https://cloud.google.com/logging/docs From 3bb55f8184b9c74a9eb1143746db3e4e7b35bc88 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 27 Aug 2019 16:35:22 -0700 Subject: [PATCH 254/855] Docs: Remove CI for gh-pages, use googleapis.dev for api_core refs. (#9085) --- packages/google-cloud-logging/docs/conf.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index 7c979b311598..da2faa63ec4f 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -340,10 +340,7 @@ intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google.api_core": ( - "https://googleapis.github.io/google-cloud-python/latest", - None, - ), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), "requests": ("https://2.python-requests.org/en/master/", None), } From bc24550bb7f4642bbdf10a8acb91f67d6a20e526 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 28 Aug 2019 09:34:16 -0700 Subject: [PATCH 255/855] Add client_options to logging v1 (#9046) --- .../google/cloud/logging/_http.py | 13 ++++--- .../google/cloud/logging/client.py | 20 ++++++++++- .../tests/unit/test__http.py | 6 ++++ .../tests/unit/test_client.py | 36 +++++++++++++++++++ 4 files changed, 69 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py index aa6d511f0106..deb6b394f49d 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging/_http.py @@ -33,17 +33,20 @@ class Connection(_http.JSONConnection): :type client_info: :class:`~google.api_core.client_info.ClientInfo` :param client_info: (Optional) instance used to generate user agent. + + :type client_options: :class:`~google.api_core.client_options.ClientOptions` + :param client_options (Optional) Client options used to set user options + on the client. API Endpoint should be set through client_options. """ - def __init__(self, client, client_info=None): - super(Connection, self).__init__(client, client_info) + DEFAULT_API_ENDPOINT = "https://logging.googleapis.com" + def __init__(self, client, client_info=None, api_endpoint=DEFAULT_API_ENDPOINT): + super(Connection, self).__init__(client, client_info) + self.API_BASE_URL = api_endpoint self._client_info.gapic_version = __version__ self._client_info.client_library_version = __version__ - API_BASE_URL = "https://logging.googleapis.com" - """The base of the API call URL.""" - API_VERSION = "v2" """The version of the API, used in building the API call's URL.""" diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 18bd8a335dcd..680c29c8a9dd 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -25,6 +25,7 @@ else: _HAVE_GRPC = True +import google.api_core.client_options from google.cloud.client import ClientWithProject from google.cloud.environment_vars import DISABLE_GRPC from google.cloud.logging._helpers import retrieve_metadata_server @@ -95,6 +96,10 @@ class Client(ClientWithProject): requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own library or partner tool. + :type client_options: :class:`~google.api_core.client_options.ClientOptions` + or :class:`dict` + :param client_options: (Optional) Client options used to set user options + on the client. API Endpoint should be set through client_options. """ _logging_api = None @@ -116,11 +121,24 @@ def __init__( _http=None, _use_grpc=None, client_info=None, + client_options=None, ): super(Client, self).__init__( project=project, credentials=credentials, _http=_http ) - self._connection = Connection(self, client_info=client_info) + + kw_args = {"client_info": client_info} + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + kw_args["api_endpoint"] = api_endpoint + + self._connection = Connection(self, **kw_args) + self._client_info = client_info if _use_grpc is None: self._use_grpc = _USE_GRPC diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index 49abeafdcb77..30fef253af0c 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -42,6 +42,12 @@ def test_default_url(self): conn = self._make_one(client) self.assertIs(conn._client, client) + def test_build_api_url_w_custom_endpoint(self): + custom_endpoint = "https://foo-logging.googleapis.com" + conn = self._make_one(object(), api_endpoint=custom_endpoint) + URI = "/".join([custom_endpoint, conn.API_VERSION, "foo"]) + self.assertEqual(conn.build_api_url("/foo"), URI) + def test_extra_headers(self): import requests from google.cloud import _http as base_http diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index e750df7de454..5acd736185fc 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -67,6 +67,42 @@ def test_ctor_explicit(self): self.assertIsInstance(client._connection, Connection) self.assertIs(client._connection._client_info, client_info) + def test_ctor_w_empty_client_options(self): + from google.api_core.client_options import ClientOptions + + creds = _make_credentials() + client_options = ClientOptions() + client = self._make_one( + project=self.PROJECT, credentials=creds, client_options=client_options + ) + self.assertEqual( + client._connection.API_BASE_URL, client._connection.DEFAULT_API_ENDPOINT + ) + + def test_ctor_w_client_options_object(self): + from google.api_core.client_options import ClientOptions + + creds = _make_credentials() + client_options = ClientOptions( + api_endpoint="https://foo-logging.googleapis.com" + ) + client = self._make_one( + project=self.PROJECT, credentials=creds, client_options=client_options + ) + self.assertEqual( + client._connection.API_BASE_URL, "https://foo-logging.googleapis.com" + ) + + def test_ctor_w_client_options_dict(self): + creds = _make_credentials() + client_options = {"api_endpoint": "https://foo-logging.googleapis.com"} + client = self._make_one( + project=self.PROJECT, credentials=creds, client_options=client_options + ) + self.assertEqual( + client._connection.API_BASE_URL, "https://foo-logging.googleapis.com" + ) + def test_logging_api_wo_gapic(self): from google.cloud.logging._http import _LoggingAPI From ac9bd35f3d1c7ecc6dd45c41d298a8df98270bd3 Mon Sep 17 00:00:00 2001 From: Alexandre Viau Date: Wed, 4 Sep 2019 10:21:21 -0400 Subject: [PATCH 256/855] ContainerEngineHandler: don't swallow stream (#9166) --- .../google/cloud/logging/handlers/container_engine.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py b/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py index 3e80b7650de1..9fe460889232 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py @@ -38,7 +38,7 @@ class ContainerEngineHandler(logging.StreamHandler): """ def __init__(self, name=None, stream=None): - super(ContainerEngineHandler, self).__init__() + super(ContainerEngineHandler, self).__init__(stream=stream) self.name = name def format(self, record): From ab778de243d0301c394b28f078b635d3b080e1ed Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 23 Sep 2019 09:58:37 -0700 Subject: [PATCH 257/855] Logging: Add LoggingV2Servicer, LogSinks, logging_metrics, and log_entry. Add LogSeverity and HttpRequest types (via synth). (#9262) --- .../gapic/config_service_v2_client.py | 33 +- .../gapic/logging_service_v2_client.py | 16 +- .../config_service_v2_grpc_transport.py | 6 +- .../logging_service_v2_grpc_transport.py | 5 +- .../proto/cloud/logging_v2/proto/__init__.py | 0 .../cloud/logging_v2/proto/log_entry_pb2.py | 873 ++++++++ .../logging_v2/proto/log_entry_pb2_grpc.py | 2 + .../logging_v2/proto/logging_config_pb2.py | 1857 +++++++++++++++++ .../proto/logging_config_pb2_grpc.py | 213 ++ .../logging_v2/proto/logging_metrics_pb2.py | 1018 +++++++++ .../proto/logging_metrics_pb2_grpc.py | 118 ++ .../cloud/logging_v2/proto/logging_pb2.py | 1312 ++++++++++++ .../logging_v2/proto/logging_pb2_grpc.py | 130 ++ .../cloud/logging_v2/proto/log_entry.proto | 97 +- .../cloud/logging_v2/proto/log_entry_pb2.py | 1 + .../cloud/logging_v2/proto/logging.proto | 74 +- .../logging_v2/proto/logging/type/__init__.py | 0 .../proto/logging/type/http_request_pb2.py | 405 ++++ .../logging/type/http_request_pb2_grpc.py | 2 + .../proto/logging/type/log_severity_pb2.py | 93 + .../logging/type/log_severity_pb2_grpc.py | 2 + .../logging_v2/proto/logging_config.proto | 264 ++- .../logging_v2/proto/logging_config_pb2.py | 1 + .../logging_v2/proto/logging_metrics.proto | 52 +- .../logging_v2/proto/logging_metrics_pb2.py | 1 + .../cloud/logging_v2/proto/logging_pb2.py | 1 + packages/google-cloud-logging/synth.metadata | 10 +- packages/google-cloud-logging/synth.py | 4 - 28 files changed, 6380 insertions(+), 210 deletions(-) create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2_grpc.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2_grpc.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_metrics_pb2.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2_grpc.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2_grpc.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2_grpc.py diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index df942566a397..2942e2207000 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -31,15 +31,11 @@ import google.api_core.path_template import grpc -from google.api import monitored_resource_pb2 from google.cloud.logging_v2.gapic import config_service_v2_client_config from google.cloud.logging_v2.gapic import enums from google.cloud.logging_v2.gapic.transports import config_service_v2_grpc_transport -from google.cloud.logging_v2.proto import log_entry_pb2 from google.cloud.logging_v2.proto import logging_config_pb2 from google.cloud.logging_v2.proto import logging_config_pb2_grpc -from google.cloud.logging_v2.proto import logging_pb2 -from google.cloud.logging_v2.proto import logging_pb2_grpc from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 @@ -48,10 +44,7 @@ class ConfigServiceV2Client(object): - """ - Service for configuring sinks used to export log entries out of - Logging. - """ + """Service for configuring sinks used to route log entries.""" SERVICE_ADDRESS = "logging.googleapis.com:443" """The default address of the service.""" @@ -593,8 +586,10 @@ def update_sink( """ Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, and - ``filter``. The updated sink might also have a new ``writer_identity``; - see the ``unique_writer_identity`` field. + ``filter``. + + The updated sink might also have a new ``writer_identity``; see the + ``unique_writer_identity`` field. Example: >>> from google.cloud import logging_v2 @@ -625,11 +620,10 @@ def update_sink( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.LogSink` - unique_writer_identity (bool): Optional. See - `sinks.create `__ - for a description of this field. When updating a sink, the effect of - this field on the value of ``writer_identity`` in the updated sink - depends on both the old and new values of this field: + unique_writer_identity (bool): Optional. See ``sinks.create`` for a description of this field. When + updating a sink, the effect of this field on the value of + ``writer_identity`` in the updated sink depends on both the old and new + values of this field: - If the old and new values of this field are both false or both true, then there is no change to the sink's ``writer_identity``. @@ -1106,10 +1100,11 @@ def update_exclusion( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.LogExclusion` - update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Required. A nonempty list of fields to change in the existing exclusion. - New values for the fields are taken from the corresponding fields in the - ``LogExclusion`` included in this request. Fields not mentioned in - ``update_mask`` are not changed and are ignored in the request. + update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Required. A non-empty list of fields to change in the existing + exclusion. New values for the fields are taken from the corresponding + fields in the ``LogExclusion`` included in this request. Fields not + mentioned in ``update_mask`` are not changed and are ignored in the + request. For example, to change the filter and description of an exclusion, specify an ``update_mask`` of ``"filter,description"``. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index 22aa07c592ae..09509318a0a6 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -36,9 +36,12 @@ from google.cloud.logging_v2.gapic import logging_service_v2_client_config from google.cloud.logging_v2.gapic.transports import logging_service_v2_grpc_transport from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2_grpc from google.cloud.logging_v2.proto import logging_pb2 from google.cloud.logging_v2.proto import logging_pb2_grpc from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version @@ -374,8 +377,8 @@ def write_log_entries( Log entries with timestamps that are more than the `logs retention period `__ in the past or more than 24 hours in the future will not be available when calling - ``entries.list``. However, those log entries can still be exported with - `LogSinks `__. + ``entries.list``. However, those log entries can still be `exported with + LogSinks `__. To improve throughput and to avoid exceeding the `quota limit `__ for calls to @@ -486,8 +489,9 @@ def list_log_entries( metadata=None, ): """ - Lists log entries. Use this method to retrieve log entries from Logging. - For ways to export log entries, see `Exporting + Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. For ways + to export log entries, see `Exporting Logs `__. Example: @@ -526,9 +530,7 @@ def list_log_entries( Projects listed in the ``project_ids`` field are added to this list. project_ids (list[str]): Deprecated. Use ``resource_names`` instead. One or more project identifiers or project numbers from which to retrieve log entries. - Example: ``"my-project-1A"``. If present, these project identifiers are - converted to resource name format and added to the list of resources in - ``resource_names``. + Example: ``"my-project-1A"``. filter_ (str): Optional. A filter that chooses which log entries to return. See `Advanced Logs Filters `__. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py index 8157764ec872..533895087231 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py @@ -163,8 +163,10 @@ def update_sink(self): Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, and - ``filter``. The updated sink might also have a new ``writer_identity``; - see the ``unique_writer_identity`` field. + ``filter``. + + The updated sink might also have a new ``writer_identity``; see the + ``unique_writer_identity`` field. Returns: Callable: A callable which accepts the appropriate diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py index caa20c480225..4477ad701b5e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py @@ -152,8 +152,9 @@ def write_log_entries(self): def list_log_entries(self): """Return the gRPC stub for :meth:`LoggingServiceV2Client.list_log_entries`. - Lists log entries. Use this method to retrieve log entries from Logging. - For ways to export log entries, see `Exporting + Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. For ways + to export log entries, see `Exporting Logs `__. Returns: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2.py new file mode 100644 index 000000000000..1f2b1ca3b64d --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2.py @@ -0,0 +1,873 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/logging_v2/proto/log_entry.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import ( + monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, +) +from google.logging.type import ( + http_request_pb2 as google_dot_logging_dot_type_dot_http__request__pb2, +) +from google.logging.type import ( + log_severity_pb2 as google_dot_logging_dot_type_dot_log__severity__pb2, +) +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/logging_v2/proto/log_entry.proto", + package="google.logging.v2", + syntax="proto3", + serialized_options=_b( + "\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" + ), + serialized_pb=_b( + '\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\x8e\x06\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12;\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadataB\x02\x18\x01\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, + google_dot_logging_dot_type_dot_http__request__pb2.DESCRIPTOR, + google_dot_logging_dot_type_dot_log__severity__pb2.DESCRIPTOR, + google_dot_protobuf_dot_any__pb2.DESCRIPTOR, + google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_rpc_dot_status__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + ], +) + + +_LOGENTRY_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.logging.v2.LogEntry.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.logging.v2.LogEntry.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.logging.v2.LogEntry.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1057, + serialized_end=1102, +) + +_LOGENTRY = _descriptor.Descriptor( + name="LogEntry", + full_name="google.logging.v2.LogEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="log_name", + full_name="google.logging.v2.LogEntry.log_name", + index=0, + number=12, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="resource", + full_name="google.logging.v2.LogEntry.resource", + index=1, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="proto_payload", + full_name="google.logging.v2.LogEntry.proto_payload", + index=2, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="text_payload", + full_name="google.logging.v2.LogEntry.text_payload", + index=3, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_payload", + full_name="google.logging.v2.LogEntry.json_payload", + index=4, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="timestamp", + full_name="google.logging.v2.LogEntry.timestamp", + index=5, + number=9, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="receive_timestamp", + full_name="google.logging.v2.LogEntry.receive_timestamp", + index=6, + number=24, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="severity", + full_name="google.logging.v2.LogEntry.severity", + index=7, + number=10, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="insert_id", + full_name="google.logging.v2.LogEntry.insert_id", + index=8, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="http_request", + full_name="google.logging.v2.LogEntry.http_request", + index=9, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.logging.v2.LogEntry.labels", + index=10, + number=11, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="metadata", + full_name="google.logging.v2.LogEntry.metadata", + index=11, + number=25, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\030\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="operation", + full_name="google.logging.v2.LogEntry.operation", + index=12, + number=15, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="trace", + full_name="google.logging.v2.LogEntry.trace", + index=13, + number=22, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="span_id", + full_name="google.logging.v2.LogEntry.span_id", + index=14, + number=27, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="trace_sampled", + full_name="google.logging.v2.LogEntry.trace_sampled", + index=15, + number=30, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="source_location", + full_name="google.logging.v2.LogEntry.source_location", + index=16, + number=23, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_LOGENTRY_LABELSENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="payload", + full_name="google.logging.v2.LogEntry.payload", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=331, + serialized_end=1113, +) + + +_LOGENTRYOPERATION = _descriptor.Descriptor( + name="LogEntryOperation", + full_name="google.logging.v2.LogEntryOperation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="id", + full_name="google.logging.v2.LogEntryOperation.id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="producer", + full_name="google.logging.v2.LogEntryOperation.producer", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="first", + full_name="google.logging.v2.LogEntryOperation.first", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="last", + full_name="google.logging.v2.LogEntryOperation.last", + index=3, + number=4, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1115, + serialized_end=1193, +) + + +_LOGENTRYSOURCELOCATION = _descriptor.Descriptor( + name="LogEntrySourceLocation", + full_name="google.logging.v2.LogEntrySourceLocation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="file", + full_name="google.logging.v2.LogEntrySourceLocation.file", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="line", + full_name="google.logging.v2.LogEntrySourceLocation.line", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="function", + full_name="google.logging.v2.LogEntrySourceLocation.function", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1195, + serialized_end=1265, +) + +_LOGENTRY_LABELSENTRY.containing_type = _LOGENTRY +_LOGENTRY.fields_by_name[ + "resource" +].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE +_LOGENTRY.fields_by_name[ + "proto_payload" +].message_type = google_dot_protobuf_dot_any__pb2._ANY +_LOGENTRY.fields_by_name[ + "json_payload" +].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT +_LOGENTRY.fields_by_name[ + "timestamp" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGENTRY.fields_by_name[ + "receive_timestamp" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGENTRY.fields_by_name[ + "severity" +].enum_type = google_dot_logging_dot_type_dot_log__severity__pb2._LOGSEVERITY +_LOGENTRY.fields_by_name[ + "http_request" +].message_type = google_dot_logging_dot_type_dot_http__request__pb2._HTTPREQUEST +_LOGENTRY.fields_by_name["labels"].message_type = _LOGENTRY_LABELSENTRY +_LOGENTRY.fields_by_name[ + "metadata" +].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEMETADATA +_LOGENTRY.fields_by_name["operation"].message_type = _LOGENTRYOPERATION +_LOGENTRY.fields_by_name["source_location"].message_type = _LOGENTRYSOURCELOCATION +_LOGENTRY.oneofs_by_name["payload"].fields.append( + _LOGENTRY.fields_by_name["proto_payload"] +) +_LOGENTRY.fields_by_name["proto_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ + "payload" +] +_LOGENTRY.oneofs_by_name["payload"].fields.append( + _LOGENTRY.fields_by_name["text_payload"] +) +_LOGENTRY.fields_by_name["text_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ + "payload" +] +_LOGENTRY.oneofs_by_name["payload"].fields.append( + _LOGENTRY.fields_by_name["json_payload"] +) +_LOGENTRY.fields_by_name["json_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ + "payload" +] +DESCRIPTOR.message_types_by_name["LogEntry"] = _LOGENTRY +DESCRIPTOR.message_types_by_name["LogEntryOperation"] = _LOGENTRYOPERATION +DESCRIPTOR.message_types_by_name["LogEntrySourceLocation"] = _LOGENTRYSOURCELOCATION +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +LogEntry = _reflection.GeneratedProtocolMessageType( + "LogEntry", + (_message.Message,), + dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_LOGENTRY_LABELSENTRY, + __module__="google.cloud.logging_v2.proto.log_entry_pb2" + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry.LabelsEntry) + ), + ), + DESCRIPTOR=_LOGENTRY, + __module__="google.cloud.logging_v2.proto.log_entry_pb2", + __doc__="""An individual entry in a log. + + + Attributes: + log_name: + Required. The resource name of the log to which this log entry + belongs: :: "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" A project number may + optionally be used in place of PROJECT\_ID. The project number + is translated to its corresponding PROJECT\_ID internally and + the ``log_name`` field will contain PROJECT\_ID in queries and + exports. ``[LOG_ID]`` must be URL-encoded within + ``log_name``. Example: ``"organizations/1234567890/logs/cloudr + esourcemanager.googleapis.com%2Factivity"``. ``[LOG_ID]`` must + be less than 512 characters long and can only include the + following characters: upper and lower case alphanumeric + characters, forward-slash, underscore, hyphen, and period. + For backward compatibility, if ``log_name`` begins with a + forward-slash, such as ``/projects/...``, then the log entry + is ingested as usual but the forward-slash is removed. Listing + the log entry will not show the leading slash and filtering + for a log name with a leading slash will never return any + results. + resource: + Required. The monitored resource that produced this log entry. + Example: a log entry that reports a database error would be + associated with the monitored resource designating the + particular database that reported the error. + payload: + Optional. The log entry payload, which can be one of multiple + types. + proto_payload: + The log entry payload, represented as a protocol buffer. Some + Google Cloud Platform services use this field for their log + entry payloads. The following protocol buffer types are + supported; user-defined types are not supported: + "type.googleapis.com/google.cloud.audit.AuditLog" + "type.googleapis.com/google.appengine.logging.v1.RequestLog" + text_payload: + The log entry payload, represented as a Unicode string + (UTF-8). + json_payload: + The log entry payload, represented as a structure that is + expressed as a JSON object. + timestamp: + Optional. The time the event described by the log entry + occurred. This time is used to compute the log entry's age and + to enforce the logs retention period. If this field is omitted + in a new log entry, then Logging assigns it the current time. + Timestamps have nanosecond accuracy, but trailing zeros in the + fractional seconds might be omitted when the timestamp is + displayed. Incoming log entries should have timestamps that + are no more than the `logs retention period + `__ in the past, and no more than 24 hours in + the future. Log entries outside those time boundaries will not + be available when calling ``entries.list``, but those log + entries can still be `exported with LogSinks + `__. + receive_timestamp: + Output only. The time the log entry was received by Logging. + severity: + Optional. The severity of the log entry. The default value is + ``LogSeverity.DEFAULT``. + insert_id: + Optional. A unique identifier for the log entry. If you + provide a value, then Logging considers other log entries in + the same project, with the same ``timestamp``, and with the + same ``insert_id`` to be duplicates which can be removed. If + omitted in new log entries, then Logging assigns its own + unique identifier. The ``insert_id`` is also used to order log + entries that have the same ``timestamp`` value. + http_request: + Optional. Information about the HTTP request associated with + this log entry, if applicable. + labels: + Optional. A set of user-defined (key, value) data that + provides additional information about the log entry. + metadata: + Deprecated. Output only. Additional metadata about the + monitored resource. Only ``k8s_container``, ``k8s_pod``, and + ``k8s_node`` MonitoredResources have this field populated for + GKE versions older than 1.12.6. For GKE versions 1.12.6 and + above, the ``metadata`` field has been deprecated. The + Kubernetes pod labels that used to be in + ``metadata.userLabels`` will now be present in the ``labels`` + field with a key prefix of ``k8s-pod/``. The Stackdriver + system labels that were present in the + ``metadata.systemLabels`` field will no longer be available in + the LogEntry. + operation: + Optional. Information about an operation associated with the + log entry, if applicable. + trace: + Optional. Resource name of the trace associated with the log + entry, if any. If it contains a relative resource name, the + name is assumed to be relative to + ``//tracing.googleapis.com``. Example: ``projects/my- + projectid/traces/06796866738c859f2f19b7cfb3214824`` + span_id: + Optional. The span ID within the trace associated with the log + entry. For Trace spans, this is the same format that the + Trace API v2 uses: a 16-character hexadecimal encoding of an + 8-byte array, such as "000000000000004a". + trace_sampled: + Optional. The sampling decision of the trace associated with + the log entry. True means that the trace resource name in the + ``trace`` field was sampled for storage in a trace backend. + False means that the trace was not sampled for storage when + this log entry was written, or the sampling decision was + unknown at the time. A non-sampled ``trace`` value is still + useful as a request correlation identifier. The default is + False. + source_location: + Optional. Source code location information associated with the + log entry, if any. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry) + ), +) +_sym_db.RegisterMessage(LogEntry) +_sym_db.RegisterMessage(LogEntry.LabelsEntry) + +LogEntryOperation = _reflection.GeneratedProtocolMessageType( + "LogEntryOperation", + (_message.Message,), + dict( + DESCRIPTOR=_LOGENTRYOPERATION, + __module__="google.cloud.logging_v2.proto.log_entry_pb2", + __doc__="""Additional information about a potentially long-running operation with + which a log entry is associated. + + + Attributes: + id: + Optional. An arbitrary operation identifier. Log entries with + the same identifier are assumed to be part of the same + operation. + producer: + Optional. An arbitrary producer identifier. The combination of + ``id`` and ``producer`` must be globally unique. Examples for + ``producer``: ``"MyDivision.MyBigCompany.com"``, + ``"github.com/MyProject/MyApplication"``. + first: + Optional. Set this to True if this is the first log entry in + the operation. + last: + Optional. Set this to True if this is the last log entry in + the operation. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntryOperation) + ), +) +_sym_db.RegisterMessage(LogEntryOperation) + +LogEntrySourceLocation = _reflection.GeneratedProtocolMessageType( + "LogEntrySourceLocation", + (_message.Message,), + dict( + DESCRIPTOR=_LOGENTRYSOURCELOCATION, + __module__="google.cloud.logging_v2.proto.log_entry_pb2", + __doc__="""Additional information about the source code location that produced the + log entry. + + + Attributes: + file: + Optional. Source file name. Depending on the runtime + environment, this might be a simple name or a fully-qualified + name. + line: + Optional. Line within the source file. 1-based; 0 indicates no + line number available. + function: + Optional. Human-readable name of the function or method being + invoked, with optional context such as the class or package + name. This information may be used in contexts such as the + logs viewer, where a file and line number are less meaningful. + The format can vary by language. For example: + ``qual.if.ied.Class.method`` (Java), ``dir/package.func`` + (Go), ``function`` (Python). + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntrySourceLocation) + ), +) +_sym_db.RegisterMessage(LogEntrySourceLocation) + + +DESCRIPTOR._options = None +_LOGENTRY_LABELSENTRY._options = None +_LOGENTRY.fields_by_name["metadata"]._options = None +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2_grpc.py new file mode 100644 index 000000000000..07cb78fe03a9 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2_grpc.py @@ -0,0 +1,2 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2.py new file mode 100644 index 000000000000..144591e49189 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2.py @@ -0,0 +1,1857 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/logging_v2/proto/logging_config.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/logging_v2/proto/logging_config.proto", + package="google.logging.v2", + syntax="proto3", + serialized_options=_b( + "\n\025com.google.logging.v2B\022LoggingConfigProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" + ), + serialized_pb=_b( + '\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"\xaa\x03\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12K\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormatB\x02\x18\x01\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12>\n\x10\x62igquery_options\x18\x0c \x01(\x0b\x32".google.logging.v2.BigQueryOptionsH\x00\x12/\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x0e \x01(\x0b\x32\x1a.google.protobuf.Timestamp"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02\x42\t\n\x07options"1\n\x0f\x42igQueryOptions\x12\x1e\n\x16use_partitioned_tables\x18\x01 \x01(\x08"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"\xb5\x01\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\xf6\x1a\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}\x1a\xdf\x01\xca\x41\x16logging.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.readB\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + ), + dependencies=[ + google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, + google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, + google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + ], +) + + +_LOGSINK_VERSIONFORMAT = _descriptor.EnumDescriptor( + name="VersionFormat", + full_name="google.logging.v2.LogSink.VersionFormat", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="VERSION_FORMAT_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="V2", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="V1", index=2, number=2, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=609, + serialized_end=672, +) +_sym_db.RegisterEnumDescriptor(_LOGSINK_VERSIONFORMAT) + + +_LOGSINK = _descriptor.Descriptor( + name="LogSink", + full_name="google.logging.v2.LogSink", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.logging.v2.LogSink.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="destination", + full_name="google.logging.v2.LogSink.destination", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.logging.v2.LogSink.filter", + index=2, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="output_version_format", + full_name="google.logging.v2.LogSink.output_version_format", + index=3, + number=6, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\030\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="writer_identity", + full_name="google.logging.v2.LogSink.writer_identity", + index=4, + number=8, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="include_children", + full_name="google.logging.v2.LogSink.include_children", + index=5, + number=9, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="bigquery_options", + full_name="google.logging.v2.LogSink.bigquery_options", + index=6, + number=12, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.logging.v2.LogSink.create_time", + index=7, + number=13, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.logging.v2.LogSink.update_time", + index=8, + number=14, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_LOGSINK_VERSIONFORMAT], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="options", + full_name="google.logging.v2.LogSink.options", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=257, + serialized_end=683, +) + + +_BIGQUERYOPTIONS = _descriptor.Descriptor( + name="BigQueryOptions", + full_name="google.logging.v2.BigQueryOptions", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="use_partitioned_tables", + full_name="google.logging.v2.BigQueryOptions.use_partitioned_tables", + index=0, + number=1, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=685, + serialized_end=734, +) + + +_LISTSINKSREQUEST = _descriptor.Descriptor( + name="ListSinksRequest", + full_name="google.logging.v2.ListSinksRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.logging.v2.ListSinksRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.logging.v2.ListSinksRequest.page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.logging.v2.ListSinksRequest.page_size", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=736, + serialized_end=809, +) + + +_LISTSINKSRESPONSE = _descriptor.Descriptor( + name="ListSinksResponse", + full_name="google.logging.v2.ListSinksResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="sinks", + full_name="google.logging.v2.ListSinksResponse.sinks", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.logging.v2.ListSinksResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=811, + serialized_end=898, +) + + +_GETSINKREQUEST = _descriptor.Descriptor( + name="GetSinkRequest", + full_name="google.logging.v2.GetSinkRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="sink_name", + full_name="google.logging.v2.GetSinkRequest.sink_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=900, + serialized_end=935, +) + + +_CREATESINKREQUEST = _descriptor.Descriptor( + name="CreateSinkRequest", + full_name="google.logging.v2.CreateSinkRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.logging.v2.CreateSinkRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="sink", + full_name="google.logging.v2.CreateSinkRequest.sink", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="unique_writer_identity", + full_name="google.logging.v2.CreateSinkRequest.unique_writer_identity", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=937, + serialized_end=1046, +) + + +_UPDATESINKREQUEST = _descriptor.Descriptor( + name="UpdateSinkRequest", + full_name="google.logging.v2.UpdateSinkRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="sink_name", + full_name="google.logging.v2.UpdateSinkRequest.sink_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="sink", + full_name="google.logging.v2.UpdateSinkRequest.sink", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="unique_writer_identity", + full_name="google.logging.v2.UpdateSinkRequest.unique_writer_identity", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_mask", + full_name="google.logging.v2.UpdateSinkRequest.update_mask", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1049, + serialized_end=1210, +) + + +_DELETESINKREQUEST = _descriptor.Descriptor( + name="DeleteSinkRequest", + full_name="google.logging.v2.DeleteSinkRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="sink_name", + full_name="google.logging.v2.DeleteSinkRequest.sink_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1212, + serialized_end=1250, +) + + +_LOGEXCLUSION = _descriptor.Descriptor( + name="LogExclusion", + full_name="google.logging.v2.LogExclusion", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.logging.v2.LogExclusion.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="description", + full_name="google.logging.v2.LogExclusion.description", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.logging.v2.LogExclusion.filter", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="disabled", + full_name="google.logging.v2.LogExclusion.disabled", + index=3, + number=4, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.logging.v2.LogExclusion.create_time", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.logging.v2.LogExclusion.update_time", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1253, + serialized_end=1434, +) + + +_LISTEXCLUSIONSREQUEST = _descriptor.Descriptor( + name="ListExclusionsRequest", + full_name="google.logging.v2.ListExclusionsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.logging.v2.ListExclusionsRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.logging.v2.ListExclusionsRequest.page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.logging.v2.ListExclusionsRequest.page_size", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1436, + serialized_end=1514, +) + + +_LISTEXCLUSIONSRESPONSE = _descriptor.Descriptor( + name="ListExclusionsResponse", + full_name="google.logging.v2.ListExclusionsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="exclusions", + full_name="google.logging.v2.ListExclusionsResponse.exclusions", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.logging.v2.ListExclusionsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1516, + serialized_end=1618, +) + + +_GETEXCLUSIONREQUEST = _descriptor.Descriptor( + name="GetExclusionRequest", + full_name="google.logging.v2.GetExclusionRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.logging.v2.GetExclusionRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1620, + serialized_end=1655, +) + + +_CREATEEXCLUSIONREQUEST = _descriptor.Descriptor( + name="CreateExclusionRequest", + full_name="google.logging.v2.CreateExclusionRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.logging.v2.CreateExclusionRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="exclusion", + full_name="google.logging.v2.CreateExclusionRequest.exclusion", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1657, + serialized_end=1749, +) + + +_UPDATEEXCLUSIONREQUEST = _descriptor.Descriptor( + name="UpdateExclusionRequest", + full_name="google.logging.v2.UpdateExclusionRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.logging.v2.UpdateExclusionRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="exclusion", + full_name="google.logging.v2.UpdateExclusionRequest.exclusion", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_mask", + full_name="google.logging.v2.UpdateExclusionRequest.update_mask", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1752, + serialized_end=1891, +) + + +_DELETEEXCLUSIONREQUEST = _descriptor.Descriptor( + name="DeleteExclusionRequest", + full_name="google.logging.v2.DeleteExclusionRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.logging.v2.DeleteExclusionRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1893, + serialized_end=1931, +) + +_LOGSINK.fields_by_name["output_version_format"].enum_type = _LOGSINK_VERSIONFORMAT +_LOGSINK.fields_by_name["bigquery_options"].message_type = _BIGQUERYOPTIONS +_LOGSINK.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGSINK.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGSINK_VERSIONFORMAT.containing_type = _LOGSINK +_LOGSINK.oneofs_by_name["options"].fields.append( + _LOGSINK.fields_by_name["bigquery_options"] +) +_LOGSINK.fields_by_name["bigquery_options"].containing_oneof = _LOGSINK.oneofs_by_name[ + "options" +] +_LISTSINKSRESPONSE.fields_by_name["sinks"].message_type = _LOGSINK +_CREATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK +_UPDATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK +_UPDATESINKREQUEST.fields_by_name[ + "update_mask" +].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LOGEXCLUSION.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGEXCLUSION.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LISTEXCLUSIONSRESPONSE.fields_by_name["exclusions"].message_type = _LOGEXCLUSION +_CREATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION +_UPDATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION +_UPDATEEXCLUSIONREQUEST.fields_by_name[ + "update_mask" +].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +DESCRIPTOR.message_types_by_name["LogSink"] = _LOGSINK +DESCRIPTOR.message_types_by_name["BigQueryOptions"] = _BIGQUERYOPTIONS +DESCRIPTOR.message_types_by_name["ListSinksRequest"] = _LISTSINKSREQUEST +DESCRIPTOR.message_types_by_name["ListSinksResponse"] = _LISTSINKSRESPONSE +DESCRIPTOR.message_types_by_name["GetSinkRequest"] = _GETSINKREQUEST +DESCRIPTOR.message_types_by_name["CreateSinkRequest"] = _CREATESINKREQUEST +DESCRIPTOR.message_types_by_name["UpdateSinkRequest"] = _UPDATESINKREQUEST +DESCRIPTOR.message_types_by_name["DeleteSinkRequest"] = _DELETESINKREQUEST +DESCRIPTOR.message_types_by_name["LogExclusion"] = _LOGEXCLUSION +DESCRIPTOR.message_types_by_name["ListExclusionsRequest"] = _LISTEXCLUSIONSREQUEST +DESCRIPTOR.message_types_by_name["ListExclusionsResponse"] = _LISTEXCLUSIONSRESPONSE +DESCRIPTOR.message_types_by_name["GetExclusionRequest"] = _GETEXCLUSIONREQUEST +DESCRIPTOR.message_types_by_name["CreateExclusionRequest"] = _CREATEEXCLUSIONREQUEST +DESCRIPTOR.message_types_by_name["UpdateExclusionRequest"] = _UPDATEEXCLUSIONREQUEST +DESCRIPTOR.message_types_by_name["DeleteExclusionRequest"] = _DELETEEXCLUSIONREQUEST +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +LogSink = _reflection.GeneratedProtocolMessageType( + "LogSink", + (_message.Message,), + dict( + DESCRIPTOR=_LOGSINK, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""Describes a sink used to export log entries to one of the following + destinations in any project: a Cloud Storage bucket, a BigQuery dataset, + or a Cloud Pub/Sub topic. A logs filter controls which log entries are + exported. The sink must be created within a project, organization, + billing account, or folder. + + + Attributes: + name: + Required. The client-assigned sink identifier, unique within + the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink + identifiers are limited to 100 characters and can include only + the following characters: upper and lower-case alphanumeric + characters, underscores, hyphens, and periods. + destination: + Required. The export destination: :: + "storage.googleapis.com/[GCS_BUCKET]" "bigquery.googleapis + .com/projects/[PROJECT_ID]/datasets/[DATASET]" "pubsub.goo + gleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" The + sink's ``writer_identity``, set when the sink is created, must + have permission to write to the destination or else the log + entries are not exported. For more information, see `Exporting + Logs with Sinks `__. + filter: + Optional. An `advanced logs filter + `__. The only exported + log entries are those that are in the resource owning the sink + and that match the filter. For example: :: + logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND + severity>=ERROR + output_version_format: + Deprecated. The log entry format to use for this sink's + exported log entries. The v2 format is used by default and + cannot be changed. + writer_identity: + Output only. An IAM identity—a service account or group—under + which Logging writes the exported log entries to the sink's + destination. This field is set by + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] + and + [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] + based on the value of ``unique_writer_identity`` in those + methods. Until you grant this identity write-access to the + destination, log entry exports from this sink will fail. For + more information, see `Granting Access for a Resource + `__. Consult the + destination service's documentation to determine the + appropriate IAM roles to assign to the identity. + include_children: + Optional. This field applies only to sinks owned by + organizations and folders. If the field is false, the default, + only the logs owned by the sink's parent resource are + available for export. If the field is true, then logs from all + the projects, folders, and billing accounts contained in the + sink's parent resource are also available for export. Whether + a particular log entry from the children is exported depends + on the sink's filter expression. For example, if this field is + true, then the filter ``resource.type=gce_instance`` would + export all Compute Engine VM instance log entries from all + projects in the sink's parent. To only export entries from + certain child projects, filter on the project part of the log + name: :: logName:("projects/test-project1/" OR + "projects/test-project2/") AND resource.type=gce_instance + options: + Optional. Destination dependent options. + bigquery_options: + Optional. Options that affect sinks exporting data to + BigQuery. + create_time: + Output only. The creation timestamp of the sink. This field + may not be present for older sinks. + update_time: + Output only. The last update timestamp of the sink. This + field may not be present for older sinks. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogSink) + ), +) +_sym_db.RegisterMessage(LogSink) + +BigQueryOptions = _reflection.GeneratedProtocolMessageType( + "BigQueryOptions", + (_message.Message,), + dict( + DESCRIPTOR=_BIGQUERYOPTIONS, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""Options that change functionality of a sink exporting data to BigQuery. + + + Attributes: + use_partitioned_tables: + Optional. Whether to use `BigQuery's partition tables + `__. By default, Logging + creates dated tables based on the log entries' timestamps, + e.g. syslog\_20170523. With partitioned tables the date suffix + is no longer present and `special query syntax + `__ has to be used + instead. In both cases, tables are sharded based on UTC + timezone. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.BigQueryOptions) + ), +) +_sym_db.RegisterMessage(BigQueryOptions) + +ListSinksRequest = _reflection.GeneratedProtocolMessageType( + "ListSinksRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTSINKSREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to ``ListSinks``. + + + Attributes: + parent: + Required. The parent resource whose sinks are to be listed: + :: "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``pageToken`` must be + the value of ``nextPageToken`` from the previous response. The + values of other method parameters should be identical to those + in the previous call. + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more results + might be available. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksRequest) + ), +) +_sym_db.RegisterMessage(ListSinksRequest) + +ListSinksResponse = _reflection.GeneratedProtocolMessageType( + "ListSinksResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTSINKSRESPONSE, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""Result returned from ``ListSinks``. + + + Attributes: + sinks: + A list of sinks. + next_page_token: + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksResponse) + ), +) +_sym_db.RegisterMessage(ListSinksResponse) + +GetSinkRequest = _reflection.GeneratedProtocolMessageType( + "GetSinkRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETSINKREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to ``GetSink``. + + + Attributes: + sink_name: + Required. The resource name of the sink: :: + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: + ``"projects/my-project-id/sinks/my-sink-id"``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.GetSinkRequest) + ), +) +_sym_db.RegisterMessage(GetSinkRequest) + +CreateSinkRequest = _reflection.GeneratedProtocolMessageType( + "CreateSinkRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATESINKREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to ``CreateSink``. + + + Attributes: + parent: + Required. The resource in which to create the sink: :: + "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" Examples: ``"projects/my-logging- + project"``, ``"organizations/123456789"``. + sink: + Required. The new sink, whose ``name`` parameter is a sink + identifier that is not already in use. + unique_writer_identity: + Optional. Determines the kind of IAM identity returned as + ``writer_identity`` in the new sink. If this value is omitted + or set to false, and if the sink's parent is a project, then + the value returned as ``writer_identity`` is the same group or + service account used by Logging before the addition of writer + identities to this API. The sink's destination must be in the + same project as the sink itself. If this field is set to + true, or if the sink is owned by a non-project resource such + as an organization, then the value of ``writer_identity`` will + be a unique service account used only for exports from the new + sink. For more information, see ``writer_identity`` in + [LogSink][google.logging.v2.LogSink]. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.CreateSinkRequest) + ), +) +_sym_db.RegisterMessage(CreateSinkRequest) + +UpdateSinkRequest = _reflection.GeneratedProtocolMessageType( + "UpdateSinkRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATESINKREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to ``UpdateSink``. + + + Attributes: + sink_name: + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: :: + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: + ``"projects/my-project-id/sinks/my-sink-id"``. + sink: + Required. The updated sink, whose name is the same identifier + that appears as part of ``sink_name``. + unique_writer_identity: + Optional. See + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] + for a description of this field. When updating a sink, the + effect of this field on the value of ``writer_identity`` in + the updated sink depends on both the old and new values of + this field: - If the old and new values of this field are + both false or both true, then there is no change to the + sink's ``writer_identity``. - If the old value is false and + the new value is true, then ``writer_identity`` is changed + to a unique service account. - It is an error if the old + value is true and the new value is set to false or + defaulted to false. + update_mask: + Optional. Field mask that specifies the fields in ``sink`` + that need an update. A sink field will be overwritten if, and + only if, it is in the update mask. ``name`` and output only + fields cannot be updated. An empty updateMask is temporarily + treated as using the following mask for backwards + compatibility purposes: destination,filter,includeChildren At + some point in the future, behavior will be removed and + specifying an empty updateMask will be an error. For a + detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/ + google.protobuf#google.protobuf.FieldMask Example: + ``updateMask=filter``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateSinkRequest) + ), +) +_sym_db.RegisterMessage(UpdateSinkRequest) + +DeleteSinkRequest = _reflection.GeneratedProtocolMessageType( + "DeleteSinkRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETESINKREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to ``DeleteSink``. + + + Attributes: + sink_name: + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: :: + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: + ``"projects/my-project-id/sinks/my-sink-id"``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteSinkRequest) + ), +) +_sym_db.RegisterMessage(DeleteSinkRequest) + +LogExclusion = _reflection.GeneratedProtocolMessageType( + "LogExclusion", + (_message.Message,), + dict( + DESCRIPTOR=_LOGEXCLUSION, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""Specifies a set of log entries that are not to be stored in Logging. If + your GCP resource receives a large volume of logs, you can use + exclusions to reduce your chargeable logs. Exclusions are processed + after log sinks, so you can export log entries before they are excluded. + Note that organization-level and folder-level exclusions don't apply to + child resources, and that you can't exclude audit log entries. + + + Attributes: + name: + Required. A client-assigned identifier, such as ``"load- + balancer-exclusion"``. Identifiers are limited to 100 + characters and can include only letters, digits, underscores, + hyphens, and periods. + description: + Optional. A description of this exclusion. + filter: + Required. An `advanced logs filter + `__ that matches the log + entries to be excluded. By using the `sample function + `__, you can + exclude less than 100% of the matching log entries. For + example, the following query matches 99% of low-severity log + entries from Google Cloud Storage buckets: + ``"resource.type=gcs_bucket severity\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12/\n\x0b\x63reate_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xe4\x07\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"/\x82\xd3\xe4\x93\x02)"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_distribution__pb2.DESCRIPTOR, + google_dot_api_dot_metric__pb2.DESCRIPTOR, + google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, + google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + ], +) + + +_LOGMETRIC_APIVERSION = _descriptor.EnumDescriptor( + name="ApiVersion", + full_name="google.logging.v2.LogMetric.ApiVersion", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="V2", index=0, number=0, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="V1", index=1, number=1, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=785, + serialized_end=813, +) +_sym_db.RegisterEnumDescriptor(_LOGMETRIC_APIVERSION) + + +_LOGMETRIC_LABELEXTRACTORSENTRY = _descriptor.Descriptor( + name="LabelExtractorsEntry", + full_name="google.logging.v2.LogMetric.LabelExtractorsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.logging.v2.LogMetric.LabelExtractorsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.logging.v2.LogMetric.LabelExtractorsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=729, + serialized_end=783, +) + +_LOGMETRIC = _descriptor.Descriptor( + name="LogMetric", + full_name="google.logging.v2.LogMetric", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.logging.v2.LogMetric.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="description", + full_name="google.logging.v2.LogMetric.description", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.logging.v2.LogMetric.filter", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="metric_descriptor", + full_name="google.logging.v2.LogMetric.metric_descriptor", + index=3, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value_extractor", + full_name="google.logging.v2.LogMetric.value_extractor", + index=4, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="label_extractors", + full_name="google.logging.v2.LogMetric.label_extractors", + index=5, + number=7, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="bucket_options", + full_name="google.logging.v2.LogMetric.bucket_options", + index=6, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.logging.v2.LogMetric.create_time", + index=7, + number=9, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.logging.v2.LogMetric.update_time", + index=8, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="version", + full_name="google.logging.v2.LogMetric.version", + index=9, + number=4, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\030\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY], + enum_types=[_LOGMETRIC_APIVERSION], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=282, + serialized_end=813, +) + + +_LISTLOGMETRICSREQUEST = _descriptor.Descriptor( + name="ListLogMetricsRequest", + full_name="google.logging.v2.ListLogMetricsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.logging.v2.ListLogMetricsRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.logging.v2.ListLogMetricsRequest.page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.logging.v2.ListLogMetricsRequest.page_size", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=815, + serialized_end=893, +) + + +_LISTLOGMETRICSRESPONSE = _descriptor.Descriptor( + name="ListLogMetricsResponse", + full_name="google.logging.v2.ListLogMetricsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="metrics", + full_name="google.logging.v2.ListLogMetricsResponse.metrics", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.logging.v2.ListLogMetricsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=895, + serialized_end=991, +) + + +_GETLOGMETRICREQUEST = _descriptor.Descriptor( + name="GetLogMetricRequest", + full_name="google.logging.v2.GetLogMetricRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="metric_name", + full_name="google.logging.v2.GetLogMetricRequest.metric_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=993, + serialized_end=1035, +) + + +_CREATELOGMETRICREQUEST = _descriptor.Descriptor( + name="CreateLogMetricRequest", + full_name="google.logging.v2.CreateLogMetricRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.logging.v2.CreateLogMetricRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="metric", + full_name="google.logging.v2.CreateLogMetricRequest.metric", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1037, + serialized_end=1123, +) + + +_UPDATELOGMETRICREQUEST = _descriptor.Descriptor( + name="UpdateLogMetricRequest", + full_name="google.logging.v2.UpdateLogMetricRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="metric_name", + full_name="google.logging.v2.UpdateLogMetricRequest.metric_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="metric", + full_name="google.logging.v2.UpdateLogMetricRequest.metric", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1125, + serialized_end=1216, +) + + +_DELETELOGMETRICREQUEST = _descriptor.Descriptor( + name="DeleteLogMetricRequest", + full_name="google.logging.v2.DeleteLogMetricRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="metric_name", + full_name="google.logging.v2.DeleteLogMetricRequest.metric_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1218, + serialized_end=1263, +) + +_LOGMETRIC_LABELEXTRACTORSENTRY.containing_type = _LOGMETRIC +_LOGMETRIC.fields_by_name[ + "metric_descriptor" +].message_type = google_dot_api_dot_metric__pb2._METRICDESCRIPTOR +_LOGMETRIC.fields_by_name[ + "label_extractors" +].message_type = _LOGMETRIC_LABELEXTRACTORSENTRY +_LOGMETRIC.fields_by_name[ + "bucket_options" +].message_type = google_dot_api_dot_distribution__pb2._DISTRIBUTION_BUCKETOPTIONS +_LOGMETRIC.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGMETRIC.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGMETRIC.fields_by_name["version"].enum_type = _LOGMETRIC_APIVERSION +_LOGMETRIC_APIVERSION.containing_type = _LOGMETRIC +_LISTLOGMETRICSRESPONSE.fields_by_name["metrics"].message_type = _LOGMETRIC +_CREATELOGMETRICREQUEST.fields_by_name["metric"].message_type = _LOGMETRIC +_UPDATELOGMETRICREQUEST.fields_by_name["metric"].message_type = _LOGMETRIC +DESCRIPTOR.message_types_by_name["LogMetric"] = _LOGMETRIC +DESCRIPTOR.message_types_by_name["ListLogMetricsRequest"] = _LISTLOGMETRICSREQUEST +DESCRIPTOR.message_types_by_name["ListLogMetricsResponse"] = _LISTLOGMETRICSRESPONSE +DESCRIPTOR.message_types_by_name["GetLogMetricRequest"] = _GETLOGMETRICREQUEST +DESCRIPTOR.message_types_by_name["CreateLogMetricRequest"] = _CREATELOGMETRICREQUEST +DESCRIPTOR.message_types_by_name["UpdateLogMetricRequest"] = _UPDATELOGMETRICREQUEST +DESCRIPTOR.message_types_by_name["DeleteLogMetricRequest"] = _DELETELOGMETRICREQUEST +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +LogMetric = _reflection.GeneratedProtocolMessageType( + "LogMetric", + (_message.Message,), + dict( + LabelExtractorsEntry=_reflection.GeneratedProtocolMessageType( + "LabelExtractorsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_LOGMETRIC_LABELEXTRACTORSENTRY, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2" + # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric.LabelExtractorsEntry) + ), + ), + DESCRIPTOR=_LOGMETRIC, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", + __doc__="""Describes a logs-based metric. The value of the metric is the number of + log entries that match a logs filter in a given time interval. + + Logs-based metric can also be used to extract values from logs and + create a a distribution of the values. The distribution records the + statistics of the extracted values along with an optional histogram of + the values as specified by the bucket options. + + + Attributes: + name: + Required. The client-assigned metric identifier. Examples: + ``"error_count"``, ``"nginx/requests"``. Metric identifiers + are limited to 100 characters and can include only the + following characters: ``A-Z``, ``a-z``, ``0-9``, and the + special characters ``_-.,+!*',()%/``. The forward-slash + character (``/``) denotes a hierarchy of name pieces, and it + cannot be the first character of the name. The metric + identifier in this field must not be `URL-encoded + `__. However, + when the metric identifier appears as the ``[METRIC_ID]`` part + of a ``metric_name`` API parameter, then the metric identifier + must be URL-encoded. Example: ``"projects/my- + project/metrics/nginx%2Frequests"``. + description: + Optional. A description of this metric, which is used in + documentation. The maximum length of the description is 8000 + characters. + filter: + Required. An `advanced logs filter + `__ which is used to + match log entries. Example: :: "resource.type=gae_app + AND severity>=ERROR" The maximum length of the filter is + 20000 characters. + metric_descriptor: + Optional. The metric descriptor associated with the logs-based + metric. If unspecified, it uses a default metric descriptor + with a DELTA metric kind, INT64 value type, with no labels and + a unit of "1". Such a metric counts the number of log entries + matching the ``filter`` expression. The ``name``, ``type``, + and ``description`` fields in the ``metric_descriptor`` are + output only, and is constructed using the ``name`` and + ``description`` field in the LogMetric. To create a logs- + based metric that records a distribution of log values, a + DELTA metric kind with a DISTRIBUTION value type must be used + along with a ``value_extractor`` expression in the LogMetric. + Each label in the metric descriptor must have a matching label + name as the key and an extractor expression as the value in + the ``label_extractors`` map. The ``metric_kind`` and + ``value_type`` fields in the ``metric_descriptor`` cannot be + updated once initially configured. New labels can be added in + the ``metric_descriptor``, but existing labels cannot be + modified except for their description. + value_extractor: + Optional. A ``value_extractor`` is required when using a + distribution logs-based metric to extract the values to record + from a log entry. Two functions are supported for value + extraction: ``EXTRACT(field)`` or ``REGEXP_EXTRACT(field, + regex)``. The argument are: 1. field: The name of the log + entry field from which the value is to be extracted. 2. regex: + A regular expression using the Google RE2 syntax + (https://github.com/google/re2/wiki/Syntax) with a single + capture group to extract data from the specified log entry + field. The value of the field is converted to a string before + applying the regex. It is an error to specify a regex that + does not include exactly one capture group. The result of the + extraction must be convertible to a double type, as the + distribution always records double values. If either the + extraction or the conversion to double fails, then those + values are not recorded in the distribution. Example: + ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` + label_extractors: + Optional. A map from a label key string to an extractor + expression which is used to extract data from a log entry + field and assign as the label value. Each label key specified + in the LabelDescriptor must have an associated extractor + expression in this map. The syntax of the extractor expression + is the same as for the ``value_extractor`` field. The + extracted value is converted to the type defined in the label + descriptor. If the either the extraction or the type + conversion fails, the label will have a default value. The + default value for a string label is an empty string, for an + integer label its 0, and for a boolean label its ``false``. + Note that there are upper bounds on the maximum number of + labels and the number of active time series that are allowed + in a project. + bucket_options: + Optional. The ``bucket_options`` are required when the logs- + based metric is using a DISTRIBUTION value type and it + describes the bucket boundaries used to create a histogram of + the extracted values. + create_time: + Output only. The creation timestamp of the metric. This field + may not be present for older metrics. + update_time: + Output only. The last update timestamp of the metric. This + field may not be present for older metrics. + version: + Deprecated. The API version that created or updated this + metric. The v2 format is used by default and cannot be + changed. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric) + ), +) +_sym_db.RegisterMessage(LogMetric) +_sym_db.RegisterMessage(LogMetric.LabelExtractorsEntry) + +ListLogMetricsRequest = _reflection.GeneratedProtocolMessageType( + "ListLogMetricsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTLOGMETRICSREQUEST, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", + __doc__="""The parameters to ListLogMetrics. + + + Attributes: + parent: + Required. The name of the project containing the metrics: :: + "projects/[PROJECT_ID]" + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``pageToken`` must be + the value of ``nextPageToken`` from the previous response. The + values of other method parameters should be identical to those + in the previous call. + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more results + might be available. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsRequest) + ), +) +_sym_db.RegisterMessage(ListLogMetricsRequest) + +ListLogMetricsResponse = _reflection.GeneratedProtocolMessageType( + "ListLogMetricsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTLOGMETRICSRESPONSE, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", + __doc__="""Result returned from ListLogMetrics. + + + Attributes: + metrics: + A list of logs-based metrics. + next_page_token: + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsResponse) + ), +) +_sym_db.RegisterMessage(ListLogMetricsResponse) + +GetLogMetricRequest = _reflection.GeneratedProtocolMessageType( + "GetLogMetricRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETLOGMETRICREQUEST, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", + __doc__="""The parameters to GetLogMetric. + + + Attributes: + metric_name: + The resource name of the desired metric: :: + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.GetLogMetricRequest) + ), +) +_sym_db.RegisterMessage(GetLogMetricRequest) + +CreateLogMetricRequest = _reflection.GeneratedProtocolMessageType( + "CreateLogMetricRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATELOGMETRICREQUEST, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", + __doc__="""The parameters to CreateLogMetric. + + + Attributes: + parent: + The resource name of the project in which to create the + metric: :: "projects/[PROJECT_ID]" The new metric must + be provided in the request. + metric: + The new logs-based metric, which must not have an identifier + that already exists. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.CreateLogMetricRequest) + ), +) +_sym_db.RegisterMessage(CreateLogMetricRequest) + +UpdateLogMetricRequest = _reflection.GeneratedProtocolMessageType( + "UpdateLogMetricRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATELOGMETRICREQUEST, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", + __doc__="""The parameters to UpdateLogMetric. + + + Attributes: + metric_name: + The resource name of the metric to update: :: + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" The updated + metric must be provided in the request and it's ``name`` field + must be the same as ``[METRIC_ID]`` If the metric does not + exist in ``[PROJECT_ID]``, then a new metric is created. + metric: + The updated metric. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateLogMetricRequest) + ), +) +_sym_db.RegisterMessage(UpdateLogMetricRequest) + +DeleteLogMetricRequest = _reflection.GeneratedProtocolMessageType( + "DeleteLogMetricRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETELOGMETRICREQUEST, + __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", + __doc__="""The parameters to DeleteLogMetric. + + + Attributes: + metric_name: + The resource name of the metric to delete: :: + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogMetricRequest) + ), +) +_sym_db.RegisterMessage(DeleteLogMetricRequest) + + +DESCRIPTOR._options = None +_LOGMETRIC_LABELEXTRACTORSENTRY._options = None +_LOGMETRIC.fields_by_name["version"]._options = None + +_METRICSSERVICEV2 = _descriptor.ServiceDescriptor( + name="MetricsServiceV2", + full_name="google.logging.v2.MetricsServiceV2", + file=DESCRIPTOR, + index=0, + serialized_options=_b( + "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" + ), + serialized_start=1266, + serialized_end=2262, + methods=[ + _descriptor.MethodDescriptor( + name="ListLogMetrics", + full_name="google.logging.v2.MetricsServiceV2.ListLogMetrics", + index=0, + containing_service=None, + input_type=_LISTLOGMETRICSREQUEST, + output_type=_LISTLOGMETRICSRESPONSE, + serialized_options=_b( + "\202\323\344\223\002!\022\037/v2/{parent=projects/*}/metrics" + ), + ), + _descriptor.MethodDescriptor( + name="GetLogMetric", + full_name="google.logging.v2.MetricsServiceV2.GetLogMetric", + index=1, + containing_service=None, + input_type=_GETLOGMETRICREQUEST, + output_type=_LOGMETRIC, + serialized_options=_b( + "\202\323\344\223\002(\022&/v2/{metric_name=projects/*/metrics/*}" + ), + ), + _descriptor.MethodDescriptor( + name="CreateLogMetric", + full_name="google.logging.v2.MetricsServiceV2.CreateLogMetric", + index=2, + containing_service=None, + input_type=_CREATELOGMETRICREQUEST, + output_type=_LOGMETRIC, + serialized_options=_b( + '\202\323\344\223\002)"\037/v2/{parent=projects/*}/metrics:\006metric' + ), + ), + _descriptor.MethodDescriptor( + name="UpdateLogMetric", + full_name="google.logging.v2.MetricsServiceV2.UpdateLogMetric", + index=3, + containing_service=None, + input_type=_UPDATELOGMETRICREQUEST, + output_type=_LOGMETRIC, + serialized_options=_b( + "\202\323\344\223\0020\032&/v2/{metric_name=projects/*/metrics/*}:\006metric" + ), + ), + _descriptor.MethodDescriptor( + name="DeleteLogMetric", + full_name="google.logging.v2.MetricsServiceV2.DeleteLogMetric", + index=4, + containing_service=None, + input_type=_DELETELOGMETRICREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + serialized_options=_b( + "\202\323\344\223\002(*&/v2/{metric_name=projects/*/metrics/*}" + ), + ), + ], +) +_sym_db.RegisterServiceDescriptor(_METRICSSERVICEV2) + +DESCRIPTOR.services_by_name["MetricsServiceV2"] = _METRICSSERVICEV2 + +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py new file mode 100644 index 000000000000..09f84e038a1b --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py @@ -0,0 +1,118 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.logging_v2.proto import ( + logging_metrics_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2, +) +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class MetricsServiceV2Stub(object): + """Service for configuring logs-based metrics. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListLogMetrics = channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/ListLogMetrics", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.FromString, + ) + self.GetLogMetric = channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/GetLogMetric", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, + ) + self.CreateLogMetric = channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/CreateLogMetric", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, + ) + self.UpdateLogMetric = channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, + ) + self.DeleteLogMetric = channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + +class MetricsServiceV2Servicer(object): + """Service for configuring logs-based metrics. + """ + + def ListLogMetrics(self, request, context): + """Lists logs-based metrics. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetLogMetric(self, request, context): + """Gets a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def CreateLogMetric(self, request, context): + """Creates a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def UpdateLogMetric(self, request, context): + """Creates or updates a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DeleteLogMetric(self, request, context): + """Deletes a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_MetricsServiceV2Servicer_to_server(servicer, server): + rpc_method_handlers = { + "ListLogMetrics": grpc.unary_unary_rpc_method_handler( + servicer.ListLogMetrics, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.SerializeToString, + ), + "GetLogMetric": grpc.unary_unary_rpc_method_handler( + servicer.GetLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, + ), + "CreateLogMetric": grpc.unary_unary_rpc_method_handler( + servicer.CreateLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, + ), + "UpdateLogMetric": grpc.unary_unary_rpc_method_handler( + servicer.UpdateLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, + ), + "DeleteLogMetric": grpc.unary_unary_rpc_method_handler( + servicer.DeleteLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.logging.v2.MetricsServiceV2", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2.py new file mode 100644 index 000000000000..04bd84375901 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2.py @@ -0,0 +1,1312 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/logging_v2/proto/logging.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import ( + monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, +) +from google.cloud.logging_v2.proto import ( + log_entry_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2, +) +from google.cloud.logging_v2.proto import ( + logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2, +) +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/logging_v2/proto/logging.proto", + package="google.logging.v2", + syntax="proto3", + serialized_options=_b( + "\n\025com.google.logging.v2B\014LoggingProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" + ), + serialized_pb=_b( + '\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x32google/cloud/logging_v2/proto/logging_config.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t"\xa9\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x19\n\x17WriteLogEntriesResponse"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01"\x91\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x85\n\n\x10LoggingServiceV2\x12\x88\x02\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty"\xbd\x01\x82\xd3\xe4\x93\x02\xb6\x01* /v2/{log_name=projects/*/logs/*}Z\x1b*\x19/v2/{log_name=*/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse"\x1c\x82\xd3\xe4\x93\x02\x16"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse"\x1b\x82\xd3\xe4\x93\x02\x15"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse"(\x82\xd3\xe4\x93\x02"\x12 /v2/monitoredResourceDescriptors\x12\xff\x01\n\x08ListLogs\x12".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logs\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, + google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2.DESCRIPTOR, + google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DESCRIPTOR, + google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, + google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_rpc_dot_status__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + ], +) + + +_DELETELOGREQUEST = _descriptor.Descriptor( + name="DeleteLogRequest", + full_name="google.logging.v2.DeleteLogRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="log_name", + full_name="google.logging.v2.DeleteLogRequest.log_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=376, + serialized_end=412, +) + + +_WRITELOGENTRIESREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=667, + serialized_end=712, +) + +_WRITELOGENTRIESREQUEST = _descriptor.Descriptor( + name="WriteLogEntriesRequest", + full_name="google.logging.v2.WriteLogEntriesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="log_name", + full_name="google.logging.v2.WriteLogEntriesRequest.log_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="resource", + full_name="google.logging.v2.WriteLogEntriesRequest.resource", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.logging.v2.WriteLogEntriesRequest.labels", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entries", + full_name="google.logging.v2.WriteLogEntriesRequest.entries", + index=3, + number=4, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="partial_success", + full_name="google.logging.v2.WriteLogEntriesRequest.partial_success", + index=4, + number=5, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="dry_run", + full_name="google.logging.v2.WriteLogEntriesRequest.dry_run", + index=5, + number=6, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=415, + serialized_end=712, +) + + +_WRITELOGENTRIESRESPONSE = _descriptor.Descriptor( + name="WriteLogEntriesResponse", + full_name="google.logging.v2.WriteLogEntriesResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=714, + serialized_end=739, +) + + +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY = _descriptor.Descriptor( + name="LogEntryErrorsEntry", + full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.key", + index=0, + number=1, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.value", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=869, + serialized_end=942, +) + +_WRITELOGENTRIESPARTIALERRORS = _descriptor.Descriptor( + name="WriteLogEntriesPartialErrors", + full_name="google.logging.v2.WriteLogEntriesPartialErrors", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="log_entry_errors", + full_name="google.logging.v2.WriteLogEntriesPartialErrors.log_entry_errors", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=742, + serialized_end=942, +) + + +_LISTLOGENTRIESREQUEST = _descriptor.Descriptor( + name="ListLogEntriesRequest", + full_name="google.logging.v2.ListLogEntriesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project_ids", + full_name="google.logging.v2.ListLogEntriesRequest.project_ids", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\030\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="resource_names", + full_name="google.logging.v2.ListLogEntriesRequest.resource_names", + index=1, + number=8, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.logging.v2.ListLogEntriesRequest.filter", + index=2, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="order_by", + full_name="google.logging.v2.ListLogEntriesRequest.order_by", + index=3, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.logging.v2.ListLogEntriesRequest.page_size", + index=4, + number=4, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.logging.v2.ListLogEntriesRequest.page_token", + index=5, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=945, + serialized_end=1090, +) + + +_LISTLOGENTRIESRESPONSE = _descriptor.Descriptor( + name="ListLogEntriesResponse", + full_name="google.logging.v2.ListLogEntriesResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="entries", + full_name="google.logging.v2.ListLogEntriesResponse.entries", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.logging.v2.ListLogEntriesResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1092, + serialized_end=1187, +) + + +_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST = _descriptor.Descriptor( + name="ListMonitoredResourceDescriptorsRequest", + full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_size", + index=0, + number=1, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1189, + serialized_end=1269, +) + + +_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE = _descriptor.Descriptor( + name="ListMonitoredResourceDescriptorsResponse", + full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="resource_descriptors", + full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse.resource_descriptors", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1272, + serialized_end=1410, +) + + +_LISTLOGSREQUEST = _descriptor.Descriptor( + name="ListLogsRequest", + full_name="google.logging.v2.ListLogsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.logging.v2.ListLogsRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.logging.v2.ListLogsRequest.page_size", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.logging.v2.ListLogsRequest.page_token", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1412, + serialized_end=1484, +) + + +_LISTLOGSRESPONSE = _descriptor.Descriptor( + name="ListLogsResponse", + full_name="google.logging.v2.ListLogsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="log_names", + full_name="google.logging.v2.ListLogsResponse.log_names", + index=0, + number=3, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.logging.v2.ListLogsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1486, + serialized_end=1548, +) + +_WRITELOGENTRIESREQUEST_LABELSENTRY.containing_type = _WRITELOGENTRIESREQUEST +_WRITELOGENTRIESREQUEST.fields_by_name[ + "resource" +].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE +_WRITELOGENTRIESREQUEST.fields_by_name[ + "labels" +].message_type = _WRITELOGENTRIESREQUEST_LABELSENTRY +_WRITELOGENTRIESREQUEST.fields_by_name[ + "entries" +].message_type = ( + google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY +) +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.fields_by_name[ + "value" +].message_type = google_dot_rpc_dot_status__pb2._STATUS +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.containing_type = ( + _WRITELOGENTRIESPARTIALERRORS +) +_WRITELOGENTRIESPARTIALERRORS.fields_by_name[ + "log_entry_errors" +].message_type = _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY +_LISTLOGENTRIESRESPONSE.fields_by_name[ + "entries" +].message_type = ( + google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY +) +_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE.fields_by_name[ + "resource_descriptors" +].message_type = ( + google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEDESCRIPTOR +) +DESCRIPTOR.message_types_by_name["DeleteLogRequest"] = _DELETELOGREQUEST +DESCRIPTOR.message_types_by_name["WriteLogEntriesRequest"] = _WRITELOGENTRIESREQUEST +DESCRIPTOR.message_types_by_name["WriteLogEntriesResponse"] = _WRITELOGENTRIESRESPONSE +DESCRIPTOR.message_types_by_name[ + "WriteLogEntriesPartialErrors" +] = _WRITELOGENTRIESPARTIALERRORS +DESCRIPTOR.message_types_by_name["ListLogEntriesRequest"] = _LISTLOGENTRIESREQUEST +DESCRIPTOR.message_types_by_name["ListLogEntriesResponse"] = _LISTLOGENTRIESRESPONSE +DESCRIPTOR.message_types_by_name[ + "ListMonitoredResourceDescriptorsRequest" +] = _LISTMONITOREDRESOURCEDESCRIPTORSREQUEST +DESCRIPTOR.message_types_by_name[ + "ListMonitoredResourceDescriptorsResponse" +] = _LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE +DESCRIPTOR.message_types_by_name["ListLogsRequest"] = _LISTLOGSREQUEST +DESCRIPTOR.message_types_by_name["ListLogsResponse"] = _LISTLOGSRESPONSE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +DeleteLogRequest = _reflection.GeneratedProtocolMessageType( + "DeleteLogRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETELOGREQUEST, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""The parameters to DeleteLog. + + + Attributes: + log_name: + Required. The resource name of the log to delete: :: + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL- + encoded. For example, ``"projects/my-project- + id/logs/syslog"``, ``"organizations/1234567890/logs/cloudresou + rcemanager.googleapis.com%2Factivity"``. For more information + about log names, see [LogEntry][google.logging.v2.LogEntry]. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogRequest) + ), +) +_sym_db.RegisterMessage(DeleteLogRequest) + +WriteLogEntriesRequest = _reflection.GeneratedProtocolMessageType( + "WriteLogEntriesRequest", + (_message.Message,), + dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_WRITELOGENTRIESREQUEST_LABELSENTRY, + __module__="google.cloud.logging_v2.proto.logging_pb2" + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest.LabelsEntry) + ), + ), + DESCRIPTOR=_WRITELOGENTRIESREQUEST, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""The parameters to WriteLogEntries. + + + Attributes: + log_name: + Optional. A default log resource name that is assigned to all + log entries in ``entries`` that do not specify a value for + ``log_name``: :: "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL- + encoded. For example: :: "projects/my-project- + id/logs/syslog" "organizations/1234567890/logs/cloudresour + cemanager.googleapis.com%2Factivity" The permission + logging.logEntries.create is needed on each project, + organization, billing account, or folder that is receiving new + log entries, whether the resource is specified in logName or + in an individual log entry. + resource: + Optional. A default monitored resource object that is assigned + to all log entries in ``entries`` that do not specify a value + for ``resource``. Example: :: { "type": "gce_instance", + "labels": { "zone": "us-central1-a", "instance_id": + "00000000000000000000" }} See + [LogEntry][google.logging.v2.LogEntry]. + labels: + Optional. Default labels that are added to the ``labels`` + field of all log entries in ``entries``. If a log entry + already has a label with the same key as a label in this + parameter, then the log entry's label is not changed. See + [LogEntry][google.logging.v2.LogEntry]. + entries: + Required. The log entries to send to Logging. The order of log + entries in this list does not matter. Values supplied in this + method's ``log_name``, ``resource``, and ``labels`` fields are + copied into those log entries in this list that do not include + values for their corresponding fields. For more information, + see the [LogEntry][google.logging.v2.LogEntry] type. If the + ``timestamp`` or ``insert_id`` fields are missing in log + entries, then this method supplies the current time or a + unique identifier, respectively. The supplied values are + chosen so that, among the log entries that did not supply + their own values, the entries earlier in the list will sort + before the entries later in the list. See the ``entries.list`` + method. Log entries with timestamps that are more than the + `logs retention period `__ in the past + or more than 24 hours in the future will not be available when + calling ``entries.list``. However, those log entries can still + be `exported with LogSinks `__. To improve throughput and to avoid exceeding the + `quota limit `__ for calls to + ``entries.write``, you should try to include several log + entries in this list, rather than calling this method for each + individual log entry. + partial_success: + Optional. Whether valid entries should be written even if some + other entries fail due to INVALID\_ARGUMENT or + PERMISSION\_DENIED errors. If any entry is not written, then + the response status is the error associated with one of the + failed entries and the response includes error details keyed + by the entries' zero-based index in the ``entries.write`` + method. + dry_run: + Optional. If true, the request should expect normal response, + but the entries won't be persisted nor exported. Useful for + checking whether the logging API endpoints are working + properly before sending valuable data. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest) + ), +) +_sym_db.RegisterMessage(WriteLogEntriesRequest) +_sym_db.RegisterMessage(WriteLogEntriesRequest.LabelsEntry) + +WriteLogEntriesResponse = _reflection.GeneratedProtocolMessageType( + "WriteLogEntriesResponse", + (_message.Message,), + dict( + DESCRIPTOR=_WRITELOGENTRIESRESPONSE, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""Result returned from WriteLogEntries. empty + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesResponse) + ), +) +_sym_db.RegisterMessage(WriteLogEntriesResponse) + +WriteLogEntriesPartialErrors = _reflection.GeneratedProtocolMessageType( + "WriteLogEntriesPartialErrors", + (_message.Message,), + dict( + LogEntryErrorsEntry=_reflection.GeneratedProtocolMessageType( + "LogEntryErrorsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY, + __module__="google.cloud.logging_v2.proto.logging_pb2" + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry) + ), + ), + DESCRIPTOR=_WRITELOGENTRIESPARTIALERRORS, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""Error details for WriteLogEntries with partial success. + + + Attributes: + log_entry_errors: + When ``WriteLogEntriesRequest.partial_success`` is true, + records the error status for entries that were not written due + to a permanent error, keyed by the entry's zero-based index in + ``WriteLogEntriesRequest.entries``. Failed requests for which + no entries are written will not include per-entry errors. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors) + ), +) +_sym_db.RegisterMessage(WriteLogEntriesPartialErrors) +_sym_db.RegisterMessage(WriteLogEntriesPartialErrors.LogEntryErrorsEntry) + +ListLogEntriesRequest = _reflection.GeneratedProtocolMessageType( + "ListLogEntriesRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTLOGENTRIESREQUEST, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""The parameters to ``ListLogEntries``. + + + Attributes: + project_ids: + Deprecated. Use ``resource_names`` instead. One or more + project identifiers or project numbers from which to retrieve + log entries. Example: ``"my-project-1A"``. + resource_names: + Required. Names of one or more parent resources from which to + retrieve log entries: :: "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" Projects listed in the ``project_ids`` + field are added to this list. + filter: + Optional. A filter that chooses which log entries to return. + See `Advanced Logs Filters + `__. Only log entries + that match the filter are returned. An empty filter matches + all log entries in the resources listed in ``resource_names``. + Referencing a parent resource that is not listed in + ``resource_names`` will cause the filter to return no results. + The maximum length of the filter is 20000 characters. + order_by: + Optional. How the results should be sorted. Presently, the + only permitted values are ``"timestamp asc"`` (default) and + ``"timestamp desc"``. The first option returns entries in + order of increasing values of ``LogEntry.timestamp`` (oldest + first), and the second option returns entries in order of + decreasing timestamps (newest first). Entries with equal + timestamps are returned in order of their ``insert_id`` + values. + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``next_page_token`` in the response indicates that more + results might be available. + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``page_token`` must be + the value of ``next_page_token`` from the previous response. + The values of other method parameters should be identical to + those in the previous call. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesRequest) + ), +) +_sym_db.RegisterMessage(ListLogEntriesRequest) + +ListLogEntriesResponse = _reflection.GeneratedProtocolMessageType( + "ListLogEntriesResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTLOGENTRIESRESPONSE, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""Result returned from ``ListLogEntries``. + + + Attributes: + entries: + A list of log entries. If ``entries`` is empty, + ``nextPageToken`` may still be returned, indicating that more + entries may exist. See ``nextPageToken`` for more information. + next_page_token: + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the next + set of results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. If a value for + ``next_page_token`` appears and the ``entries`` field is + empty, it means that the search found no log entries so far + but it did not have time to search all the possible log + entries. Retry the method with this value for ``page_token`` + to continue the search. Alternatively, consider speeding up + the search by changing your filter to specify a single log + name or resource type, or to narrow the time range of the + search. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesResponse) + ), +) +_sym_db.RegisterMessage(ListLogEntriesResponse) + +ListMonitoredResourceDescriptorsRequest = _reflection.GeneratedProtocolMessageType( + "ListMonitoredResourceDescriptorsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""The parameters to ListMonitoredResourceDescriptors + + + Attributes: + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more results + might be available. + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``pageToken`` must be + the value of ``nextPageToken`` from the previous response. The + values of other method parameters should be identical to those + in the previous call. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsRequest) + ), +) +_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsRequest) + +ListMonitoredResourceDescriptorsResponse = _reflection.GeneratedProtocolMessageType( + "ListMonitoredResourceDescriptorsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""Result returned from ListMonitoredResourceDescriptors. + + + Attributes: + resource_descriptors: + A list of resource descriptors. + next_page_token: + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the next + set of results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsResponse) + ), +) +_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsResponse) + +ListLogsRequest = _reflection.GeneratedProtocolMessageType( + "ListLogsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTLOGSREQUEST, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""The parameters to ListLogs. + + + Attributes: + parent: + Required. The resource name that owns the logs: :: + "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more results + might be available. + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``pageToken`` must be + the value of ``nextPageToken`` from the previous response. The + values of other method parameters should be identical to those + in the previous call. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsRequest) + ), +) +_sym_db.RegisterMessage(ListLogsRequest) + +ListLogsResponse = _reflection.GeneratedProtocolMessageType( + "ListLogsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTLOGSRESPONSE, + __module__="google.cloud.logging_v2.proto.logging_pb2", + __doc__="""Result returned from ListLogs. + + + Attributes: + log_names: + A list of log names. For example, ``"projects/my- + project/logs/syslog"`` or ``"organizations/123/logs/cloudresou + rcemanager.googleapis.com%2Factivity"``. + next_page_token: + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the next + set of results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsResponse) + ), +) +_sym_db.RegisterMessage(ListLogsResponse) + + +DESCRIPTOR._options = None +_WRITELOGENTRIESREQUEST_LABELSENTRY._options = None +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY._options = None +_LISTLOGENTRIESREQUEST.fields_by_name["project_ids"]._options = None + +_LOGGINGSERVICEV2 = _descriptor.ServiceDescriptor( + name="LoggingServiceV2", + full_name="google.logging.v2.LoggingServiceV2", + file=DESCRIPTOR, + index=0, + serialized_options=_b( + "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" + ), + serialized_start=1551, + serialized_end=2836, + methods=[ + _descriptor.MethodDescriptor( + name="DeleteLog", + full_name="google.logging.v2.LoggingServiceV2.DeleteLog", + index=0, + containing_service=None, + input_type=_DELETELOGREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + serialized_options=_b( + "\202\323\344\223\002\266\001* /v2/{log_name=projects/*/logs/*}Z\033*\031/v2/{log_name=*/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}" + ), + ), + _descriptor.MethodDescriptor( + name="WriteLogEntries", + full_name="google.logging.v2.LoggingServiceV2.WriteLogEntries", + index=1, + containing_service=None, + input_type=_WRITELOGENTRIESREQUEST, + output_type=_WRITELOGENTRIESRESPONSE, + serialized_options=_b( + '\202\323\344\223\002\026"\021/v2/entries:write:\001*' + ), + ), + _descriptor.MethodDescriptor( + name="ListLogEntries", + full_name="google.logging.v2.LoggingServiceV2.ListLogEntries", + index=2, + containing_service=None, + input_type=_LISTLOGENTRIESREQUEST, + output_type=_LISTLOGENTRIESRESPONSE, + serialized_options=_b( + '\202\323\344\223\002\025"\020/v2/entries:list:\001*' + ), + ), + _descriptor.MethodDescriptor( + name="ListMonitoredResourceDescriptors", + full_name="google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors", + index=3, + containing_service=None, + input_type=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, + output_type=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, + serialized_options=_b( + '\202\323\344\223\002"\022 /v2/monitoredResourceDescriptors' + ), + ), + _descriptor.MethodDescriptor( + name="ListLogs", + full_name="google.logging.v2.LoggingServiceV2.ListLogs", + index=4, + containing_service=None, + input_type=_LISTLOGSREQUEST, + output_type=_LISTLOGSRESPONSE, + serialized_options=_b( + "\202\323\344\223\002\242\001\022\025/v2/{parent=*/*}/logsZ\036\022\034/v2/{parent=projects/*}/logsZ#\022!/v2/{parent=organizations/*}/logsZ\035\022\033/v2/{parent=folders/*}/logsZ%\022#/v2/{parent=billingAccounts/*}/logs" + ), + ), + ], +) +_sym_db.RegisterServiceDescriptor(_LOGGINGSERVICEV2) + +DESCRIPTOR.services_by_name["LoggingServiceV2"] = _LOGGINGSERVICEV2 + +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2_grpc.py new file mode 100644 index 000000000000..2a2b3656925c --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2_grpc.py @@ -0,0 +1,130 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.logging_v2.proto import ( + logging_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2, +) +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class LoggingServiceV2Stub(object): + """Service for ingesting and querying logs. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.DeleteLog = channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/DeleteLog", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.WriteLogEntries = channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/WriteLogEntries", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.FromString, + ) + self.ListLogEntries = channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogEntries", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.FromString, + ) + self.ListMonitoredResourceDescriptors = channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.FromString, + ) + self.ListLogs = channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogs", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.FromString, + ) + + +class LoggingServiceV2Servicer(object): + """Service for ingesting and querying logs. + """ + + def DeleteLog(self, request, context): + """Deletes all the log entries in a log. + The log reappears if it receives new entries. + Log entries written shortly before the delete operation might not be + deleted. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def WriteLogEntries(self, request, context): + """Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method + is used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use Logging. + A single request may contain log entries for a maximum of 1000 + different resources (projects, organizations, billing accounts or + folders) + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListLogEntries(self, request, context): + """Lists log entries. Use this method to retrieve log entries that originated + from a project/folder/organization/billing account. For ways to export log + entries, see [Exporting Logs](/logging/docs/export). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListMonitoredResourceDescriptors(self, request, context): + """Lists the descriptors for monitored resource types used by Logging. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListLogs(self, request, context): + """Lists the logs in projects, organizations, folders, or billing accounts. + Only logs that have entries are listed. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_LoggingServiceV2Servicer_to_server(servicer, server): + rpc_method_handlers = { + "DeleteLog": grpc.unary_unary_rpc_method_handler( + servicer.DeleteLog, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "WriteLogEntries": grpc.unary_unary_rpc_method_handler( + servicer.WriteLogEntries, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.SerializeToString, + ), + "ListLogEntries": grpc.unary_unary_rpc_method_handler( + servicer.ListLogEntries, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.SerializeToString, + ), + "ListMonitoredResourceDescriptors": grpc.unary_unary_rpc_method_handler( + servicer.ListMonitoredResourceDescriptors, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.SerializeToString, + ), + "ListLogs": grpc.unary_unary_rpc_method_handler( + servicer.ListLogs, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.logging.v2.LoggingServiceV2", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto index de9786daf733..f0b037545199 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,13 +17,14 @@ syntax = "proto3"; package google.logging.v2; -import "google/api/annotations.proto"; import "google/api/monitored_resource.proto"; import "google/logging/type/http_request.proto"; import "google/logging/type/log_severity.proto"; import "google/protobuf/any.proto"; import "google/protobuf/struct.proto"; import "google/protobuf/timestamp.proto"; +import "google/rpc/status.proto"; +import "google/api/annotations.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Logging.V2"; @@ -34,6 +35,7 @@ option java_package = "com.google.logging.v2"; option php_namespace = "Google\\Cloud\\Logging\\V2"; // An individual entry in a log. +// message LogEntry { // Required. The resource name of the log to which this log entry belongs: // @@ -42,9 +44,9 @@ message LogEntry { // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" // "folders/[FOLDER_ID]/logs/[LOG_ID]" // - // A project number may optionally be used in place of PROJECT_ID. The - // project number is translated to its corresponding PROJECT_ID internally - // and the `log_name` field will contain PROJECT_ID in queries and exports. + // A project number may optionally be used in place of PROJECT_ID. The project + // number is translated to its corresponding PROJECT_ID internally and the + // `log_name` field will contain PROJECT_ID in queries and exports. // // `[LOG_ID]` must be URL-encoded within `log_name`. Example: // `"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"`. @@ -59,17 +61,23 @@ message LogEntry { // any results. string log_name = 12; - // Required. The primary monitored resource associated with this log entry. - // Example: a log entry that reports a database error would be - // associated with the monitored resource designating the particular - // database that reported the error. + // Required. The monitored resource that produced this log entry. + // + // Example: a log entry that reports a database error would be associated with + // the monitored resource designating the particular database that reported + // the error. google.api.MonitoredResource resource = 8; // Optional. The log entry payload, which can be one of multiple types. oneof payload { - // The log entry payload, represented as a protocol buffer. Some - // Google Cloud Platform services use this field for their log - // entry payloads. + // The log entry payload, represented as a protocol buffer. Some Google + // Cloud Platform services use this field for their log entry payloads. + // + // The following protocol buffer types are supported; user-defined types + // are not supported: + // + // "type.googleapis.com/google.cloud.audit.AuditLog" + // "type.googleapis.com/google.appengine.logging.v1.RequestLog" google.protobuf.Any proto_payload = 2; // The log entry payload, represented as a Unicode string (UTF-8). @@ -80,19 +88,18 @@ message LogEntry { google.protobuf.Struct json_payload = 6; } - // Optional. The time the event described by the log entry occurred. - // This time is used to compute the log entry's age and to enforce - // the logs retention period. If this field is omitted in a new log - // entry, then Logging assigns it the current time. - // Timestamps have nanosecond accuracy, but trailing zeros in the fractional - // seconds might be omitted when the timestamp is displayed. + // Optional. The time the event described by the log entry occurred. This + // time is used to compute the log entry's age and to enforce the logs + // retention period. If this field is omitted in a new log entry, then Logging + // assigns it the current time. Timestamps have nanosecond accuracy, but + // trailing zeros in the fractional seconds might be omitted when the + // timestamp is displayed. // - // Incoming log entries should have timestamps that are no more than - // the [logs retention period](/logging/quotas) in the past, - // and no more than 24 hours in the future. Log entries outside those time - // boundaries will not be available when calling `entries.list`, but - // those log entries can still be exported with - // [LogSinks](/logging/docs/api/tasks/exporting-logs). + // Incoming log entries should have timestamps that are no more than the [logs + // retention period](/logging/quotas) in the past, and no more than 24 hours + // in the future. Log entries outside those time boundaries will not be + // available when calling `entries.list`, but those log entries can still be + // [exported with LogSinks](/logging/docs/api/tasks/exporting-logs). google.protobuf.Timestamp timestamp = 9; // Output only. The time the log entry was received by Logging. @@ -103,25 +110,31 @@ message LogEntry { google.logging.type.LogSeverity severity = 10; // Optional. A unique identifier for the log entry. If you provide a value, - // then Logging considers other log entries in the same project, - // with the same `timestamp`, and with the same `insert_id` to be duplicates - // which can be removed. If omitted in new log entries, then - // Logging assigns its own unique identifier. The `insert_id` is also used - // to order log entries that have the same `timestamp` value. + // then Logging considers other log entries in the same project, with the same + // `timestamp`, and with the same `insert_id` to be duplicates which can be + // removed. If omitted in new log entries, then Logging assigns its own unique + // identifier. The `insert_id` is also used to order log entries that have the + // same `timestamp` value. string insert_id = 4; - // Optional. Information about the HTTP request associated with this - // log entry, if applicable. + // Optional. Information about the HTTP request associated with this log + // entry, if applicable. google.logging.type.HttpRequest http_request = 7; // Optional. A set of user-defined (key, value) data that provides additional // information about the log entry. map labels = 11; - // Output only. Additional metadata about the monitored resource. + // Deprecated. Output only. Additional metadata about the monitored resource. + // // Only `k8s_container`, `k8s_pod`, and `k8s_node` MonitoredResources have - // this field populated. - google.api.MonitoredResourceMetadata metadata = 25; + // this field populated for GKE versions older than 1.12.6. For GKE versions + // 1.12.6 and above, the `metadata` field has been deprecated. The Kubernetes + // pod labels that used to be in `metadata.userLabels` will now be present in + // the `labels` field with a key prefix of `k8s-pod/`. The Stackdriver system + // labels that were present in the `metadata.systemLabels` field will no + // longer be available in the LogEntry. + google.api.MonitoredResourceMetadata metadata = 25 [deprecated = true]; // Optional. Information about an operation associated with the log entry, if // applicable. @@ -134,12 +147,14 @@ message LogEntry { string trace = 22; // Optional. The span ID within the trace associated with the log entry. - // For Trace spans, this is the same format that the Trace - // API v2 uses: a 16-character hexadecimal encoding of an 8-byte array, such - // as "000000000000004a". + // + // For Trace spans, this is the same format that the Trace API v2 uses: a + // 16-character hexadecimal encoding of an 8-byte array, such as + // "000000000000004a". string span_id = 27; // Optional. The sampling decision of the trace associated with the log entry. + // // True means that the trace resource name in the `trace` field was sampled // for storage in a trace backend. False means that the trace was not sampled // for storage when this log entry was written, or the sampling decision was @@ -155,12 +170,12 @@ message LogEntry { // Additional information about a potentially long-running operation with which // a log entry is associated. message LogEntryOperation { - // Optional. An arbitrary operation identifier. Log entries with the - // same identifier are assumed to be part of the same operation. + // Optional. An arbitrary operation identifier. Log entries with the same + // identifier are assumed to be part of the same operation. string id = 1; - // Optional. An arbitrary producer identifier. The combination of - // `id` and `producer` must be globally unique. Examples for `producer`: + // Optional. An arbitrary producer identifier. The combination of `id` and + // `producer` must be globally unique. Examples for `producer`: // `"MyDivision.MyBigCompany.com"`, `"github.com/MyProject/MyApplication"`. string producer = 2; diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py index 6dc9ec5817f1..1d3af3c42416 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/log_entry.proto diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto index d04cd5c03dd0..fc4217593770 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,13 +17,15 @@ syntax = "proto3"; package google.logging.v2; -import "google/api/annotations.proto"; import "google/api/monitored_resource.proto"; import "google/logging/v2/log_entry.proto"; +import "google/logging/v2/logging_config.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; import "google/rpc/status.proto"; +import "google/api/annotations.proto"; +import "google/api/client.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Logging.V2"; @@ -35,6 +37,14 @@ option php_namespace = "Google\\Cloud\\Logging\\V2"; // Service for ingesting and querying logs. service LoggingServiceV2 { + option (google.api.default_host) = "logging.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/cloud-platform.read-only," + "https://www.googleapis.com/auth/logging.admin," + "https://www.googleapis.com/auth/logging.read," + "https://www.googleapis.com/auth/logging.write"; + // Deletes all the log entries in a log. // The log reappears if it receives new entries. // Log entries written shortly before the delete operation might not be @@ -42,9 +52,18 @@ service LoggingServiceV2 { rpc DeleteLog(DeleteLogRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v2/{log_name=projects/*/logs/*}" - additional_bindings { delete: "/v2/{log_name=organizations/*/logs/*}" } - additional_bindings { delete: "/v2/{log_name=folders/*/logs/*}" } - additional_bindings { delete: "/v2/{log_name=billingAccounts/*/logs/*}" } + additional_bindings { + delete: "/v2/{log_name=*/*/logs/*}" + } + additional_bindings { + delete: "/v2/{log_name=organizations/*/logs/*}" + } + additional_bindings { + delete: "/v2/{log_name=folders/*/logs/*}" + } + additional_bindings { + delete: "/v2/{log_name=billingAccounts/*/logs/*}" + } }; } @@ -55,17 +74,16 @@ service LoggingServiceV2 { // A single request may contain log entries for a maximum of 1000 // different resources (projects, organizations, billing accounts or // folders) - rpc WriteLogEntries(WriteLogEntriesRequest) - returns (WriteLogEntriesResponse) { + rpc WriteLogEntries(WriteLogEntriesRequest) returns (WriteLogEntriesResponse) { option (google.api.http) = { post: "/v2/entries:write" body: "*" }; } - // Lists log entries. Use this method to retrieve log entries from - // Logging. For ways to export log entries, see - // [Exporting Logs](/logging/docs/export). + // Lists log entries. Use this method to retrieve log entries that originated + // from a project/folder/organization/billing account. For ways to export log + // entries, see [Exporting Logs](/logging/docs/export). rpc ListLogEntries(ListLogEntriesRequest) returns (ListLogEntriesResponse) { option (google.api.http) = { post: "/v2/entries:list" @@ -74,8 +92,7 @@ service LoggingServiceV2 { } // Lists the descriptors for monitored resource types used by Logging. - rpc ListMonitoredResourceDescriptors(ListMonitoredResourceDescriptorsRequest) - returns (ListMonitoredResourceDescriptorsResponse) { + rpc ListMonitoredResourceDescriptors(ListMonitoredResourceDescriptorsRequest) returns (ListMonitoredResourceDescriptorsResponse) { option (google.api.http) = { get: "/v2/monitoredResourceDescriptors" }; @@ -86,10 +103,18 @@ service LoggingServiceV2 { rpc ListLogs(ListLogsRequest) returns (ListLogsResponse) { option (google.api.http) = { get: "/v2/{parent=*/*}/logs" - additional_bindings { get: "/v2/{parent=projects/*}/logs" } - additional_bindings { get: "/v2/{parent=organizations/*}/logs" } - additional_bindings { get: "/v2/{parent=folders/*}/logs" } - additional_bindings { get: "/v2/{parent=billingAccounts/*}/logs" } + additional_bindings { + get: "/v2/{parent=projects/*}/logs" + } + additional_bindings { + get: "/v2/{parent=organizations/*}/logs" + } + additional_bindings { + get: "/v2/{parent=folders/*}/logs" + } + additional_bindings { + get: "/v2/{parent=billingAccounts/*}/logs" + } }; } } @@ -164,8 +189,8 @@ message WriteLogEntriesRequest { // Log entries with timestamps that are more than the // [logs retention period](/logging/quota-policy) in the past or more than // 24 hours in the future will not be available when calling `entries.list`. - // However, those log entries can still be exported with - // [LogSinks](/logging/docs/api/tasks/exporting-logs). + // However, those log entries can still be + // [exported with LogSinks](/logging/docs/api/tasks/exporting-logs). // // To improve throughput and to avoid exceeding the // [quota limit](/logging/quota-policy) for calls to `entries.write`, @@ -188,7 +213,9 @@ message WriteLogEntriesRequest { // Result returned from WriteLogEntries. // empty -message WriteLogEntriesResponse {} +message WriteLogEntriesResponse { + +} // Error details for WriteLogEntries with partial success. message WriteLogEntriesPartialErrors { @@ -205,9 +232,7 @@ message WriteLogEntriesPartialErrors { message ListLogEntriesRequest { // Deprecated. Use `resource_names` instead. One or more project identifiers // or project numbers from which to retrieve log entries. Example: - // `"my-project-1A"`. If present, these project identifiers are converted to - // resource name format and added to the list of resources in - // `resource_names`. + // `"my-project-1A"`. repeated string project_ids = 1 [deprecated = true]; // Required. Names of one or more parent resources from which to @@ -218,6 +243,7 @@ message ListLogEntriesRequest { // "billingAccounts/[BILLING_ACCOUNT_ID]" // "folders/[FOLDER_ID]" // + // // Projects listed in the `project_ids` field are added to this list. repeated string resource_names = 8; @@ -320,8 +346,8 @@ message ListLogsRequest { // Result returned from ListLogs. message ListLogsResponse { // A list of log names. For example, - // `"projects/my-project/syslog"` or - // `"organizations/123/cloudresourcemanager.googleapis.com%2Factivity"`. + // `"projects/my-project/logs/syslog"` or + // `"organizations/123/logs/cloudresourcemanager.googleapis.com%2Factivity"`. repeated string log_names = 3; // If there might be more results than those appearing in this response, then diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2.py new file mode 100644 index 000000000000..cd065d8a9311 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2.py @@ -0,0 +1,405 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/logging/type/http_request.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/logging/type/http_request.proto", + package="google.logging.type", + syntax="proto3", + serialized_options=_b( + "\n\027com.google.logging.typeB\020HttpRequestProtoP\001Z8google.golang.org/genproto/googleapis/logging/type;ltype\252\002\031Google.Cloud.Logging.Type\312\002\031Google\\Cloud\\Logging\\Type" + ), + serialized_pb=_b( + '\n&google/logging/type/http_request.proto\x12\x13google.logging.type\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/api/annotations.proto"\xef\x02\n\x0bHttpRequest\x12\x16\n\x0erequest_method\x18\x01 \x01(\t\x12\x13\n\x0brequest_url\x18\x02 \x01(\t\x12\x14\n\x0crequest_size\x18\x03 \x01(\x03\x12\x0e\n\x06status\x18\x04 \x01(\x05\x12\x15\n\rresponse_size\x18\x05 \x01(\x03\x12\x12\n\nuser_agent\x18\x06 \x01(\t\x12\x11\n\tremote_ip\x18\x07 \x01(\t\x12\x11\n\tserver_ip\x18\r \x01(\t\x12\x0f\n\x07referer\x18\x08 \x01(\t\x12*\n\x07latency\x18\x0e \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x14\n\x0c\x63\x61\x63he_lookup\x18\x0b \x01(\x08\x12\x11\n\tcache_hit\x18\t \x01(\x08\x12*\n"cache_validated_with_origin_server\x18\n \x01(\x08\x12\x18\n\x10\x63\x61\x63he_fill_bytes\x18\x0c \x01(\x03\x12\x10\n\x08protocol\x18\x0f \x01(\tB\x9f\x01\n\x17\x63om.google.logging.typeB\x10HttpRequestProtoP\x01Z8google.golang.org/genproto/googleapis/logging/type;ltype\xaa\x02\x19Google.Cloud.Logging.Type\xca\x02\x19Google\\Cloud\\Logging\\Typeb\x06proto3' + ), + dependencies=[ + google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + ], +) + + +_HTTPREQUEST = _descriptor.Descriptor( + name="HttpRequest", + full_name="google.logging.type.HttpRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="request_method", + full_name="google.logging.type.HttpRequest.request_method", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="request_url", + full_name="google.logging.type.HttpRequest.request_url", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="request_size", + full_name="google.logging.type.HttpRequest.request_size", + index=2, + number=3, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="status", + full_name="google.logging.type.HttpRequest.status", + index=3, + number=4, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="response_size", + full_name="google.logging.type.HttpRequest.response_size", + index=4, + number=5, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="user_agent", + full_name="google.logging.type.HttpRequest.user_agent", + index=5, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="remote_ip", + full_name="google.logging.type.HttpRequest.remote_ip", + index=6, + number=7, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="server_ip", + full_name="google.logging.type.HttpRequest.server_ip", + index=7, + number=13, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="referer", + full_name="google.logging.type.HttpRequest.referer", + index=8, + number=8, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="latency", + full_name="google.logging.type.HttpRequest.latency", + index=9, + number=14, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cache_lookup", + full_name="google.logging.type.HttpRequest.cache_lookup", + index=10, + number=11, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cache_hit", + full_name="google.logging.type.HttpRequest.cache_hit", + index=11, + number=9, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cache_validated_with_origin_server", + full_name="google.logging.type.HttpRequest.cache_validated_with_origin_server", + index=12, + number=10, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cache_fill_bytes", + full_name="google.logging.type.HttpRequest.cache_fill_bytes", + index=13, + number=12, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="protocol", + full_name="google.logging.type.HttpRequest.protocol", + index=14, + number=15, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=126, + serialized_end=493, +) + +_HTTPREQUEST.fields_by_name[ + "latency" +].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +DESCRIPTOR.message_types_by_name["HttpRequest"] = _HTTPREQUEST +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +HttpRequest = _reflection.GeneratedProtocolMessageType( + "HttpRequest", + (_message.Message,), + dict( + DESCRIPTOR=_HTTPREQUEST, + __module__="google.logging.type.http_request_pb2", + __doc__="""A common proto for logging HTTP requests. Only contains semantics + defined by the HTTP specification. Product-specific logging information + MUST be defined in a separate message. + + + Attributes: + request_method: + The request method. Examples: ``"GET"``, ``"HEAD"``, + ``"PUT"``, ``"POST"``. + request_url: + The scheme (http, https), the host name, the path and the + query portion of the URL that was requested. Example: + ``"http://example.com/some/info?color=red"``. + request_size: + The size of the HTTP request message in bytes, including the + request headers and the request body. + status: + The response code indicating the status of response. Examples: + 200, 404. + response_size: + The size of the HTTP response message sent back to the client, + in bytes, including the response headers and the response + body. + user_agent: + The user agent sent by the client. Example: ``"Mozilla/4.0 + (compatible; MSIE 6.0; Windows 98; Q312461; .NET CLR + 1.0.3705)"``. + remote_ip: + The IP address (IPv4 or IPv6) of the client that issued the + HTTP request. Examples: ``"192.168.1.1"``, + ``"FE80::0202:B3FF:FE1E:8329"``. + server_ip: + The IP address (IPv4 or IPv6) of the origin server that the + request was sent to. + referer: + The referer URL of the request, as defined in `HTTP/1.1 Header + Field Definitions + `__. + latency: + The request processing latency on the server, from the time + the request was received until the response was sent. + cache_lookup: + Whether or not a cache lookup was attempted. + cache_hit: + Whether or not an entity was served from cache (with or + without validation). + cache_validated_with_origin_server: + Whether or not the response was validated with the origin + server before being served from cache. This field is only + meaningful if ``cache_hit`` is True. + cache_fill_bytes: + The number of HTTP response bytes inserted into cache. Set + only when a cache fill was attempted. + protocol: + Protocol used for the request. Examples: "HTTP/1.1", "HTTP/2", + "websocket" + """, + # @@protoc_insertion_point(class_scope:google.logging.type.HttpRequest) + ), +) +_sym_db.RegisterMessage(HttpRequest) + + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2_grpc.py new file mode 100644 index 000000000000..07cb78fe03a9 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2_grpc.py @@ -0,0 +1,2 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2.py new file mode 100644 index 000000000000..bc429a3fca0b --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/logging/type/log_severity.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/logging/type/log_severity.proto", + package="google.logging.type", + syntax="proto3", + serialized_options=_b( + "\n\027com.google.logging.typeB\020LogSeverityProtoP\001Z8google.golang.org/genproto/googleapis/logging/type;ltype\252\002\031Google.Cloud.Logging.Type\312\002\031Google\\Cloud\\Logging\\Type" + ), + serialized_pb=_b( + "\n&google/logging/type/log_severity.proto\x12\x13google.logging.type\x1a\x1cgoogle/api/annotations.proto*\x82\x01\n\x0bLogSeverity\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x44\x45\x42UG\x10\x64\x12\t\n\x04INFO\x10\xc8\x01\x12\x0b\n\x06NOTICE\x10\xac\x02\x12\x0c\n\x07WARNING\x10\x90\x03\x12\n\n\x05\x45RROR\x10\xf4\x03\x12\r\n\x08\x43RITICAL\x10\xd8\x04\x12\n\n\x05\x41LERT\x10\xbc\x05\x12\x0e\n\tEMERGENCY\x10\xa0\x06\x42\x9f\x01\n\x17\x63om.google.logging.typeB\x10LogSeverityProtoP\x01Z8google.golang.org/genproto/googleapis/logging/type;ltype\xaa\x02\x19Google.Cloud.Logging.Type\xca\x02\x19Google\\Cloud\\Logging\\Typeb\x06proto3" + ), + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], +) + +_LOGSEVERITY = _descriptor.EnumDescriptor( + name="LogSeverity", + full_name="google.logging.type.LogSeverity", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="DEFAULT", index=0, number=0, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="DEBUG", index=1, number=100, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="INFO", index=2, number=200, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="NOTICE", index=3, number=300, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="WARNING", index=4, number=400, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ERROR", index=5, number=500, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CRITICAL", index=6, number=600, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ALERT", index=7, number=700, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="EMERGENCY", index=8, number=800, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=94, + serialized_end=224, +) +_sym_db.RegisterEnumDescriptor(_LOGSEVERITY) + +LogSeverity = enum_type_wrapper.EnumTypeWrapper(_LOGSEVERITY) +DEFAULT = 0 +DEBUG = 100 +INFO = 200 +NOTICE = 300 +WARNING = 400 +ERROR = 500 +CRITICAL = 600 +ALERT = 700 +EMERGENCY = 800 + + +DESCRIPTOR.enum_types_by_name["LogSeverity"] = _LOGSEVERITY +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2_grpc.py new file mode 100644 index 000000000000..07cb78fe03a9 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2_grpc.py @@ -0,0 +1,2 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto index 2afea1062df5..1e3c84d3f419 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,10 +17,12 @@ syntax = "proto3"; package google.logging.v2; -import "google/api/annotations.proto"; +import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; +import "google/api/client.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Logging.V2"; @@ -30,17 +32,31 @@ option java_outer_classname = "LoggingConfigProto"; option java_package = "com.google.logging.v2"; option php_namespace = "Google\\Cloud\\Logging\\V2"; -// Service for configuring sinks used to export log entries out of -// Logging. +// Service for configuring sinks used to route log entries. service ConfigServiceV2 { + option (google.api.default_host) = "logging.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/cloud-platform.read-only," + "https://www.googleapis.com/auth/logging.admin," + "https://www.googleapis.com/auth/logging.read"; + // Lists sinks. rpc ListSinks(ListSinksRequest) returns (ListSinksResponse) { option (google.api.http) = { get: "/v2/{parent=*/*}/sinks" - additional_bindings { get: "/v2/{parent=projects/*}/sinks" } - additional_bindings { get: "/v2/{parent=organizations/*}/sinks" } - additional_bindings { get: "/v2/{parent=folders/*}/sinks" } - additional_bindings { get: "/v2/{parent=billingAccounts/*}/sinks" } + additional_bindings { + get: "/v2/{parent=projects/*}/sinks" + } + additional_bindings { + get: "/v2/{parent=organizations/*}/sinks" + } + additional_bindings { + get: "/v2/{parent=folders/*}/sinks" + } + additional_bindings { + get: "/v2/{parent=billingAccounts/*}/sinks" + } }; } @@ -48,27 +64,41 @@ service ConfigServiceV2 { rpc GetSink(GetSinkRequest) returns (LogSink) { option (google.api.http) = { get: "/v2/{sink_name=*/*/sinks/*}" - additional_bindings { get: "/v2/{sink_name=projects/*/sinks/*}" } - additional_bindings { get: "/v2/{sink_name=organizations/*/sinks/*}" } - additional_bindings { get: "/v2/{sink_name=folders/*/sinks/*}" } - additional_bindings { get: "/v2/{sink_name=billingAccounts/*/sinks/*}" } + additional_bindings { + get: "/v2/{sink_name=projects/*/sinks/*}" + } + additional_bindings { + get: "/v2/{sink_name=organizations/*/sinks/*}" + } + additional_bindings { + get: "/v2/{sink_name=folders/*/sinks/*}" + } + additional_bindings { + get: "/v2/{sink_name=billingAccounts/*/sinks/*}" + } }; } - // Creates a sink that exports specified log entries to a destination. The + // Creates a sink that exports specified log entries to a destination. The // export of newly-ingested log entries begins immediately, unless the sink's - // `writer_identity` is not permitted to write to the destination. A sink can + // `writer_identity` is not permitted to write to the destination. A sink can // export log entries only from the resource owning the sink. rpc CreateSink(CreateSinkRequest) returns (LogSink) { option (google.api.http) = { post: "/v2/{parent=*/*}/sinks" body: "sink" - additional_bindings { post: "/v2/{parent=projects/*}/sinks" body: "sink" } + additional_bindings { + post: "/v2/{parent=projects/*}/sinks" + body: "sink" + } additional_bindings { post: "/v2/{parent=organizations/*}/sinks" body: "sink" } - additional_bindings { post: "/v2/{parent=folders/*}/sinks" body: "sink" } + additional_bindings { + post: "/v2/{parent=folders/*}/sinks" + body: "sink" + } additional_bindings { post: "/v2/{parent=billingAccounts/*}/sinks" body: "sink" @@ -76,8 +106,9 @@ service ConfigServiceV2 { }; } - // Updates a sink. This method replaces the following fields in the existing + // Updates a sink. This method replaces the following fields in the existing // sink with values from the new sink: `destination`, and `filter`. + // // The updated sink might also have a new `writer_identity`; see the // `unique_writer_identity` field. rpc UpdateSink(UpdateSinkRequest) returns (LogSink) { @@ -124,9 +155,15 @@ service ConfigServiceV2 { rpc DeleteSink(DeleteSinkRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v2/{sink_name=*/*/sinks/*}" - additional_bindings { delete: "/v2/{sink_name=projects/*/sinks/*}" } - additional_bindings { delete: "/v2/{sink_name=organizations/*/sinks/*}" } - additional_bindings { delete: "/v2/{sink_name=folders/*/sinks/*}" } + additional_bindings { + delete: "/v2/{sink_name=projects/*/sinks/*}" + } + additional_bindings { + delete: "/v2/{sink_name=organizations/*/sinks/*}" + } + additional_bindings { + delete: "/v2/{sink_name=folders/*/sinks/*}" + } additional_bindings { delete: "/v2/{sink_name=billingAccounts/*/sinks/*}" } @@ -137,10 +174,18 @@ service ConfigServiceV2 { rpc ListExclusions(ListExclusionsRequest) returns (ListExclusionsResponse) { option (google.api.http) = { get: "/v2/{parent=*/*}/exclusions" - additional_bindings { get: "/v2/{parent=projects/*}/exclusions" } - additional_bindings { get: "/v2/{parent=organizations/*}/exclusions" } - additional_bindings { get: "/v2/{parent=folders/*}/exclusions" } - additional_bindings { get: "/v2/{parent=billingAccounts/*}/exclusions" } + additional_bindings { + get: "/v2/{parent=projects/*}/exclusions" + } + additional_bindings { + get: "/v2/{parent=organizations/*}/exclusions" + } + additional_bindings { + get: "/v2/{parent=folders/*}/exclusions" + } + additional_bindings { + get: "/v2/{parent=billingAccounts/*}/exclusions" + } }; } @@ -148,10 +193,18 @@ service ConfigServiceV2 { rpc GetExclusion(GetExclusionRequest) returns (LogExclusion) { option (google.api.http) = { get: "/v2/{name=*/*/exclusions/*}" - additional_bindings { get: "/v2/{name=projects/*/exclusions/*}" } - additional_bindings { get: "/v2/{name=organizations/*/exclusions/*}" } - additional_bindings { get: "/v2/{name=folders/*/exclusions/*}" } - additional_bindings { get: "/v2/{name=billingAccounts/*/exclusions/*}" } + additional_bindings { + get: "/v2/{name=projects/*/exclusions/*}" + } + additional_bindings { + get: "/v2/{name=organizations/*/exclusions/*}" + } + additional_bindings { + get: "/v2/{name=folders/*/exclusions/*}" + } + additional_bindings { + get: "/v2/{name=billingAccounts/*/exclusions/*}" + } }; } @@ -209,9 +262,15 @@ service ConfigServiceV2 { rpc DeleteExclusion(DeleteExclusionRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v2/{name=*/*/exclusions/*}" - additional_bindings { delete: "/v2/{name=projects/*/exclusions/*}" } - additional_bindings { delete: "/v2/{name=organizations/*/exclusions/*}" } - additional_bindings { delete: "/v2/{name=folders/*/exclusions/*}" } + additional_bindings { + delete: "/v2/{name=projects/*/exclusions/*}" + } + additional_bindings { + delete: "/v2/{name=organizations/*/exclusions/*}" + } + additional_bindings { + delete: "/v2/{name=folders/*/exclusions/*}" + } additional_bindings { delete: "/v2/{name=billingAccounts/*/exclusions/*}" } @@ -221,9 +280,9 @@ service ConfigServiceV2 { // Describes a sink used to export log entries to one of the following // destinations in any project: a Cloud Storage bucket, a BigQuery dataset, or a -// Cloud Pub/Sub topic. A logs filter controls which log entries are -// exported. The sink must be created within a project, organization, billing -// account, or folder. +// Cloud Pub/Sub topic. A logs filter controls which log entries are exported. +// The sink must be created within a project, organization, billing account, or +// folder. message LogSink { // Available log entry formats. Log entries can be written to // Logging in either format and can be exported in either format. @@ -240,7 +299,7 @@ message LogSink { } // Required. The client-assigned sink identifier, unique within the - // project. Example: `"my-syslog-errors-to-pubsub"`. Sink identifiers are + // project. Example: `"my-syslog-errors-to-pubsub"`. Sink identifiers are // limited to 100 characters and can include only the following characters: // upper and lower-case alphanumeric characters, underscores, hyphens, and // periods. @@ -254,34 +313,33 @@ message LogSink { // // The sink's `writer_identity`, set when the sink is created, must // have permission to write to the destination or else the log - // entries are not exported. For more information, see - // [Exporting Logs With Sinks](/logging/docs/api/tasks/exporting-logs). + // entries are not exported. For more information, see + // [Exporting Logs with Sinks](/logging/docs/api/tasks/exporting-logs). string destination = 3; - // Optional. - // An [advanced logs filter](/logging/docs/view/advanced_filters). The only + // Optional. An [advanced logs filter](/logging/docs/view/advanced-queries). The only // exported log entries are those that are in the resource owning the sink and - // that match the filter. For example: + // that match the filter. For example: // // logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR string filter = 5; // Deprecated. The log entry format to use for this sink's exported log - // entries. The v2 format is used by default and cannot be changed. + // entries. The v2 format is used by default and cannot be changed. VersionFormat output_version_format = 6 [deprecated = true]; // Output only. An IAM identity—a service account or group—under - // which Logging writes the exported log entries to the sink's - // destination. This field is set by - // [sinks.create](/logging/docs/api/reference/rest/v2/projects.sinks/create) + // which Logging writes the exported log entries to the sink's destination. + // This field is set by + // [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] // and - // [sinks.update](/logging/docs/api/reference/rest/v2/projects.sinks/update), - // based on the setting of `unique_writer_identity` in those methods. + // [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] + // based on the value of `unique_writer_identity` in those methods. // // Until you grant this identity write-access to the destination, log entry // exports from this sink will fail. For more information, - // see [Granting access for a - // resource](/iam/docs/granting-roles-to-service-accounts#granting_access_to_a_service_account_for_a_resource). + // see [Granting Access for a + // Resource](/iam/docs/granting-roles-to-service-accounts#granting_access_to_a_service_account_for_a_resource). // Consult the destination service's documentation to determine the // appropriate IAM roles to assign to the identity. string writer_identity = 8; @@ -301,11 +359,33 @@ message LogSink { // resource.type=gce_instance bool include_children = 9; - // Deprecated. This field is ignored when creating or updating sinks. - google.protobuf.Timestamp start_time = 10 [deprecated = true]; + // Optional. Destination dependent options. + oneof options { + // Optional. Options that affect sinks exporting data to BigQuery. + BigQueryOptions bigquery_options = 12; + } + + // Output only. The creation timestamp of the sink. + // + // This field may not be present for older sinks. + google.protobuf.Timestamp create_time = 13; + + // Output only. The last update timestamp of the sink. + // + // This field may not be present for older sinks. + google.protobuf.Timestamp update_time = 14; +} - // Deprecated. This field is ignored when creating or updating sinks. - google.protobuf.Timestamp end_time = 11 [deprecated = true]; +// Options that change functionality of a sink exporting data to BigQuery. +message BigQueryOptions { + // Optional. Whether to use [BigQuery's partition + // tables](/bigquery/docs/partitioned-tables). By default, Logging + // creates dated tables based on the log entries' timestamps, e.g. + // syslog_20170523. With partitioned tables the date suffix is no longer + // present and [special query + // syntax](/bigquery/docs/querying-partitioned-tables) has to be used instead. + // In both cases, tables are sharded based on UTC timezone. + bool use_partitioned_tables = 1; } // The parameters to `ListSinks`. @@ -319,13 +399,13 @@ message ListSinksRequest { string parent = 1; // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method + // preceding call to this method. `pageToken` must be the value of + // `nextPageToken` from the previous response. The values of other method // parameters should be identical to those in the previous call. string page_token = 2; // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the + // Non-positive values are ignored. The presence of `nextPageToken` in the // response indicates that more results might be available. int32 page_size = 3; } @@ -336,7 +416,7 @@ message ListSinksResponse { repeated LogSink sinks = 1; // If there might be more results than appear in this response, then - // `nextPageToken` is included. To get the next set of results, call the same + // `nextPageToken` is included. To get the next set of results, call the same // method again using the value of `nextPageToken` as `pageToken`. string next_page_token = 2; } @@ -371,17 +451,16 @@ message CreateSinkRequest { LogSink sink = 2; // Optional. Determines the kind of IAM identity returned as `writer_identity` - // in the new sink. If this value is omitted or set to false, and if the + // in the new sink. If this value is omitted or set to false, and if the // sink's parent is a project, then the value returned as `writer_identity` is - // the same group or service account used by Logging before the - // addition of writer identities to this API. The sink's destination must be - // in the same project as the sink itself. + // the same group or service account used by Logging before the addition of + // writer identities to this API. The sink's destination must be in the same + // project as the sink itself. // // If this field is set to true, or if the sink is owned by a non-project // resource such as an organization, then the value of `writer_identity` will - // be a unique service account used only for exports from the new sink. For - // more information, see `writer_identity` in - // [LogSink][google.logging.v2.LogSink]. + // be a unique service account used only for exports from the new sink. For + // more information, see `writer_identity` in [LogSink][google.logging.v2.LogSink]. bool unique_writer_identity = 3; } @@ -402,9 +481,8 @@ message UpdateSinkRequest { // as part of `sink_name`. LogSink sink = 2; - // Optional. See - // [sinks.create](/logging/docs/api/reference/rest/v2/projects.sinks/create) - // for a description of this field. When updating a sink, the effect of this + // Optional. See [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] + // for a description of this field. When updating a sink, the effect of this // field on the value of `writer_identity` in the updated sink depends on both // the old and new values of this field: // @@ -418,7 +496,7 @@ message UpdateSinkRequest { // Optional. Field mask that specifies the fields in `sink` that need // an update. A sink field will be overwritten if, and only if, it is - // in the update mask. `name` and output only fields cannot be updated. + // in the update mask. `name` and output only fields cannot be updated. // // An empty updateMask is temporarily treated as using the following mask // for backwards compatibility purposes: @@ -448,11 +526,11 @@ message DeleteSinkRequest { } // Specifies a set of log entries that are not to be stored in -// Logging. If your project receives a large volume of logs, you might be able -// to use exclusions to reduce your chargeable logs. Exclusions are processed -// after log sinks, so you can export log entries before they are excluded. -// Audit log entries and log entries from Amazon Web Services are never -// excluded. +// Logging. If your GCP resource receives a large volume of logs, you can +// use exclusions to reduce your chargeable logs. Exclusions are +// processed after log sinks, so you can export log entries before they are +// excluded. Note that organization-level and folder-level exclusions don't +// apply to child resources, and that you can't exclude audit log entries. message LogExclusion { // Required. A client-assigned identifier, such as // `"load-balancer-exclusion"`. Identifiers are limited to 100 characters and @@ -462,22 +540,31 @@ message LogExclusion { // Optional. A description of this exclusion. string description = 2; - // Required. - // An [advanced logs filter](/logging/docs/view/advanced_filters) + // Required. An [advanced logs filter](/logging/docs/view/advanced-queries) // that matches the log entries to be excluded. By using the - // [sample function](/logging/docs/view/advanced_filters#sample), + // [sample function](/logging/docs/view/advanced-queries#sample), // you can exclude less than 100% of the matching log entries. - // For example, the following filter matches 99% of low-severity log - // entries from load balancers: + // For example, the following query matches 99% of low-severity log + // entries from Google Cloud Storage buckets: // - // `"resource.type=http_load_balancer severity") - # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- From a0bdcc525de78ac5403ba3b04de6ffe03d32310e Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 25 Sep 2019 12:35:50 -0400 Subject: [PATCH 258/855] docs: fix intersphinx reference to requests (#9294) --- packages/google-cloud-logging/docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index da2faa63ec4f..524c564a1698 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -342,7 +342,7 @@ "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), } From ed990ec6a78205b311e7efac8a709c122e9d1e03 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 26 Sep 2019 10:16:25 -0700 Subject: [PATCH 259/855] codegen(logging): add deprecated 'start_time' / 'end_time' fields to 'LogSink' (#9311) --- .../logging_v2/proto/logging_config_pb2.py | 116 +++++++++++++----- .../logging_v2/proto/logging_config.proto | 6 + packages/google-cloud-logging/synth.metadata | 10 +- 3 files changed, 93 insertions(+), 39 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2.py index 144591e49189..ae561d708480 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2.py @@ -31,7 +31,7 @@ "\n\025com.google.logging.v2B\022LoggingConfigProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"\xaa\x03\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12K\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormatB\x02\x18\x01\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12>\n\x10\x62igquery_options\x18\x0c \x01(\x0b\x32".google.logging.v2.BigQueryOptionsH\x00\x12/\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x0e \x01(\x0b\x32\x1a.google.protobuf.Timestamp"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02\x42\t\n\x07options"1\n\x0f\x42igQueryOptions\x12\x1e\n\x16use_partitioned_tables\x18\x01 \x01(\x08"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"\xb5\x01\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\xf6\x1a\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}\x1a\xdf\x01\xca\x41\x16logging.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.readB\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"\x90\x04\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12K\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormatB\x02\x18\x01\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12>\n\x10\x62igquery_options\x18\x0c \x01(\x0b\x32".google.logging.v2.BigQueryOptionsH\x00\x12/\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x0e \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\x12\x30\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02\x42\t\n\x07options"1\n\x0f\x42igQueryOptions\x12\x1e\n\x16use_partitioned_tables\x18\x01 \x01(\x08"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"\xb5\x01\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\xf6\x1a\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}\x1a\xdf\x01\xca\x41\x16logging.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.readB\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, @@ -66,8 +66,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=609, - serialized_end=672, + serialized_start=711, + serialized_end=774, ) _sym_db.RegisterEnumDescriptor(_LOGSINK_VERSIONFORMAT) @@ -241,6 +241,42 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="start_time", + full_name="google.logging.v2.LogSink.start_time", + index=9, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\030\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="end_time", + full_name="google.logging.v2.LogSink.end_time", + index=10, + number=11, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\030\001"), + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -259,7 +295,7 @@ ) ], serialized_start=257, - serialized_end=683, + serialized_end=785, ) @@ -297,8 +333,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=685, - serialized_end=734, + serialized_start=787, + serialized_end=836, ) @@ -372,8 +408,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=736, - serialized_end=809, + serialized_start=838, + serialized_end=911, ) @@ -429,8 +465,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=811, - serialized_end=898, + serialized_start=913, + serialized_end=1000, ) @@ -468,8 +504,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=900, - serialized_end=935, + serialized_start=1002, + serialized_end=1037, ) @@ -543,8 +579,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=937, - serialized_end=1046, + serialized_start=1039, + serialized_end=1148, ) @@ -636,8 +672,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1049, - serialized_end=1210, + serialized_start=1151, + serialized_end=1312, ) @@ -675,8 +711,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1212, - serialized_end=1250, + serialized_start=1314, + serialized_end=1352, ) @@ -804,8 +840,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1253, - serialized_end=1434, + serialized_start=1355, + serialized_end=1536, ) @@ -879,8 +915,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1436, - serialized_end=1514, + serialized_start=1538, + serialized_end=1616, ) @@ -936,8 +972,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1516, - serialized_end=1618, + serialized_start=1618, + serialized_end=1720, ) @@ -975,8 +1011,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1620, - serialized_end=1655, + serialized_start=1722, + serialized_end=1757, ) @@ -1032,8 +1068,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1657, - serialized_end=1749, + serialized_start=1759, + serialized_end=1851, ) @@ -1107,8 +1143,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1752, - serialized_end=1891, + serialized_start=1854, + serialized_end=1993, ) @@ -1146,8 +1182,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1893, - serialized_end=1931, + serialized_start=1995, + serialized_end=2033, ) _LOGSINK.fields_by_name["output_version_format"].enum_type = _LOGSINK_VERSIONFORMAT @@ -1158,6 +1194,12 @@ _LOGSINK.fields_by_name[ "update_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGSINK.fields_by_name[ + "start_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGSINK.fields_by_name[ + "end_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LOGSINK_VERSIONFORMAT.containing_type = _LOGSINK _LOGSINK.oneofs_by_name["options"].fields.append( _LOGSINK.fields_by_name["bigquery_options"] @@ -1281,6 +1323,10 @@ update_time: Output only. The last update timestamp of the sink. This field may not be present for older sinks. + start_time: + Do not use. This field is ignored. + end_time: + Do not use. This field is ignored. """, # @@protoc_insertion_point(class_scope:google.logging.v2.LogSink) ), @@ -1726,6 +1772,8 @@ DESCRIPTOR._options = None _LOGSINK.fields_by_name["output_version_format"]._options = None +_LOGSINK.fields_by_name["start_time"]._options = None +_LOGSINK.fields_by_name["end_time"]._options = None _CONFIGSERVICEV2 = _descriptor.ServiceDescriptor( name="ConfigServiceV2", @@ -1735,8 +1783,8 @@ serialized_options=_b( "\312A\026logging.googleapis.com\322A\302\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read" ), - serialized_start=1934, - serialized_end=5380, + serialized_start=2036, + serialized_end=5482, methods=[ _descriptor.MethodDescriptor( name="ListSinks", diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto index 1e3c84d3f419..a9ccdf51cb19 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto @@ -374,6 +374,12 @@ message LogSink { // // This field may not be present for older sinks. google.protobuf.Timestamp update_time = 14; + + // Do not use. This field is ignored. + google.protobuf.Timestamp start_time = 10 [deprecated = true]; + + // Do not use. This field is ignored. + google.protobuf.Timestamp end_time = 11 [deprecated = true]; } // Options that change functionality of a sink exporting data to BigQuery. diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index d9ea35fc2dd8..89c4354a1db7 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-09-23T16:17:44.431083Z", + "updateTime": "2019-09-26T12:29:00.912920Z", "sources": [ { "generator": { "name": "artman", - "version": "0.37.0", - "dockerImage": "googleapis/artman@sha256:0f66008f69061ea6d41499e2a34da3fc64fc7c9798077e3a37158653a135d801" + "version": "0.37.1", + "dockerImage": "googleapis/artman@sha256:6068f67900a3f0bdece596b97bda8fc70406ca0e137a941f4c81d3217c994a80" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "999d0930cea7a7cb3147a7c5432e1f011060d549", - "internalRef": "270363949" + "sha": "4c2ca81a0c976d4d37a8999984b7894d9af22124", + "internalRef": "271130964" } }, { From 9f97c7bd236cb9f01f6f39b09bce30c0ce9595fe Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 30 Sep 2019 10:47:49 -0400 Subject: [PATCH 260/855] chore(logging): release logging 1.13.0 (#9269) --- packages/google-cloud-logging/CHANGELOG.md | 18 ++++++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index f7ad1b7451ed..2fab1ee705dd 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,24 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## 1.13.0 + +09-23-2019 10:00 PDT + +### Implementation Changes +- Pass 'stream' argument to super in 'ContainerEngineHandler.__init__'. ([#9166](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9166)) + +### New Features +- Add LoggingV2Servicer, LogSinks, logging_metrics, and log_entry. Add LogSeverity and HttpRequest types (via synth). ([#9262](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9262)) +- Add client_options to logging v1 ([#9046](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9046)) + +### Documentation +- Remove compatability badges from READMEs. ([#9035](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9035)) + +### Internal / Testing Changes +- Docs: Remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9085)) +- Delete custom synth removing gRPC send/recv msg size limits. ([#8939](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8939)) + ## 1.12.1 08-01-2019 09:45 PDT diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 6d3da8a110f9..8e88a95058de 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-logging' description = 'Stackdriver Logging API client library' -version = '1.12.1' +version = '1.13.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From aa78b3e82b19b26c49449cd2e562edb89e369ad3 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 10 Oct 2019 11:07:29 -0700 Subject: [PATCH 261/855] fix(logging): fix proto copy (#9420) --- .../docs/gapic/v2/api.rst | 6 + .../docs/gapic/v2/types.rst | 5 + packages/google-cloud-logging/docs/index.rst | 25 +- packages/google-cloud-logging/docs/usage.rst | 3 + packages/google-cloud-logging/docs/v1.rst | 18 + packages/google-cloud-logging/docs/v2.rst | 7 + .../proto/cloud/logging_v2/proto/__init__.py | 0 .../cloud/logging_v2/proto/log_entry_pb2.py | 873 -------- .../logging_v2/proto/log_entry_pb2_grpc.py | 2 - .../logging_v2/proto/logging_config_pb2.py | 1905 ----------------- .../proto/logging_config_pb2_grpc.py | 213 -- .../logging_v2/proto/logging_metrics_pb2.py | 1018 --------- .../proto/logging_metrics_pb2_grpc.py | 118 - .../cloud/logging_v2/proto/logging_pb2.py | 1312 ------------ .../logging_v2/proto/logging_pb2_grpc.py | 130 -- .../cloud/logging_v2/proto/http_request.proto | 93 - .../cloud/logging_v2/proto/log_entry_pb2.py | 66 +- .../cloud/logging_v2/proto/log_severity.proto | 73 - .../logging_v2/proto/logging/type/__init__.py | 0 .../proto/logging/type/http_request_pb2.py | 405 ---- .../logging/type/http_request_pb2_grpc.py | 2 - .../proto/logging/type/log_severity_pb2.py | 93 - .../logging/type/log_severity_pb2_grpc.py | 2 - .../logging_v2/proto/logging_config_pb2.py | 346 ++- .../proto/logging_config_pb2_grpc.py | 13 +- .../logging_v2/proto/logging_metrics_pb2.py | 107 +- .../cloud/logging_v2/proto/logging_pb2.py | 82 +- .../logging_v2/proto/logging_pb2_grpc.py | 6 +- packages/google-cloud-logging/synth.metadata | 10 +- packages/google-cloud-logging/synth.py | 6 +- 30 files changed, 497 insertions(+), 6442 deletions(-) create mode 100644 packages/google-cloud-logging/docs/gapic/v2/api.rst create mode 100644 packages/google-cloud-logging/docs/gapic/v2/types.rst create mode 100644 packages/google-cloud-logging/docs/v1.rst create mode 100644 packages/google-cloud-logging/docs/v2.rst delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/__init__.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2_grpc.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2_grpc.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_metrics_pb2.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2_grpc.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/http_request.proto delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/log_severity.proto delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/__init__.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2_grpc.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2_grpc.py diff --git a/packages/google-cloud-logging/docs/gapic/v2/api.rst b/packages/google-cloud-logging/docs/gapic/v2/api.rst new file mode 100644 index 000000000000..2dc6bf6fcc6b --- /dev/null +++ b/packages/google-cloud-logging/docs/gapic/v2/api.rst @@ -0,0 +1,6 @@ +Client for Stackdriver Logging API +================================== + +.. automodule:: google.cloud.logging_v2 + :members: + :inherited-members: \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/gapic/v2/types.rst b/packages/google-cloud-logging/docs/gapic/v2/types.rst new file mode 100644 index 000000000000..5521d4f9bc12 --- /dev/null +++ b/packages/google-cloud-logging/docs/gapic/v2/types.rst @@ -0,0 +1,5 @@ +Types for Stackdriver Logging API Client +======================================== + +.. automodule:: google.cloud.logging_v2.types + :members: \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/index.rst b/packages/google-cloud-logging/docs/index.rst index 67ad362dfc69..f617201a90ab 100644 --- a/packages/google-cloud-logging/docs/index.rst +++ b/packages/google-cloud-logging/docs/index.rst @@ -1,29 +1,12 @@ .. include:: README.rst -Usage Documentation +Documentation ------------------- .. toctree:: - :maxdepth: 2 - - usage - -Api Reference -------------- -.. toctree:: - :maxdepth: 2 + :maxdepth: 3 - client - logger - entries - metric - sink - stdlib-usage - handlers - handlers-app-engine - handlers-container-engine - transports-sync - transports-thread - transports-base + v1 + v2 Changelog ~~~~~~~~~ diff --git a/packages/google-cloud-logging/docs/usage.rst b/packages/google-cloud-logging/docs/usage.rst index 122a850fecba..f5662bcbaa08 100644 --- a/packages/google-cloud-logging/docs/usage.rst +++ b/packages/google-cloud-logging/docs/usage.rst @@ -1,3 +1,6 @@ +Usage Guide +=========== + Writing log entries ------------------- diff --git a/packages/google-cloud-logging/docs/v1.rst b/packages/google-cloud-logging/docs/v1.rst new file mode 100644 index 000000000000..f4f79d377a65 --- /dev/null +++ b/packages/google-cloud-logging/docs/v1.rst @@ -0,0 +1,18 @@ +v1 +============== +.. toctree:: + :maxdepth: 2 + + usage + client + logger + entries + metric + sink + stdlib-usage + handlers + handlers-app-engine + handlers-container-engine + transports-sync + transports-thread + transports-base \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/v2.rst b/packages/google-cloud-logging/docs/v2.rst new file mode 100644 index 000000000000..8dfc18b48171 --- /dev/null +++ b/packages/google-cloud-logging/docs/v2.rst @@ -0,0 +1,7 @@ +v2 +---------------- +.. toctree:: + :maxdepth: 2 + + gapic/v2/api + gapic/v2/types \ No newline at end of file diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2.py deleted file mode 100644 index 1f2b1ca3b64d..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2.py +++ /dev/null @@ -1,873 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/logging_v2/proto/log_entry.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.logging.type import ( - http_request_pb2 as google_dot_logging_dot_type_dot_http__request__pb2, -) -from google.logging.type import ( - log_severity_pb2 as google_dot_logging_dot_type_dot_log__severity__pb2, -) -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/logging_v2/proto/log_entry.proto", - package="google.logging.v2", - syntax="proto3", - serialized_options=_b( - "\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), - serialized_pb=_b( - '\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\x8e\x06\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12;\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadataB\x02\x18\x01\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, - google_dot_logging_dot_type_dot_http__request__pb2.DESCRIPTOR, - google_dot_logging_dot_type_dot_log__severity__pb2.DESCRIPTOR, - google_dot_protobuf_dot_any__pb2.DESCRIPTOR, - google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_LOGENTRY_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.logging.v2.LogEntry.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.LogEntry.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.LogEntry.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1057, - serialized_end=1102, -) - -_LOGENTRY = _descriptor.Descriptor( - name="LogEntry", - full_name="google.logging.v2.LogEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_name", - full_name="google.logging.v2.LogEntry.log_name", - index=0, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource", - full_name="google.logging.v2.LogEntry.resource", - index=1, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="proto_payload", - full_name="google.logging.v2.LogEntry.proto_payload", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="text_payload", - full_name="google.logging.v2.LogEntry.text_payload", - index=3, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="json_payload", - full_name="google.logging.v2.LogEntry.json_payload", - index=4, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="timestamp", - full_name="google.logging.v2.LogEntry.timestamp", - index=5, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="receive_timestamp", - full_name="google.logging.v2.LogEntry.receive_timestamp", - index=6, - number=24, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="severity", - full_name="google.logging.v2.LogEntry.severity", - index=7, - number=10, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="insert_id", - full_name="google.logging.v2.LogEntry.insert_id", - index=8, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="http_request", - full_name="google.logging.v2.LogEntry.http_request", - index=9, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.logging.v2.LogEntry.labels", - index=10, - number=11, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metadata", - full_name="google.logging.v2.LogEntry.metadata", - index=11, - number=25, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="operation", - full_name="google.logging.v2.LogEntry.operation", - index=12, - number=15, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="trace", - full_name="google.logging.v2.LogEntry.trace", - index=13, - number=22, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="span_id", - full_name="google.logging.v2.LogEntry.span_id", - index=14, - number=27, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="trace_sampled", - full_name="google.logging.v2.LogEntry.trace_sampled", - index=15, - number=30, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="source_location", - full_name="google.logging.v2.LogEntry.source_location", - index=16, - number=23, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_LOGENTRY_LABELSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="payload", - full_name="google.logging.v2.LogEntry.payload", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=331, - serialized_end=1113, -) - - -_LOGENTRYOPERATION = _descriptor.Descriptor( - name="LogEntryOperation", - full_name="google.logging.v2.LogEntryOperation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="id", - full_name="google.logging.v2.LogEntryOperation.id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="producer", - full_name="google.logging.v2.LogEntryOperation.producer", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="first", - full_name="google.logging.v2.LogEntryOperation.first", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="last", - full_name="google.logging.v2.LogEntryOperation.last", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1115, - serialized_end=1193, -) - - -_LOGENTRYSOURCELOCATION = _descriptor.Descriptor( - name="LogEntrySourceLocation", - full_name="google.logging.v2.LogEntrySourceLocation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="file", - full_name="google.logging.v2.LogEntrySourceLocation.file", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="line", - full_name="google.logging.v2.LogEntrySourceLocation.line", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="function", - full_name="google.logging.v2.LogEntrySourceLocation.function", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1195, - serialized_end=1265, -) - -_LOGENTRY_LABELSENTRY.containing_type = _LOGENTRY -_LOGENTRY.fields_by_name[ - "resource" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE -_LOGENTRY.fields_by_name[ - "proto_payload" -].message_type = google_dot_protobuf_dot_any__pb2._ANY -_LOGENTRY.fields_by_name[ - "json_payload" -].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT -_LOGENTRY.fields_by_name[ - "timestamp" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGENTRY.fields_by_name[ - "receive_timestamp" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGENTRY.fields_by_name[ - "severity" -].enum_type = google_dot_logging_dot_type_dot_log__severity__pb2._LOGSEVERITY -_LOGENTRY.fields_by_name[ - "http_request" -].message_type = google_dot_logging_dot_type_dot_http__request__pb2._HTTPREQUEST -_LOGENTRY.fields_by_name["labels"].message_type = _LOGENTRY_LABELSENTRY -_LOGENTRY.fields_by_name[ - "metadata" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEMETADATA -_LOGENTRY.fields_by_name["operation"].message_type = _LOGENTRYOPERATION -_LOGENTRY.fields_by_name["source_location"].message_type = _LOGENTRYSOURCELOCATION -_LOGENTRY.oneofs_by_name["payload"].fields.append( - _LOGENTRY.fields_by_name["proto_payload"] -) -_LOGENTRY.fields_by_name["proto_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ - "payload" -] -_LOGENTRY.oneofs_by_name["payload"].fields.append( - _LOGENTRY.fields_by_name["text_payload"] -) -_LOGENTRY.fields_by_name["text_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ - "payload" -] -_LOGENTRY.oneofs_by_name["payload"].fields.append( - _LOGENTRY.fields_by_name["json_payload"] -) -_LOGENTRY.fields_by_name["json_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ - "payload" -] -DESCRIPTOR.message_types_by_name["LogEntry"] = _LOGENTRY -DESCRIPTOR.message_types_by_name["LogEntryOperation"] = _LOGENTRYOPERATION -DESCRIPTOR.message_types_by_name["LogEntrySourceLocation"] = _LOGENTRYSOURCELOCATION -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LogEntry = _reflection.GeneratedProtocolMessageType( - "LogEntry", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_LOGENTRY_LABELSENTRY, - __module__="google.cloud.logging_v2.proto.log_entry_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry.LabelsEntry) - ), - ), - DESCRIPTOR=_LOGENTRY, - __module__="google.cloud.logging_v2.proto.log_entry_pb2", - __doc__="""An individual entry in a log. - - - Attributes: - log_name: - Required. The resource name of the log to which this log entry - belongs: :: "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" A project number may - optionally be used in place of PROJECT\_ID. The project number - is translated to its corresponding PROJECT\_ID internally and - the ``log_name`` field will contain PROJECT\_ID in queries and - exports. ``[LOG_ID]`` must be URL-encoded within - ``log_name``. Example: ``"organizations/1234567890/logs/cloudr - esourcemanager.googleapis.com%2Factivity"``. ``[LOG_ID]`` must - be less than 512 characters long and can only include the - following characters: upper and lower case alphanumeric - characters, forward-slash, underscore, hyphen, and period. - For backward compatibility, if ``log_name`` begins with a - forward-slash, such as ``/projects/...``, then the log entry - is ingested as usual but the forward-slash is removed. Listing - the log entry will not show the leading slash and filtering - for a log name with a leading slash will never return any - results. - resource: - Required. The monitored resource that produced this log entry. - Example: a log entry that reports a database error would be - associated with the monitored resource designating the - particular database that reported the error. - payload: - Optional. The log entry payload, which can be one of multiple - types. - proto_payload: - The log entry payload, represented as a protocol buffer. Some - Google Cloud Platform services use this field for their log - entry payloads. The following protocol buffer types are - supported; user-defined types are not supported: - "type.googleapis.com/google.cloud.audit.AuditLog" - "type.googleapis.com/google.appengine.logging.v1.RequestLog" - text_payload: - The log entry payload, represented as a Unicode string - (UTF-8). - json_payload: - The log entry payload, represented as a structure that is - expressed as a JSON object. - timestamp: - Optional. The time the event described by the log entry - occurred. This time is used to compute the log entry's age and - to enforce the logs retention period. If this field is omitted - in a new log entry, then Logging assigns it the current time. - Timestamps have nanosecond accuracy, but trailing zeros in the - fractional seconds might be omitted when the timestamp is - displayed. Incoming log entries should have timestamps that - are no more than the `logs retention period - `__ in the past, and no more than 24 hours in - the future. Log entries outside those time boundaries will not - be available when calling ``entries.list``, but those log - entries can still be `exported with LogSinks - `__. - receive_timestamp: - Output only. The time the log entry was received by Logging. - severity: - Optional. The severity of the log entry. The default value is - ``LogSeverity.DEFAULT``. - insert_id: - Optional. A unique identifier for the log entry. If you - provide a value, then Logging considers other log entries in - the same project, with the same ``timestamp``, and with the - same ``insert_id`` to be duplicates which can be removed. If - omitted in new log entries, then Logging assigns its own - unique identifier. The ``insert_id`` is also used to order log - entries that have the same ``timestamp`` value. - http_request: - Optional. Information about the HTTP request associated with - this log entry, if applicable. - labels: - Optional. A set of user-defined (key, value) data that - provides additional information about the log entry. - metadata: - Deprecated. Output only. Additional metadata about the - monitored resource. Only ``k8s_container``, ``k8s_pod``, and - ``k8s_node`` MonitoredResources have this field populated for - GKE versions older than 1.12.6. For GKE versions 1.12.6 and - above, the ``metadata`` field has been deprecated. The - Kubernetes pod labels that used to be in - ``metadata.userLabels`` will now be present in the ``labels`` - field with a key prefix of ``k8s-pod/``. The Stackdriver - system labels that were present in the - ``metadata.systemLabels`` field will no longer be available in - the LogEntry. - operation: - Optional. Information about an operation associated with the - log entry, if applicable. - trace: - Optional. Resource name of the trace associated with the log - entry, if any. If it contains a relative resource name, the - name is assumed to be relative to - ``//tracing.googleapis.com``. Example: ``projects/my- - projectid/traces/06796866738c859f2f19b7cfb3214824`` - span_id: - Optional. The span ID within the trace associated with the log - entry. For Trace spans, this is the same format that the - Trace API v2 uses: a 16-character hexadecimal encoding of an - 8-byte array, such as "000000000000004a". - trace_sampled: - Optional. The sampling decision of the trace associated with - the log entry. True means that the trace resource name in the - ``trace`` field was sampled for storage in a trace backend. - False means that the trace was not sampled for storage when - this log entry was written, or the sampling decision was - unknown at the time. A non-sampled ``trace`` value is still - useful as a request correlation identifier. The default is - False. - source_location: - Optional. Source code location information associated with the - log entry, if any. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry) - ), -) -_sym_db.RegisterMessage(LogEntry) -_sym_db.RegisterMessage(LogEntry.LabelsEntry) - -LogEntryOperation = _reflection.GeneratedProtocolMessageType( - "LogEntryOperation", - (_message.Message,), - dict( - DESCRIPTOR=_LOGENTRYOPERATION, - __module__="google.cloud.logging_v2.proto.log_entry_pb2", - __doc__="""Additional information about a potentially long-running operation with - which a log entry is associated. - - - Attributes: - id: - Optional. An arbitrary operation identifier. Log entries with - the same identifier are assumed to be part of the same - operation. - producer: - Optional. An arbitrary producer identifier. The combination of - ``id`` and ``producer`` must be globally unique. Examples for - ``producer``: ``"MyDivision.MyBigCompany.com"``, - ``"github.com/MyProject/MyApplication"``. - first: - Optional. Set this to True if this is the first log entry in - the operation. - last: - Optional. Set this to True if this is the last log entry in - the operation. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntryOperation) - ), -) -_sym_db.RegisterMessage(LogEntryOperation) - -LogEntrySourceLocation = _reflection.GeneratedProtocolMessageType( - "LogEntrySourceLocation", - (_message.Message,), - dict( - DESCRIPTOR=_LOGENTRYSOURCELOCATION, - __module__="google.cloud.logging_v2.proto.log_entry_pb2", - __doc__="""Additional information about the source code location that produced the - log entry. - - - Attributes: - file: - Optional. Source file name. Depending on the runtime - environment, this might be a simple name or a fully-qualified - name. - line: - Optional. Line within the source file. 1-based; 0 indicates no - line number available. - function: - Optional. Human-readable name of the function or method being - invoked, with optional context such as the class or package - name. This information may be used in contexts such as the - logs viewer, where a file and line number are less meaningful. - The format can vary by language. For example: - ``qual.if.ied.Class.method`` (Java), ``dir/package.func`` - (Go), ``function`` (Python). - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntrySourceLocation) - ), -) -_sym_db.RegisterMessage(LogEntrySourceLocation) - - -DESCRIPTOR._options = None -_LOGENTRY_LABELSENTRY._options = None -_LOGENTRY.fields_by_name["metadata"]._options = None -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2.py deleted file mode 100644 index ae561d708480..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2.py +++ /dev/null @@ -1,1905 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/logging_v2/proto/logging_config.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/logging_v2/proto/logging_config.proto", - package="google.logging.v2", - syntax="proto3", - serialized_options=_b( - "\n\025com.google.logging.v2B\022LoggingConfigProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), - serialized_pb=_b( - '\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"\x90\x04\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12K\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormatB\x02\x18\x01\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12>\n\x10\x62igquery_options\x18\x0c \x01(\x0b\x32".google.logging.v2.BigQueryOptionsH\x00\x12/\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x0e \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\x12\x30\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02\x42\t\n\x07options"1\n\x0f\x42igQueryOptions\x12\x1e\n\x16use_partitioned_tables\x18\x01 \x01(\x08"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"\xb5\x01\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\xf6\x1a\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}\x1a\xdf\x01\xca\x41\x16logging.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.readB\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_LOGSINK_VERSIONFORMAT = _descriptor.EnumDescriptor( - name="VersionFormat", - full_name="google.logging.v2.LogSink.VersionFormat", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="VERSION_FORMAT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="V2", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="V1", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=711, - serialized_end=774, -) -_sym_db.RegisterEnumDescriptor(_LOGSINK_VERSIONFORMAT) - - -_LOGSINK = _descriptor.Descriptor( - name="LogSink", - full_name="google.logging.v2.LogSink", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.LogSink.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="destination", - full_name="google.logging.v2.LogSink.destination", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.LogSink.filter", - index=2, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="output_version_format", - full_name="google.logging.v2.LogSink.output_version_format", - index=3, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="writer_identity", - full_name="google.logging.v2.LogSink.writer_identity", - index=4, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="include_children", - full_name="google.logging.v2.LogSink.include_children", - index=5, - number=9, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bigquery_options", - full_name="google.logging.v2.LogSink.bigquery_options", - index=6, - number=12, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.logging.v2.LogSink.create_time", - index=7, - number=13, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.logging.v2.LogSink.update_time", - index=8, - number=14, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.logging.v2.LogSink.start_time", - index=9, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.logging.v2.LogSink.end_time", - index=10, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_LOGSINK_VERSIONFORMAT], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="options", - full_name="google.logging.v2.LogSink.options", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=257, - serialized_end=785, -) - - -_BIGQUERYOPTIONS = _descriptor.Descriptor( - name="BigQueryOptions", - full_name="google.logging.v2.BigQueryOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="use_partitioned_tables", - full_name="google.logging.v2.BigQueryOptions.use_partitioned_tables", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=787, - serialized_end=836, -) - - -_LISTSINKSREQUEST = _descriptor.Descriptor( - name="ListSinksRequest", - full_name="google.logging.v2.ListSinksRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListSinksRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListSinksRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListSinksRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=838, - serialized_end=911, -) - - -_LISTSINKSRESPONSE = _descriptor.Descriptor( - name="ListSinksResponse", - full_name="google.logging.v2.ListSinksResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sinks", - full_name="google.logging.v2.ListSinksResponse.sinks", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListSinksResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=913, - serialized_end=1000, -) - - -_GETSINKREQUEST = _descriptor.Descriptor( - name="GetSinkRequest", - full_name="google.logging.v2.GetSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sink_name", - full_name="google.logging.v2.GetSinkRequest.sink_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1002, - serialized_end=1037, -) - - -_CREATESINKREQUEST = _descriptor.Descriptor( - name="CreateSinkRequest", - full_name="google.logging.v2.CreateSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.CreateSinkRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="sink", - full_name="google.logging.v2.CreateSinkRequest.sink", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="unique_writer_identity", - full_name="google.logging.v2.CreateSinkRequest.unique_writer_identity", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1039, - serialized_end=1148, -) - - -_UPDATESINKREQUEST = _descriptor.Descriptor( - name="UpdateSinkRequest", - full_name="google.logging.v2.UpdateSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sink_name", - full_name="google.logging.v2.UpdateSinkRequest.sink_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="sink", - full_name="google.logging.v2.UpdateSinkRequest.sink", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="unique_writer_identity", - full_name="google.logging.v2.UpdateSinkRequest.unique_writer_identity", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.logging.v2.UpdateSinkRequest.update_mask", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1151, - serialized_end=1312, -) - - -_DELETESINKREQUEST = _descriptor.Descriptor( - name="DeleteSinkRequest", - full_name="google.logging.v2.DeleteSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sink_name", - full_name="google.logging.v2.DeleteSinkRequest.sink_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1314, - serialized_end=1352, -) - - -_LOGEXCLUSION = _descriptor.Descriptor( - name="LogExclusion", - full_name="google.logging.v2.LogExclusion", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.LogExclusion.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.logging.v2.LogExclusion.description", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.LogExclusion.filter", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="disabled", - full_name="google.logging.v2.LogExclusion.disabled", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.logging.v2.LogExclusion.create_time", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.logging.v2.LogExclusion.update_time", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1355, - serialized_end=1536, -) - - -_LISTEXCLUSIONSREQUEST = _descriptor.Descriptor( - name="ListExclusionsRequest", - full_name="google.logging.v2.ListExclusionsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListExclusionsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListExclusionsRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListExclusionsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1538, - serialized_end=1616, -) - - -_LISTEXCLUSIONSRESPONSE = _descriptor.Descriptor( - name="ListExclusionsResponse", - full_name="google.logging.v2.ListExclusionsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="exclusions", - full_name="google.logging.v2.ListExclusionsResponse.exclusions", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListExclusionsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1618, - serialized_end=1720, -) - - -_GETEXCLUSIONREQUEST = _descriptor.Descriptor( - name="GetExclusionRequest", - full_name="google.logging.v2.GetExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.GetExclusionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1722, - serialized_end=1757, -) - - -_CREATEEXCLUSIONREQUEST = _descriptor.Descriptor( - name="CreateExclusionRequest", - full_name="google.logging.v2.CreateExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.CreateExclusionRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="exclusion", - full_name="google.logging.v2.CreateExclusionRequest.exclusion", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1759, - serialized_end=1851, -) - - -_UPDATEEXCLUSIONREQUEST = _descriptor.Descriptor( - name="UpdateExclusionRequest", - full_name="google.logging.v2.UpdateExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.UpdateExclusionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="exclusion", - full_name="google.logging.v2.UpdateExclusionRequest.exclusion", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.logging.v2.UpdateExclusionRequest.update_mask", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1854, - serialized_end=1993, -) - - -_DELETEEXCLUSIONREQUEST = _descriptor.Descriptor( - name="DeleteExclusionRequest", - full_name="google.logging.v2.DeleteExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.DeleteExclusionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1995, - serialized_end=2033, -) - -_LOGSINK.fields_by_name["output_version_format"].enum_type = _LOGSINK_VERSIONFORMAT -_LOGSINK.fields_by_name["bigquery_options"].message_type = _BIGQUERYOPTIONS -_LOGSINK.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGSINK.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGSINK.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGSINK.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGSINK_VERSIONFORMAT.containing_type = _LOGSINK -_LOGSINK.oneofs_by_name["options"].fields.append( - _LOGSINK.fields_by_name["bigquery_options"] -) -_LOGSINK.fields_by_name["bigquery_options"].containing_oneof = _LOGSINK.oneofs_by_name[ - "options" -] -_LISTSINKSRESPONSE.fields_by_name["sinks"].message_type = _LOGSINK -_CREATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK -_UPDATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK -_UPDATESINKREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LOGEXCLUSION.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGEXCLUSION.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LISTEXCLUSIONSRESPONSE.fields_by_name["exclusions"].message_type = _LOGEXCLUSION -_CREATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION -_UPDATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION -_UPDATEEXCLUSIONREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -DESCRIPTOR.message_types_by_name["LogSink"] = _LOGSINK -DESCRIPTOR.message_types_by_name["BigQueryOptions"] = _BIGQUERYOPTIONS -DESCRIPTOR.message_types_by_name["ListSinksRequest"] = _LISTSINKSREQUEST -DESCRIPTOR.message_types_by_name["ListSinksResponse"] = _LISTSINKSRESPONSE -DESCRIPTOR.message_types_by_name["GetSinkRequest"] = _GETSINKREQUEST -DESCRIPTOR.message_types_by_name["CreateSinkRequest"] = _CREATESINKREQUEST -DESCRIPTOR.message_types_by_name["UpdateSinkRequest"] = _UPDATESINKREQUEST -DESCRIPTOR.message_types_by_name["DeleteSinkRequest"] = _DELETESINKREQUEST -DESCRIPTOR.message_types_by_name["LogExclusion"] = _LOGEXCLUSION -DESCRIPTOR.message_types_by_name["ListExclusionsRequest"] = _LISTEXCLUSIONSREQUEST -DESCRIPTOR.message_types_by_name["ListExclusionsResponse"] = _LISTEXCLUSIONSRESPONSE -DESCRIPTOR.message_types_by_name["GetExclusionRequest"] = _GETEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name["CreateExclusionRequest"] = _CREATEEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name["UpdateExclusionRequest"] = _UPDATEEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name["DeleteExclusionRequest"] = _DELETEEXCLUSIONREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LogSink = _reflection.GeneratedProtocolMessageType( - "LogSink", - (_message.Message,), - dict( - DESCRIPTOR=_LOGSINK, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Describes a sink used to export log entries to one of the following - destinations in any project: a Cloud Storage bucket, a BigQuery dataset, - or a Cloud Pub/Sub topic. A logs filter controls which log entries are - exported. The sink must be created within a project, organization, - billing account, or folder. - - - Attributes: - name: - Required. The client-assigned sink identifier, unique within - the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink - identifiers are limited to 100 characters and can include only - the following characters: upper and lower-case alphanumeric - characters, underscores, hyphens, and periods. - destination: - Required. The export destination: :: - "storage.googleapis.com/[GCS_BUCKET]" "bigquery.googleapis - .com/projects/[PROJECT_ID]/datasets/[DATASET]" "pubsub.goo - gleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" The - sink's ``writer_identity``, set when the sink is created, must - have permission to write to the destination or else the log - entries are not exported. For more information, see `Exporting - Logs with Sinks `__. - filter: - Optional. An `advanced logs filter - `__. The only exported - log entries are those that are in the resource owning the sink - and that match the filter. For example: :: - logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND - severity>=ERROR - output_version_format: - Deprecated. The log entry format to use for this sink's - exported log entries. The v2 format is used by default and - cannot be changed. - writer_identity: - Output only. An IAM identity—a service account or group—under - which Logging writes the exported log entries to the sink's - destination. This field is set by - [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] - and - [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] - based on the value of ``unique_writer_identity`` in those - methods. Until you grant this identity write-access to the - destination, log entry exports from this sink will fail. For - more information, see `Granting Access for a Resource - `__. Consult the - destination service's documentation to determine the - appropriate IAM roles to assign to the identity. - include_children: - Optional. This field applies only to sinks owned by - organizations and folders. If the field is false, the default, - only the logs owned by the sink's parent resource are - available for export. If the field is true, then logs from all - the projects, folders, and billing accounts contained in the - sink's parent resource are also available for export. Whether - a particular log entry from the children is exported depends - on the sink's filter expression. For example, if this field is - true, then the filter ``resource.type=gce_instance`` would - export all Compute Engine VM instance log entries from all - projects in the sink's parent. To only export entries from - certain child projects, filter on the project part of the log - name: :: logName:("projects/test-project1/" OR - "projects/test-project2/") AND resource.type=gce_instance - options: - Optional. Destination dependent options. - bigquery_options: - Optional. Options that affect sinks exporting data to - BigQuery. - create_time: - Output only. The creation timestamp of the sink. This field - may not be present for older sinks. - update_time: - Output only. The last update timestamp of the sink. This - field may not be present for older sinks. - start_time: - Do not use. This field is ignored. - end_time: - Do not use. This field is ignored. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogSink) - ), -) -_sym_db.RegisterMessage(LogSink) - -BigQueryOptions = _reflection.GeneratedProtocolMessageType( - "BigQueryOptions", - (_message.Message,), - dict( - DESCRIPTOR=_BIGQUERYOPTIONS, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Options that change functionality of a sink exporting data to BigQuery. - - - Attributes: - use_partitioned_tables: - Optional. Whether to use `BigQuery's partition tables - `__. By default, Logging - creates dated tables based on the log entries' timestamps, - e.g. syslog\_20170523. With partitioned tables the date suffix - is no longer present and `special query syntax - `__ has to be used - instead. In both cases, tables are sharded based on UTC - timezone. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.BigQueryOptions) - ), -) -_sym_db.RegisterMessage(BigQueryOptions) - -ListSinksRequest = _reflection.GeneratedProtocolMessageType( - "ListSinksRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTSINKSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``ListSinks``. - - - Attributes: - parent: - Required. The parent resource whose sinks are to be listed: - :: "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksRequest) - ), -) -_sym_db.RegisterMessage(ListSinksRequest) - -ListSinksResponse = _reflection.GeneratedProtocolMessageType( - "ListSinksResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTSINKSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Result returned from ``ListSinks``. - - - Attributes: - sinks: - A list of sinks. - next_page_token: - If there might be more results than appear in this response, - then ``nextPageToken`` is included. To get the next set of - results, call the same method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksResponse) - ), -) -_sym_db.RegisterMessage(ListSinksResponse) - -GetSinkRequest = _reflection.GeneratedProtocolMessageType( - "GetSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETSINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``GetSink``. - - - Attributes: - sink_name: - Required. The resource name of the sink: :: - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: - ``"projects/my-project-id/sinks/my-sink-id"``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.GetSinkRequest) - ), -) -_sym_db.RegisterMessage(GetSinkRequest) - -CreateSinkRequest = _reflection.GeneratedProtocolMessageType( - "CreateSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATESINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``CreateSink``. - - - Attributes: - parent: - Required. The resource in which to create the sink: :: - "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" Examples: ``"projects/my-logging- - project"``, ``"organizations/123456789"``. - sink: - Required. The new sink, whose ``name`` parameter is a sink - identifier that is not already in use. - unique_writer_identity: - Optional. Determines the kind of IAM identity returned as - ``writer_identity`` in the new sink. If this value is omitted - or set to false, and if the sink's parent is a project, then - the value returned as ``writer_identity`` is the same group or - service account used by Logging before the addition of writer - identities to this API. The sink's destination must be in the - same project as the sink itself. If this field is set to - true, or if the sink is owned by a non-project resource such - as an organization, then the value of ``writer_identity`` will - be a unique service account used only for exports from the new - sink. For more information, see ``writer_identity`` in - [LogSink][google.logging.v2.LogSink]. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.CreateSinkRequest) - ), -) -_sym_db.RegisterMessage(CreateSinkRequest) - -UpdateSinkRequest = _reflection.GeneratedProtocolMessageType( - "UpdateSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATESINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``UpdateSink``. - - - Attributes: - sink_name: - Required. The full resource name of the sink to update, - including the parent resource and the sink identifier: :: - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: - ``"projects/my-project-id/sinks/my-sink-id"``. - sink: - Required. The updated sink, whose name is the same identifier - that appears as part of ``sink_name``. - unique_writer_identity: - Optional. See - [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] - for a description of this field. When updating a sink, the - effect of this field on the value of ``writer_identity`` in - the updated sink depends on both the old and new values of - this field: - If the old and new values of this field are - both false or both true, then there is no change to the - sink's ``writer_identity``. - If the old value is false and - the new value is true, then ``writer_identity`` is changed - to a unique service account. - It is an error if the old - value is true and the new value is set to false or - defaulted to false. - update_mask: - Optional. Field mask that specifies the fields in ``sink`` - that need an update. A sink field will be overwritten if, and - only if, it is in the update mask. ``name`` and output only - fields cannot be updated. An empty updateMask is temporarily - treated as using the following mask for backwards - compatibility purposes: destination,filter,includeChildren At - some point in the future, behavior will be removed and - specifying an empty updateMask will be an error. For a - detailed ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/ - google.protobuf#google.protobuf.FieldMask Example: - ``updateMask=filter``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateSinkRequest) - ), -) -_sym_db.RegisterMessage(UpdateSinkRequest) - -DeleteSinkRequest = _reflection.GeneratedProtocolMessageType( - "DeleteSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETESINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``DeleteSink``. - - - Attributes: - sink_name: - Required. The full resource name of the sink to delete, - including the parent resource and the sink identifier: :: - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: - ``"projects/my-project-id/sinks/my-sink-id"``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteSinkRequest) - ), -) -_sym_db.RegisterMessage(DeleteSinkRequest) - -LogExclusion = _reflection.GeneratedProtocolMessageType( - "LogExclusion", - (_message.Message,), - dict( - DESCRIPTOR=_LOGEXCLUSION, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Specifies a set of log entries that are not to be stored in Logging. If - your GCP resource receives a large volume of logs, you can use - exclusions to reduce your chargeable logs. Exclusions are processed - after log sinks, so you can export log entries before they are excluded. - Note that organization-level and folder-level exclusions don't apply to - child resources, and that you can't exclude audit log entries. - - - Attributes: - name: - Required. A client-assigned identifier, such as ``"load- - balancer-exclusion"``. Identifiers are limited to 100 - characters and can include only letters, digits, underscores, - hyphens, and periods. - description: - Optional. A description of this exclusion. - filter: - Required. An `advanced logs filter - `__ that matches the log - entries to be excluded. By using the `sample function - `__, you can - exclude less than 100% of the matching log entries. For - example, the following query matches 99% of low-severity log - entries from Google Cloud Storage buckets: - ``"resource.type=gcs_bucket severity\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12/\n\x0b\x63reate_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xe4\x07\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"/\x82\xd3\xe4\x93\x02)"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_distribution__pb2.DESCRIPTOR, - google_dot_api_dot_metric__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_LOGMETRIC_APIVERSION = _descriptor.EnumDescriptor( - name="ApiVersion", - full_name="google.logging.v2.LogMetric.ApiVersion", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="V2", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="V1", index=1, number=1, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=785, - serialized_end=813, -) -_sym_db.RegisterEnumDescriptor(_LOGMETRIC_APIVERSION) - - -_LOGMETRIC_LABELEXTRACTORSENTRY = _descriptor.Descriptor( - name="LabelExtractorsEntry", - full_name="google.logging.v2.LogMetric.LabelExtractorsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.LogMetric.LabelExtractorsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.LogMetric.LabelExtractorsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=729, - serialized_end=783, -) - -_LOGMETRIC = _descriptor.Descriptor( - name="LogMetric", - full_name="google.logging.v2.LogMetric", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.LogMetric.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.logging.v2.LogMetric.description", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.LogMetric.filter", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric_descriptor", - full_name="google.logging.v2.LogMetric.metric_descriptor", - index=3, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value_extractor", - full_name="google.logging.v2.LogMetric.value_extractor", - index=4, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="label_extractors", - full_name="google.logging.v2.LogMetric.label_extractors", - index=5, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_options", - full_name="google.logging.v2.LogMetric.bucket_options", - index=6, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.logging.v2.LogMetric.create_time", - index=7, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.logging.v2.LogMetric.update_time", - index=8, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.logging.v2.LogMetric.version", - index=9, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY], - enum_types=[_LOGMETRIC_APIVERSION], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=282, - serialized_end=813, -) - - -_LISTLOGMETRICSREQUEST = _descriptor.Descriptor( - name="ListLogMetricsRequest", - full_name="google.logging.v2.ListLogMetricsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListLogMetricsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListLogMetricsRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListLogMetricsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=815, - serialized_end=893, -) - - -_LISTLOGMETRICSRESPONSE = _descriptor.Descriptor( - name="ListLogMetricsResponse", - full_name="google.logging.v2.ListLogMetricsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metrics", - full_name="google.logging.v2.ListLogMetricsResponse.metrics", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListLogMetricsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=895, - serialized_end=991, -) - - -_GETLOGMETRICREQUEST = _descriptor.Descriptor( - name="GetLogMetricRequest", - full_name="google.logging.v2.GetLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metric_name", - full_name="google.logging.v2.GetLogMetricRequest.metric_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=993, - serialized_end=1035, -) - - -_CREATELOGMETRICREQUEST = _descriptor.Descriptor( - name="CreateLogMetricRequest", - full_name="google.logging.v2.CreateLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.CreateLogMetricRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric", - full_name="google.logging.v2.CreateLogMetricRequest.metric", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1037, - serialized_end=1123, -) - - -_UPDATELOGMETRICREQUEST = _descriptor.Descriptor( - name="UpdateLogMetricRequest", - full_name="google.logging.v2.UpdateLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metric_name", - full_name="google.logging.v2.UpdateLogMetricRequest.metric_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric", - full_name="google.logging.v2.UpdateLogMetricRequest.metric", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1125, - serialized_end=1216, -) - - -_DELETELOGMETRICREQUEST = _descriptor.Descriptor( - name="DeleteLogMetricRequest", - full_name="google.logging.v2.DeleteLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metric_name", - full_name="google.logging.v2.DeleteLogMetricRequest.metric_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1218, - serialized_end=1263, -) - -_LOGMETRIC_LABELEXTRACTORSENTRY.containing_type = _LOGMETRIC -_LOGMETRIC.fields_by_name[ - "metric_descriptor" -].message_type = google_dot_api_dot_metric__pb2._METRICDESCRIPTOR -_LOGMETRIC.fields_by_name[ - "label_extractors" -].message_type = _LOGMETRIC_LABELEXTRACTORSENTRY -_LOGMETRIC.fields_by_name[ - "bucket_options" -].message_type = google_dot_api_dot_distribution__pb2._DISTRIBUTION_BUCKETOPTIONS -_LOGMETRIC.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGMETRIC.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGMETRIC.fields_by_name["version"].enum_type = _LOGMETRIC_APIVERSION -_LOGMETRIC_APIVERSION.containing_type = _LOGMETRIC -_LISTLOGMETRICSRESPONSE.fields_by_name["metrics"].message_type = _LOGMETRIC -_CREATELOGMETRICREQUEST.fields_by_name["metric"].message_type = _LOGMETRIC -_UPDATELOGMETRICREQUEST.fields_by_name["metric"].message_type = _LOGMETRIC -DESCRIPTOR.message_types_by_name["LogMetric"] = _LOGMETRIC -DESCRIPTOR.message_types_by_name["ListLogMetricsRequest"] = _LISTLOGMETRICSREQUEST -DESCRIPTOR.message_types_by_name["ListLogMetricsResponse"] = _LISTLOGMETRICSRESPONSE -DESCRIPTOR.message_types_by_name["GetLogMetricRequest"] = _GETLOGMETRICREQUEST -DESCRIPTOR.message_types_by_name["CreateLogMetricRequest"] = _CREATELOGMETRICREQUEST -DESCRIPTOR.message_types_by_name["UpdateLogMetricRequest"] = _UPDATELOGMETRICREQUEST -DESCRIPTOR.message_types_by_name["DeleteLogMetricRequest"] = _DELETELOGMETRICREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LogMetric = _reflection.GeneratedProtocolMessageType( - "LogMetric", - (_message.Message,), - dict( - LabelExtractorsEntry=_reflection.GeneratedProtocolMessageType( - "LabelExtractorsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_LOGMETRIC_LABELEXTRACTORSENTRY, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric.LabelExtractorsEntry) - ), - ), - DESCRIPTOR=_LOGMETRIC, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""Describes a logs-based metric. The value of the metric is the number of - log entries that match a logs filter in a given time interval. - - Logs-based metric can also be used to extract values from logs and - create a a distribution of the values. The distribution records the - statistics of the extracted values along with an optional histogram of - the values as specified by the bucket options. - - - Attributes: - name: - Required. The client-assigned metric identifier. Examples: - ``"error_count"``, ``"nginx/requests"``. Metric identifiers - are limited to 100 characters and can include only the - following characters: ``A-Z``, ``a-z``, ``0-9``, and the - special characters ``_-.,+!*',()%/``. The forward-slash - character (``/``) denotes a hierarchy of name pieces, and it - cannot be the first character of the name. The metric - identifier in this field must not be `URL-encoded - `__. However, - when the metric identifier appears as the ``[METRIC_ID]`` part - of a ``metric_name`` API parameter, then the metric identifier - must be URL-encoded. Example: ``"projects/my- - project/metrics/nginx%2Frequests"``. - description: - Optional. A description of this metric, which is used in - documentation. The maximum length of the description is 8000 - characters. - filter: - Required. An `advanced logs filter - `__ which is used to - match log entries. Example: :: "resource.type=gae_app - AND severity>=ERROR" The maximum length of the filter is - 20000 characters. - metric_descriptor: - Optional. The metric descriptor associated with the logs-based - metric. If unspecified, it uses a default metric descriptor - with a DELTA metric kind, INT64 value type, with no labels and - a unit of "1". Such a metric counts the number of log entries - matching the ``filter`` expression. The ``name``, ``type``, - and ``description`` fields in the ``metric_descriptor`` are - output only, and is constructed using the ``name`` and - ``description`` field in the LogMetric. To create a logs- - based metric that records a distribution of log values, a - DELTA metric kind with a DISTRIBUTION value type must be used - along with a ``value_extractor`` expression in the LogMetric. - Each label in the metric descriptor must have a matching label - name as the key and an extractor expression as the value in - the ``label_extractors`` map. The ``metric_kind`` and - ``value_type`` fields in the ``metric_descriptor`` cannot be - updated once initially configured. New labels can be added in - the ``metric_descriptor``, but existing labels cannot be - modified except for their description. - value_extractor: - Optional. A ``value_extractor`` is required when using a - distribution logs-based metric to extract the values to record - from a log entry. Two functions are supported for value - extraction: ``EXTRACT(field)`` or ``REGEXP_EXTRACT(field, - regex)``. The argument are: 1. field: The name of the log - entry field from which the value is to be extracted. 2. regex: - A regular expression using the Google RE2 syntax - (https://github.com/google/re2/wiki/Syntax) with a single - capture group to extract data from the specified log entry - field. The value of the field is converted to a string before - applying the regex. It is an error to specify a regex that - does not include exactly one capture group. The result of the - extraction must be convertible to a double type, as the - distribution always records double values. If either the - extraction or the conversion to double fails, then those - values are not recorded in the distribution. Example: - ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` - label_extractors: - Optional. A map from a label key string to an extractor - expression which is used to extract data from a log entry - field and assign as the label value. Each label key specified - in the LabelDescriptor must have an associated extractor - expression in this map. The syntax of the extractor expression - is the same as for the ``value_extractor`` field. The - extracted value is converted to the type defined in the label - descriptor. If the either the extraction or the type - conversion fails, the label will have a default value. The - default value for a string label is an empty string, for an - integer label its 0, and for a boolean label its ``false``. - Note that there are upper bounds on the maximum number of - labels and the number of active time series that are allowed - in a project. - bucket_options: - Optional. The ``bucket_options`` are required when the logs- - based metric is using a DISTRIBUTION value type and it - describes the bucket boundaries used to create a histogram of - the extracted values. - create_time: - Output only. The creation timestamp of the metric. This field - may not be present for older metrics. - update_time: - Output only. The last update timestamp of the metric. This - field may not be present for older metrics. - version: - Deprecated. The API version that created or updated this - metric. The v2 format is used by default and cannot be - changed. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric) - ), -) -_sym_db.RegisterMessage(LogMetric) -_sym_db.RegisterMessage(LogMetric.LabelExtractorsEntry) - -ListLogMetricsRequest = _reflection.GeneratedProtocolMessageType( - "ListLogMetricsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGMETRICSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to ListLogMetrics. - - - Attributes: - parent: - Required. The name of the project containing the metrics: :: - "projects/[PROJECT_ID]" - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsRequest) - ), -) -_sym_db.RegisterMessage(ListLogMetricsRequest) - -ListLogMetricsResponse = _reflection.GeneratedProtocolMessageType( - "ListLogMetricsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGMETRICSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""Result returned from ListLogMetrics. - - - Attributes: - metrics: - A list of logs-based metrics. - next_page_token: - If there might be more results than appear in this response, - then ``nextPageToken`` is included. To get the next set of - results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsResponse) - ), -) -_sym_db.RegisterMessage(ListLogMetricsResponse) - -GetLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "GetLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETLOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to GetLogMetric. - - - Attributes: - metric_name: - The resource name of the desired metric: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.GetLogMetricRequest) - ), -) -_sym_db.RegisterMessage(GetLogMetricRequest) - -CreateLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "CreateLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATELOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to CreateLogMetric. - - - Attributes: - parent: - The resource name of the project in which to create the - metric: :: "projects/[PROJECT_ID]" The new metric must - be provided in the request. - metric: - The new logs-based metric, which must not have an identifier - that already exists. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.CreateLogMetricRequest) - ), -) -_sym_db.RegisterMessage(CreateLogMetricRequest) - -UpdateLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "UpdateLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATELOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to UpdateLogMetric. - - - Attributes: - metric_name: - The resource name of the metric to update: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" The updated - metric must be provided in the request and it's ``name`` field - must be the same as ``[METRIC_ID]`` If the metric does not - exist in ``[PROJECT_ID]``, then a new metric is created. - metric: - The updated metric. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateLogMetricRequest) - ), -) -_sym_db.RegisterMessage(UpdateLogMetricRequest) - -DeleteLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "DeleteLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETELOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to DeleteLogMetric. - - - Attributes: - metric_name: - The resource name of the metric to delete: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogMetricRequest) - ), -) -_sym_db.RegisterMessage(DeleteLogMetricRequest) - - -DESCRIPTOR._options = None -_LOGMETRIC_LABELEXTRACTORSENTRY._options = None -_LOGMETRIC.fields_by_name["version"]._options = None - -_METRICSSERVICEV2 = _descriptor.ServiceDescriptor( - name="MetricsServiceV2", - full_name="google.logging.v2.MetricsServiceV2", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" - ), - serialized_start=1266, - serialized_end=2262, - methods=[ - _descriptor.MethodDescriptor( - name="ListLogMetrics", - full_name="google.logging.v2.MetricsServiceV2.ListLogMetrics", - index=0, - containing_service=None, - input_type=_LISTLOGMETRICSREQUEST, - output_type=_LISTLOGMETRICSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002!\022\037/v2/{parent=projects/*}/metrics" - ), - ), - _descriptor.MethodDescriptor( - name="GetLogMetric", - full_name="google.logging.v2.MetricsServiceV2.GetLogMetric", - index=1, - containing_service=None, - input_type=_GETLOGMETRICREQUEST, - output_type=_LOGMETRIC, - serialized_options=_b( - "\202\323\344\223\002(\022&/v2/{metric_name=projects/*/metrics/*}" - ), - ), - _descriptor.MethodDescriptor( - name="CreateLogMetric", - full_name="google.logging.v2.MetricsServiceV2.CreateLogMetric", - index=2, - containing_service=None, - input_type=_CREATELOGMETRICREQUEST, - output_type=_LOGMETRIC, - serialized_options=_b( - '\202\323\344\223\002)"\037/v2/{parent=projects/*}/metrics:\006metric' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateLogMetric", - full_name="google.logging.v2.MetricsServiceV2.UpdateLogMetric", - index=3, - containing_service=None, - input_type=_UPDATELOGMETRICREQUEST, - output_type=_LOGMETRIC, - serialized_options=_b( - "\202\323\344\223\0020\032&/v2/{metric_name=projects/*/metrics/*}:\006metric" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteLogMetric", - full_name="google.logging.v2.MetricsServiceV2.DeleteLogMetric", - index=4, - containing_service=None, - input_type=_DELETELOGMETRICREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002(*&/v2/{metric_name=projects/*/metrics/*}" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_METRICSSERVICEV2) - -DESCRIPTOR.services_by_name["MetricsServiceV2"] = _METRICSSERVICEV2 - -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py deleted file mode 100644 index 09f84e038a1b..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py +++ /dev/null @@ -1,118 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.logging_v2.proto import ( - logging_metrics_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class MetricsServiceV2Stub(object): - """Service for configuring logs-based metrics. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListLogMetrics = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/ListLogMetrics", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.FromString, - ) - self.GetLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/GetLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, - ) - self.CreateLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/CreateLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, - ) - self.UpdateLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, - ) - self.DeleteLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - -class MetricsServiceV2Servicer(object): - """Service for configuring logs-based metrics. - """ - - def ListLogMetrics(self, request, context): - """Lists logs-based metrics. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetLogMetric(self, request, context): - """Gets a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateLogMetric(self, request, context): - """Creates a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateLogMetric(self, request, context): - """Creates or updates a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteLogMetric(self, request, context): - """Deletes a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_MetricsServiceV2Servicer_to_server(servicer, server): - rpc_method_handlers = { - "ListLogMetrics": grpc.unary_unary_rpc_method_handler( - servicer.ListLogMetrics, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.SerializeToString, - ), - "GetLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.GetLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, - ), - "CreateLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.CreateLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, - ), - "UpdateLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.UpdateLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, - ), - "DeleteLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.DeleteLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.logging.v2.MetricsServiceV2", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2.py deleted file mode 100644 index 04bd84375901..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2.py +++ /dev/null @@ -1,1312 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/logging_v2/proto/logging.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.cloud.logging_v2.proto import ( - log_entry_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2, -) -from google.cloud.logging_v2.proto import ( - logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/logging_v2/proto/logging.proto", - package="google.logging.v2", - syntax="proto3", - serialized_options=_b( - "\n\025com.google.logging.v2B\014LoggingProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), - serialized_pb=_b( - '\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x32google/cloud/logging_v2/proto/logging_config.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t"\xa9\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x19\n\x17WriteLogEntriesResponse"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01"\x91\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x85\n\n\x10LoggingServiceV2\x12\x88\x02\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty"\xbd\x01\x82\xd3\xe4\x93\x02\xb6\x01* /v2/{log_name=projects/*/logs/*}Z\x1b*\x19/v2/{log_name=*/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse"\x1c\x82\xd3\xe4\x93\x02\x16"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse"\x1b\x82\xd3\xe4\x93\x02\x15"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse"(\x82\xd3\xe4\x93\x02"\x12 /v2/monitoredResourceDescriptors\x12\xff\x01\n\x08ListLogs\x12".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logs\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2.DESCRIPTOR, - google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_DELETELOGREQUEST = _descriptor.Descriptor( - name="DeleteLogRequest", - full_name="google.logging.v2.DeleteLogRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_name", - full_name="google.logging.v2.DeleteLogRequest.log_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=376, - serialized_end=412, -) - - -_WRITELOGENTRIESREQUEST_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=667, - serialized_end=712, -) - -_WRITELOGENTRIESREQUEST = _descriptor.Descriptor( - name="WriteLogEntriesRequest", - full_name="google.logging.v2.WriteLogEntriesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_name", - full_name="google.logging.v2.WriteLogEntriesRequest.log_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource", - full_name="google.logging.v2.WriteLogEntriesRequest.resource", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.logging.v2.WriteLogEntriesRequest.labels", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="entries", - full_name="google.logging.v2.WriteLogEntriesRequest.entries", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="partial_success", - full_name="google.logging.v2.WriteLogEntriesRequest.partial_success", - index=4, - number=5, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="dry_run", - full_name="google.logging.v2.WriteLogEntriesRequest.dry_run", - index=5, - number=6, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=415, - serialized_end=712, -) - - -_WRITELOGENTRIESRESPONSE = _descriptor.Descriptor( - name="WriteLogEntriesResponse", - full_name="google.logging.v2.WriteLogEntriesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=714, - serialized_end=739, -) - - -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY = _descriptor.Descriptor( - name="LogEntryErrorsEntry", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.key", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.value", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=869, - serialized_end=942, -) - -_WRITELOGENTRIESPARTIALERRORS = _descriptor.Descriptor( - name="WriteLogEntriesPartialErrors", - full_name="google.logging.v2.WriteLogEntriesPartialErrors", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_entry_errors", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.log_entry_errors", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=742, - serialized_end=942, -) - - -_LISTLOGENTRIESREQUEST = _descriptor.Descriptor( - name="ListLogEntriesRequest", - full_name="google.logging.v2.ListLogEntriesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_ids", - full_name="google.logging.v2.ListLogEntriesRequest.project_ids", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource_names", - full_name="google.logging.v2.ListLogEntriesRequest.resource_names", - index=1, - number=8, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.ListLogEntriesRequest.filter", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="order_by", - full_name="google.logging.v2.ListLogEntriesRequest.order_by", - index=3, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListLogEntriesRequest.page_size", - index=4, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListLogEntriesRequest.page_token", - index=5, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=945, - serialized_end=1090, -) - - -_LISTLOGENTRIESRESPONSE = _descriptor.Descriptor( - name="ListLogEntriesResponse", - full_name="google.logging.v2.ListLogEntriesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="entries", - full_name="google.logging.v2.ListLogEntriesResponse.entries", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListLogEntriesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1092, - serialized_end=1187, -) - - -_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST = _descriptor.Descriptor( - name="ListMonitoredResourceDescriptorsRequest", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_size", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1189, - serialized_end=1269, -) - - -_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE = _descriptor.Descriptor( - name="ListMonitoredResourceDescriptorsResponse", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="resource_descriptors", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse.resource_descriptors", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1272, - serialized_end=1410, -) - - -_LISTLOGSREQUEST = _descriptor.Descriptor( - name="ListLogsRequest", - full_name="google.logging.v2.ListLogsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListLogsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListLogsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListLogsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1412, - serialized_end=1484, -) - - -_LISTLOGSRESPONSE = _descriptor.Descriptor( - name="ListLogsResponse", - full_name="google.logging.v2.ListLogsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_names", - full_name="google.logging.v2.ListLogsResponse.log_names", - index=0, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListLogsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1486, - serialized_end=1548, -) - -_WRITELOGENTRIESREQUEST_LABELSENTRY.containing_type = _WRITELOGENTRIESREQUEST -_WRITELOGENTRIESREQUEST.fields_by_name[ - "resource" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE -_WRITELOGENTRIESREQUEST.fields_by_name[ - "labels" -].message_type = _WRITELOGENTRIESREQUEST_LABELSENTRY -_WRITELOGENTRIESREQUEST.fields_by_name[ - "entries" -].message_type = ( - google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY -) -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.fields_by_name[ - "value" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.containing_type = ( - _WRITELOGENTRIESPARTIALERRORS -) -_WRITELOGENTRIESPARTIALERRORS.fields_by_name[ - "log_entry_errors" -].message_type = _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY -_LISTLOGENTRIESRESPONSE.fields_by_name[ - "entries" -].message_type = ( - google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY -) -_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE.fields_by_name[ - "resource_descriptors" -].message_type = ( - google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEDESCRIPTOR -) -DESCRIPTOR.message_types_by_name["DeleteLogRequest"] = _DELETELOGREQUEST -DESCRIPTOR.message_types_by_name["WriteLogEntriesRequest"] = _WRITELOGENTRIESREQUEST -DESCRIPTOR.message_types_by_name["WriteLogEntriesResponse"] = _WRITELOGENTRIESRESPONSE -DESCRIPTOR.message_types_by_name[ - "WriteLogEntriesPartialErrors" -] = _WRITELOGENTRIESPARTIALERRORS -DESCRIPTOR.message_types_by_name["ListLogEntriesRequest"] = _LISTLOGENTRIESREQUEST -DESCRIPTOR.message_types_by_name["ListLogEntriesResponse"] = _LISTLOGENTRIESRESPONSE -DESCRIPTOR.message_types_by_name[ - "ListMonitoredResourceDescriptorsRequest" -] = _LISTMONITOREDRESOURCEDESCRIPTORSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListMonitoredResourceDescriptorsResponse" -] = _LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE -DESCRIPTOR.message_types_by_name["ListLogsRequest"] = _LISTLOGSREQUEST -DESCRIPTOR.message_types_by_name["ListLogsResponse"] = _LISTLOGSRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -DeleteLogRequest = _reflection.GeneratedProtocolMessageType( - "DeleteLogRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETELOGREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to DeleteLog. - - - Attributes: - log_name: - Required. The resource name of the log to delete: :: - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL- - encoded. For example, ``"projects/my-project- - id/logs/syslog"``, ``"organizations/1234567890/logs/cloudresou - rcemanager.googleapis.com%2Factivity"``. For more information - about log names, see [LogEntry][google.logging.v2.LogEntry]. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogRequest) - ), -) -_sym_db.RegisterMessage(DeleteLogRequest) - -WriteLogEntriesRequest = _reflection.GeneratedProtocolMessageType( - "WriteLogEntriesRequest", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_WRITELOGENTRIESREQUEST_LABELSENTRY, - __module__="google.cloud.logging_v2.proto.logging_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest.LabelsEntry) - ), - ), - DESCRIPTOR=_WRITELOGENTRIESREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to WriteLogEntries. - - - Attributes: - log_name: - Optional. A default log resource name that is assigned to all - log entries in ``entries`` that do not specify a value for - ``log_name``: :: "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL- - encoded. For example: :: "projects/my-project- - id/logs/syslog" "organizations/1234567890/logs/cloudresour - cemanager.googleapis.com%2Factivity" The permission - logging.logEntries.create is needed on each project, - organization, billing account, or folder that is receiving new - log entries, whether the resource is specified in logName or - in an individual log entry. - resource: - Optional. A default monitored resource object that is assigned - to all log entries in ``entries`` that do not specify a value - for ``resource``. Example: :: { "type": "gce_instance", - "labels": { "zone": "us-central1-a", "instance_id": - "00000000000000000000" }} See - [LogEntry][google.logging.v2.LogEntry]. - labels: - Optional. Default labels that are added to the ``labels`` - field of all log entries in ``entries``. If a log entry - already has a label with the same key as a label in this - parameter, then the log entry's label is not changed. See - [LogEntry][google.logging.v2.LogEntry]. - entries: - Required. The log entries to send to Logging. The order of log - entries in this list does not matter. Values supplied in this - method's ``log_name``, ``resource``, and ``labels`` fields are - copied into those log entries in this list that do not include - values for their corresponding fields. For more information, - see the [LogEntry][google.logging.v2.LogEntry] type. If the - ``timestamp`` or ``insert_id`` fields are missing in log - entries, then this method supplies the current time or a - unique identifier, respectively. The supplied values are - chosen so that, among the log entries that did not supply - their own values, the entries earlier in the list will sort - before the entries later in the list. See the ``entries.list`` - method. Log entries with timestamps that are more than the - `logs retention period `__ in the past - or more than 24 hours in the future will not be available when - calling ``entries.list``. However, those log entries can still - be `exported with LogSinks `__. To improve throughput and to avoid exceeding the - `quota limit `__ for calls to - ``entries.write``, you should try to include several log - entries in this list, rather than calling this method for each - individual log entry. - partial_success: - Optional. Whether valid entries should be written even if some - other entries fail due to INVALID\_ARGUMENT or - PERMISSION\_DENIED errors. If any entry is not written, then - the response status is the error associated with one of the - failed entries and the response includes error details keyed - by the entries' zero-based index in the ``entries.write`` - method. - dry_run: - Optional. If true, the request should expect normal response, - but the entries won't be persisted nor exported. Useful for - checking whether the logging API endpoints are working - properly before sending valuable data. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest) - ), -) -_sym_db.RegisterMessage(WriteLogEntriesRequest) -_sym_db.RegisterMessage(WriteLogEntriesRequest.LabelsEntry) - -WriteLogEntriesResponse = _reflection.GeneratedProtocolMessageType( - "WriteLogEntriesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_WRITELOGENTRIESRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from WriteLogEntries. empty - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesResponse) - ), -) -_sym_db.RegisterMessage(WriteLogEntriesResponse) - -WriteLogEntriesPartialErrors = _reflection.GeneratedProtocolMessageType( - "WriteLogEntriesPartialErrors", - (_message.Message,), - dict( - LogEntryErrorsEntry=_reflection.GeneratedProtocolMessageType( - "LogEntryErrorsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY, - __module__="google.cloud.logging_v2.proto.logging_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry) - ), - ), - DESCRIPTOR=_WRITELOGENTRIESPARTIALERRORS, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Error details for WriteLogEntries with partial success. - - - Attributes: - log_entry_errors: - When ``WriteLogEntriesRequest.partial_success`` is true, - records the error status for entries that were not written due - to a permanent error, keyed by the entry's zero-based index in - ``WriteLogEntriesRequest.entries``. Failed requests for which - no entries are written will not include per-entry errors. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors) - ), -) -_sym_db.RegisterMessage(WriteLogEntriesPartialErrors) -_sym_db.RegisterMessage(WriteLogEntriesPartialErrors.LogEntryErrorsEntry) - -ListLogEntriesRequest = _reflection.GeneratedProtocolMessageType( - "ListLogEntriesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGENTRIESREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to ``ListLogEntries``. - - - Attributes: - project_ids: - Deprecated. Use ``resource_names`` instead. One or more - project identifiers or project numbers from which to retrieve - log entries. Example: ``"my-project-1A"``. - resource_names: - Required. Names of one or more parent resources from which to - retrieve log entries: :: "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" Projects listed in the ``project_ids`` - field are added to this list. - filter: - Optional. A filter that chooses which log entries to return. - See `Advanced Logs Filters - `__. Only log entries - that match the filter are returned. An empty filter matches - all log entries in the resources listed in ``resource_names``. - Referencing a parent resource that is not listed in - ``resource_names`` will cause the filter to return no results. - The maximum length of the filter is 20000 characters. - order_by: - Optional. How the results should be sorted. Presently, the - only permitted values are ``"timestamp asc"`` (default) and - ``"timestamp desc"``. The first option returns entries in - order of increasing values of ``LogEntry.timestamp`` (oldest - first), and the second option returns entries in order of - decreasing timestamps (newest first). Entries with equal - timestamps are returned in order of their ``insert_id`` - values. - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``next_page_token`` in the response indicates that more - results might be available. - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``page_token`` must be - the value of ``next_page_token`` from the previous response. - The values of other method parameters should be identical to - those in the previous call. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesRequest) - ), -) -_sym_db.RegisterMessage(ListLogEntriesRequest) - -ListLogEntriesResponse = _reflection.GeneratedProtocolMessageType( - "ListLogEntriesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGENTRIESRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from ``ListLogEntries``. - - - Attributes: - entries: - A list of log entries. If ``entries`` is empty, - ``nextPageToken`` may still be returned, indicating that more - entries may exist. See ``nextPageToken`` for more information. - next_page_token: - If there might be more results than those appearing in this - response, then ``nextPageToken`` is included. To get the next - set of results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. If a value for - ``next_page_token`` appears and the ``entries`` field is - empty, it means that the search found no log entries so far - but it did not have time to search all the possible log - entries. Retry the method with this value for ``page_token`` - to continue the search. Alternatively, consider speeding up - the search by changing your filter to specify a single log - name or resource type, or to narrow the time range of the - search. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesResponse) - ), -) -_sym_db.RegisterMessage(ListLogEntriesResponse) - -ListMonitoredResourceDescriptorsRequest = _reflection.GeneratedProtocolMessageType( - "ListMonitoredResourceDescriptorsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to ListMonitoredResourceDescriptors - - - Attributes: - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsRequest) - ), -) -_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsRequest) - -ListMonitoredResourceDescriptorsResponse = _reflection.GeneratedProtocolMessageType( - "ListMonitoredResourceDescriptorsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from ListMonitoredResourceDescriptors. - - - Attributes: - resource_descriptors: - A list of resource descriptors. - next_page_token: - If there might be more results than those appearing in this - response, then ``nextPageToken`` is included. To get the next - set of results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsResponse) - ), -) -_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsResponse) - -ListLogsRequest = _reflection.GeneratedProtocolMessageType( - "ListLogsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to ListLogs. - - - Attributes: - parent: - Required. The resource name that owns the logs: :: - "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsRequest) - ), -) -_sym_db.RegisterMessage(ListLogsRequest) - -ListLogsResponse = _reflection.GeneratedProtocolMessageType( - "ListLogsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from ListLogs. - - - Attributes: - log_names: - A list of log names. For example, ``"projects/my- - project/logs/syslog"`` or ``"organizations/123/logs/cloudresou - rcemanager.googleapis.com%2Factivity"``. - next_page_token: - If there might be more results than those appearing in this - response, then ``nextPageToken`` is included. To get the next - set of results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsResponse) - ), -) -_sym_db.RegisterMessage(ListLogsResponse) - - -DESCRIPTOR._options = None -_WRITELOGENTRIESREQUEST_LABELSENTRY._options = None -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY._options = None -_LISTLOGENTRIESREQUEST.fields_by_name["project_ids"]._options = None - -_LOGGINGSERVICEV2 = _descriptor.ServiceDescriptor( - name="LoggingServiceV2", - full_name="google.logging.v2.LoggingServiceV2", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" - ), - serialized_start=1551, - serialized_end=2836, - methods=[ - _descriptor.MethodDescriptor( - name="DeleteLog", - full_name="google.logging.v2.LoggingServiceV2.DeleteLog", - index=0, - containing_service=None, - input_type=_DELETELOGREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002\266\001* /v2/{log_name=projects/*/logs/*}Z\033*\031/v2/{log_name=*/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}" - ), - ), - _descriptor.MethodDescriptor( - name="WriteLogEntries", - full_name="google.logging.v2.LoggingServiceV2.WriteLogEntries", - index=1, - containing_service=None, - input_type=_WRITELOGENTRIESREQUEST, - output_type=_WRITELOGENTRIESRESPONSE, - serialized_options=_b( - '\202\323\344\223\002\026"\021/v2/entries:write:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="ListLogEntries", - full_name="google.logging.v2.LoggingServiceV2.ListLogEntries", - index=2, - containing_service=None, - input_type=_LISTLOGENTRIESREQUEST, - output_type=_LISTLOGENTRIESRESPONSE, - serialized_options=_b( - '\202\323\344\223\002\025"\020/v2/entries:list:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="ListMonitoredResourceDescriptors", - full_name="google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors", - index=3, - containing_service=None, - input_type=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, - output_type=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, - serialized_options=_b( - '\202\323\344\223\002"\022 /v2/monitoredResourceDescriptors' - ), - ), - _descriptor.MethodDescriptor( - name="ListLogs", - full_name="google.logging.v2.LoggingServiceV2.ListLogs", - index=4, - containing_service=None, - input_type=_LISTLOGSREQUEST, - output_type=_LISTLOGSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002\242\001\022\025/v2/{parent=*/*}/logsZ\036\022\034/v2/{parent=projects/*}/logsZ#\022!/v2/{parent=organizations/*}/logsZ\035\022\033/v2/{parent=folders/*}/logsZ%\022#/v2/{parent=billingAccounts/*}/logs" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_LOGGINGSERVICEV2) - -DESCRIPTOR.services_by_name["LoggingServiceV2"] = _LOGGINGSERVICEV2 - -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2_grpc.py deleted file mode 100644 index 2a2b3656925c..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2_grpc.py +++ /dev/null @@ -1,130 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.logging_v2.proto import ( - logging_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class LoggingServiceV2Stub(object): - """Service for ingesting and querying logs. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.DeleteLog = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/DeleteLog", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.WriteLogEntries = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/WriteLogEntries", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.FromString, - ) - self.ListLogEntries = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/ListLogEntries", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.FromString, - ) - self.ListMonitoredResourceDescriptors = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.FromString, - ) - self.ListLogs = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/ListLogs", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.FromString, - ) - - -class LoggingServiceV2Servicer(object): - """Service for ingesting and querying logs. - """ - - def DeleteLog(self, request, context): - """Deletes all the log entries in a log. - The log reappears if it receives new entries. - Log entries written shortly before the delete operation might not be - deleted. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def WriteLogEntries(self, request, context): - """Writes log entries to Logging. This API method is the - only way to send log entries to Logging. This method - is used, directly or indirectly, by the Logging agent - (fluentd) and all logging libraries configured to use Logging. - A single request may contain log entries for a maximum of 1000 - different resources (projects, organizations, billing accounts or - folders) - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListLogEntries(self, request, context): - """Lists log entries. Use this method to retrieve log entries that originated - from a project/folder/organization/billing account. For ways to export log - entries, see [Exporting Logs](/logging/docs/export). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListMonitoredResourceDescriptors(self, request, context): - """Lists the descriptors for monitored resource types used by Logging. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListLogs(self, request, context): - """Lists the logs in projects, organizations, folders, or billing accounts. - Only logs that have entries are listed. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_LoggingServiceV2Servicer_to_server(servicer, server): - rpc_method_handlers = { - "DeleteLog": grpc.unary_unary_rpc_method_handler( - servicer.DeleteLog, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "WriteLogEntries": grpc.unary_unary_rpc_method_handler( - servicer.WriteLogEntries, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.SerializeToString, - ), - "ListLogEntries": grpc.unary_unary_rpc_method_handler( - servicer.ListLogEntries, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.SerializeToString, - ), - "ListMonitoredResourceDescriptors": grpc.unary_unary_rpc_method_handler( - servicer.ListMonitoredResourceDescriptors, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.SerializeToString, - ), - "ListLogs": grpc.unary_unary_rpc_method_handler( - servicer.ListLogs, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.logging.v2.LoggingServiceV2", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/http_request.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/http_request.proto deleted file mode 100644 index 21b1367ab8ca..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/http_request.proto +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2018 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.logging.type; - -import "google/api/annotations.proto"; -import "google/protobuf/duration.proto"; - -option csharp_namespace = "Google.Cloud.Logging.Type"; -option go_package = "google.golang.org/genproto/googleapis/logging/type;ltype"; -option java_multiple_files = true; -option java_outer_classname = "HttpRequestProto"; -option java_package = "com.google.logging.type"; -option php_namespace = "Google\\Cloud\\Logging\\Type"; - - -// A common proto for logging HTTP requests. Only contains semantics -// defined by the HTTP specification. Product-specific logging -// information MUST be defined in a separate message. -message HttpRequest { - // The request method. Examples: `"GET"`, `"HEAD"`, `"PUT"`, `"POST"`. - string request_method = 1; - - // The scheme (http, https), the host name, the path and the query - // portion of the URL that was requested. - // Example: `"http://example.com/some/info?color=red"`. - string request_url = 2; - - // The size of the HTTP request message in bytes, including the request - // headers and the request body. - int64 request_size = 3; - - // The response code indicating the status of response. - // Examples: 200, 404. - int32 status = 4; - - // The size of the HTTP response message sent back to the client, in bytes, - // including the response headers and the response body. - int64 response_size = 5; - - // The user agent sent by the client. Example: - // `"Mozilla/4.0 (compatible; MSIE 6.0; Windows 98; Q312461; .NET CLR 1.0.3705)"`. - string user_agent = 6; - - // The IP address (IPv4 or IPv6) of the client that issued the HTTP - // request. Examples: `"192.168.1.1"`, `"FE80::0202:B3FF:FE1E:8329"`. - string remote_ip = 7; - - // The IP address (IPv4 or IPv6) of the origin server that the request was - // sent to. - string server_ip = 13; - - // The referer URL of the request, as defined in - // [HTTP/1.1 Header Field Definitions](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html). - string referer = 8; - - // The request processing latency on the server, from the time the request was - // received until the response was sent. - google.protobuf.Duration latency = 14; - - // Whether or not a cache lookup was attempted. - bool cache_lookup = 11; - - // Whether or not an entity was served from cache - // (with or without validation). - bool cache_hit = 9; - - // Whether or not the response was validated with the origin server before - // being served from cache. This field is only meaningful if `cache_hit` is - // True. - bool cache_validated_with_origin_server = 10; - - // The number of HTTP response bytes inserted into cache. Set only when a - // cache fill was attempted. - int64 cache_fill_bytes = 12; - - // Protocol used for the request. Examples: "HTTP/1.1", "HTTP/2", "websocket" - string protocol = 15; -} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py index 1d3af3c42416..1f2b1ca3b64d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/log_entry.proto @@ -17,7 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import ( monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, ) @@ -30,6 +27,8 @@ from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -40,16 +39,17 @@ "\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x8a\x06\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12\x37\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadata\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\x8e\x06\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12;\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadataB\x02\x18\x01\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, google_dot_logging_dot_type_dot_http__request__pb2.DESCRIPTOR, google_dot_logging_dot_type_dot_log__severity__pb2.DESCRIPTOR, google_dot_protobuf_dot_any__pb2.DESCRIPTOR, google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_rpc_dot_status__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) @@ -106,8 +106,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1028, - serialized_end=1073, + serialized_start=1057, + serialized_end=1102, ) _LOGENTRY = _descriptor.Descriptor( @@ -330,7 +330,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\030\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -440,8 +440,8 @@ fields=[], ) ], - serialized_start=306, - serialized_end=1084, + serialized_start=331, + serialized_end=1113, ) @@ -533,8 +533,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1086, - serialized_end=1164, + serialized_start=1115, + serialized_end=1193, ) @@ -608,8 +608,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1166, - serialized_end=1236, + serialized_start=1195, + serialized_end=1265, ) _LOGENTRY_LABELSENTRY.containing_type = _LOGENTRY @@ -704,17 +704,20 @@ for a log name with a leading slash will never return any results. resource: - Required. The primary monitored resource associated with this - log entry. Example: a log entry that reports a database error - would be associated with the monitored resource designating - the particular database that reported the error. + Required. The monitored resource that produced this log entry. + Example: a log entry that reports a database error would be + associated with the monitored resource designating the + particular database that reported the error. payload: Optional. The log entry payload, which can be one of multiple types. proto_payload: The log entry payload, represented as a protocol buffer. Some Google Cloud Platform services use this field for their log - entry payloads. + entry payloads. The following protocol buffer types are + supported; user-defined types are not supported: + "type.googleapis.com/google.cloud.audit.AuditLog" + "type.googleapis.com/google.appengine.logging.v1.RequestLog" text_payload: The log entry payload, represented as a Unicode string (UTF-8). @@ -733,7 +736,7 @@ `__ in the past, and no more than 24 hours in the future. Log entries outside those time boundaries will not be available when calling ``entries.list``, but those log - entries can still be exported with `LogSinks + entries can still be `exported with LogSinks `__. receive_timestamp: Output only. The time the log entry was received by Logging. @@ -755,9 +758,17 @@ Optional. A set of user-defined (key, value) data that provides additional information about the log entry. metadata: - Output only. Additional metadata about the monitored resource. - Only ``k8s_container``, ``k8s_pod``, and ``k8s_node`` - MonitoredResources have this field populated. + Deprecated. Output only. Additional metadata about the + monitored resource. Only ``k8s_container``, ``k8s_pod``, and + ``k8s_node`` MonitoredResources have this field populated for + GKE versions older than 1.12.6. For GKE versions 1.12.6 and + above, the ``metadata`` field has been deprecated. The + Kubernetes pod labels that used to be in + ``metadata.userLabels`` will now be present in the ``labels`` + field with a key prefix of ``k8s-pod/``. The Stackdriver + system labels that were present in the + ``metadata.systemLabels`` field will no longer be available in + the LogEntry. operation: Optional. Information about an operation associated with the log entry, if applicable. @@ -769,12 +780,12 @@ projectid/traces/06796866738c859f2f19b7cfb3214824`` span_id: Optional. The span ID within the trace associated with the log - entry. For Trace spans, this is the same format that the Trace - API v2 uses: a 16-character hexadecimal encoding of an 8-byte - array, such as "000000000000004a". + entry. For Trace spans, this is the same format that the + Trace API v2 uses: a 16-character hexadecimal encoding of an + 8-byte array, such as "000000000000004a". trace_sampled: Optional. The sampling decision of the trace associated with - the log entry. True means that the trace resource name in the + the log entry. True means that the trace resource name in the ``trace`` field was sampled for storage in a trace backend. False means that the trace was not sampled for storage when this log entry was written, or the sampling decision was @@ -858,4 +869,5 @@ DESCRIPTOR._options = None _LOGENTRY_LABELSENTRY._options = None +_LOGENTRY.fields_by_name["metadata"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_severity.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_severity.proto deleted file mode 100644 index ccb08cacb445..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_severity.proto +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2018 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.logging.type; - -import "google/api/annotations.proto"; - -option csharp_namespace = "Google.Cloud.Logging.Type"; -option go_package = "google.golang.org/genproto/googleapis/logging/type;ltype"; -option java_multiple_files = true; -option java_outer_classname = "LogSeverityProto"; -option java_package = "com.google.logging.type"; -option php_namespace = "Google\\Cloud\\Logging\\Type"; - - -// The severity of the event described in a log entry, expressed as one of the -// standard severity levels listed below. For your reference, the levels are -// assigned the listed numeric values. The effect of using numeric values other -// than those listed is undefined. -// -// You can filter for log entries by severity. For example, the following -// filter expression will match log entries with severities `INFO`, `NOTICE`, -// and `WARNING`: -// -// severity > DEBUG AND severity <= WARNING -// -// If you are writing log entries, you should map other severity encodings to -// one of these standard levels. For example, you might map all of Java's FINE, -// FINER, and FINEST levels to `LogSeverity.DEBUG`. You can preserve the -// original severity level in the log entry payload if you wish. -enum LogSeverity { - // (0) The log entry has no assigned severity level. - DEFAULT = 0; - - // (100) Debug or trace information. - DEBUG = 100; - - // (200) Routine information, such as ongoing status or performance. - INFO = 200; - - // (300) Normal but significant events, such as start up, shut down, or - // a configuration change. - NOTICE = 300; - - // (400) Warning events might cause problems. - WARNING = 400; - - // (500) Error events are likely to cause problems. - ERROR = 500; - - // (600) Critical events cause more severe problems or outages. - CRITICAL = 600; - - // (700) A person must take an action immediately. - ALERT = 700; - - // (800) One or more systems are unusable. - EMERGENCY = 800; -} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2.py deleted file mode 100644 index cd065d8a9311..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2.py +++ /dev/null @@ -1,405 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/logging/type/http_request.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/logging/type/http_request.proto", - package="google.logging.type", - syntax="proto3", - serialized_options=_b( - "\n\027com.google.logging.typeB\020HttpRequestProtoP\001Z8google.golang.org/genproto/googleapis/logging/type;ltype\252\002\031Google.Cloud.Logging.Type\312\002\031Google\\Cloud\\Logging\\Type" - ), - serialized_pb=_b( - '\n&google/logging/type/http_request.proto\x12\x13google.logging.type\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/api/annotations.proto"\xef\x02\n\x0bHttpRequest\x12\x16\n\x0erequest_method\x18\x01 \x01(\t\x12\x13\n\x0brequest_url\x18\x02 \x01(\t\x12\x14\n\x0crequest_size\x18\x03 \x01(\x03\x12\x0e\n\x06status\x18\x04 \x01(\x05\x12\x15\n\rresponse_size\x18\x05 \x01(\x03\x12\x12\n\nuser_agent\x18\x06 \x01(\t\x12\x11\n\tremote_ip\x18\x07 \x01(\t\x12\x11\n\tserver_ip\x18\r \x01(\t\x12\x0f\n\x07referer\x18\x08 \x01(\t\x12*\n\x07latency\x18\x0e \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x14\n\x0c\x63\x61\x63he_lookup\x18\x0b \x01(\x08\x12\x11\n\tcache_hit\x18\t \x01(\x08\x12*\n"cache_validated_with_origin_server\x18\n \x01(\x08\x12\x18\n\x10\x63\x61\x63he_fill_bytes\x18\x0c \x01(\x03\x12\x10\n\x08protocol\x18\x0f \x01(\tB\x9f\x01\n\x17\x63om.google.logging.typeB\x10HttpRequestProtoP\x01Z8google.golang.org/genproto/googleapis/logging/type;ltype\xaa\x02\x19Google.Cloud.Logging.Type\xca\x02\x19Google\\Cloud\\Logging\\Typeb\x06proto3' - ), - dependencies=[ - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_HTTPREQUEST = _descriptor.Descriptor( - name="HttpRequest", - full_name="google.logging.type.HttpRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="request_method", - full_name="google.logging.type.HttpRequest.request_method", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="request_url", - full_name="google.logging.type.HttpRequest.request_url", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="request_size", - full_name="google.logging.type.HttpRequest.request_size", - index=2, - number=3, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status", - full_name="google.logging.type.HttpRequest.status", - index=3, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="response_size", - full_name="google.logging.type.HttpRequest.response_size", - index=4, - number=5, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="user_agent", - full_name="google.logging.type.HttpRequest.user_agent", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="remote_ip", - full_name="google.logging.type.HttpRequest.remote_ip", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="server_ip", - full_name="google.logging.type.HttpRequest.server_ip", - index=7, - number=13, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="referer", - full_name="google.logging.type.HttpRequest.referer", - index=8, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="latency", - full_name="google.logging.type.HttpRequest.latency", - index=9, - number=14, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cache_lookup", - full_name="google.logging.type.HttpRequest.cache_lookup", - index=10, - number=11, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cache_hit", - full_name="google.logging.type.HttpRequest.cache_hit", - index=11, - number=9, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cache_validated_with_origin_server", - full_name="google.logging.type.HttpRequest.cache_validated_with_origin_server", - index=12, - number=10, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cache_fill_bytes", - full_name="google.logging.type.HttpRequest.cache_fill_bytes", - index=13, - number=12, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="protocol", - full_name="google.logging.type.HttpRequest.protocol", - index=14, - number=15, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=126, - serialized_end=493, -) - -_HTTPREQUEST.fields_by_name[ - "latency" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -DESCRIPTOR.message_types_by_name["HttpRequest"] = _HTTPREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -HttpRequest = _reflection.GeneratedProtocolMessageType( - "HttpRequest", - (_message.Message,), - dict( - DESCRIPTOR=_HTTPREQUEST, - __module__="google.logging.type.http_request_pb2", - __doc__="""A common proto for logging HTTP requests. Only contains semantics - defined by the HTTP specification. Product-specific logging information - MUST be defined in a separate message. - - - Attributes: - request_method: - The request method. Examples: ``"GET"``, ``"HEAD"``, - ``"PUT"``, ``"POST"``. - request_url: - The scheme (http, https), the host name, the path and the - query portion of the URL that was requested. Example: - ``"http://example.com/some/info?color=red"``. - request_size: - The size of the HTTP request message in bytes, including the - request headers and the request body. - status: - The response code indicating the status of response. Examples: - 200, 404. - response_size: - The size of the HTTP response message sent back to the client, - in bytes, including the response headers and the response - body. - user_agent: - The user agent sent by the client. Example: ``"Mozilla/4.0 - (compatible; MSIE 6.0; Windows 98; Q312461; .NET CLR - 1.0.3705)"``. - remote_ip: - The IP address (IPv4 or IPv6) of the client that issued the - HTTP request. Examples: ``"192.168.1.1"``, - ``"FE80::0202:B3FF:FE1E:8329"``. - server_ip: - The IP address (IPv4 or IPv6) of the origin server that the - request was sent to. - referer: - The referer URL of the request, as defined in `HTTP/1.1 Header - Field Definitions - `__. - latency: - The request processing latency on the server, from the time - the request was received until the response was sent. - cache_lookup: - Whether or not a cache lookup was attempted. - cache_hit: - Whether or not an entity was served from cache (with or - without validation). - cache_validated_with_origin_server: - Whether or not the response was validated with the origin - server before being served from cache. This field is only - meaningful if ``cache_hit`` is True. - cache_fill_bytes: - The number of HTTP response bytes inserted into cache. Set - only when a cache fill was attempted. - protocol: - Protocol used for the request. Examples: "HTTP/1.1", "HTTP/2", - "websocket" - """, - # @@protoc_insertion_point(class_scope:google.logging.type.HttpRequest) - ), -) -_sym_db.RegisterMessage(HttpRequest) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2.py deleted file mode 100644 index bc429a3fca0b..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2.py +++ /dev/null @@ -1,93 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/logging/type/log_severity.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/logging/type/log_severity.proto", - package="google.logging.type", - syntax="proto3", - serialized_options=_b( - "\n\027com.google.logging.typeB\020LogSeverityProtoP\001Z8google.golang.org/genproto/googleapis/logging/type;ltype\252\002\031Google.Cloud.Logging.Type\312\002\031Google\\Cloud\\Logging\\Type" - ), - serialized_pb=_b( - "\n&google/logging/type/log_severity.proto\x12\x13google.logging.type\x1a\x1cgoogle/api/annotations.proto*\x82\x01\n\x0bLogSeverity\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x44\x45\x42UG\x10\x64\x12\t\n\x04INFO\x10\xc8\x01\x12\x0b\n\x06NOTICE\x10\xac\x02\x12\x0c\n\x07WARNING\x10\x90\x03\x12\n\n\x05\x45RROR\x10\xf4\x03\x12\r\n\x08\x43RITICAL\x10\xd8\x04\x12\n\n\x05\x41LERT\x10\xbc\x05\x12\x0e\n\tEMERGENCY\x10\xa0\x06\x42\x9f\x01\n\x17\x63om.google.logging.typeB\x10LogSeverityProtoP\x01Z8google.golang.org/genproto/googleapis/logging/type;ltype\xaa\x02\x19Google.Cloud.Logging.Type\xca\x02\x19Google\\Cloud\\Logging\\Typeb\x06proto3" - ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], -) - -_LOGSEVERITY = _descriptor.EnumDescriptor( - name="LogSeverity", - full_name="google.logging.type.LogSeverity", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="DEFAULT", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DEBUG", index=1, number=100, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="INFO", index=2, number=200, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="NOTICE", index=3, number=300, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="WARNING", index=4, number=400, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ERROR", index=5, number=500, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CRITICAL", index=6, number=600, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ALERT", index=7, number=700, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="EMERGENCY", index=8, number=800, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=94, - serialized_end=224, -) -_sym_db.RegisterEnumDescriptor(_LOGSEVERITY) - -LogSeverity = enum_type_wrapper.EnumTypeWrapper(_LOGSEVERITY) -DEFAULT = 0 -DEBUG = 100 -INFO = 200 -NOTICE = 300 -WARNING = 400 -ERROR = 500 -CRITICAL = 600 -ALERT = 700 -EMERGENCY = 800 - - -DESCRIPTOR.enum_types_by_name["LogSeverity"] = _LOGSEVERITY -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py index abeb244880d8..ae561d708480 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/logging_config.proto @@ -17,10 +15,12 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -31,13 +31,15 @@ "\n\025com.google.logging.v2B\022LoggingConfigProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xe3\x02\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12K\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormatB\x02\x18\x01\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12\x32\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\x12\x30\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"S\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\x94\x19\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}B\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"\x90\x04\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12K\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormatB\x02\x18\x01\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12>\n\x10\x62igquery_options\x18\x0c \x01(\x0b\x32".google.logging.v2.BigQueryOptionsH\x00\x12/\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x0e \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\x12\x30\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02\x42\t\n\x07options"1\n\x0f\x42igQueryOptions\x12\x1e\n\x16use_partitioned_tables\x18\x01 \x01(\x08"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"\xb5\x01\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\xf6\x1a\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}\x1a\xdf\x01\xca\x41\x16logging.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.readB\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -64,8 +66,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=492, - serialized_end=555, + serialized_start=711, + serialized_end=774, ) _sym_db.RegisterEnumDescriptor(_LOGSINK_VERSIONFORMAT) @@ -185,10 +187,64 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="bigquery_options", + full_name="google.logging.v2.LogSink.bigquery_options", + index=6, + number=12, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.logging.v2.LogSink.create_time", + index=7, + number=13, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.logging.v2.LogSink.update_time", + index=8, + number=14, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="start_time", full_name="google.logging.v2.LogSink.start_time", - index=6, + index=9, number=10, type=11, cpp_type=10, @@ -206,7 +262,7 @@ _descriptor.FieldDescriptor( name="end_time", full_name="google.logging.v2.LogSink.end_time", - index=7, + index=10, number=11, type=11, cpp_type=10, @@ -229,9 +285,56 @@ is_extendable=False, syntax="proto3", extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="options", + full_name="google.logging.v2.LogSink.options", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=257, + serialized_end=785, +) + + +_BIGQUERYOPTIONS = _descriptor.Descriptor( + name="BigQueryOptions", + full_name="google.logging.v2.BigQueryOptions", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="use_partitioned_tables", + full_name="google.logging.v2.BigQueryOptions.use_partitioned_tables", + index=0, + number=1, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], oneofs=[], - serialized_start=200, - serialized_end=555, + serialized_start=787, + serialized_end=836, ) @@ -305,8 +408,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=557, - serialized_end=630, + serialized_start=838, + serialized_end=911, ) @@ -362,8 +465,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=632, - serialized_end=719, + serialized_start=913, + serialized_end=1000, ) @@ -401,8 +504,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=721, - serialized_end=756, + serialized_start=1002, + serialized_end=1037, ) @@ -476,8 +579,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=758, - serialized_end=867, + serialized_start=1039, + serialized_end=1148, ) @@ -569,8 +672,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=870, - serialized_end=1031, + serialized_start=1151, + serialized_end=1312, ) @@ -608,8 +711,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1033, - serialized_end=1071, + serialized_start=1314, + serialized_end=1352, ) @@ -692,6 +795,42 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.logging.v2.LogExclusion.create_time", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.logging.v2.LogExclusion.update_time", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -701,8 +840,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1073, - serialized_end=1156, + serialized_start=1355, + serialized_end=1536, ) @@ -776,8 +915,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1158, - serialized_end=1236, + serialized_start=1538, + serialized_end=1616, ) @@ -833,8 +972,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1238, - serialized_end=1340, + serialized_start=1618, + serialized_end=1720, ) @@ -872,8 +1011,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1342, - serialized_end=1377, + serialized_start=1722, + serialized_end=1757, ) @@ -929,8 +1068,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1379, - serialized_end=1471, + serialized_start=1759, + serialized_end=1851, ) @@ -1004,8 +1143,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1474, - serialized_end=1613, + serialized_start=1854, + serialized_end=1993, ) @@ -1043,11 +1182,18 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1615, - serialized_end=1653, + serialized_start=1995, + serialized_end=2033, ) _LOGSINK.fields_by_name["output_version_format"].enum_type = _LOGSINK_VERSIONFORMAT +_LOGSINK.fields_by_name["bigquery_options"].message_type = _BIGQUERYOPTIONS +_LOGSINK.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGSINK.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LOGSINK.fields_by_name[ "start_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP @@ -1055,12 +1201,24 @@ "end_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LOGSINK_VERSIONFORMAT.containing_type = _LOGSINK +_LOGSINK.oneofs_by_name["options"].fields.append( + _LOGSINK.fields_by_name["bigquery_options"] +) +_LOGSINK.fields_by_name["bigquery_options"].containing_oneof = _LOGSINK.oneofs_by_name[ + "options" +] _LISTSINKSRESPONSE.fields_by_name["sinks"].message_type = _LOGSINK _CREATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK _UPDATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK _UPDATESINKREQUEST.fields_by_name[ "update_mask" ].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LOGEXCLUSION.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGEXCLUSION.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LISTEXCLUSIONSRESPONSE.fields_by_name["exclusions"].message_type = _LOGEXCLUSION _CREATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION _UPDATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION @@ -1068,6 +1226,7 @@ "update_mask" ].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK DESCRIPTOR.message_types_by_name["LogSink"] = _LOGSINK +DESCRIPTOR.message_types_by_name["BigQueryOptions"] = _BIGQUERYOPTIONS DESCRIPTOR.message_types_by_name["ListSinksRequest"] = _LISTSINKSREQUEST DESCRIPTOR.message_types_by_name["ListSinksResponse"] = _LISTSINKSRESPONSE DESCRIPTOR.message_types_by_name["GetSinkRequest"] = _GETSINKREQUEST @@ -1111,10 +1270,10 @@ sink's ``writer_identity``, set when the sink is created, must have permission to write to the destination or else the log entries are not exported. For more information, see `Exporting - Logs With Sinks `__. + Logs with Sinks `__. filter: Optional. An `advanced logs filter - `__. The only exported + `__. The only exported log entries are those that are in the resource owning the sink and that match the filter. For example: :: logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND @@ -1126,17 +1285,18 @@ writer_identity: Output only. An IAM identity—a service account or group—under which Logging writes the exported log entries to the sink's - destination. This field is set by `sinks.create - `__ - and `sinks.update `__, based on the setting of - ``unique_writer_identity`` in those methods. Until you grant - this identity write-access to the destination, log entry - exports from this sink will fail. For more information, see - `Granting access for a resource `__. Consult the destination service's documentation to - determine the appropriate IAM roles to assign to the identity. + destination. This field is set by + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] + and + [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] + based on the value of ``unique_writer_identity`` in those + methods. Until you grant this identity write-access to the + destination, log entry exports from this sink will fail. For + more information, see `Granting Access for a Resource + `__. Consult the + destination service's documentation to determine the + appropriate IAM roles to assign to the identity. include_children: Optional. This field applies only to sinks owned by organizations and folders. If the field is false, the default, @@ -1152,18 +1312,52 @@ certain child projects, filter on the project part of the log name: :: logName:("projects/test-project1/" OR "projects/test-project2/") AND resource.type=gce_instance + options: + Optional. Destination dependent options. + bigquery_options: + Optional. Options that affect sinks exporting data to + BigQuery. + create_time: + Output only. The creation timestamp of the sink. This field + may not be present for older sinks. + update_time: + Output only. The last update timestamp of the sink. This + field may not be present for older sinks. start_time: - Deprecated. This field is ignored when creating or updating - sinks. + Do not use. This field is ignored. end_time: - Deprecated. This field is ignored when creating or updating - sinks. + Do not use. This field is ignored. """, # @@protoc_insertion_point(class_scope:google.logging.v2.LogSink) ), ) _sym_db.RegisterMessage(LogSink) +BigQueryOptions = _reflection.GeneratedProtocolMessageType( + "BigQueryOptions", + (_message.Message,), + dict( + DESCRIPTOR=_BIGQUERYOPTIONS, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""Options that change functionality of a sink exporting data to BigQuery. + + + Attributes: + use_partitioned_tables: + Optional. Whether to use `BigQuery's partition tables + `__. By default, Logging + creates dated tables based on the log entries' timestamps, + e.g. syslog\_20170523. With partitioned tables the date suffix + is no longer present and `special query syntax + `__ has to be used + instead. In both cases, tables are sharded based on UTC + timezone. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.BigQueryOptions) + ), +) +_sym_db.RegisterMessage(BigQueryOptions) + ListSinksRequest = _reflection.GeneratedProtocolMessageType( "ListSinksRequest", (_message.Message,), @@ -1303,8 +1497,8 @@ Required. The updated sink, whose name is the same identifier that appears as part of ``sink_name``. unique_writer_identity: - Optional. See `sinks.create - `__ + Optional. See + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] for a description of this field. When updating a sink, the effect of this field on the value of ``writer_identity`` in the updated sink depends on both the old and new values of @@ -1365,11 +1559,11 @@ DESCRIPTOR=_LOGEXCLUSION, __module__="google.cloud.logging_v2.proto.logging_config_pb2", __doc__="""Specifies a set of log entries that are not to be stored in Logging. If - your project receives a large volume of logs, you might be able to use + your GCP resource receives a large volume of logs, you can use exclusions to reduce your chargeable logs. Exclusions are processed after log sinks, so you can export log entries before they are excluded. - Audit log entries and log entries from Amazon Web Services are never - excluded. + Note that organization-level and folder-level exclusions don't apply to + child resources, and that you can't exclude audit log entries. Attributes: @@ -1382,19 +1576,25 @@ Optional. A description of this exclusion. filter: Required. An `advanced logs filter - `__ that matches the log + `__ that matches the log entries to be excluded. By using the `sample function - `__, you can + `__, you can exclude less than 100% of the matching log entries. For - example, the following filter matches 99% of low-severity log - entries from load balancers: - ``"resource.type=http_load_balancer severity`__ to change the value of this field. + it does not exclude any log entries. You can [update an + exclusion][google.logging.v2.ConfigServiceV2.UpdateExclusion] + to change the value of this field. + create_time: + Output only. The creation timestamp of the exclusion. This + field may not be present for older exclusions. + update_time: + Output only. The last update timestamp of the exclusion. This + field may not be present for older exclusions. """, # @@protoc_insertion_point(class_scope:google.logging.v2.LogExclusion) ), @@ -1530,7 +1730,7 @@ Required. New values for the existing exclusion. Only the fields specified in ``update_mask`` are relevant. update_mask: - Required. A nonempty list of fields to change in the existing + Required. A non-empty list of fields to change in the existing exclusion. New values for the fields are taken from the corresponding fields in the [LogExclusion][google.logging.v2.LogExclusion] included in @@ -1580,9 +1780,11 @@ full_name="google.logging.v2.ConfigServiceV2", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1656, - serialized_end=4876, + serialized_options=_b( + "\312A\026logging.googleapis.com\322A\302\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read" + ), + serialized_start=2036, + serialized_end=5482, methods=[ _descriptor.MethodDescriptor( name="ListSinks", diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py index 6e93d39b46b4..b250dc7dec22 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py @@ -8,8 +8,7 @@ class ConfigServiceV2Stub(object): - """Service for configuring sinks used to export log entries out of - Logging. + """Service for configuring sinks used to route log entries. """ def __init__(self, channel): @@ -71,8 +70,7 @@ def __init__(self, channel): class ConfigServiceV2Servicer(object): - """Service for configuring sinks used to export log entries out of - Logging. + """Service for configuring sinks used to route log entries. """ def ListSinks(self, request, context): @@ -90,9 +88,9 @@ def GetSink(self, request, context): raise NotImplementedError("Method not implemented!") def CreateSink(self, request, context): - """Creates a sink that exports specified log entries to a destination. The + """Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's - `writer_identity` is not permitted to write to the destination. A sink can + `writer_identity` is not permitted to write to the destination. A sink can export log entries only from the resource owning the sink. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) @@ -100,8 +98,9 @@ def CreateSink(self, request, context): raise NotImplementedError("Method not implemented!") def UpdateSink(self, request, context): - """Updates a sink. This method replaces the following fields in the existing + """Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: `destination`, and `filter`. + The updated sink might also have a new `writer_identity`; see the `unique_writer_identity` field. """ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py index 200f2b381014..3fd3fcead6d9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/logging_metrics.proto @@ -17,12 +15,13 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import distribution_pb2 as google_dot_api_dot_distribution__pb2 from google.api import metric_pb2 as google_dot_api_dot_metric__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -33,15 +32,16 @@ "\n\025com.google.logging.v2B\023LoggingMetricsProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1dgoogle/api/distribution.proto\x1a\x17google/api/metric.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xb1\x03\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xd4\x05\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"/\x82\xd3\xe4\x93\x02)"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}B\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x1dgoogle/api/distribution.proto\x1a\x17google/api/metric.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"\x93\x04\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12/\n\x0b\x63reate_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xe4\x07\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"/\x82\xd3\xe4\x93\x02)"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_distribution__pb2.DESCRIPTOR, google_dot_api_dot_metric__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -61,8 +61,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=662, - serialized_end=690, + serialized_start=785, + serialized_end=813, ) _sym_db.RegisterEnumDescriptor(_LOGMETRIC_APIVERSION) @@ -119,8 +119,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=606, - serialized_end=660, + serialized_start=729, + serialized_end=783, ) _LOGMETRIC = _descriptor.Descriptor( @@ -256,10 +256,46 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.logging.v2.LogMetric.create_time", + index=7, + number=9, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.logging.v2.LogMetric.update_time", + index=8, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="version", full_name="google.logging.v2.LogMetric.version", - index=7, + index=9, number=4, type=14, cpp_type=8, @@ -283,8 +319,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=257, - serialized_end=690, + serialized_start=282, + serialized_end=813, ) @@ -358,8 +394,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=692, - serialized_end=770, + serialized_start=815, + serialized_end=893, ) @@ -415,8 +451,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=772, - serialized_end=868, + serialized_start=895, + serialized_end=991, ) @@ -454,8 +490,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=870, - serialized_end=912, + serialized_start=993, + serialized_end=1035, ) @@ -511,8 +547,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=914, - serialized_end=1000, + serialized_start=1037, + serialized_end=1123, ) @@ -568,8 +604,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1002, - serialized_end=1093, + serialized_start=1125, + serialized_end=1216, ) @@ -607,8 +643,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1095, - serialized_end=1140, + serialized_start=1218, + serialized_end=1263, ) _LOGMETRIC_LABELEXTRACTORSENTRY.containing_type = _LOGMETRIC @@ -621,6 +657,12 @@ _LOGMETRIC.fields_by_name[ "bucket_options" ].message_type = google_dot_api_dot_distribution__pb2._DISTRIBUTION_BUCKETOPTIONS +_LOGMETRIC.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGMETRIC.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LOGMETRIC.fields_by_name["version"].enum_type = _LOGMETRIC_APIVERSION _LOGMETRIC_APIVERSION.containing_type = _LOGMETRIC _LISTLOGMETRICSRESPONSE.fields_by_name["metrics"].message_type = _LOGMETRIC @@ -676,7 +718,8 @@ project/metrics/nginx%2Frequests"``. description: Optional. A description of this metric, which is used in - documentation. + documentation. The maximum length of the description is 8000 + characters. filter: Required. An `advanced logs filter `__ which is used to @@ -740,6 +783,12 @@ based metric is using a DISTRIBUTION value type and it describes the bucket boundaries used to create a histogram of the extracted values. + create_time: + Output only. The creation timestamp of the metric. This field + may not be present for older metrics. + update_time: + Output only. The last update timestamp of the metric. This + field may not be present for older metrics. version: Deprecated. The API version that created or updated this metric. The v2 format is used by default and cannot be @@ -899,9 +948,11 @@ full_name="google.logging.v2.MetricsServiceV2", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1143, - serialized_end=1867, + serialized_options=_b( + "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" + ), + serialized_start=1266, + serialized_end=2262, methods=[ _descriptor.MethodDescriptor( name="ListLogMetrics", diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py index 07c7a191fa2a..04bd84375901 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/logging.proto @@ -17,17 +15,21 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import ( monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, ) from google.cloud.logging_v2.proto import ( log_entry_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2, ) +from google.cloud.logging_v2.proto import ( + logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2, +) from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -38,16 +40,18 @@ "\n\025com.google.logging.v2B\014LoggingProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t"\xa9\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x19\n\x17WriteLogEntriesResponse"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01"\x91\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xd8\x07\n\x10LoggingServiceV2\x12\xeb\x01\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty"\xa0\x01\x82\xd3\xe4\x93\x02\x99\x01* /v2/{log_name=projects/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse"\x1c\x82\xd3\xe4\x93\x02\x16"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse"\x1b\x82\xd3\xe4\x93\x02\x15"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse"(\x82\xd3\xe4\x93\x02"\x12 /v2/monitoredResourceDescriptors\x12\xff\x01\n\x08ListLogs\x12".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logsB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x32google/cloud/logging_v2/proto/logging_config.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t"\xa9\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x19\n\x17WriteLogEntriesResponse"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01"\x91\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x85\n\n\x10LoggingServiceV2\x12\x88\x02\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty"\xbd\x01\x82\xd3\xe4\x93\x02\xb6\x01* /v2/{log_name=projects/*/logs/*}Z\x1b*\x19/v2/{log_name=*/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse"\x1c\x82\xd3\xe4\x93\x02\x16"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse"\x1b\x82\xd3\xe4\x93\x02\x15"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse"(\x82\xd3\xe4\x93\x02"\x12 /v2/monitoredResourceDescriptors\x12\xff\x01\n\x08ListLogs\x12".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logs\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2.DESCRIPTOR, + google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_rpc_dot_status__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -86,8 +90,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=299, - serialized_end=335, + serialized_start=376, + serialized_end=412, ) @@ -143,8 +147,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=590, - serialized_end=635, + serialized_start=667, + serialized_end=712, ) _WRITELOGENTRIESREQUEST = _descriptor.Descriptor( @@ -271,8 +275,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=338, - serialized_end=635, + serialized_start=415, + serialized_end=712, ) @@ -291,8 +295,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=637, - serialized_end=662, + serialized_start=714, + serialized_end=739, ) @@ -348,8 +352,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=792, - serialized_end=865, + serialized_start=869, + serialized_end=942, ) _WRITELOGENTRIESPARTIALERRORS = _descriptor.Descriptor( @@ -386,8 +390,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=665, - serialized_end=865, + serialized_start=742, + serialized_end=942, ) @@ -515,8 +519,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=868, - serialized_end=1013, + serialized_start=945, + serialized_end=1090, ) @@ -572,8 +576,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1015, - serialized_end=1110, + serialized_start=1092, + serialized_end=1187, ) @@ -629,8 +633,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1112, - serialized_end=1192, + serialized_start=1189, + serialized_end=1269, ) @@ -686,8 +690,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1195, - serialized_end=1333, + serialized_start=1272, + serialized_end=1410, ) @@ -761,8 +765,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1335, - serialized_end=1407, + serialized_start=1412, + serialized_end=1484, ) @@ -818,8 +822,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1409, - serialized_end=1471, + serialized_start=1486, + serialized_end=1548, ) _WRITELOGENTRIESREQUEST_LABELSENTRY.containing_type = _WRITELOGENTRIESREQUEST @@ -960,7 +964,7 @@ `logs retention period `__ in the past or more than 24 hours in the future will not be available when calling ``entries.list``. However, those log entries can still - be exported with `LogSinks `__. To improve throughput and to avoid exceeding the `quota limit `__ for calls to ``entries.write``, you should try to include several log @@ -1044,9 +1048,7 @@ project_ids: Deprecated. Use ``resource_names`` instead. One or more project identifiers or project numbers from which to retrieve - log entries. Example: ``"my-project-1A"``. If present, these - project identifiers are converted to resource name format and - added to the list of resources in ``resource_names``. + log entries. Example: ``"my-project-1A"``. resource_names: Required. Names of one or more parent resources from which to retrieve log entries: :: "projects/[PROJECT_ID]" @@ -1216,8 +1218,8 @@ Attributes: log_names: A list of log names. For example, ``"projects/my- - project/syslog"`` or ``"organizations/123/cloudresourcemanager - .googleapis.com%2Factivity"``. + project/logs/syslog"`` or ``"organizations/123/logs/cloudresou + rcemanager.googleapis.com%2Factivity"``. next_page_token: If there might be more results than those appearing in this response, then ``nextPageToken`` is included. To get the next @@ -1240,9 +1242,11 @@ full_name="google.logging.v2.LoggingServiceV2", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1474, - serialized_end=2458, + serialized_options=_b( + "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" + ), + serialized_start=1551, + serialized_end=2836, methods=[ _descriptor.MethodDescriptor( name="DeleteLog", @@ -1252,7 +1256,7 @@ input_type=_DELETELOGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002\231\001* /v2/{log_name=projects/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}" + "\202\323\344\223\002\266\001* /v2/{log_name=projects/*/logs/*}Z\033*\031/v2/{log_name=*/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}" ), ), _descriptor.MethodDescriptor( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py index d67dd2c95fd0..2a2b3656925c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py @@ -72,9 +72,9 @@ def WriteLogEntries(self, request, context): raise NotImplementedError("Method not implemented!") def ListLogEntries(self, request, context): - """Lists log entries. Use this method to retrieve log entries from - Logging. For ways to export log entries, see - [Exporting Logs](/logging/docs/export). + """Lists log entries. Use this method to retrieve log entries that originated + from a project/folder/organization/billing account. For ways to export log + entries, see [Exporting Logs](/logging/docs/export). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 89c4354a1db7..fe4f1d1563c5 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-09-26T12:29:00.912920Z", + "updateTime": "2019-10-08T18:03:34.610285Z", "sources": [ { "generator": { "name": "artman", - "version": "0.37.1", - "dockerImage": "googleapis/artman@sha256:6068f67900a3f0bdece596b97bda8fc70406ca0e137a941f4c81d3217c994a80" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "4c2ca81a0c976d4d37a8999984b7894d9af22124", - "internalRef": "271130964" + "sha": "dcebbafcd6f915ae59f01e92ac7260e61a545dd5", + "internalRef": "273552966" } }, { diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/synth.py index 43bd1b7bbe50..4364f387b4c9 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/synth.py @@ -30,9 +30,13 @@ include_protos=True, ) -s.move(library / "google/cloud/logging_v2/proto") +# the structure of the logging directory is a bit different, so manually copy the protos +s.move(library / "google/cloud/logging_v2/proto/cloud/logging_v2/proto", "google/cloud/logging_v2/proto") +s.move(library / "google/cloud/logging_v2/proto/*.proto") + s.move(library / "google/cloud/logging_v2/gapic") s.move(library / "tests/unit/gapic/v2") +s.move(library / "docs/gapic/v2") # ---------------------------------------------------------------------------- # Add templated files From 958a78717f4697a438684b75c7d4ea9297c12949 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 15 Oct 2019 09:36:40 -0400 Subject: [PATCH 262/855] chore: pin 'google-cloud-core >= 1.0.3, < 2.0.0dev' (#9445) --- packages/google-cloud-logging/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 8e88a95058de..a4b5ea7daac3 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -29,8 +29,8 @@ # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ - 'google-api-core[grpc] >= 1.14.0, < 2.0.0dev', - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", ] extras = { } From 8b420bc67b109c2a98bc8728973b6b1eb87f0cad Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 15 Oct 2019 13:39:42 -0400 Subject: [PATCH 263/855] chore(logging): release 1.14.0 (#9471) --- packages/google-cloud-logging/CHANGELOG.md | 11 +++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 2fab1ee705dd..05caf8d580b8 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,17 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## 1.14.0 + +10-15-2019 06:50 PDT + + +### Implementation Changes +- Fix proto copy. ([#9420](https://github.com/googleapis/google-cloud-python/pull/9420)) + +### Dependencies +- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) + ## 1.13.0 09-23-2019 10:00 PDT diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index a4b5ea7daac3..00c9cb388a12 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-logging' description = 'Stackdriver Logging API client library' -version = '1.13.0' +version = '1.14.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From b3aaccf955912280aea782a1df8d56f710eebbfd Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 29 Oct 2019 09:51:58 -0700 Subject: [PATCH 264/855] chore(logging): add trailing commas (via synth) (#9560) --- .../gapic/config_service_v2_client.py | 38 +++++++++---------- .../gapic/logging_service_v2_client.py | 26 ++++++------- .../gapic/metrics_service_v2_client.py | 28 +++++++------- .../config_service_v2_grpc_transport.py | 4 +- .../logging_service_v2_grpc_transport.py | 4 +- .../metrics_service_v2_grpc_transport.py | 4 +- .../cloud/logging_v2/proto/log_entry_pb2.py | 4 +- .../logging_v2/proto/logging_config_pb2.py | 14 +++---- .../logging_v2/proto/logging_metrics_pb2.py | 8 ++-- .../cloud/logging_v2/proto/logging_pb2.py | 8 ++-- packages/google-cloud-logging/synth.metadata | 12 +++--- .../transports/test_background_thread.py | 2 +- .../tests/unit/test_client.py | 4 +- 13 files changed, 78 insertions(+), 78 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index 2942e2207000..18ed3c277435 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -40,7 +40,7 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version class ConfigServiceV2Client(object): @@ -77,7 +77,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account + "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod @@ -110,7 +110,7 @@ def exclusion_path(cls, project, exclusion): @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder) + return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def folder_exclusion_path(cls, folder, exclusion): @@ -125,14 +125,14 @@ def folder_exclusion_path(cls, folder, exclusion): def folder_sink_path(cls, folder, sink): """Return a fully-qualified folder_sink string.""" return google.api_core.path_template.expand( - "folders/{folder}/sinks/{sink}", folder=folder, sink=sink + "folders/{folder}/sinks/{sink}", folder=folder, sink=sink, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization + "organizations/{organization}", organization=organization, ) @classmethod @@ -157,14 +157,14 @@ def organization_sink_path(cls, organization, sink): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) @classmethod def sink_path(cls, project, sink): """Return a fully-qualified sink string.""" return google.api_core.path_template.expand( - "projects/{project}/sinks/{sink}", project=project, sink=sink + "projects/{project}/sinks/{sink}", project=project, sink=sink, ) def __init__( @@ -254,12 +254,12 @@ def __init__( self.transport = transport else: self.transport = config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -270,7 +270,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -360,7 +360,7 @@ def list_sinks( ) request = logging_config_pb2.ListSinksRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -450,7 +450,7 @@ def get_sink( client_info=self._client_info, ) - request = logging_config_pb2.GetSinkRequest(sink_name=sink_name) + request = logging_config_pb2.GetSinkRequest(sink_name=sink_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -554,7 +554,7 @@ def create_sink( ) request = logging_config_pb2.CreateSinkRequest( - parent=parent, sink=sink, unique_writer_identity=unique_writer_identity + parent=parent, sink=sink, unique_writer_identity=unique_writer_identity, ) if metadata is None: metadata = [] @@ -759,7 +759,7 @@ def delete_sink( client_info=self._client_info, ) - request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name) + request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -857,7 +857,7 @@ def list_exclusions( ) request = logging_config_pb2.ListExclusionsRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -947,7 +947,7 @@ def get_exclusion( client_info=self._client_info, ) - request = logging_config_pb2.GetExclusionRequest(name=name) + request = logging_config_pb2.GetExclusionRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1038,7 +1038,7 @@ def create_exclusion( ) request = logging_config_pb2.CreateExclusionRequest( - parent=parent, exclusion=exclusion + parent=parent, exclusion=exclusion, ) if metadata is None: metadata = [] @@ -1142,7 +1142,7 @@ def update_exclusion( ) request = logging_config_pb2.UpdateExclusionRequest( - name=name, exclusion=exclusion, update_mask=update_mask + name=name, exclusion=exclusion, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -1218,7 +1218,7 @@ def delete_exclusion( client_info=self._client_info, ) - request = logging_config_pb2.DeleteExclusionRequest(name=name) + request = logging_config_pb2.DeleteExclusionRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index 09509318a0a6..778ba747d83c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -44,7 +44,7 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version class LoggingServiceV2Client(object): @@ -81,7 +81,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account + "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod @@ -96,27 +96,27 @@ def billing_log_path(cls, billing_account, log): @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder) + return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def folder_log_path(cls, folder, log): """Return a fully-qualified folder_log string.""" return google.api_core.path_template.expand( - "folders/{folder}/logs/{log}", folder=folder, log=log + "folders/{folder}/logs/{log}", folder=folder, log=log, ) @classmethod def log_path(cls, project, log): """Return a fully-qualified log string.""" return google.api_core.path_template.expand( - "projects/{project}/logs/{log}", project=project, log=log + "projects/{project}/logs/{log}", project=project, log=log, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization + "organizations/{organization}", organization=organization, ) @classmethod @@ -132,7 +132,7 @@ def organization_log_path(cls, organization, log): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) def __init__( @@ -222,12 +222,12 @@ def __init__( self.transport = transport else: self.transport = logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -238,7 +238,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -311,7 +311,7 @@ def delete_log( client_info=self._client_info, ) - request = logging_pb2.DeleteLogRequest(log_name=log_name) + request = logging_pb2.DeleteLogRequest(log_name=log_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -679,7 +679,7 @@ def list_monitored_resource_descriptors( ) request = logging_pb2.ListMonitoredResourceDescriptorsRequest( - page_size=page_size + page_size=page_size, ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, @@ -776,7 +776,7 @@ def list_logs( client_info=self._client_info, ) - request = logging_pb2.ListLogsRequest(parent=parent, page_size=page_size) + request = logging_pb2.ListLogsRequest(parent=parent, page_size=page_size,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 59dae9c7a78f..278f1365153d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -46,7 +46,7 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version class MetricsServiceV2Client(object): @@ -83,33 +83,33 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account + "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder) + return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def metric_path(cls, project, metric): """Return a fully-qualified metric string.""" return google.api_core.path_template.expand( - "projects/{project}/metrics/{metric}", project=project, metric=metric + "projects/{project}/metrics/{metric}", project=project, metric=metric, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization + "organizations/{organization}", organization=organization, ) @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) def __init__( @@ -199,12 +199,12 @@ def __init__( self.transport = transport else: self.transport = metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -215,7 +215,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -302,7 +302,7 @@ def list_log_metrics( ) request = logging_metrics_pb2.ListLogMetricsRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -387,7 +387,7 @@ def get_log_metric( client_info=self._client_info, ) - request = logging_metrics_pb2.GetLogMetricRequest(metric_name=metric_name) + request = logging_metrics_pb2.GetLogMetricRequest(metric_name=metric_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -472,7 +472,7 @@ def create_log_metric( ) request = logging_metrics_pb2.CreateLogMetricRequest( - parent=parent, metric=metric + parent=parent, metric=metric, ) if metadata is None: metadata = [] @@ -559,7 +559,7 @@ def update_log_metric( ) request = logging_metrics_pb2.UpdateLogMetricRequest( - metric_name=metric_name, metric=metric + metric_name=metric_name, metric=metric, ) if metadata is None: metadata = [] @@ -630,7 +630,7 @@ def delete_log_metric( client_info=self._client_info, ) - request = logging_metrics_pb2.DeleteLogMetricRequest(metric_name=metric_name) + request = logging_metrics_pb2.DeleteLogMetricRequest(metric_name=metric_name,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py index 533895087231..b85abcd58a78 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py @@ -59,7 +59,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -80,7 +80,7 @@ def __init__( self._stubs = { "config_service_v2_stub": logging_config_pb2_grpc.ConfigServiceV2Stub( channel - ) + ), } @classmethod diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py index 4477ad701b5e..f6ab3ab8876c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py @@ -59,7 +59,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -78,7 +78,7 @@ def __init__( # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { - "logging_service_v2_stub": logging_pb2_grpc.LoggingServiceV2Stub(channel) + "logging_service_v2_stub": logging_pb2_grpc.LoggingServiceV2Stub(channel), } @classmethod diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py index 426edce6edd7..bc66722729bb 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py @@ -59,7 +59,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -80,7 +80,7 @@ def __init__( self._stubs = { "metrics_service_v2_stub": logging_metrics_pb2_grpc.MetricsServiceV2Stub( channel - ) + ), } @classmethod diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py index 1f2b1ca3b64d..c2517d84adae 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -425,7 +425,7 @@ ), ], extensions=[], - nested_types=[_LOGENTRY_LABELSENTRY], + nested_types=[_LOGENTRY_LABELSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -438,7 +438,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=331, serialized_end=1113, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py index ae561d708480..cc2a143fc6fc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -280,7 +280,7 @@ ], extensions=[], nested_types=[], - enum_types=[_LOGSINK_VERSIONFORMAT], + enum_types=[_LOGSINK_VERSIONFORMAT,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -292,7 +292,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=257, serialized_end=785, @@ -323,7 +323,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -494,7 +494,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -701,7 +701,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1001,7 +1001,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1172,7 +1172,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py index 3fd3fcead6d9..1addc0a0b592 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -312,8 +312,8 @@ ), ], extensions=[], - nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY], - enum_types=[_LOGMETRIC_APIVERSION], + nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY,], + enum_types=[_LOGMETRIC_APIVERSION,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -480,7 +480,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -633,7 +633,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py index 04bd84375901..79a73bd0f5fc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py @@ -80,7 +80,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -268,7 +268,7 @@ ), ], extensions=[], - nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY], + nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -380,10 +380,10 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], - nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY], + nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index fe4f1d1563c5..44367f98f48d 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-10-08T18:03:34.610285Z", + "updateTime": "2019-10-29T12:31:03.926658Z", "sources": [ { "generator": { "name": "artman", - "version": "0.38.0", - "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "dcebbafcd6f915ae59f01e92ac7260e61a545dd5", - "internalRef": "273552966" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 7de912560aa7..7edae8a7bfa0 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -43,7 +43,7 @@ def test_constructor(self): transport, worker = self._make_one(client, name) - logger, = worker.call_args[0] # call_args[0] is *args. + (logger,) = worker.call_args[0] # call_args[0] is *args. self.assertEqual(logger.name, name) def test_send(self): diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 5acd736185fc..4e0b5ca22f0d 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -676,7 +676,7 @@ def test_setup_logging(self): self.assertEqual(len(mocked.mock_calls), 1) _, args, kwargs = mocked.mock_calls[0] - handler, = args + (handler,) = args self.assertIsInstance(handler, CloudLoggingHandler) handler.transport.worker.stop() @@ -710,7 +710,7 @@ def test_setup_logging_w_extra_kwargs(self): self.assertEqual(len(mocked.mock_calls), 1) _, args, kwargs = mocked.mock_calls[0] - handler, = args + (handler,) = args self.assertIsInstance(handler, CloudLoggingHandler) self.assertEqual(handler.name, name) self.assertEqual(handler.resource, resource) From eaa401ab7ac5c9e8488f6991de69aba81fa9357d Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 11 Nov 2019 15:15:32 -0800 Subject: [PATCH 265/855] docs: add python 2 sunset banner to documentation (#9036) --- .../docs/_static/custom.css | 4 ++ .../docs/_templates/layout.html | 49 +++++++++++++++++++ packages/google-cloud-logging/docs/conf.py | 2 +- 3 files changed, 54 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-logging/docs/_static/custom.css create mode 100644 packages/google-cloud-logging/docs/_templates/layout.html diff --git a/packages/google-cloud-logging/docs/_static/custom.css b/packages/google-cloud-logging/docs/_static/custom.css new file mode 100644 index 000000000000..9a6f9f8ddc3a --- /dev/null +++ b/packages/google-cloud-logging/docs/_static/custom.css @@ -0,0 +1,4 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/_templates/layout.html b/packages/google-cloud-logging/docs/_templates/layout.html new file mode 100644 index 000000000000..de457b2c2767 --- /dev/null +++ b/packages/google-cloud-logging/docs/_templates/layout.html @@ -0,0 +1,49 @@ +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ On January 1, 2020 this library will no longer support Python 2 on the latest released version. + Previously released library versions will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index 524c564a1698..c0253fdd03a8 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -164,7 +164,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -# html_static_path = [] +html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied From c6f70ffc4ae44fe408ffbbd3cc87b99d65a3bbfa Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 12 Nov 2019 12:50:06 -0800 Subject: [PATCH 266/855] chore(logging): change spacing in docs templates (via synth) (#9756) --- packages/google-cloud-logging/docs/_static/custom.css | 2 +- .../google-cloud-logging/docs/_templates/layout.html | 1 + packages/google-cloud-logging/synth.metadata | 10 +++++----- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/docs/_static/custom.css b/packages/google-cloud-logging/docs/_static/custom.css index 9a6f9f8ddc3a..0abaf229fce3 100644 --- a/packages/google-cloud-logging/docs/_static/custom.css +++ b/packages/google-cloud-logging/docs/_static/custom.css @@ -1,4 +1,4 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/_templates/layout.html b/packages/google-cloud-logging/docs/_templates/layout.html index de457b2c2767..228529efe2d2 100644 --- a/packages/google-cloud-logging/docs/_templates/layout.html +++ b/packages/google-cloud-logging/docs/_templates/layout.html @@ -1,3 +1,4 @@ + {% extends "!layout.html" %} {%- block content %} {%- if theme_fixed_sidebar|lower == 'true' %} diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 44367f98f48d..f39e49fda131 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-10-29T12:31:03.926658Z", + "updateTime": "2019-11-12T13:33:01.620730Z", "sources": [ { "generator": { "name": "artman", - "version": "0.40.3", - "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" + "version": "0.41.1", + "dockerImage": "googleapis/artman@sha256:545c758c76c3f779037aa259023ec3d1ef2d57d2c8cd00a222cb187d63ceac5e" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", - "internalRef": "277177415" + "sha": "f69562be0608904932bdcfbc5ad8b9a22d9dceb8", + "internalRef": "279774957" } }, { From cf0bc37bcfe1de7fb14058aa77d9152df2741160 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 19 Nov 2019 10:10:04 -0800 Subject: [PATCH 267/855] fix(logging): deprecate resource name helper methods (via synth) (#9837) --- .../gapic/config_service_v2_client.py | 84 ++++++++++++++++--- .../gapic/logging_service_v2_client.py | 56 +++++++++++-- .../gapic/metrics_service_v2_client.py | 35 ++++++-- packages/google-cloud-logging/synth.metadata | 10 +-- 4 files changed, 155 insertions(+), 30 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index 18ed3c277435..521b2e304f84 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -75,14 +75,24 @@ def from_service_account_file(cls, filename, *args, **kwargs): @classmethod def billing_path(cls, billing_account): - """Return a fully-qualified billing string.""" + """DEPRECATED. Return a fully-qualified billing string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod def billing_exclusion_path(cls, billing_account, exclusion): - """Return a fully-qualified billing_exclusion string.""" + """DEPRECATED. Return a fully-qualified billing_exclusion string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "billingAccounts/{billing_account}/exclusions/{exclusion}", billing_account=billing_account, @@ -91,7 +101,12 @@ def billing_exclusion_path(cls, billing_account, exclusion): @classmethod def billing_sink_path(cls, billing_account, sink): - """Return a fully-qualified billing_sink string.""" + """DEPRECATED. Return a fully-qualified billing_sink string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "billingAccounts/{billing_account}/sinks/{sink}", billing_account=billing_account, @@ -100,7 +115,12 @@ def billing_sink_path(cls, billing_account, sink): @classmethod def exclusion_path(cls, project, exclusion): - """Return a fully-qualified exclusion string.""" + """DEPRECATED. Return a fully-qualified exclusion string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "projects/{project}/exclusions/{exclusion}", project=project, @@ -109,12 +129,22 @@ def exclusion_path(cls, project, exclusion): @classmethod def folder_path(cls, folder): - """Return a fully-qualified folder string.""" + """DEPRECATED. Return a fully-qualified folder string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def folder_exclusion_path(cls, folder, exclusion): - """Return a fully-qualified folder_exclusion string.""" + """DEPRECATED. Return a fully-qualified folder_exclusion string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "folders/{folder}/exclusions/{exclusion}", folder=folder, @@ -123,21 +153,36 @@ def folder_exclusion_path(cls, folder, exclusion): @classmethod def folder_sink_path(cls, folder, sink): - """Return a fully-qualified folder_sink string.""" + """DEPRECATED. Return a fully-qualified folder_sink string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "folders/{folder}/sinks/{sink}", folder=folder, sink=sink, ) @classmethod def organization_path(cls, organization): - """Return a fully-qualified organization string.""" + """DEPRECATED. Return a fully-qualified organization string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "organizations/{organization}", organization=organization, ) @classmethod def organization_exclusion_path(cls, organization, exclusion): - """Return a fully-qualified organization_exclusion string.""" + """DEPRECATED. Return a fully-qualified organization_exclusion string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "organizations/{organization}/exclusions/{exclusion}", organization=organization, @@ -146,7 +191,12 @@ def organization_exclusion_path(cls, organization, exclusion): @classmethod def organization_sink_path(cls, organization, sink): - """Return a fully-qualified organization_sink string.""" + """DEPRECATED. Return a fully-qualified organization_sink string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "organizations/{organization}/sinks/{sink}", organization=organization, @@ -155,14 +205,24 @@ def organization_sink_path(cls, organization, sink): @classmethod def project_path(cls, project): - """Return a fully-qualified project string.""" + """DEPRECATED. Return a fully-qualified project string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "projects/{project}", project=project, ) @classmethod def sink_path(cls, project, sink): - """Return a fully-qualified sink string.""" + """DEPRECATED. Return a fully-qualified sink string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "projects/{project}/sinks/{sink}", project=project, sink=sink, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index 778ba747d83c..072c4ebbbb7c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -79,14 +79,24 @@ def from_service_account_file(cls, filename, *args, **kwargs): @classmethod def billing_path(cls, billing_account): - """Return a fully-qualified billing string.""" + """DEPRECATED. Return a fully-qualified billing string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod def billing_log_path(cls, billing_account, log): - """Return a fully-qualified billing_log string.""" + """DEPRECATED. Return a fully-qualified billing_log string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "billingAccounts/{billing_account}/logs/{log}", billing_account=billing_account, @@ -95,33 +105,58 @@ def billing_log_path(cls, billing_account, log): @classmethod def folder_path(cls, folder): - """Return a fully-qualified folder string.""" + """DEPRECATED. Return a fully-qualified folder string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def folder_log_path(cls, folder, log): - """Return a fully-qualified folder_log string.""" + """DEPRECATED. Return a fully-qualified folder_log string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "folders/{folder}/logs/{log}", folder=folder, log=log, ) @classmethod def log_path(cls, project, log): - """Return a fully-qualified log string.""" + """DEPRECATED. Return a fully-qualified log string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "projects/{project}/logs/{log}", project=project, log=log, ) @classmethod def organization_path(cls, organization): - """Return a fully-qualified organization string.""" + """DEPRECATED. Return a fully-qualified organization string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "organizations/{organization}", organization=organization, ) @classmethod def organization_log_path(cls, organization, log): - """Return a fully-qualified organization_log string.""" + """DEPRECATED. Return a fully-qualified organization_log string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "organizations/{organization}/logs/{log}", organization=organization, @@ -130,7 +165,12 @@ def organization_log_path(cls, organization, log): @classmethod def project_path(cls, project): - """Return a fully-qualified project string.""" + """DEPRECATED. Return a fully-qualified project string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "projects/{project}", project=project, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 278f1365153d..b127502ee3de 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -81,33 +81,58 @@ def from_service_account_file(cls, filename, *args, **kwargs): @classmethod def billing_path(cls, billing_account): - """Return a fully-qualified billing string.""" + """DEPRECATED. Return a fully-qualified billing string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod def folder_path(cls, folder): - """Return a fully-qualified folder string.""" + """DEPRECATED. Return a fully-qualified folder string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def metric_path(cls, project, metric): - """Return a fully-qualified metric string.""" + """DEPRECATED. Return a fully-qualified metric string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "projects/{project}/metrics/{metric}", project=project, metric=metric, ) @classmethod def organization_path(cls, organization): - """Return a fully-qualified organization string.""" + """DEPRECATED. Return a fully-qualified organization string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "organizations/{organization}", organization=organization, ) @classmethod def project_path(cls, project): - """Return a fully-qualified project string.""" + """DEPRECATED. Return a fully-qualified project string.""" + warnings.warn( + "Resource name helper functions are deprecated.", + PendingDeprecationWarning, + stacklevel=1, + ) return google.api_core.path_template.expand( "projects/{project}", project=project, ) diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index f39e49fda131..425ea50eea8e 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-11-12T13:33:01.620730Z", + "updateTime": "2019-11-19T13:27:19.668508Z", "sources": [ { "generator": { "name": "artman", - "version": "0.41.1", - "dockerImage": "googleapis/artman@sha256:545c758c76c3f779037aa259023ec3d1ef2d57d2c8cd00a222cb187d63ceac5e" + "version": "0.42.1", + "dockerImage": "googleapis/artman@sha256:c773192618c608a7a0415dd95282f841f8e6bcdef7dd760a988c93b77a64bd57" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f69562be0608904932bdcfbc5ad8b9a22d9dceb8", - "internalRef": "279774957" + "sha": "d8dd7fe8d5304f7bd1c52207703d7f27d5328c5a", + "internalRef": "281088257" } }, { From 801e4629e45825108283f69486662688e1c79a1e Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 10 Jan 2020 21:47:16 +0000 Subject: [PATCH 268/855] fix(logging): update test assertion and core version pins (#10087) * Adjust request params assertion in test (timeout) * Update version pins for core dependencies Version bumps are needed, as timeouts were introduced to API core. * Adjust assertion in system test An empty zone label that was missing is added. --- packages/google-cloud-logging/setup.py | 4 ++-- packages/google-cloud-logging/tests/system/test_system.py | 3 ++- packages/google-cloud-logging/tests/unit/test__http.py | 6 +++++- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 00c9cb388a12..fd71ab274b7d 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -29,8 +29,8 @@ # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ - "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", - "google-cloud-core >= 1.0.3, < 2.0dev", + "google-api-core[grpc] >= 1.15.0, < 2.0.0dev", + "google-cloud-core >= 1.1.0, < 2.0dev", ] extras = { } diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 8e2d694081a1..ea51aa8fd729 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -179,7 +179,8 @@ def test_log_text_with_resource(self): logger = Config.CLIENT.logger(self._logger_name("log_text_res")) now = datetime.datetime.utcnow() resource = Resource( - type="gae_app", labels={"module_id": "default", "version_id": "test"} + type="gae_app", + labels={"module_id": "default", "version_id": "test", "zone": ""}, ) self.to_delete.append(logger) diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index 30fef253af0c..4ffc1cfebd1c 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -72,7 +72,11 @@ def test_extra_headers(self): } expected_uri = conn.build_api_url("/rainbow") http.request.assert_called_once_with( - data=req_data, headers=expected_headers, method="GET", url=expected_uri + data=req_data, + headers=expected_headers, + method="GET", + url=expected_uri, + timeout=None, ) From 5fdb0a013f54cfe2d3bd2f236bfd247dc91901f5 Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade Date: Wed, 15 Jan 2020 17:09:20 +0200 Subject: [PATCH 269/855] fix: replace unsafe six.PY3 with PY2 for better future compatibility with Python 4 (#10081) * fix: fix for Python 4: replace unsafe six.PY3 with PY2 * Fix wording --- .../google-cloud-logging/tests/unit/handlers/test__helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index b5ba26fd17a7..702015961771 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -77,7 +77,7 @@ def get(self): self.response.out.write(json.dumps(trace_id)) -@unittest.skipIf(six.PY3, "webapp2 is Python 2 only") +@unittest.skipIf(not six.PY2, "webapp2 is Python 2 only") class Test_get_trace_id_from_webapp2(unittest.TestCase): @staticmethod def create_app(): From 75815a699b0db3e986cba50639408fb0bf9511fe Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 5 Feb 2020 22:49:05 +0000 Subject: [PATCH 270/855] chore: add split repo templates (#2) --- .../.github/CONTRIBUTING.md | 28 ++ .../.github/ISSUE_TEMPLATE/bug_report.md | 44 +++ .../.github/ISSUE_TEMPLATE/feature_request.md | 18 ++ .../.github/ISSUE_TEMPLATE/support_request.md | 7 + .../.github/PULL_REQUEST_TEMPLATE.md | 7 + .../.github/release-please.yml | 1 + packages/google-cloud-logging/.gitignore | 58 ++++ .../google-cloud-logging/.kokoro/build.sh | 39 +++ .../.kokoro/continuous/common.cfg | 27 ++ .../.kokoro/continuous/continuous.cfg | 1 + .../.kokoro/docs/common.cfg | 48 +++ .../.kokoro/docs/docs.cfg | 1 + .../.kokoro/presubmit/common.cfg | 27 ++ .../.kokoro/presubmit/presubmit.cfg | 1 + .../.kokoro/publish-docs.sh | 57 ++++ .../google-cloud-logging/.kokoro/release.sh | 34 +++ .../.kokoro/release/common.cfg | 64 ++++ .../.kokoro/release/release.cfg | 1 + .../.kokoro/trampoline.sh | 23 ++ .../google-cloud-logging/.repo-metadata.json | 2 +- .../google-cloud-logging/CODE_OF_CONDUCT.md | 44 +++ .../google-cloud-logging/CONTRIBUTING.rst | 279 ++++++++++++++++++ packages/google-cloud-logging/LICENSE | 7 +- packages/google-cloud-logging/MANIFEST.in | 1 + packages/google-cloud-logging/docs/conf.py | 13 +- packages/google-cloud-logging/noxfile.py | 21 +- packages/google-cloud-logging/renovate.json | 5 + packages/google-cloud-logging/setup.py | 2 +- packages/google-cloud-logging/synth.metadata | 13 +- .../test_utils/credentials.json.enc | 49 +++ .../scripts/circleci/get_tagged_package.py | 64 ++++ .../scripts/circleci/twine_upload.sh | 36 +++ .../test_utils/scripts/get_target_packages.py | 268 +++++++++++++++++ .../scripts/get_target_packages_kokoro.py | 98 ++++++ .../test_utils/scripts/run_emulator.py | 199 +++++++++++++ .../test_utils/scripts/update_docs.sh | 93 ++++++ .../google-cloud-logging/test_utils/setup.py | 64 ++++ .../test_utils/test_utils/__init__.py | 0 .../test_utils/test_utils/imports.py | 38 +++ .../test_utils/test_utils/retry.py | 207 +++++++++++++ .../test_utils/test_utils/system.py | 81 +++++ .../test_utils/test_utils/vpcsc_config.py | 118 ++++++++ .../tests/unit/test__http.py | 8 +- 43 files changed, 2162 insertions(+), 34 deletions(-) create mode 100644 packages/google-cloud-logging/.github/CONTRIBUTING.md create mode 100644 packages/google-cloud-logging/.github/ISSUE_TEMPLATE/bug_report.md create mode 100644 packages/google-cloud-logging/.github/ISSUE_TEMPLATE/feature_request.md create mode 100644 packages/google-cloud-logging/.github/ISSUE_TEMPLATE/support_request.md create mode 100644 packages/google-cloud-logging/.github/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/google-cloud-logging/.github/release-please.yml create mode 100644 packages/google-cloud-logging/.gitignore create mode 100755 packages/google-cloud-logging/.kokoro/build.sh create mode 100644 packages/google-cloud-logging/.kokoro/continuous/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/continuous/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/docs/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/docs/docs.cfg create mode 100644 packages/google-cloud-logging/.kokoro/presubmit/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/presubmit/presubmit.cfg create mode 100755 packages/google-cloud-logging/.kokoro/publish-docs.sh create mode 100755 packages/google-cloud-logging/.kokoro/release.sh create mode 100644 packages/google-cloud-logging/.kokoro/release/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/release/release.cfg create mode 100755 packages/google-cloud-logging/.kokoro/trampoline.sh create mode 100644 packages/google-cloud-logging/CODE_OF_CONDUCT.md create mode 100644 packages/google-cloud-logging/CONTRIBUTING.rst create mode 100644 packages/google-cloud-logging/renovate.json create mode 100644 packages/google-cloud-logging/test_utils/credentials.json.enc create mode 100644 packages/google-cloud-logging/test_utils/scripts/circleci/get_tagged_package.py create mode 100755 packages/google-cloud-logging/test_utils/scripts/circleci/twine_upload.sh create mode 100644 packages/google-cloud-logging/test_utils/scripts/get_target_packages.py create mode 100644 packages/google-cloud-logging/test_utils/scripts/get_target_packages_kokoro.py create mode 100644 packages/google-cloud-logging/test_utils/scripts/run_emulator.py create mode 100755 packages/google-cloud-logging/test_utils/scripts/update_docs.sh create mode 100644 packages/google-cloud-logging/test_utils/setup.py create mode 100644 packages/google-cloud-logging/test_utils/test_utils/__init__.py create mode 100644 packages/google-cloud-logging/test_utils/test_utils/imports.py create mode 100644 packages/google-cloud-logging/test_utils/test_utils/retry.py create mode 100644 packages/google-cloud-logging/test_utils/test_utils/system.py create mode 100644 packages/google-cloud-logging/test_utils/test_utils/vpcsc_config.py diff --git a/packages/google-cloud-logging/.github/CONTRIBUTING.md b/packages/google-cloud-logging/.github/CONTRIBUTING.md new file mode 100644 index 000000000000..939e5341e74d --- /dev/null +++ b/packages/google-cloud-logging/.github/CONTRIBUTING.md @@ -0,0 +1,28 @@ +# How to Contribute + +We'd love to accept your patches and contributions to this project. There are +just a few small guidelines you need to follow. + +## Contributor License Agreement + +Contributions to this project must be accompanied by a Contributor License +Agreement. You (or your employer) retain the copyright to your contribution; +this simply gives us permission to use and redistribute your contributions as +part of the project. Head over to to see +your current agreements on file or to sign a new one. + +You generally only need to submit a CLA once, so if you've already submitted one +(even if it was for a different project), you probably don't need to do it +again. + +## Code reviews + +All submissions, including submissions by project members, require review. We +use GitHub pull requests for this purpose. Consult +[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more +information on using pull requests. + +## Community Guidelines + +This project follows [Google's Open Source Community +Guidelines](https://opensource.google.com/conduct/). diff --git a/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/bug_report.md b/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000000..d575444a7b5c --- /dev/null +++ b/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,44 @@ +--- +name: Bug report +about: Create a report to help us improve + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + +Please run down the following list and make sure you've tried the usual "quick fixes": + + - Search the issues already opened: https://github.com/googleapis/python-logging/issues + - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-python + - Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+python + +If you are still having issues, please be sure to include as much information as possible: + +#### Environment details + + - OS type and version: + - Python version: `python --version` + - pip version: `pip --version` + - `google-cloud-logging` version: `pip show google-cloud-logging` + +#### Steps to reproduce + + 1. ? + 2. ? + +#### Code example + +```python +# example +``` + +#### Stack trace +``` +# example +``` + +Making sure to follow these steps will guarantee the quickest resolution possible. + +Thanks! diff --git a/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/feature_request.md b/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000000..6365857f33c6 --- /dev/null +++ b/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,18 @@ +--- +name: Feature request +about: Suggest an idea for this library + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + + **Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + **Describe the solution you'd like** +A clear and concise description of what you want to happen. + **Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + **Additional context** +Add any other context or screenshots about the feature request here. diff --git a/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/support_request.md b/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/support_request.md new file mode 100644 index 000000000000..995869032125 --- /dev/null +++ b/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/support_request.md @@ -0,0 +1,7 @@ +--- +name: Support request +about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. + +--- + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/packages/google-cloud-logging/.github/PULL_REQUEST_TEMPLATE.md b/packages/google-cloud-logging/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000000..ff76f9a9c6d0 --- /dev/null +++ b/packages/google-cloud-logging/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,7 @@ +Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-logging/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +- [ ] Ensure the tests and linter pass +- [ ] Code coverage does not decrease (if any source code was changed) +- [ ] Appropriate docs were updated (if necessary) + +Fixes # 🦕 diff --git a/packages/google-cloud-logging/.github/release-please.yml b/packages/google-cloud-logging/.github/release-please.yml new file mode 100644 index 000000000000..4507ad0598a5 --- /dev/null +++ b/packages/google-cloud-logging/.github/release-please.yml @@ -0,0 +1 @@ +releaseType: python diff --git a/packages/google-cloud-logging/.gitignore b/packages/google-cloud-logging/.gitignore new file mode 100644 index 000000000000..3fb06e09ce74 --- /dev/null +++ b/packages/google-cloud-logging/.gitignore @@ -0,0 +1,58 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated + +# Virtual environment +env/ +coverage.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/build.sh b/packages/google-cloud-logging/.kokoro/build.sh new file mode 100755 index 000000000000..8df566562b91 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/build.sh @@ -0,0 +1,39 @@ +#!/bin/bash +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +cd github/python-logging + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Setup service account credentials. +export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json + +# Setup project id. +export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") + +# Remove old nox +python3.6 -m pip uninstall --yes --quiet nox-automation + +# Install nox +python3.6 -m pip install --upgrade --quiet nox +python3.6 -m nox --version + +python3.6 -m nox diff --git a/packages/google-cloud-logging/.kokoro/continuous/common.cfg b/packages/google-cloud-logging/.kokoro/continuous/common.cfg new file mode 100644 index 000000000000..2d5b6bac27ca --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/continuous/common.cfg @@ -0,0 +1,27 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/build.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/continuous/continuous.cfg b/packages/google-cloud-logging/.kokoro/continuous/continuous.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/continuous/continuous.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/docs/common.cfg b/packages/google-cloud-logging/.kokoro/docs/common.cfg new file mode 100644 index 000000000000..01a16ec856c1 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/docs/common.cfg @@ -0,0 +1,48 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/publish-docs.sh" +} + +env_vars: { + key: "STAGING_BUCKET" + value: "docs-staging" +} + +# Fetch the token needed for reporting release status to GitHub +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "yoshi-automation-github-key" + } + } +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/docs/docs.cfg b/packages/google-cloud-logging/.kokoro/docs/docs.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/docs/docs.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/presubmit/common.cfg b/packages/google-cloud-logging/.kokoro/presubmit/common.cfg new file mode 100644 index 000000000000..2d5b6bac27ca --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/presubmit/common.cfg @@ -0,0 +1,27 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/build.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/presubmit/presubmit.cfg b/packages/google-cloud-logging/.kokoro/presubmit/presubmit.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/presubmit/presubmit.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/publish-docs.sh b/packages/google-cloud-logging/.kokoro/publish-docs.sh new file mode 100755 index 000000000000..d01483d2dfb6 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/publish-docs.sh @@ -0,0 +1,57 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#!/bin/bash + +set -eo pipefail + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +cd github/python-logging + +# Remove old nox +python3.6 -m pip uninstall --yes --quiet nox-automation + +# Install nox +python3.6 -m pip install --upgrade --quiet nox +python3.6 -m nox --version + +# build docs +nox -s docs + +python3 -m pip install gcp-docuploader + +# install a json parser +sudo apt-get update +sudo apt-get -y install software-properties-common +sudo add-apt-repository universe +sudo apt-get update +sudo apt-get -y install jq + +# create metadata +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging diff --git a/packages/google-cloud-logging/.kokoro/release.sh b/packages/google-cloud-logging/.kokoro/release.sh new file mode 100755 index 000000000000..c9b0928bad84 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/release.sh @@ -0,0 +1,34 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#!/bin/bash + +set -eo pipefail + +# Start the releasetool reporter +python3 -m pip install gcp-releasetool +python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script + +# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. +python3 -m pip install --upgrade twine wheel setuptools + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Move into the package, build the distribution and upload. +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +cd github/python-logging +python3 setup.py sdist bdist_wheel +twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-logging/.kokoro/release/common.cfg b/packages/google-cloud-logging/.kokoro/release/common.cfg new file mode 100644 index 000000000000..52c4b699b7cf --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/release/common.cfg @@ -0,0 +1,64 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/release.sh" +} + +# Fetch the token needed for reporting release status to GitHub +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "yoshi-automation-github-key" + } + } +} + +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_pypi_password" + } + } +} + +# Fetch magictoken to use with Magic Github Proxy +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "releasetool-magictoken" + } + } +} + +# Fetch api key to use with Magic Github Proxy +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "magic-github-proxy-api-key" + } + } +} diff --git a/packages/google-cloud-logging/.kokoro/release/release.cfg b/packages/google-cloud-logging/.kokoro/release/release.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/release/release.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/trampoline.sh b/packages/google-cloud-logging/.kokoro/trampoline.sh new file mode 100755 index 000000000000..e8c4251f3ed4 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/trampoline.sh @@ -0,0 +1,23 @@ +#!/bin/bash +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? + +chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh +${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true + +exit ${ret_code} diff --git a/packages/google-cloud-logging/.repo-metadata.json b/packages/google-cloud-logging/.repo-metadata.json index 33d1b1f819ee..4a83dbb62d84 100644 --- a/packages/google-cloud-logging/.repo-metadata.json +++ b/packages/google-cloud-logging/.repo-metadata.json @@ -6,7 +6,7 @@ "issue_tracker": "https://issuetracker.google.com/savedsearches/559764", "release_level": "ga", "language": "python", - "repo": "googleapis/google-cloud-python", + "repo": "googleapis/python-logging", "distribution_name": "google-cloud-logging", "api_id": "logging.googleapis.com" } \ No newline at end of file diff --git a/packages/google-cloud-logging/CODE_OF_CONDUCT.md b/packages/google-cloud-logging/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..b3d1f6029849 --- /dev/null +++ b/packages/google-cloud-logging/CODE_OF_CONDUCT.md @@ -0,0 +1,44 @@ + +# Contributor Code of Conduct + +As contributors and maintainers of this project, +and in the interest of fostering an open and welcoming community, +we pledge to respect all people who contribute through reporting issues, +posting feature requests, updating documentation, +submitting pull requests or patches, and other activities. + +We are committed to making participation in this project +a harassment-free experience for everyone, +regardless of level of experience, gender, gender identity and expression, +sexual orientation, disability, personal appearance, +body size, race, ethnicity, age, religion, or nationality. + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery +* Personal attacks +* Trolling or insulting/derogatory comments +* Public or private harassment +* Publishing other's private information, +such as physical or electronic +addresses, without explicit permission +* Other unethical or unprofessional conduct. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct. +By adopting this Code of Conduct, +project maintainers commit themselves to fairly and consistently +applying these principles to every aspect of managing this project. +Project maintainers who do not follow or enforce the Code of Conduct +may be permanently removed from the project team. + +This code of conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. + +Instances of abusive, harassing, or otherwise unacceptable behavior +may be reported by opening an issue +or contacting one or more of the project maintainers. + +This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, +available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst new file mode 100644 index 000000000000..4fad6c852f61 --- /dev/null +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -0,0 +1,279 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: 2.7, + 3.5, 3.6, and 3.7 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``python-logging`` `repo`_ on GitHub. + +- Fork and clone the ``python-logging`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``python-logging`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-python-logging``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/python-logging.git hack-on-python-logging + $ cd hack-on-python-logging + # Configure remotes such that you can pull changes from the googleapis/python-logging + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/python-logging.git + # fetch and merge changes from upstream into master + $ git fetch upstream + $ git merge upstream/master + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/python-logging + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + + $ nox -s unit-2.7 + $ nox -s unit-3.7 + $ ... + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +Note on Editable Installs / Develop Mode +======================================== + +- As mentioned previously, using ``setuptools`` in `develop mode`_ + or a ``pip`` `editable install`_ is not possible with this + library. This is because this library uses `namespace packages`_. + For context see `Issue #2316`_ and the relevant `PyPA issue`_. + + Since ``editable`` / ``develop`` mode can't be used, packages + need to be installed directly. Hence your changes to the source + tree don't get incorporated into the **already installed** + package. + +.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ +.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 +.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 +.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode +.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ + +- PEP8 compliance, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="master" + + By doing this, you are specifying the location of the most up-to-date + version of ``python-logging``. The the suggested remote name ``upstream`` + should point to the official ``googleapis`` checkout and the + the branch should be the main branch on that remote (``master``). + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + $ nox -s system-3.7 + $ nox -s system-2.7 + + .. note:: + + System tests are only configured to run under Python 2.7 and + Python 3.7. For expediency, we do not run them in older versions + of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project and + so you'll need to provide some environment variables to facilitate + authentication to your project: + + - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; + Such a file can be downloaded directly from the developer's console by clicking + "Generate new JSON key". See private key + `docs `__ + for more details. + +- Once you have downloaded your json keys, set the environment variable + ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file:: + + $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" + + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/python-logging/blob/master/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-logging + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.5`_ +- `Python 3.6`_ +- `Python 3.7`_ + +.. _Python 3.5: https://docs.python.org/3.5/ +.. _Python 3.6: https://docs.python.org/3.6/ +.. _Python 3.7: https://docs.python.org/3.7/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/python-logging/blob/master/noxfile.py + +We explicitly decided not to support `Python 2.5`_ due to `decreased usage`_ +and lack of continuous integration `support`_. + +.. _Python 2.5: https://docs.python.org/2.5/ +.. _decreased usage: https://caremad.io/2013/10/a-look-at-pypi-downloads/ +.. _support: https://blog.travis-ci.com/2013-11-18-upcoming-build-environment-updates/ + +We have `dropped 2.6`_ as a supported version as well since Python 2.6 is no +longer supported by the core development team. + +Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. + +We also explicitly decided to support Python 3 beginning with version +3.5. Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ +.. _dropped 2.6: https://github.com/googleapis/google-cloud-python/issues/995 + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-logging/LICENSE b/packages/google-cloud-logging/LICENSE index d64569567334..a8ee855de2aa 100644 --- a/packages/google-cloud-logging/LICENSE +++ b/packages/google-cloud-logging/LICENSE @@ -1,7 +1,6 @@ - - Apache License + Apache License Version 2.0, January 2004 - http://www.apache.org/licenses/ + https://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -193,7 +192,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/packages/google-cloud-logging/MANIFEST.in b/packages/google-cloud-logging/MANIFEST.in index 9cbf175afe6b..cd011be27a0e 100644 --- a/packages/google-cloud-logging/MANIFEST.in +++ b/packages/google-cloud-logging/MANIFEST.in @@ -1,3 +1,4 @@ +# Generated by synthtool. DO NOT EDIT! include README.rst LICENSE recursive-include google *.json *.proto recursive-include tests * diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index c0253fdd03a8..45db4f8b2923 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -20,7 +20,7 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +__version__ = "" # -- General configuration ------------------------------------------------ @@ -66,7 +66,7 @@ # General information about the project. project = u"google-cloud-logging" -copyright = u"2017, Google" +copyright = u"2019, Google" author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for @@ -133,9 +133,9 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for Python", + "description": "Google Cloud Client Libraries for google-cloud-logging", "github_user": "googleapis", - "github_repo": "google-cloud-python", + "github_repo": "python-logging", "github_banner": True, "font_family": "'Roboto', Georgia, sans", "head_font_family": "'Roboto', Georgia, serif", @@ -318,7 +318,7 @@ u"google-cloud-logging Documentation", author, "google-cloud-logging", - "GAPIC library for the logging API", + "google-cloud-logging Library", "APIs", ) ] @@ -340,9 +340,8 @@ intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), } diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index d2a53d2de4fb..d3af7cb0c13c 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -21,10 +21,6 @@ import nox -LOCAL_DEPS = ( - os.path.join('..', 'api_core'), - os.path.join('..', 'core'), -) UNIT_TEST_DEPS = ( 'mock', 'pytest', @@ -42,7 +38,7 @@ def lint(session): Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.install("flake8", "black", *LOCAL_DEPS) + session.install("flake8", "black") session.run( "black", "--check", @@ -84,8 +80,6 @@ def default(session, django_dep=('django',)): deps += django_dep session.install(*deps) - for local_dep in LOCAL_DEPS: - session.install('-e', local_dep) session.install('-e', '.') # Run py.test against the unit tests. @@ -134,16 +128,15 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest') - for local_dep in LOCAL_DEPS: - session.install('-e', local_dep) systest_deps = [ - '../bigquery/', - '../pubsub/', - '../storage/', - '../test_utils/', + 'google-cloud-bigquery', + 'google-cloud-pubsub', + 'google-cloud-storage', ] for systest_dep in systest_deps: - session.install('-e', systest_dep) + session.install(systest_dep) + + session.install('-e', 'test_utils/') session.install('-e', '.') # Run py.test against the system tests. diff --git a/packages/google-cloud-logging/renovate.json b/packages/google-cloud-logging/renovate.json new file mode 100644 index 000000000000..4fa949311b20 --- /dev/null +++ b/packages/google-cloud-logging/renovate.json @@ -0,0 +1,5 @@ +{ + "extends": [ + "config:base", ":preserveSemverRanges" + ] +} diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index fd71ab274b7d..95d45ad641c1 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -64,7 +64,7 @@ author='Google LLC', author_email='googleapis-packages@google.com', license='Apache 2.0', - url='https://github.com/googleapis/google-cloud-python', + url='https://github.com/googleapis/python-logging', classifiers=[ release_status, 'Intended Audience :: Developers', diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 425ea50eea8e..f8b9e8a14c93 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,24 +1,25 @@ { - "updateTime": "2019-11-19T13:27:19.668508Z", + "updateTime": "2020-02-05T13:11:21.216339Z", "sources": [ { "generator": { "name": "artman", - "version": "0.42.1", - "dockerImage": "googleapis/artman@sha256:c773192618c608a7a0415dd95282f841f8e6bcdef7dd760a988c93b77a64bd57" + "version": "0.44.4", + "dockerImage": "googleapis/artman@sha256:19e945954fc960a4bdfee6cb34695898ab21a8cf0bac063ee39b91f00a1faec8" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "d8dd7fe8d5304f7bd1c52207703d7f27d5328c5a", - "internalRef": "281088257" + "sha": "a8ed9d921fdddc61d8467bfd7c1668f0ad90435c", + "internalRef": "293257997", + "log": "a8ed9d921fdddc61d8467bfd7c1668f0ad90435c\nfix: set Ruby module name for OrgPolicy\n\nPiperOrigin-RevId: 293257997\n\n6c7d28509bd8315de8af0889688ee20099594269\nredis: v1beta1 add UpgradeInstance and connect_mode field to Instance\n\nPiperOrigin-RevId: 293242878\n\nae0abed4fcb4c21f5cb67a82349a049524c4ef68\nredis: v1 add connect_mode field to Instance\n\nPiperOrigin-RevId: 293241914\n\n3f7a0d29b28ee9365771da2b66edf7fa2b4e9c56\nAdds service config definition for bigqueryreservation v1beta1\n\nPiperOrigin-RevId: 293234418\n\n0c88168d5ed6fe353a8cf8cbdc6bf084f6bb66a5\naddition of BUILD & configuration for accessapproval v1\n\nPiperOrigin-RevId: 293219198\n\n39bedc2e30f4778ce81193f6ba1fec56107bcfc4\naccessapproval: v1 publish protos\n\nPiperOrigin-RevId: 293167048\n\n69d9945330a5721cd679f17331a78850e2618226\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080182\n\nf6a1a6b417f39694275ca286110bc3c1ca4db0dc\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080178\n\n29d40b78e3dc1579b0b209463fbcb76e5767f72a\nExpose managedidentities/v1beta1/ API for client library usage.\n\nPiperOrigin-RevId: 292979741\n\na22129a1fb6e18056d576dfb7717aef74b63734a\nExpose managedidentities/v1/ API for client library usage.\n\nPiperOrigin-RevId: 292968186\n\nb5cbe4a4ba64ab19e6627573ff52057a1657773d\nSecurityCenter v1p1beta1: move file-level option on top to workaround protobuf.js bug.\n\nPiperOrigin-RevId: 292647187\n\nb224b317bf20c6a4fbc5030b4a969c3147f27ad3\nAdds API definitions for bigqueryreservation v1beta1.\n\nPiperOrigin-RevId: 292634722\n\nc1468702f9b17e20dd59007c0804a089b83197d2\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 292626173\n\nffdfa4f55ab2f0afc11d0eb68f125ccbd5e404bd\nvision: v1p3beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605599\n\n78f61482cd028fc1d9892aa5d89d768666a954cd\nvision: v1p1beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605125\n\n60bb5a294a604fd1778c7ec87b265d13a7106171\nvision: v1p2beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604980\n\n3bcf7aa79d45eb9ec29ab9036e9359ea325a7fc3\nvision: v1p4beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604656\n\n2717b8a1c762b26911b45ecc2e4ee01d98401b28\nFix dataproc artman client library generation.\n\nPiperOrigin-RevId: 292555664\n\n7ac66d9be8a7d7de4f13566d8663978c9ee9dcd7\nAdd Dataproc Autoscaling API to V1.\n\nPiperOrigin-RevId: 292450564\n\n5d932b2c1be3a6ef487d094e3cf5c0673d0241dd\n- Improve documentation\n- Add a client_id field to StreamingPullRequest\n\nPiperOrigin-RevId: 292434036\n\neaff9fa8edec3e914995ce832b087039c5417ea7\nmonitoring: v3 publish annotations and client retry config\n\nPiperOrigin-RevId: 292425288\n\n70958bab8c5353870d31a23fb2c40305b050d3fe\nBigQuery Storage Read API v1 clients.\n\nPiperOrigin-RevId: 292407644\n\n7a15e7fe78ff4b6d5c9606a3264559e5bde341d1\nUpdate backend proto for Google Cloud Endpoints\n\nPiperOrigin-RevId: 292391607\n\n3ca2c014e24eb5111c8e7248b1e1eb833977c83d\nbazel: Add --flaky_test_attempts=3 argument to prevent CI failures caused by flaky tests\n\nPiperOrigin-RevId: 292382559\n\n9933347c1f677e81e19a844c2ef95bfceaf694fe\nbazel:Integrate latest protoc-java-resource-names-plugin changes (fix for PyYAML dependency in bazel rules)\n\nPiperOrigin-RevId: 292376626\n\nb835ab9d2f62c88561392aa26074c0b849fb0bd3\nasset: v1p2beta1 add client config annotations\n\n* remove unintentionally exposed RPCs\n* remove messages relevant to removed RPCs\n\nPiperOrigin-RevId: 292369593\n\nc1246a29e22b0f98e800a536b5b0da2d933a55f2\nUpdating v1 protos with the latest inline documentation (in comments) and config options. Also adding a per-service .yaml file.\n\nPiperOrigin-RevId: 292310790\n\nb491d07cadaae7cde5608321f913e5ca1459b32d\nRevert accidental local_repository change\n\nPiperOrigin-RevId: 292245373\n\naf3400a8cb6110025198b59a0f7d018ae3cda700\nUpdate gapic-generator dependency (prebuilt PHP binary support).\n\nPiperOrigin-RevId: 292243997\n\n341fd5690fae36f36cf626ef048fbcf4bbe7cee6\ngrafeas: v1 add resource_definition for the grafeas.io/Project and change references for Project.\n\nPiperOrigin-RevId: 292221998\n\n42e915ec2ece1cd37a590fbcd10aa2c0fb0e5b06\nUpdate the gapic-generator, protoc-java-resource-name-plugin and protoc-docs-plugin to the latest commit.\n\nPiperOrigin-RevId: 292182368\n\nf035f47250675d31492a09f4a7586cfa395520a7\nFix grafeas build and update build.sh script to include gerafeas.\n\nPiperOrigin-RevId: 292168753\n\n26ccb214b7bc4a716032a6266bcb0a9ca55d6dbb\nasset: v1p1beta1 add client config annotations and retry config\n\nPiperOrigin-RevId: 292154210\n\n974ee5c0b5d03e81a50dafcedf41e0efebb5b749\nasset: v1beta1 add client config annotations\n\nPiperOrigin-RevId: 292152573\n\ncf3b61102ed5f36b827bc82ec39be09525f018c8\n Fix to protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 292034635\n\n4e1cfaa7c0fede9e65d64213ca3da1b1255816c0\nUpdate the public proto to support UTF-8 encoded id for CatalogService API, increase the ListCatalogItems deadline to 300s and some minor documentation change\n\nPiperOrigin-RevId: 292030970\n\n9c483584f8fd5a1b862ae07973f4cc7bb3e46648\nasset: add annotations to v1p1beta1\n\nPiperOrigin-RevId: 292009868\n\ne19209fac29731d0baf6d9ac23da1164f7bdca24\nAdd the google.rpc.context.AttributeContext message to the open source\ndirectories.\n\nPiperOrigin-RevId: 291999930\n\nae5662960573f279502bf98a108a35ba1175e782\noslogin API: move file level option on top of the file to avoid protobuf.js bug.\n\nPiperOrigin-RevId: 291990506\n\neba3897fff7c49ed85d3c47fc96fe96e47f6f684\nAdd cc_proto_library and cc_grpc_library targets for Spanner and IAM protos.\n\nPiperOrigin-RevId: 291988651\n\n8e981acfd9b97ea2f312f11bbaa7b6c16e412dea\nBeta launch for PersonDetection and FaceDetection features.\n\nPiperOrigin-RevId: 291821782\n\n994e067fae3b21e195f7da932b08fff806d70b5d\nasset: add annotations to v1p2beta1\n\nPiperOrigin-RevId: 291815259\n\n244e1d2c89346ca2e0701b39e65552330d68545a\nAdd Playable Locations service\n\nPiperOrigin-RevId: 291806349\n\n909f8f67963daf45dd88d020877fb9029b76788d\nasset: add annotations to v1beta2\n\nPiperOrigin-RevId: 291805301\n\n3c39a1d6e23c1ef63c7fba4019c25e76c40dfe19\nKMS: add file-level message for CryptoKeyPath, it is defined in gapic yaml but not\nin proto files.\n\nPiperOrigin-RevId: 291420695\n\nc6f3f350b8387f8d1b85ed4506f30187ebaaddc3\ncontaineranalysis: update v1beta1 and bazel build with annotations\n\nPiperOrigin-RevId: 291401900\n\n92887d74b44e4e636252b7b8477d0d2570cd82db\nfix: fix the location of grpc config file.\n\nPiperOrigin-RevId: 291396015\n\ne26cab8afd19d396b929039dac5d874cf0b5336c\nexpr: add default_host and method_signature annotations to CelService\n\nPiperOrigin-RevId: 291240093\n\n06093ae3952441c34ec176d1f7431b8765cec0be\nirm: fix v1alpha2 bazel build by adding missing proto imports\n\nPiperOrigin-RevId: 291227940\n\na8a2514af326e4673063f9a3c9d0ef1091c87e6c\nAdd proto annotation for cloud/irm API\n\nPiperOrigin-RevId: 291217859\n\n8d16f76de065f530d395a4c7eabbf766d6a120fd\nGenerate Memcache v1beta2 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 291008516\n\n3af1dabd93df9a9f17bf3624d3b875c11235360b\ngrafeas: Add containeranalysis default_host to Grafeas service\n\nPiperOrigin-RevId: 290965849\n\nbe2663fa95e31cba67d0cd62611a6674db9f74b7\nfix(google/maps/roads): add missing opening bracket\n\nPiperOrigin-RevId: 290964086\n\nfacc26550a0af0696e0534bc9cae9df14275aa7c\nUpdating v2 protos with the latest inline documentation (in comments) and adding a per-service .yaml file.\n\nPiperOrigin-RevId: 290952261\n\ncda99c1f7dc5e4ca9b1caeae1dc330838cbc1461\nChange api_name to 'asset' for v1p1beta1\n\nPiperOrigin-RevId: 290800639\n\n94e9e90c303a820ce40643d9129e7f0d2054e8a1\nAdds Google Maps Road service\n\nPiperOrigin-RevId: 290795667\n\na3b23dcb2eaecce98c600c7d009451bdec52dbda\nrpc: new message ErrorInfo, other comment updates\n\nPiperOrigin-RevId: 290781668\n\n26420ef4e46c37f193c0fbe53d6ebac481de460e\nAdd proto definition for Org Policy v1.\n\nPiperOrigin-RevId: 290771923\n\n7f0dab8177cf371ae019a082e2512de7ac102888\nPublish Routes Preferred API v1 service definitions.\n\nPiperOrigin-RevId: 290326986\n\nad6e508d0728e1d1bca6e3f328cd562718cb772d\nFix: Qualify resource type references with \"jobs.googleapis.com/\"\n\nPiperOrigin-RevId: 290285762\n\n58e770d568a2b78168ddc19a874178fee8265a9d\ncts client library\n\nPiperOrigin-RevId: 290146169\n\naf9daa4c3b4c4a8b7133b81588dd9ffd37270af2\nAdd more programming language options to public proto\n\nPiperOrigin-RevId: 290144091\n\nd9f2bbf2df301ef84641d4cec7c828736a0bd907\ntalent: add missing resource.proto dep to Bazel build target\n\nPiperOrigin-RevId: 290143164\n\n3b3968237451d027b42471cd28884a5a1faed6c7\nAnnotate Talent API.\nAdd gRPC service config for retry.\nUpdate bazel file with google.api.resource dependency.\n\nPiperOrigin-RevId: 290125172\n\n0735b4b096872960568d1f366bfa75b7b0e1f1a3\nWeekly library update.\n\nPiperOrigin-RevId: 289939042\n\n8760d3d9a4543d7f9c0d1c7870aca08b116e4095\nWeekly library update.\n\nPiperOrigin-RevId: 289939020\n\n8607df842f782a901805187e02fff598145b0b0e\nChange Talent API timeout to 30s.\n\nPiperOrigin-RevId: 289912621\n\n908155991fe32570653bcb72ecfdcfc896642f41\nAdd Recommendations AI V1Beta1\n\nPiperOrigin-RevId: 289901914\n\n5c9a8c2bebd8b71aa66d1cc473edfaac837a2c78\nAdding no-arg method signatures for ListBillingAccounts and ListServices\n\nPiperOrigin-RevId: 289891136\n\n50b0e8286ac988b0593bd890eb31fef6ea2f5767\nlongrunning: add grpc service config and default_host annotation to operations.proto\n\nPiperOrigin-RevId: 289876944\n\n6cac27dabe51c54807b0401698c32d34998948a9\n Updating default deadline for Cloud Security Command Center's v1 APIs.\n\nPiperOrigin-RevId: 289875412\n\nd99df0d67057a233c711187e0689baa4f8e6333d\nFix: Correct spelling in C# namespace option\n\nPiperOrigin-RevId: 289709813\n\n2fa8d48165cc48e35b0c62e6f7bdade12229326c\nfeat: Publish Recommender v1 to GitHub.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289619243\n\n9118db63d1ab493a2e44a3b4973fde810a835c49\nfirestore: don't retry reads that fail with Aborted\n\nFor transaction reads that fail with ABORTED, we need to rollback and start a new transaction. Our current configuration makes it so that GAPIC retries ABORTED reads multiple times without making any progress. Instead, we should retry at the transaction level.\n\nPiperOrigin-RevId: 289532382\n\n1dbfd3fe4330790b1e99c0bb20beb692f1e20b8a\nFix bazel build\nAdd other langauges (Java was already there) for bigquery/storage/v1alpha2 api.\n\nPiperOrigin-RevId: 289519766\n\nc06599cdd7d11f8d3fd25f8d3249e5bb1a3d5d73\nInitial commit of google.cloud.policytroubleshooter API, The API helps in troubleshooting GCP policies. Refer https://cloud.google.com/iam/docs/troubleshooting-access for more information\n\nPiperOrigin-RevId: 289491444\n\nfce7d80fa16ea241e87f7bc33d68595422e94ecd\nDo not pass samples option for Artman config of recommender v1 API.\n\nPiperOrigin-RevId: 289477403\n\nef179e8c61436297e6bb124352e47e45c8c80cb1\nfix: Address missing Bazel dependency.\n\nBazel builds stopped working in 06ec6d5 because\nthe google/longrunning/operations.proto file took\nan import from google/api/client.proto, but that\nimport was not added to BUILD.bazel.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446074\n\n8841655b242c84fd691d77d7bcf21b61044f01ff\nMigrate Data Labeling v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446026\n\n06ec6d5d053fff299eaa6eaa38afdd36c5e2fc68\nAdd annotations to google.longrunning.v1\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289413169\n\n0480cf40be1d3cc231f4268a2fdb36a8dd60e641\nMigrate IAM Admin v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289411084\n\n1017173e9adeb858587639af61889ad970c728b1\nSpecify a C# namespace for BigQuery Connection v1beta1\n\nPiperOrigin-RevId: 289396763\n\nb08714b378e8e5b0c4ecdde73f92c36d6303b4b6\nfix: Integrate latest proto-docs-plugin fix.\nFixes dialogflow v2\n\nPiperOrigin-RevId: 289189004\n\n51217a67e79255ee1f2e70a6a3919df082513327\nCreate BUILD file for recommender v1\n\nPiperOrigin-RevId: 289183234\n\nacacd87263c0a60e458561b8b8ce9f67c760552a\nGenerate recommender v1 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 289177510\n\n9d2f7133b97720b1fa3601f6dcd30760ba6d8a1e\nFix kokoro build script\n\nPiperOrigin-RevId: 289166315\n\nc43a67530d2a47a0220cad20ca8de39b3fbaf2c5\ncloudtasks: replace missing RPC timeout config for v2beta2 and v2beta3\n\nPiperOrigin-RevId: 289162391\n\n4cefc229a9197236fc0adf02d69b71c0c5cf59de\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 289158456\n\n56f263fe959c50786dab42e3c61402d32d1417bd\nCatalog API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 289149879\n\n4543762b23a57fc3c53d409efc3a9affd47b6ab3\nFix Bazel build\nbilling/v1 and dialogflow/v2 remain broken (not bazel-related issues).\nBilling has wrong configuration, dialogflow failure is caused by a bug in documentation plugin.\n\nPiperOrigin-RevId: 289140194\n\nc9dce519127b97e866ca133a01157f4ce27dcceb\nUpdate Bigtable docs\n\nPiperOrigin-RevId: 289114419\n\n802c5c5f2bf94c3facb011267d04e71942e0d09f\nMigrate DLP to proto annotations (but not GAPIC v2).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289102579\n\n6357f30f2ec3cff1d8239d18b707ff9d438ea5da\nRemove gRPC configuration file that was in the wrong place.\n\nPiperOrigin-RevId: 289096111\n\n360a8792ed62f944109d7e22d613a04a010665b4\n Protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 289011995\n\na79211c20c4f2807eec524d00123bf7c06ad3d6e\nRoll back containeranalysis v1 to GAPIC v1.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288999068\n\n9e60345ba603e03484a8aaa33ce5ffa19c1c652b\nPublish Routes Preferred API v1 proto definitions.\n\nPiperOrigin-RevId: 288941399\n\nd52885b642ad2aa1f42b132ee62dbf49a73e1e24\nMigrate the service management API to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288909426\n\n6ace586805c08896fef43e28a261337fcf3f022b\ncloudtasks: replace missing RPC timeout config\n\nPiperOrigin-RevId: 288783603\n\n51d906cabee4876b12497054b15b05d4a50ad027\nImport of Grafeas from Github.\n\nUpdate BUILD.bazel accordingly.\n\nPiperOrigin-RevId: 288783426\n\n5ef42bcd363ba0440f0ee65b3c80b499e9067ede\nMigrate Recommender v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288713066\n\n94f986afd365b7d7e132315ddcd43d7af0e652fb\nMigrate Container Analysis v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288708382\n\n7a751a279184970d3b6ba90e4dd4d22a382a0747\nRemove Container Analysis v1alpha1 (nobody publishes it).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288707473\n\n3c0d9c71242e70474b2b640e15bb0a435fd06ff0\nRemove specious annotation from BigQuery Data Transfer before\nanyone accidentally does anything that uses it.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288701604\n\n1af307a4764bd415ef942ac5187fa1def043006f\nMigrate BigQuery Connection to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288698681\n\n08b488e0660c59842a7dee0e3e2b65d9e3a514a9\nExposing cloud_catalog.proto (This API is already available through REST)\n\nPiperOrigin-RevId: 288625007\n\na613482977e11ac09fa47687a5d1b5a01efcf794\nUpdate the OS Login v1beta API description to render better in the UI.\n\nPiperOrigin-RevId: 288547940\n\n5e182b8d9943f1b17008d69d4c7e865dc83641a7\nUpdate the OS Login API description to render better in the UI.\n\nPiperOrigin-RevId: 288546443\n\ncb79155f596e0396dd900da93872be7066f6340d\nFix: Add a resource annotation for Agent\nFix: Correct the service name in annotations for Intent and SessionEntityType\n\nPiperOrigin-RevId: 288441307\n\nf7f6e9daec3315fd47cb638789bd8415bf4a27cc\nAdded cloud asset api v1p1beta1\n\nPiperOrigin-RevId: 288427239\n\nf2880f5b342c6345f3dcaad24fcb3c6ca9483654\nBilling account API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 288351810\n\ndc250ffe071729f8f8bef9d6fd0fbbeb0254c666\nFix: Remove incorrect resource annotations in requests\n\nPiperOrigin-RevId: 288321208\n\n91ef2d9dd69807b0b79555f22566fb2d81e49ff9\nAdd GAPIC annotations to Cloud KMS (but do not migrate the GAPIC config yet).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 287999179\n\n4d45a6399e9444fbddaeb1c86aabfde210723714\nRefreshing Cloud Billing API protos.\n\nThis exposes the following API methods:\n- UpdateBillingAccount\n- CreateBillingAccount\n- GetIamPolicy\n- SetIamPolicy\n- TestIamPermissions\n\nThere are also some new fields to support the management of sub-accounts.\n\nPiperOrigin-RevId: 287908369\n\nec285d3d230810147ebbf8d5b691ee90320c6d2d\nHide not yet implemented update_transforms message\n\nPiperOrigin-RevId: 287608953\n\na202fb3b91cd0e4231be878b0348afd17067cbe2\nBigQuery Storage Write API v1alpha2 clients. The service is enabled by whitelist only.\n\nPiperOrigin-RevId: 287379998\n\n650d7f1f8adb0cfaf37b3ce2241c3168f24efd4d\nUpdate Readme.md to match latest Bazel updates\n090d98aea20270e3be4b64240775588f7ce50ff8\ndocs(bigtable): Fix library release level listed in generated documentation\n\nPiperOrigin-RevId: 287308849\n\n2c28f646ca77b1d57550368be22aa388adde2e66\nfirestore: retry reads that fail with contention\n\nPiperOrigin-RevId: 287250665\n\nfd3091fbe9b2083cabc53dc50c78035658bfc4eb\nSync timeout in grpc config back to 10s for tasks API with github googelapis gapic config.\n\nPiperOrigin-RevId: 287207067\n\n49dd7d856a6f77c0cf7e5cb3334423e5089a9e8a\nbazel: Integrate bazel-2.0.0 compatibility fixes\n\nPiperOrigin-RevId: 287205644\n\n46e52fd64973e815cae61e78b14608fe7aa7b1df\nbazel: Integrate bazel build file generator\n\nTo generate/update BUILD.bazel files for any particular client or a batch of clients:\n```\nbazel run //:build_gen -- --src=google/example/library\n```\n\nPiperOrigin-RevId: 286958627\n\n1a380ea21dea9b6ac6ad28c60ad96d9d73574e19\nBigQuery Storage Read API v1beta2 clients.\n\nPiperOrigin-RevId: 286616241\n\n5f3f1d0f1c06b6475a17d995e4f7a436ca67ec9e\nAdd Artman config for secretmanager.\n\nPiperOrigin-RevId: 286598440\n\n50af0530730348f1e3697bf3c70261f7daaf2981\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 286491002\n\n91818800384f4ed26961aea268910b1a2ec58cc8\nFor Data Catalog API,\n1. Add support for marking a tag template field as required when creating a new tag template.\n2. Add support for updating a tag template field from required to optional.\n\nPiperOrigin-RevId: 286490262\n\nff4a2047b3d66f38c9b22197c370ed0d02fc0238\nWeekly library update.\n\nPiperOrigin-RevId: 286484215\n\n192c14029861752a911ed434fd6ee5b850517cd9\nWeekly library update.\n\nPiperOrigin-RevId: 286484165\n\nd9e328eaf790d4e4346fbbf32858160f497a03e0\nFix bazel build (versions 1.x)\n\nBump gapic-generator and resource names plugins to the latest version.\n\nPiperOrigin-RevId: 286469287\n\n0ca305403dcc50e31ad9477c9b6241ddfd2056af\nsecretmanager client package name option updates for java and go\n\nPiperOrigin-RevId: 286439553\n\nade4803e8a1a9e3efd249c8c86895d2f12eb2aaa\niam credentials: publish v1 protos containing annotations\n\nPiperOrigin-RevId: 286418383\n\n03e5708e5f8d1909dcb74b25520309e59ebf24be\nsecuritycenter: add missing proto deps for Bazel build\n\nPiperOrigin-RevId: 286417075\n\n8b991eb3eb82483b0ca1f1361a9c8e5b375c4747\nAdd secretmanager client package name options.\n\nPiperOrigin-RevId: 286415883\n\nd400cb8d45df5b2ae796b909f098a215b2275c1d\ndialogflow: add operation_info annotations to BatchUpdateEntities and BatchDeleteEntities.\n\nPiperOrigin-RevId: 286312673\n\nf2b25232db397ebd4f67eb901a2a4bc99f7cc4c6\nIncreased the default timeout time for all the Cloud Security Command Center client libraries.\n\nPiperOrigin-RevId: 286263771\n\ncb2f1eefd684c7efd56fd375cde8d4084a20439e\nExposing new Resource fields in the SecurityCenterProperties proto, added more comments to the filter logic for these Resource fields, and updated the response proto for the ListFindings API with the new Resource fields.\n\nPiperOrigin-RevId: 286263092\n\n73cebb20432b387c3d8879bb161b517d60cf2552\nUpdate v1beta2 clusters and jobs to include resource ids in GRPC header.\n\nPiperOrigin-RevId: 286261392\n\n1b4e453d51c0bd77e7b73896cdd8357d62768d83\nsecuritycenter: publish v1beta1 protos with annotations\n\nPiperOrigin-RevId: 286228860\n\na985eeda90ae98e8519d2320bee4dec148eb8ccb\nAdd default retry configurations for speech_v1p1beta1.\n\nSettings are copied from speech_gapic.legacy.yaml. The Python client library is being generated with timeouts that are too low. See https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2578\n\nPiperOrigin-RevId: 286191318\n\n3352100a15ede383f5ab3c34599f7a10a3d066fe\nMake importing rule with the same name (but different aliases) from different repositories possible.\n\nThis is needed to allow monolitic gapic-generator and microgenerators coexist during transition period.\n\nTo plug a microgenerator:\n\n1) Add corresponding rules bidnings under `switched_rules_by_language` in repository_rules.bzl:\n rules[\"go_gapic_library2\"] = _switch(\n go and grpc and gapic,\n \"@gapic_generator_go//rules_go_gapic/go_gapic.bzl\",\n \"go_gapic_library\",\n )\n\n2) Import microgenerator in WORKSPACE (the above example assumes that the generator was imported under name \"gapic_generator_go\").\n\n3) To migrate an API from monolith to micro generator (this is done per API and per language) modify the corresponding load statement in the API's BUILD.bazel file. For example, for the example above, to migrate to go microgenerator modify the go-specific load statement in BUILD.bazel file of a specific API (which you want to migrate) to the following:\n\nload(\n \"@com_google_googleapis_imports//:imports.bzl\",\n \"go_gapic_assembly_pkg\",\n go_gapic_library = \"go_gapic_library2\",\n \"go_proto_library\",\n \"go_test\",\n)\n\nPiperOrigin-RevId: 286065440\n\n6ad2bb13bc4b0f3f785517f0563118f6ca52ddfd\nUpdated v1beta1 protos for the client:\n- added support for GenericSignedAttestation which has a generic Signature\n- added support for CVSSv3 and WindowsDetail in Vulnerability\n- documentation updates\n\nPiperOrigin-RevId: 286008145\n\nfe1962e49999a832eed8162c45f23096336a9ced\nAdMob API v1 20191210\n\nBasic account info, mediation and network report available. See https://developers.google.com/admob/api/release-notes for more details.\n\nPiperOrigin-RevId: 285894502\n\n41fc1403738b61427f3a798ca9750ef47eb9c0f2\nAnnotate the required fields for the Monitoring Dashboards API\n\nPiperOrigin-RevId: 285824386\n\n27d0e0f202cbe91bf155fcf36824a87a5764ef1e\nRemove inappropriate resource_reference annotations for UpdateWorkflowTemplateRequest.template.\n\nPiperOrigin-RevId: 285802643\n\ne5c4d3a2b5b5bef0a30df39ebb27711dc98dee64\nAdd Artman BUILD.bazel file for the Monitoring Dashboards API\n\nPiperOrigin-RevId: 285445602\n\n2085a0d3c76180ee843cf2ecef2b94ca5266be31\nFix path in the artman config for Monitoring Dashboard API.\n\nPiperOrigin-RevId: 285233245\n\n2da72dfe71e4cca80902f9e3e125c40f02c2925b\nAdd Artman and GAPIC configs for the Monitoring Dashboards API.\n\nPiperOrigin-RevId: 285211544\n\n9f6eeebf1f30f51ffa02acea5a71680fe592348e\nAdd annotations to Dataproc v1. (Also forwarding comment changes from internal source control.)\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 285197557\n\n19c4589a3cb44b3679f7b3fba88365b3d055d5f8\noslogin: fix v1beta retry configuration\n\nPiperOrigin-RevId: 285013366\n\nee3f02926d0f8a0bc13f8d716581aad20f575751\nAdd Monitoring Dashboards API protocol buffers to Google Cloud Monitoring API.\n\nPiperOrigin-RevId: 284982647\n\ne47fdd266542386e5e7346697f90476e96dc7ee8\nbigquery datatransfer: Remove non-publicly available DataSourceService.\n\nPiperOrigin-RevId: 284822593\n\n6156f433fd1d9d5e4a448d6c6da7f637921d92ea\nAdds OSConfig v1beta protos and initial client library config\n\nPiperOrigin-RevId: 284799663\n\n6cc9499e225a4f6a5e34fe07e390f67055d7991c\nAdd datetime.proto to google/type/BUILD.bazel\n\nPiperOrigin-RevId: 284643689\n\nfe7dd5277e39ffe0075729c61e8d118d7527946d\nCosmetic changes to proto comment as part of testing internal release instructions.\n\nPiperOrigin-RevId: 284608712\n\n68d109adad726b89f74276d2f4b2ba6aac6ec04a\nAdd annotations to securitycenter v1, but leave GAPIC v1 in place.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 284580511\n\ndf8a1707a910fc17c71407a75547992fd1864c51\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 284568564\n\na69a974976221ce3bb944901b739418b85d6408c\nclient library update\n\nPiperOrigin-RevId: 284463979\n\na4adac3a12aca6e3a792c9c35ee850435fe7cf7e\nAdded DateTime, TimeZone, and Month proto files to google/type\n\nPiperOrigin-RevId: 284277770\n\ned5dec392906078db4f7745fe4f11d34dd401ae9\nchange common resources from message-level annotations to file-level annotations.\n\nPiperOrigin-RevId: 284236794\n\na00e2c575ef1b637667b4ebe96b8c228b2ddb273\nbigquerydatatransfer: change resource type TransferRun to Run to be consistent with gapic configs\nbigquerydatatransfer: add missing patterns for DataSource, TransferConfig and Run (to allow the location segment)\nbigquerydatatransfer: add file-level Parent resource type (to allow the location segement)\nbigquerydatatransfer: update grpc service config with correct retry delays\n\nPiperOrigin-RevId: 284234378\n\nb10e4547017ca529ac8d183e839f3c272e1c13de\ncloud asset: replace required fields for batchgetassethistory. Correct the time out duration.\n\nPiperOrigin-RevId: 284059574\n\n6690161e3dcc3367639a2ec10db67bf1cf392550\nAdd default retry configurations for speech_v1.\n\nSettings are copied from speech_gapic.legacy.yaml. The Python client library is being generated with timeouts that are too low. See https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2578\n\nPiperOrigin-RevId: 284035915\n\n9b2635ef91e114f0357bdb87652c26a8f59316d5\ncloudtasks: fix gapic v2 config\n\nPiperOrigin-RevId: 284020555\n\ne5676ba8b863951a8ed0bfd6046e1db38062743c\nReinstate resource name handling in GAPIC config for Asset v1.\n\nPiperOrigin-RevId: 283993903\n\nf337f7fb702c85833b7b6ca56afaf9a1bf32c096\nOSConfig AgentEndpoint: add LookupEffectiveGuestPolicy rpc\n\nPiperOrigin-RevId: 283989762\n\nc0ac9b55f2e2efd0ee525b3a6591a1b09330e55a\nInclude real time feed api into v1 version\n\nPiperOrigin-RevId: 283845474\n\n2427a3a0f6f4222315362d973d91a082a3a884a7\nfirestore admin: update v1 protos with annotations & retry config\n\nPiperOrigin-RevId: 283826605\n\n555e844dbe04af50a8f55fe1217fa9d39a0a80b2\nchore: publish retry configs for iam admin, cloud asset, and remoteworkers\n\nPiperOrigin-RevId: 283801979\n\n6311dc536668849142d1fe5cd9fc46da66d1f77f\nfirestore: update v1beta1 protos with annotations and retry config\n\nPiperOrigin-RevId: 283794315\n\nda0edeeef953b05eb1524d514d2e9842ac2df0fd\nfeat: publish several retry config files for client generation\n\nPiperOrigin-RevId: 283614497\n\n59a78053537e06190f02d0a7ffb792c34e185c5a\nRemoving TODO comment\n\nPiperOrigin-RevId: 283592535\n\n8463992271d162e2aff1d5da5b78db11f2fb5632\nFix bazel build\n\nPiperOrigin-RevId: 283589351\n\n3bfcb3d8df10dfdba58f864d3bdb8ccd69364669\nPublic client library for bebop_jobs_api_20191118_1_RC3 release.\n\nPiperOrigin-RevId: 283568877\n\n27ab0db61021d267c452b34d149161a7bf0d9f57\nfirestore: publish annotated protos and new retry config\n\nPiperOrigin-RevId: 283565148\n\n38dc36a2a43cbab4a2a9183a43dd0441670098a9\nfeat: add http annotations for operations calls\n\nPiperOrigin-RevId: 283384331\n\n366caab94906975af0e17822e372f1d34e319d51\ndatastore: add a legacy artman config for PHP generation\n\nPiperOrigin-RevId: 283378578\n\n82944da21578a53b74e547774cf62ed31a05b841\nMigrate container v1beta1 to GAPIC v2.\n\nPiperOrigin-RevId: 283342796\n\n584dcde5826dd11ebe222016b7b208a4e1196f4b\nRemove resource name annotation for UpdateKeyRequest.key, because it's the resource, not a name.\n\nPiperOrigin-RevId: 283167368\n\n6ab0171e3688bfdcf3dbc4056e2df6345e843565\nAdded resource annotation for Key message.\n\nPiperOrigin-RevId: 283066965\n\n86c1a2db1707a25cec7d92f8850cc915163ec3c3\nExpose Admin API methods for Key manipulation.\n\nPiperOrigin-RevId: 282988776\n\n3ddad085965896ffb205d44cb0c0616fe3def10b\nC++ targets: correct deps so they build, rename them from trace* to cloudtrace*\nto match the proto names.\n\nPiperOrigin-RevId: 282857635\n\ne9389365a971ad6457ceb9646c595e79dfdbdea5\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 282810797\n\ne42eaaa9abed3c4d63d64f790bd3191448dbbca6\nPut back C++ targets for cloud trace v2 api.\n\nPiperOrigin-RevId: 282803841\n\nd8896a3d8a191702a9e39f29cf4c2e16fa05f76d\nAdd initial BUILD.bazel for secretmanager.googleapis.com\n\nPiperOrigin-RevId: 282674885\n\n2cc56cb83ea3e59a6364e0392c29c9e23ad12c3a\nCreate sample for list recommendations\n\nPiperOrigin-RevId: 282665402\n\nf88e2ca65790e3b44bb3455e4779b41de1bf7136\nbump Go to ga\n\nPiperOrigin-RevId: 282651105\n\naac86d932b3cefd7d746f19def6935d16d6235e0\nDocumentation update. Add location_id in preparation for regionalization.\n\nPiperOrigin-RevId: 282586371\n\n5b501cd384f6b842486bd41acce77854876158e7\nMigrate Datastore Admin to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 282570874\n\n6a16d474d5be201b20a27646e2009c4dfde30452\nMigrate Datastore to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 282564329\n\n74bd9b95ac8c70b883814e4765a725cffe43d77c\nmark Go lib ga\n\nPiperOrigin-RevId: 282562558\n\nf7b3d434f44f6a77cf6c37cae5474048a0639298\nAdd secretmanager.googleapis.com protos\n\nPiperOrigin-RevId: 282546399\n\nc34a911aaa0660a45f5a556578f764f135e6e060\niot: bump Go GAPIC to GA release level\n\nPiperOrigin-RevId: 282494787\n\n79b7f1c5ba86859dbf70aa6cd546057c1002cdc0\nPut back C++ targets.\nPrevious change overrode custom C++ targets made by external teams. This PR puts those targets back.\n\nPiperOrigin-RevId: 282458292\n\n06a840781d2dc1b0a28e03e30fb4b1bfb0b29d1e\nPopulate BAZEL.build files for around 100 APIs (all APIs we publish) in all 7 langauges.\n\nPiperOrigin-RevId: 282449910\n\n777b580a046c4fa84a35e1d00658b71964120bb0\nCreate BUILD file for recommender v1beta1\n\nPiperOrigin-RevId: 282068850\n\n48b385b6ef71dfe2596490ea34c9a9a434e74243\nGenerate recommender v1beta1 gRPC ServiceConfig file\n\nPiperOrigin-RevId: 282067795\n\n8395b0f1435a4d7ce8737b3b55392627758bd20c\nfix: Set timeout to 25s, because Tasks fails for any deadline above 30s.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 282017295\n\n3ba7ddc4b2acf532bdfb0004ca26311053c11c30\nfix: Shift Ruby and PHP to legacy GAPIC YAMLs for back-compat.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281852671\n\nad6f0c002194c3ec6c13d592d911d122d2293931\nRemove unneeded yaml files\n\nPiperOrigin-RevId: 281835839\n\n1f42588e4373750588152cdf6f747de1cadbcbef\nrefactor: Migrate Tasks beta 2 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281769558\n\n902b51f2073e9958a2aba441f7f7ac54ea00966d\nrefactor: Migrate Tasks to GAPIC v2 (for real this time).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281769522\n\n17561f59970eede87f61ef6e9c322fa1198a2f4d\nMigrate Tasks Beta 3 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281769519\n\nf95883b15a1ddd58eb7e3583fdefe7b00505faa3\nRegenerate recommender v1beta1 protos and sanitized yaml\n\nPiperOrigin-RevId: 281765245\n\n9a52df54c626b36699a058013d1735a166933167\nadd gRPC ServiceConfig for grafeas v1\n\nPiperOrigin-RevId: 281762754\n\n7a79d682ef40c5ca39c3fca1c0901a8e90021f8a\nfix: Roll back Tasks GAPIC v2 while we investigate C# issue.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281758548\n\n3fc31491640a90f029f284289e7e97f78f442233\nMigrate Tasks to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281751187\n\n5bc0fecee454f857cec042fb99fe2d22e1bff5bc\nfix: adds operation HTTP rules back to v1p1beta1 config\n\nPiperOrigin-RevId: 281635572\n\n5364a19284a1333b3ffe84e4e78a1919363d9f9c\nbazel: Fix build\n\n1) Update to latest gapic-generator (has iam resource names fix for java).\n2) Fix non-trivial issues with oslogin (resources defined in sibling package to the one they are used from) and monitoring.\n3) Fix trivial missing dependencies in proto_library targets for other apis.\n\nThis is to prepare the repository to being populated with BUILD.bazel files for all supported apis (101 API) in all 7 languages.\n\nPiperOrigin-RevId: 281618750\n\n0aa77cbe45538d5e5739eb637db3f2940b912789\nUpdating common proto files in google/type/ with their latest versions.\n\nPiperOrigin-RevId: 281603926\n\nd47e1b4485b3effbb2298eb10dd13a544c0f66dc\nfix: replace Speech Recognize RPC retry_codes_name for non-standard assignment\n\nPiperOrigin-RevId: 281594037\n\n16543773103e2619d2b5f52456264de5bb9be104\nRegenerating public protos for datacatalog, also adding gRPC service config.\n\nPiperOrigin-RevId: 281423227\n\n328ebe76adb06128d12547ed70107fb841aebf4e\nChange custom data type from String to google.protobuf.Struct to be consistent with other docs such as\nhttps://developers.google.com/actions/smarthome/develop/process-intents#response_format\n\nPiperOrigin-RevId: 281402467\n\n5af83f47b9656261cafcf88b0b3334521ab266b3\n(internal change without visible public changes)\n\nPiperOrigin-RevId: 281334391\n\nc53ed56649583a149382bd88d3c427be475b91b6\nFix typo in protobuf docs.\n\nPiperOrigin-RevId: 281293109\n\n" } }, { "template": { - "name": "python_library", + "name": "python_split_library", "origin": "synthtool.gcp", "version": "2019.10.17" } diff --git a/packages/google-cloud-logging/test_utils/credentials.json.enc b/packages/google-cloud-logging/test_utils/credentials.json.enc new file mode 100644 index 000000000000..f073c7e4f774 --- /dev/null +++ b/packages/google-cloud-logging/test_utils/credentials.json.enc @@ -0,0 +1,49 @@ +U2FsdGVkX1/vVm/dOEg1DCACYbdOcL+ey6+64A+DZGZVgF8Z/3skK6rpPocu6GOA +UZAqASsBH9QifDf8cKVXQXVYpYq6HSv2O0w7vOmVorZO9GYPo98s9/8XO+4ty/AU +aB6TD68frBAYv4cT/l5m7aYdzfzMTy0EOXoleZT09JYP3B5FV3KCO114FzMXGwrj +HXsR6E5SyUUlUnWPC3eD3aqmovay0gxOKYO3ZwjFK1nlbN/8q6/8nwBCf/Bg6SHV +V93pNxdolRlJev9kgKz4RN1z4jGCy5PAndhSLE82NFIs9LoAiEOU5YeMlN+Ulqus +J92nh+ptUe9a4pJGbAuveUWO7zdS1QyXvTMUcmmSfXCNm/eIQjNuu5+rHtIjWKh8 +Ilwj2w1aTfSptQEhk/kwRgFz/d11vfwJzvwTmCxO6zyOeL0VUWLqdCBGgG5As9He +/RenF8PZ1O0WbTt7fns5oTlTk/MUo+0xJ1xqvu/y45LaqqcBAnEdrWKmtM3dJHWv +ufQku+kD+83F/VwBnQdvgMHu6KZEs6LRrNo58r4QuK6fS7VCACdzxID1RM2cL7kT +6BFRlyGj1aigmjne9g9M9Jx4R+mZDpPU1WDzzG71J4qCUwaX8Dfwutuv4uiFvzwq +NUF0wLJJPtKWmtW+hnZ/fhHQGCRsOpZzFnqp6Zv7J7k6esqxMgIjfal7Djk5Acy8 +j3iVvm6CYmKMVqzL62JHYS9Ye83tzBCaR8hpnJQKgH3FSOFY8HSwrtQSIsl/hSeF +41sgnz0Y+/gkzNeU18qFk+eCZmvljyu+JK0nPYUgpOCJYVBNQpNHz5PUyiAEKhtM +IOSdjPRW1Y+Xf4RroJnLPoF24Ijwrow5LCm9hBRY6TPPMMmnIXCd23xcLJ1rMj6g +x4ZikElans+cwuc9wtbb7w01DcpTwQ1+eIV1qV+KIgpnLjRGLhZD4etobBsrwYu/ +vnIwy2QHCKENPb8sbdgp7x2mF7VSX0/7tf+9+i70EBiMzpOKBkiZhtLzm6hOBkEy +ODaWrx4lTTwbSw8Rmtf58APhPFMsjHoNsjiUoK249Y8Y2Ff4fMfqYsXu6VC1n/At +CuWYHc3EfBwFcLJS+RQB9kFk/4FygFBWq4Kj0MqoRruLbKmoGeJKH9q35W0f0NCD +j+iHt3014kMGiuyJe1UDQ6fvEihFFdHuDivFpPAXDt4PTY/WtpDhaGMx23kb54pK +jkAuxpznAB1lK3u9bGRXDasGeHIrNtIlPvgkrWHXvoBVqM7zry8TGtoxp3E3I42Z +cUfDWfB9GqVdrOwvrTzyZsl2uShRkAJaZFZj5aMyYxiptp4gM8CwWiNtOd2EwtRO +LxZX4M02PQFIqXV3FSDA0q6EwglUrTZdAlYeOEkopaKCtG31dEPOSQG3NGJAEYso +Cxm99H7970dp0OAgpNSgRbcWDbhVbQXnRzvFGqLeH6a9dQ/a8uD3s8Qm9Du/kB6d +XxTRe2OGxzcD0AgI8GClE4rIZHCLbcwuJRp0EYcN+pgY80O4U98fZ5RYpU6OYbU/ +MEiaBYFKtZtGkV6AQD568V7hHJWqc5DDfVHUQ/aeQwnKi2vnU66u+nnV2rZxXxLP ++dqeLRpul+wKa5b/Z5SfQ14Ff8s7aVyxaogGpyggyPL1vyq4KWZ6Or/wEE5hgNO4 +kBh6ht0QT1Hti8XY2JK1M+Jgbjgcg4jkHBGVqegrG1Rvcc2A4TYKwx+QMSBhyxrU +5qhROjS4lTcC42hQslMUkUwc4U/Y91XdFbOOnaAkwzI36NRYL0pmgZnYxGJZeRvr +E5foOhnOEVSFGdOkLfFh+FkWZQf56Lmn8Gg2wHE3dZTxLHibiUYfkgOr1uEosq29 +D1NstvlJURPQ0Q+8QQNWcl9nEZHMAjOmnL1hbx+QfuC6seucp+sXGzdZByMLZbvT +tG8KNL293CmyQowgf9MXToWYnwRkcvqfTaKyor2Ggze3JtoFW4t0j4DI1XPciZFX +XmfApHrzdB/bZadzxyaZ2NE0CuH9zDelwI6rz38xsN5liYnp5qmNKVCZVOHccXa6 +J8x365m5/VaaA2RrtdPqKxn8VaKy7+T690QgMXVGM4PbzQzQxHuSleklocqlP+sB +jSMXCZY+ng/i4UmRO9noiyW3UThYh0hIdMYs12EmmI9cnF/OuYZpl30fmqwV+VNM +td5B2fYvAvvsjiX60SFCn3DATP1GrPMBlZSmhhP3GYS+xrWt3Xxta9qIX2BEF1Gg +twnZZRjoULSRFUYPfJPEOfEH2UQwm84wxx/GezVE+S/RpBlatPOgCiLnNNaLfdTC +mTG9qY9elJv3GGQO8Lqgf4i8blExs05lSPk1BDhzTB6H9TLz+Ge0/l1QxKf3gPXU +aImK1azieXMXHECkdKxrzmehwu1dZ/oYOLc/OFQCETwSRoLPFOFpYUpizwmVVHR6 +uLSfRptte4ZOU3zHfpd/0+J4tkwHwEkGzsmMdqudlm7qME6upuIplyVBH8JiXzUK +n1RIH/OPmVEluAnexWRLZNdk7MrakIO4XACVbICENiYQgAIErP568An6twWEGDbZ +bEN64E3cVDTDRPRAunIhhsEaapcxpFEPWlHorxv36nMUt0R0h0bJlCu5QdzckfcX +ZrRuu1kl76ZfbSE8T0G4/rBb9gsU4Gn3WyvLIO3MgFBuxR68ZwcR8LpEUd8qp38H +NG4cxPmN1nGKo663Z+xI2Gt5up4gpl+fOt4mXqxY386rB7yHaOfElMG5TUYdrS9w +1xbbCVgeJ6zxX+NFlndG33cSAPprhw+C18eUu6ZU63WZcYFo3GfK6rs3lvYtofvE +8DxztdTidQedNVNE+63YCjhxd/cZUI5n/UpgYkr9owp7hNGJiR3tdoNLR2gcoGqL +qWhH928k2aSgF2j97LZ2OqoPCp0tUB7ho4jD2u4Ik3GLVNlCc3dCvWRvpHtDTQDv +tujESMfHUc9I2r4S/PD3bku/ABGwa977Yp1PjzJGr9RajA5is5n6GVpyynwjtKG4 +iyyITpdwpCgr8pueTBLwZnas3slmiMOog/E4PmPgctHzvC+vhQijhUtw5zSsmv0l +bZlw/mVhp5Ta7dTcLBKR8DA3m3vTbaEGkz0xpfQr7GfiSMRbJyvIw88pDK0gyTMD diff --git a/packages/google-cloud-logging/test_utils/scripts/circleci/get_tagged_package.py b/packages/google-cloud-logging/test_utils/scripts/circleci/get_tagged_package.py new file mode 100644 index 000000000000..c148b9dc2370 --- /dev/null +++ b/packages/google-cloud-logging/test_utils/scripts/circleci/get_tagged_package.py @@ -0,0 +1,64 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helper to determine package from tag. +Get the current package directory corresponding to the Circle Tag. +""" + +from __future__ import print_function + +import os +import re +import sys + + +TAG_RE = re.compile(r""" + ^ + (?P + (([a-z]+)[_-])*) # pkg-name-with-hyphens-or-underscores (empty allowed) + ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints) + $ +""", re.VERBOSE) +TAG_ENV = 'CIRCLE_TAG' +ERROR_MSG = '%s env. var. not set' % (TAG_ENV,) +BAD_TAG_MSG = 'Invalid tag name: %s. Expected pkg-name-x.y.z' +CIRCLE_CI_SCRIPTS_DIR = os.path.dirname(__file__) +ROOT_DIR = os.path.realpath( + os.path.join(CIRCLE_CI_SCRIPTS_DIR, '..', '..', '..')) + + +def main(): + """Get the current package directory. + Prints the package directory out so callers can consume it. + """ + if TAG_ENV not in os.environ: + print(ERROR_MSG, file=sys.stderr) + sys.exit(1) + + tag_name = os.environ[TAG_ENV] + match = TAG_RE.match(tag_name) + if match is None: + print(BAD_TAG_MSG % (tag_name,), file=sys.stderr) + sys.exit(1) + + pkg_name = match.group('pkg') + if pkg_name is None: + print(ROOT_DIR) + else: + pkg_dir = pkg_name.rstrip('-').replace('-', '_') + print(os.path.join(ROOT_DIR, pkg_dir)) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-logging/test_utils/scripts/circleci/twine_upload.sh b/packages/google-cloud-logging/test_utils/scripts/circleci/twine_upload.sh new file mode 100755 index 000000000000..23a4738e90b9 --- /dev/null +++ b/packages/google-cloud-logging/test_utils/scripts/circleci/twine_upload.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -ev + +# If this is not a CircleCI tag, no-op. +if [[ -z "$CIRCLE_TAG" ]]; then + echo "This is not a release tag. Doing nothing." + exit 0 +fi + +# H/T: http://stackoverflow.com/a/246128/1068170 +SCRIPT="$(dirname "${BASH_SOURCE[0]}")/get_tagged_package.py" +# Determine the package directory being deploying on this tag. +PKG_DIR="$(python ${SCRIPT})" + +# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. +python3 -m pip install --upgrade twine wheel setuptools + +# Move into the package, build the distribution and upload. +cd ${PKG_DIR} +python3 setup.py sdist bdist_wheel +twine upload dist/* diff --git a/packages/google-cloud-logging/test_utils/scripts/get_target_packages.py b/packages/google-cloud-logging/test_utils/scripts/get_target_packages.py new file mode 100644 index 000000000000..1d51830cc23a --- /dev/null +++ b/packages/google-cloud-logging/test_utils/scripts/get_target_packages.py @@ -0,0 +1,268 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Print a list of packages which require testing.""" + +import os +import re +import subprocess +import warnings + + +CURRENT_DIR = os.path.realpath(os.path.dirname(__file__)) +BASE_DIR = os.path.realpath(os.path.join(CURRENT_DIR, '..', '..')) +GITHUB_REPO = os.environ.get('GITHUB_REPO', 'google-cloud-python') +CI = os.environ.get('CI', '') +CI_BRANCH = os.environ.get('CIRCLE_BRANCH') +CI_PR = os.environ.get('CIRCLE_PR_NUMBER') +CIRCLE_TAG = os.environ.get('CIRCLE_TAG') +head_hash, head_name = subprocess.check_output(['git', 'show-ref', 'HEAD'] +).strip().decode('ascii').split() +rev_parse = subprocess.check_output( + ['git', 'rev-parse', '--abbrev-ref', 'HEAD'] +).strip().decode('ascii') +MAJOR_DIV = '#' * 78 +MINOR_DIV = '#' + '-' * 77 + +# NOTE: This reg-ex is copied from ``get_tagged_packages``. +TAG_RE = re.compile(r""" + ^ + (?P + (([a-z]+)-)*) # pkg-name-with-hyphens- (empty allowed) + ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints) + $ +""", re.VERBOSE) + +# This is the current set of dependencies by package. +# As of this writing, the only "real" dependency is that of error_reporting +# (on logging), the rest are just system test dependencies. +PKG_DEPENDENCIES = { + 'logging': {'pubsub'}, +} + + +def get_baseline(): + """Return the baseline commit. + + On a pull request, or on a branch, return the common parent revision + with the master branch. + + Locally, return a value pulled from environment variables, or None if + the environment variables are not set. + + On a push to master, return None. This will effectively cause everything + to be considered to be affected. + """ + + # If this is a pull request or branch, return the tip for master. + # We will test only packages which have changed since that point. + ci_non_master = (CI == 'true') and any([CI_BRANCH != 'master', CI_PR]) + + if ci_non_master: + + repo_url = 'git@github.com:GoogleCloudPlatform/{}'.format(GITHUB_REPO) + subprocess.run(['git', 'remote', 'add', 'baseline', repo_url], + stderr=subprocess.DEVNULL) + subprocess.run(['git', 'pull', 'baseline'], stderr=subprocess.DEVNULL) + + if CI_PR is None and CI_BRANCH is not None: + output = subprocess.check_output([ + 'git', 'merge-base', '--fork-point', + 'baseline/master', CI_BRANCH]) + return output.strip().decode('ascii') + + return 'baseline/master' + + # If environment variables are set identifying what the master tip is, + # use that. + if os.environ.get('GOOGLE_CLOUD_TESTING_REMOTE', ''): + remote = os.environ['GOOGLE_CLOUD_TESTING_REMOTE'] + branch = os.environ.get('GOOGLE_CLOUD_TESTING_BRANCH', 'master') + return '%s/%s' % (remote, branch) + + # If we are not in CI and we got this far, issue a warning. + if not CI: + warnings.warn('No baseline could be determined; this means tests ' + 'will run for every package. If this is local ' + 'development, set the $GOOGLE_CLOUD_TESTING_REMOTE ' + 'environment variable.') + + # That is all we can do; return None. + return None + + +def get_changed_files(): + """Return a list of files that have been changed since the baseline. + + If there is no base, return None. + """ + # Get the baseline, and fail quickly if there is no baseline. + baseline = get_baseline() + print('# Baseline commit: {}'.format(baseline)) + if not baseline: + return None + + # Return a list of altered files. + try: + return subprocess.check_output([ + 'git', 'diff', '--name-only', '{}..HEAD'.format(baseline), + ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n') + except subprocess.CalledProcessError: + warnings.warn('Unable to perform git diff; falling back to assuming ' + 'all packages have changed.') + return None + + +def reverse_map(dict_of_sets): + """Reverse a map of one-to-many. + + So the map:: + + { + 'A': {'B', 'C'}, + 'B': {'C'}, + } + + becomes + + { + 'B': {'A'}, + 'C': {'A', 'B'}, + } + + Args: + dict_of_sets (dict[set]): A dictionary of sets, mapping + one value to many. + + Returns: + dict[set]: The reversed map. + """ + result = {} + for key, values in dict_of_sets.items(): + for value in values: + result.setdefault(value, set()).add(key) + + return result + +def get_changed_packages(file_list): + """Return a list of changed packages based on the provided file list. + + If the file list is None, then all packages should be considered to be + altered. + """ + # Determine a complete list of packages. + all_packages = set() + for file_ in os.listdir(BASE_DIR): + abs_file = os.path.realpath(os.path.join(BASE_DIR, file_)) + nox_file = os.path.join(abs_file, 'nox.py') + if os.path.isdir(abs_file) and os.path.isfile(nox_file): + all_packages.add(file_) + + # If ther is no file list, send down the full package set. + if file_list is None: + return all_packages + + # Create a set based on the list of changed files. + answer = set() + reverse_deps = reverse_map(PKG_DEPENDENCIES) + for file_ in file_list: + # Ignore root directory changes (setup.py, .gitignore, etc.). + if os.path.sep not in file_: + continue + + # Ignore changes that are not in a package (usually this will be docs). + package = file_.split(os.path.sep, 1)[0] + if package not in all_packages: + continue + + # If there is a change in core, short-circuit now and return + # everything. + if package in ('core',): + return all_packages + + # Add the package, as well as any dependencies this package has. + # NOTE: For now, dependencies only go down one level. + answer.add(package) + answer = answer.union(reverse_deps.get(package, set())) + + # We got this far without being short-circuited; return the final answer. + return answer + + +def get_tagged_package(): + """Return the package corresponding to the current tag. + + If there is not tag, will return :data:`None`. + """ + if CIRCLE_TAG is None: + return + + match = TAG_RE.match(CIRCLE_TAG) + if match is None: + return + + pkg_name = match.group('pkg') + if pkg_name == '': + # NOTE: This corresponds to the "umbrella" tag. + return + + return pkg_name.rstrip('-').replace('-', '_') + + +def get_target_packages(): + """Return a list of target packages to be run in the current build. + + If in a tag build, will run only the package(s) that are tagged, otherwise + will run the packages that have file changes in them (or packages that + depend on those). + """ + tagged_package = get_tagged_package() + if tagged_package is None: + file_list = get_changed_files() + print(MAJOR_DIV) + print('# Changed files:') + print(MINOR_DIV) + for file_ in file_list or (): + print('# {}'.format(file_)) + for package in sorted(get_changed_packages(file_list)): + yield package + else: + yield tagged_package + + +def main(): + print(MAJOR_DIV) + print('# Environment') + print(MINOR_DIV) + print('# CircleCI: {}'.format(CI)) + print('# CircleCI branch: {}'.format(CI_BRANCH)) + print('# CircleCI pr: {}'.format(CI_PR)) + print('# CircleCI tag: {}'.format(CIRCLE_TAG)) + print('# HEAD ref: {}'.format(head_hash)) + print('# {}'.format(head_name)) + print('# Git branch: {}'.format(rev_parse)) + print(MAJOR_DIV) + + packages = list(get_target_packages()) + + print(MAJOR_DIV) + print('# Target packages:') + print(MINOR_DIV) + for package in packages: + print(package) + print(MAJOR_DIV) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-logging/test_utils/scripts/get_target_packages_kokoro.py b/packages/google-cloud-logging/test_utils/scripts/get_target_packages_kokoro.py new file mode 100644 index 000000000000..27d3a0c940ea --- /dev/null +++ b/packages/google-cloud-logging/test_utils/scripts/get_target_packages_kokoro.py @@ -0,0 +1,98 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Print a list of packages which require testing.""" + +import pathlib +import subprocess + +import ci_diff_helper +import requests + + +def print_environment(environment): + print("-> CI environment:") + print('Branch', environment.branch) + print('PR', environment.pr) + print('In PR', environment.in_pr) + print('Repo URL', environment.repo_url) + if environment.in_pr: + print('PR Base', environment.base) + + +def get_base(environment): + if environment.in_pr: + return environment.base + else: + # If we're not in a PR, just calculate the changes between this commit + # and its parent. + return 'HEAD~1' + + +def get_changed_files_from_base(base): + return subprocess.check_output([ + 'git', 'diff', '--name-only', f'{base}..HEAD', + ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n') + + +_URL_TEMPLATE = ( + 'https://api.github.com/repos/googleapis/google-cloud-python/pulls/' + '{}/files' +) + + +def get_changed_files_from_pr(pr): + url = _URL_TEMPLATE.format(pr) + while url is not None: + response = requests.get(url) + for info in response.json(): + yield info['filename'] + url = response.links.get('next', {}).get('url') + + +def determine_changed_packages(changed_files): + packages = [ + path.parent for path in pathlib.Path('.').glob('*/noxfile.py') + ] + + changed_packages = set() + for file in changed_files: + file = pathlib.Path(file) + for package in packages: + if package in file.parents: + changed_packages.add(package) + + return changed_packages + + +def main(): + environment = ci_diff_helper.get_config() + print_environment(environment) + base = get_base(environment) + + if environment.in_pr: + changed_files = list(get_changed_files_from_pr(environment.pr)) + else: + changed_files = get_changed_files_from_base(base) + + packages = determine_changed_packages(changed_files) + + print(f"Comparing against {base}.") + print("-> Changed packages:") + + for package in packages: + print(package) + + +main() diff --git a/packages/google-cloud-logging/test_utils/scripts/run_emulator.py b/packages/google-cloud-logging/test_utils/scripts/run_emulator.py new file mode 100644 index 000000000000..287b08640691 --- /dev/null +++ b/packages/google-cloud-logging/test_utils/scripts/run_emulator.py @@ -0,0 +1,199 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Run system tests locally with the emulator. + +First makes system calls to spawn the emulator and get the local environment +variable needed for it. Then calls the system tests. +""" + + +import argparse +import os +import subprocess + +import psutil + +from google.cloud.environment_vars import BIGTABLE_EMULATOR +from google.cloud.environment_vars import GCD_DATASET +from google.cloud.environment_vars import GCD_HOST +from google.cloud.environment_vars import PUBSUB_EMULATOR +from run_system_test import run_module_tests + + +BIGTABLE = 'bigtable' +DATASTORE = 'datastore' +PUBSUB = 'pubsub' +PACKAGE_INFO = { + BIGTABLE: (BIGTABLE_EMULATOR,), + DATASTORE: (GCD_DATASET, GCD_HOST), + PUBSUB: (PUBSUB_EMULATOR,), +} +EXTRA = { + DATASTORE: ('--no-legacy',), +} +_DS_READY_LINE = '[datastore] Dev App Server is now running.\n' +_PS_READY_LINE_PREFIX = '[pubsub] INFO: Server started, listening on ' +_BT_READY_LINE_PREFIX = '[bigtable] Cloud Bigtable emulator running on ' + + +def get_parser(): + """Get simple ``argparse`` parser to determine package. + + :rtype: :class:`argparse.ArgumentParser` + :returns: The parser for this script. + """ + parser = argparse.ArgumentParser( + description='Run google-cloud system tests against local emulator.') + parser.add_argument('--package', dest='package', + choices=sorted(PACKAGE_INFO.keys()), + default=DATASTORE, help='Package to be tested.') + return parser + + +def get_start_command(package): + """Get command line arguments for starting emulator. + + :type package: str + :param package: The package to start an emulator for. + + :rtype: tuple + :returns: The arguments to be used, in a tuple. + """ + result = ('gcloud', 'beta', 'emulators', package, 'start') + extra = EXTRA.get(package, ()) + return result + extra + + +def get_env_init_command(package): + """Get command line arguments for getting emulator env. info. + + :type package: str + :param package: The package to get environment info for. + + :rtype: tuple + :returns: The arguments to be used, in a tuple. + """ + result = ('gcloud', 'beta', 'emulators', package, 'env-init') + extra = EXTRA.get(package, ()) + return result + extra + + +def datastore_wait_ready(popen): + """Wait until the datastore emulator is ready to use. + + :type popen: :class:`subprocess.Popen` + :param popen: An open subprocess to interact with. + """ + emulator_ready = False + while not emulator_ready: + emulator_ready = popen.stderr.readline() == _DS_READY_LINE + + +def wait_ready_prefix(popen, prefix): + """Wait until the a process encounters a line with matching prefix. + + :type popen: :class:`subprocess.Popen` + :param popen: An open subprocess to interact with. + + :type prefix: str + :param prefix: The prefix to match + """ + emulator_ready = False + while not emulator_ready: + emulator_ready = popen.stderr.readline().startswith(prefix) + + +def wait_ready(package, popen): + """Wait until the emulator is ready to use. + + :type package: str + :param package: The package to check if ready. + + :type popen: :class:`subprocess.Popen` + :param popen: An open subprocess to interact with. + + :raises: :class:`KeyError` if the ``package`` is not among + ``datastore``, ``pubsub`` or ``bigtable``. + """ + if package == DATASTORE: + datastore_wait_ready(popen) + elif package == PUBSUB: + wait_ready_prefix(popen, _PS_READY_LINE_PREFIX) + elif package == BIGTABLE: + wait_ready_prefix(popen, _BT_READY_LINE_PREFIX) + else: + raise KeyError('Package not supported', package) + + +def cleanup(pid): + """Cleanup a process (including all of its children). + + :type pid: int + :param pid: Process ID. + """ + proc = psutil.Process(pid) + for child_proc in proc.children(recursive=True): + try: + child_proc.kill() + child_proc.terminate() + except psutil.NoSuchProcess: + pass + proc.terminate() + proc.kill() + + +def run_tests_in_emulator(package): + """Spawn an emulator instance and run the system tests. + + :type package: str + :param package: The package to run system tests against. + """ + # Make sure this package has environment vars to replace. + env_vars = PACKAGE_INFO[package] + + start_command = get_start_command(package) + # Ignore stdin and stdout, don't pollute the user's output with them. + proc_start = subprocess.Popen(start_command, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + try: + wait_ready(package, proc_start) + env_init_command = get_env_init_command(package) + proc_env = subprocess.Popen(env_init_command, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + env_status = proc_env.wait() + if env_status != 0: + raise RuntimeError(env_status, proc_env.stderr.read()) + env_lines = proc_env.stdout.read().strip().split('\n') + # Set environment variables before running the system tests. + for env_var in env_vars: + line_prefix = 'export ' + env_var + '=' + value, = [line.split(line_prefix, 1)[1] for line in env_lines + if line.startswith(line_prefix)] + os.environ[env_var] = value + run_module_tests(package, + ignore_requirements=True) + finally: + cleanup(proc_start.pid) + + +def main(): + """Main method to run this script.""" + parser = get_parser() + args = parser.parse_args() + run_tests_in_emulator(args.package) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-logging/test_utils/scripts/update_docs.sh b/packages/google-cloud-logging/test_utils/scripts/update_docs.sh new file mode 100755 index 000000000000..8cbab9f0dad0 --- /dev/null +++ b/packages/google-cloud-logging/test_utils/scripts/update_docs.sh @@ -0,0 +1,93 @@ +#!/bin/bash + +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -ev + +GH_OWNER='GoogleCloudPlatform' +GH_PROJECT_NAME='google-cloud-python' + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +# Function to build the docs. +function build_docs { + rm -rf docs/_build/ + rm -f docs/bigquery/generated/*.rst + # -W -> warnings as errors + # -T -> show full traceback on exception + # -N -> no color + sphinx-build \ + -W -T -N \ + -b html \ + -d docs/_build/doctrees \ + docs/ \ + docs/_build/html/ + return $? +} + +# Only update docs if we are on CircleCI. +if [[ "${CIRCLE_BRANCH}" == "master" ]] && [[ -z "${CIRCLE_PR_NUMBER}" ]]; then + echo "Building new docs on a merged commit." +elif [[ "$1" == "kokoro" ]]; then + echo "Building and publishing docs on Kokoro." +elif [[ -n "${CIRCLE_TAG}" ]]; then + echo "Building new docs on a tag (but will not deploy)." + build_docs + exit $? +else + echo "Not on master nor a release tag." + echo "Building new docs for testing purposes, but not deploying." + build_docs + exit $? +fi + +# Adding GitHub pages branch. `git submodule add` checks it +# out at HEAD. +GH_PAGES_DIR='ghpages' +git submodule add -q -b gh-pages \ + "git@github.com:${GH_OWNER}/${GH_PROJECT_NAME}" ${GH_PAGES_DIR} + +# Determine if we are building a new tag or are building docs +# for master. Then build new docs in docs/_build from master. +if [[ -n "${CIRCLE_TAG}" ]]; then + # Sphinx will use the package version by default. + build_docs +else + SPHINX_RELEASE=$(git log -1 --pretty=%h) build_docs +fi + +# Update gh-pages with the created docs. +cd ${GH_PAGES_DIR} +git rm -fr latest/ +cp -R ../docs/_build/html/ latest/ + +# Update the files push to gh-pages. +git add . +git status + +# If there are no changes, just exit cleanly. +if [[ -z "$(git status --porcelain)" ]]; then + echo "Nothing to commit. Exiting without pushing changes." + exit +fi + +# Commit to gh-pages branch to apply changes. +git config --global user.email "dpebot@google.com" +git config --global user.name "dpebot" +git commit -m "Update docs after merge to master." + +# NOTE: This may fail if two docs updates (on merges to master) +# happen in close proximity. +git push -q origin HEAD:gh-pages diff --git a/packages/google-cloud-logging/test_utils/setup.py b/packages/google-cloud-logging/test_utils/setup.py new file mode 100644 index 000000000000..8e9222a7f862 --- /dev/null +++ b/packages/google-cloud-logging/test_utils/setup.py @@ -0,0 +1,64 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from setuptools import find_packages +from setuptools import setup + + +PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) + + +# NOTE: This is duplicated throughout and we should try to +# consolidate. +SETUP_BASE = { + 'author': 'Google Cloud Platform', + 'author_email': 'googleapis-publisher@google.com', + 'scripts': [], + 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', + 'license': 'Apache 2.0', + 'platforms': 'Posix; MacOS X; Windows', + 'include_package_data': True, + 'zip_safe': False, + 'classifiers': [ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Topic :: Internet', + ], +} + + +REQUIREMENTS = [ + 'google-auth >= 0.4.0', + 'six', +] + +setup( + name='google-cloud-testutils', + version='0.24.0', + description='System test utilities for google-cloud-python', + packages=find_packages(), + install_requires=REQUIREMENTS, + python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*', + **SETUP_BASE +) diff --git a/packages/google-cloud-logging/test_utils/test_utils/__init__.py b/packages/google-cloud-logging/test_utils/test_utils/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/test_utils/test_utils/imports.py b/packages/google-cloud-logging/test_utils/test_utils/imports.py new file mode 100644 index 000000000000..5991af7fc465 --- /dev/null +++ b/packages/google-cloud-logging/test_utils/test_utils/imports.py @@ -0,0 +1,38 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock +import six + + +def maybe_fail_import(predicate): + """Create and return a patcher that conditionally makes an import fail. + + Args: + predicate (Callable[[...], bool]): A callable that, if it returns `True`, + triggers an `ImportError`. It must accept the same arguments as the + built-in `__import__` function. + https://docs.python.org/3/library/functions.html#__import__ + + Returns: + A mock patcher object that can be used to enable patched import behavior. + """ + orig_import = six.moves.builtins.__import__ + + def custom_import(name, globals=None, locals=None, fromlist=(), level=0): + if predicate(name, globals, locals, fromlist, level): + raise ImportError + return orig_import(name, globals, locals, fromlist, level) + + return mock.patch.object(six.moves.builtins, "__import__", new=custom_import) diff --git a/packages/google-cloud-logging/test_utils/test_utils/retry.py b/packages/google-cloud-logging/test_utils/test_utils/retry.py new file mode 100644 index 000000000000..e61c001a03e1 --- /dev/null +++ b/packages/google-cloud-logging/test_utils/test_utils/retry.py @@ -0,0 +1,207 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +from functools import wraps + +import six + +MAX_TRIES = 4 +DELAY = 1 +BACKOFF = 2 + + +def _retry_all(_): + """Retry all caught exceptions.""" + return True + + +class BackoffFailed(Exception): + """Retry w/ backoffs did not complete successfully.""" + + +class RetryBase(object): + """Base for retrying calling a decorated function w/ exponential backoff. + + :type max_tries: int + :param max_tries: Number of times to try (not retry) before giving up. + + :type delay: int + :param delay: Initial delay between retries in seconds. + + :type backoff: int + :param backoff: Backoff multiplier e.g. value of 2 will double the + delay each retry. + + :type logger: logging.Logger instance + :param logger: Logger to use. If None, print. + """ + def __init__(self, max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, + logger=None): + self.max_tries = max_tries + self.delay = delay + self.backoff = backoff + self.logger = logger.warning if logger else six.print_ + + +class RetryErrors(RetryBase): + """Decorator for retrying given exceptions in testing. + + :type exception: Exception or tuple of Exceptions + :param exception: The exception to check or may be a tuple of + exceptions to check. + + :type error_predicate: function, takes caught exception, returns bool + :param error_predicate: Predicate evaluating whether to retry after a + caught exception. + + :type max_tries: int + :param max_tries: Number of times to try (not retry) before giving up. + + :type delay: int + :param delay: Initial delay between retries in seconds. + + :type backoff: int + :param backoff: Backoff multiplier e.g. value of 2 will double the + delay each retry. + + :type logger: logging.Logger instance + :param logger: Logger to use. If None, print. + """ + def __init__(self, exception, error_predicate=_retry_all, + max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, + logger=None): + super(RetryErrors, self).__init__(max_tries, delay, backoff, logger) + self.exception = exception + self.error_predicate = error_predicate + + def __call__(self, to_wrap): + @wraps(to_wrap) + def wrapped_function(*args, **kwargs): + tries = 0 + while tries < self.max_tries: + try: + return to_wrap(*args, **kwargs) + except self.exception as caught_exception: + + if not self.error_predicate(caught_exception): + raise + + delay = self.delay * self.backoff**tries + msg = ("%s, Trying again in %d seconds..." % + (caught_exception, delay)) + self.logger(msg) + + time.sleep(delay) + tries += 1 + return to_wrap(*args, **kwargs) + + return wrapped_function + + +class RetryResult(RetryBase): + """Decorator for retrying based on non-error result. + + :type result_predicate: function, takes result, returns bool + :param result_predicate: Predicate evaluating whether to retry after a + result is returned. + + :type max_tries: int + :param max_tries: Number of times to try (not retry) before giving up. + + :type delay: int + :param delay: Initial delay between retries in seconds. + + :type backoff: int + :param backoff: Backoff multiplier e.g. value of 2 will double the + delay each retry. + + :type logger: logging.Logger instance + :param logger: Logger to use. If None, print. + """ + def __init__(self, result_predicate, + max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, + logger=None): + super(RetryResult, self).__init__(max_tries, delay, backoff, logger) + self.result_predicate = result_predicate + + def __call__(self, to_wrap): + @wraps(to_wrap) + def wrapped_function(*args, **kwargs): + tries = 0 + while tries < self.max_tries: + result = to_wrap(*args, **kwargs) + if self.result_predicate(result): + return result + + delay = self.delay * self.backoff**tries + msg = "%s. Trying again in %d seconds..." % ( + self.result_predicate.__name__, delay,) + self.logger(msg) + + time.sleep(delay) + tries += 1 + raise BackoffFailed() + + return wrapped_function + + +class RetryInstanceState(RetryBase): + """Decorator for retrying based on instance state. + + :type instance_predicate: function, takes instance, returns bool + :param instance_predicate: Predicate evaluating whether to retry after an + API-invoking method is called. + + :type max_tries: int + :param max_tries: Number of times to try (not retry) before giving up. + + :type delay: int + :param delay: Initial delay between retries in seconds. + + :type backoff: int + :param backoff: Backoff multiplier e.g. value of 2 will double the + delay each retry. + + :type logger: logging.Logger instance + :param logger: Logger to use. If None, print. + """ + def __init__(self, instance_predicate, + max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, + logger=None): + super(RetryInstanceState, self).__init__( + max_tries, delay, backoff, logger) + self.instance_predicate = instance_predicate + + def __call__(self, to_wrap): + instance = to_wrap.__self__ # only instance methods allowed + + @wraps(to_wrap) + def wrapped_function(*args, **kwargs): + tries = 0 + while tries < self.max_tries: + result = to_wrap(*args, **kwargs) + if self.instance_predicate(instance): + return result + + delay = self.delay * self.backoff**tries + msg = "%s. Trying again in %d seconds..." % ( + self.instance_predicate.__name__, delay,) + self.logger(msg) + + time.sleep(delay) + tries += 1 + raise BackoffFailed() + + return wrapped_function diff --git a/packages/google-cloud-logging/test_utils/test_utils/system.py b/packages/google-cloud-logging/test_utils/test_utils/system.py new file mode 100644 index 000000000000..590dc62a06e6 --- /dev/null +++ b/packages/google-cloud-logging/test_utils/test_utils/system.py @@ -0,0 +1,81 @@ +# Copyright 2014 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function +import os +import sys +import time + +import google.auth.credentials +from google.auth.environment_vars import CREDENTIALS as TEST_CREDENTIALS + + +# From shell environ. May be None. +CREDENTIALS = os.getenv(TEST_CREDENTIALS) + +ENVIRON_ERROR_MSG = """\ +To run the system tests, you need to set some environment variables. +Please check the CONTRIBUTING guide for instructions. +""" + + +class EmulatorCreds(google.auth.credentials.Credentials): + """A mock credential object. + + Used to avoid unnecessary token refreshing or reliance on the network + while an emulator is running. + """ + + def __init__(self): # pylint: disable=super-init-not-called + self.token = b'seekrit' + self.expiry = None + + @property + def valid(self): + """Would-be validity check of the credentials. + + Always is :data:`True`. + """ + return True + + def refresh(self, unused_request): # pylint: disable=unused-argument + """Off-limits implementation for abstract method.""" + raise RuntimeError('Should never be refreshed.') + + +def check_environ(): + err_msg = None + if CREDENTIALS is None: + err_msg = '\nMissing variables: ' + TEST_CREDENTIALS + elif not os.path.isfile(CREDENTIALS): + err_msg = '\nThe %s path %r is not a file.' % (TEST_CREDENTIALS, + CREDENTIALS) + + if err_msg is not None: + msg = ENVIRON_ERROR_MSG + err_msg + print(msg, file=sys.stderr) + sys.exit(1) + + +def unique_resource_id(delimiter='_'): + """A unique identifier for a resource. + + Intended to help locate resources created in particular + testing environments and at particular times. + """ + build_id = os.getenv('CIRCLE_BUILD_NUM', '') + if build_id == '': + return '%s%d' % (delimiter, 1000 * time.time()) + else: + return '%s%s%s%d' % (delimiter, build_id, delimiter, time.time()) diff --git a/packages/google-cloud-logging/test_utils/test_utils/vpcsc_config.py b/packages/google-cloud-logging/test_utils/test_utils/vpcsc_config.py new file mode 100644 index 000000000000..36b15d6be991 --- /dev/null +++ b/packages/google-cloud-logging/test_utils/test_utils/vpcsc_config.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + + +INSIDE_VPCSC_ENVVAR = "GOOGLE_CLOUD_TESTS_IN_VPCSC" +PROJECT_INSIDE_ENVVAR = "PROJECT_ID" +PROJECT_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT" +BUCKET_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_BUCKET" + + +class VPCSCTestConfig(object): + """System test utility for VPCSC detection. + + See: https://cloud.google.com/vpc-service-controls/docs/ + """ + + @property + def inside_vpcsc(self): + """Test whether the test environment is configured to run inside VPCSC. + + Returns: + bool: + true if the environment is configured to run inside VPCSC, + else false. + """ + return INSIDE_VPCSC_ENVVAR in os.environ + + @property + def project_inside(self): + """Project ID for testing outside access. + + Returns: + str: project ID used for testing outside access; None if undefined. + """ + return os.environ.get(PROJECT_INSIDE_ENVVAR, None) + + @property + def project_outside(self): + """Project ID for testing inside access. + + Returns: + str: project ID used for testing inside access; None if undefined. + """ + return os.environ.get(PROJECT_OUTSIDE_ENVVAR, None) + + @property + def bucket_outside(self): + """GCS bucket for testing inside access. + + Returns: + str: bucket ID used for testing inside access; None if undefined. + """ + return os.environ.get(BUCKET_OUTSIDE_ENVVAR, None) + + def skip_if_inside_vpcsc(self, testcase): + """Test decorator: skip if running inside VPCSC.""" + reason = ( + "Running inside VPCSC. " + "Unset the {} environment variable to enable this test." + ).format(INSIDE_VPCSC_ENVVAR) + skip = pytest.mark.skipif(self.inside_vpcsc, reason=reason) + return skip(testcase) + + def skip_unless_inside_vpcsc(self, testcase): + """Test decorator: skip if running outside VPCSC.""" + reason = ( + "Running outside VPCSC. " + "Set the {} environment variable to enable this test." + ).format(INSIDE_VPCSC_ENVVAR) + skip = pytest.mark.skipif(not self.inside_vpcsc, reason=reason) + return skip(testcase) + + def skip_unless_inside_project(self, testcase): + """Test decorator: skip if inside project env var not set.""" + reason = ( + "Project ID for running inside VPCSC not set. " + "Set the {} environment variable to enable this test." + ).format(PROJECT_INSIDE_ENVVAR) + skip = pytest.mark.skipif(self.project_inside is None, reason=reason) + return skip(testcase) + + def skip_unless_outside_project(self, testcase): + """Test decorator: skip if outside project env var not set.""" + reason = ( + "Project ID for running outside VPCSC not set. " + "Set the {} environment variable to enable this test." + ).format(PROJECT_OUTSIDE_ENVVAR) + skip = pytest.mark.skipif(self.project_outside is None, reason=reason) + return skip(testcase) + + def skip_unless_outside_bucket(self, testcase): + """Test decorator: skip if outside bucket env var not set.""" + reason = ( + "Bucket ID for running outside VPCSC not set. " + "Set the {} environment variable to enable this test." + ).format(BUCKET_OUTSIDE_ENVVAR) + skip = pytest.mark.skipif(self.bucket_outside is None, reason=reason) + return skip(testcase) + + +vpcsc_config = VPCSCTestConfig() diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index 4ffc1cfebd1c..83de87aae299 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -28,6 +28,12 @@ class TestConnection(unittest.TestCase): PROJECT = "project" FILTER = "logName:syslog AND severity>=ERROR" + @staticmethod + def _get_default_timeout(): + from google.cloud.logging._http import _http + + return _http._DEFAULT_TIMEOUT + @staticmethod def _get_target_class(): from google.cloud.logging._http import Connection @@ -76,7 +82,7 @@ def test_extra_headers(self): headers=expected_headers, method="GET", url=expected_uri, - timeout=None, + timeout=self._get_default_timeout(), ) From aaeaafdbe3f309ef8a09852f877c4ade3940e9e9 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 19 Feb 2020 14:58:40 -0800 Subject: [PATCH 271/855] feat: add support for cmek settings; undeprecate resource name helper methods; bump copyright year to 2020 (#22) --- .../gapic/config_service_v2_client.py | 291 ++++++-- .../gapic/config_service_v2_client_config.py | 11 + .../google/cloud/logging_v2/gapic/enums.py | 2 +- .../gapic/logging_service_v2_client.py | 68 +- .../gapic/metrics_service_v2_client.py | 50 +- .../config_service_v2_grpc_transport.py | 51 +- .../logging_service_v2_grpc_transport.py | 10 +- .../metrics_service_v2_grpc_transport.py | 2 +- .../cloud/logging_v2/proto/log_entry.proto | 24 +- .../cloud/logging_v2/proto/log_entry_pb2.py | 44 +- .../cloud/logging_v2/proto/logging.proto | 44 +- .../logging_v2/proto/logging_config.proto | 288 +++++++- .../logging_v2/proto/logging_config_pb2.py | 639 +++++++++++++++--- .../proto/logging_config_pb2_grpc.py | 54 ++ .../logging_v2/proto/logging_metrics.proto | 70 +- .../logging_v2/proto/logging_metrics_pb2.py | 121 ++-- .../cloud/logging_v2/proto/logging_pb2.py | 108 +-- .../logging_v2/proto/logging_pb2_grpc.py | 8 +- packages/google-cloud-logging/synth.metadata | 28 +- .../v2/test_config_service_v2_client_v2.py | 90 ++- .../v2/test_logging_service_v2_client_v2.py | 2 +- .../v2/test_metrics_service_v2_client_v2.py | 2 +- 22 files changed, 1570 insertions(+), 437 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index 521b2e304f84..37dafa34ac0e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -75,24 +75,14 @@ def from_service_account_file(cls, filename, *args, **kwargs): @classmethod def billing_path(cls, billing_account): - """DEPRECATED. Return a fully-qualified billing string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod def billing_exclusion_path(cls, billing_account, exclusion): - """DEPRECATED. Return a fully-qualified billing_exclusion string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified billing_exclusion string.""" return google.api_core.path_template.expand( "billingAccounts/{billing_account}/exclusions/{exclusion}", billing_account=billing_account, @@ -101,12 +91,7 @@ def billing_exclusion_path(cls, billing_account, exclusion): @classmethod def billing_sink_path(cls, billing_account, sink): - """DEPRECATED. Return a fully-qualified billing_sink string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified billing_sink string.""" return google.api_core.path_template.expand( "billingAccounts/{billing_account}/sinks/{sink}", billing_account=billing_account, @@ -115,12 +100,7 @@ def billing_sink_path(cls, billing_account, sink): @classmethod def exclusion_path(cls, project, exclusion): - """DEPRECATED. Return a fully-qualified exclusion string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified exclusion string.""" return google.api_core.path_template.expand( "projects/{project}/exclusions/{exclusion}", project=project, @@ -129,22 +109,12 @@ def exclusion_path(cls, project, exclusion): @classmethod def folder_path(cls, folder): - """DEPRECATED. Return a fully-qualified folder string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified folder string.""" return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def folder_exclusion_path(cls, folder, exclusion): - """DEPRECATED. Return a fully-qualified folder_exclusion string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified folder_exclusion string.""" return google.api_core.path_template.expand( "folders/{folder}/exclusions/{exclusion}", folder=folder, @@ -153,36 +123,21 @@ def folder_exclusion_path(cls, folder, exclusion): @classmethod def folder_sink_path(cls, folder, sink): - """DEPRECATED. Return a fully-qualified folder_sink string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified folder_sink string.""" return google.api_core.path_template.expand( "folders/{folder}/sinks/{sink}", folder=folder, sink=sink, ) @classmethod def organization_path(cls, organization): - """DEPRECATED. Return a fully-qualified organization string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( "organizations/{organization}", organization=organization, ) @classmethod def organization_exclusion_path(cls, organization, exclusion): - """DEPRECATED. Return a fully-qualified organization_exclusion string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified organization_exclusion string.""" return google.api_core.path_template.expand( "organizations/{organization}/exclusions/{exclusion}", organization=organization, @@ -191,12 +146,7 @@ def organization_exclusion_path(cls, organization, exclusion): @classmethod def organization_sink_path(cls, organization, sink): - """DEPRECATED. Return a fully-qualified organization_sink string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified organization_sink string.""" return google.api_core.path_template.expand( "organizations/{organization}/sinks/{sink}", organization=organization, @@ -205,24 +155,14 @@ def organization_sink_path(cls, organization, sink): @classmethod def project_path(cls, project): - """DEPRECATED. Return a fully-qualified project string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified project string.""" return google.api_core.path_template.expand( "projects/{project}", project=project, ) @classmethod def sink_path(cls, project, sink): - """DEPRECATED. Return a fully-qualified sink string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified sink string.""" return google.api_core.path_template.expand( "projects/{project}/sinks/{sink}", project=project, sink=sink, ) @@ -1295,3 +1235,208 @@ def delete_exclusion( self._inner_api_calls["delete_exclusion"]( request, retry=retry, timeout=timeout, metadata=metadata ) + + def get_cmek_settings( + self, + name=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured for GCP + organizations. Once configured, it applies to all projects and folders + in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> response = client.get_cmek_settings() + + Args: + name (str): Required. The resource for which to retrieve CMEK settings. + + :: + + "projects/[PROJECT_ID]/cmekSettings" + "organizations/[ORGANIZATION_ID]/cmekSettings" + "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" + "folders/[FOLDER_ID]/cmekSettings" + + Example: ``"organizations/12345/cmekSettings"``. + + Note: CMEK for the Logs Router can currently only be configured for GCP + organizations. Once configured, it applies to all projects and folders + in the GCP organization. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.CmekSettings` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_cmek_settings" not in self._inner_api_calls: + self._inner_api_calls[ + "get_cmek_settings" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_cmek_settings, + default_retry=self._method_configs["GetCmekSettings"].retry, + default_timeout=self._method_configs["GetCmekSettings"].timeout, + client_info=self._client_info, + ) + + request = logging_config_pb2.GetCmekSettingsRequest(name=name,) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_cmek_settings"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def update_cmek_settings( + self, + name=None, + cmek_settings=None, + update_mask=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Updates the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured for GCP + organizations. Once configured, it applies to all projects and folders + in the GCP organization. + + ``UpdateCmekSettings`` will fail if 1) ``kms_key_name`` is invalid, or + 2) the associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the + key, or + + 3) access to the key is disabled. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> response = client.update_cmek_settings() + + Args: + name (str): Required. The resource name for the CMEK settings to update. + + :: + + "projects/[PROJECT_ID]/cmekSettings" + "organizations/[ORGANIZATION_ID]/cmekSettings" + "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" + "folders/[FOLDER_ID]/cmekSettings" + + Example: ``"organizations/12345/cmekSettings"``. + + Note: CMEK for the Logs Router can currently only be configured for GCP + organizations. Once configured, it applies to all projects and folders + in the GCP organization. + cmek_settings (Union[dict, ~google.cloud.logging_v2.types.CmekSettings]): Required. The CMEK settings to update. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.CmekSettings` + update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Optional. Field mask identifying which fields from ``cmek_settings`` + should be updated. A field will be overwritten if and only if it is in + the update mask. Output only fields cannot be updated. + + See ``FieldMask`` for more information. + + Example: ``"updateMask=kmsKeyName"`` + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.CmekSettings` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "update_cmek_settings" not in self._inner_api_calls: + self._inner_api_calls[ + "update_cmek_settings" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_cmek_settings, + default_retry=self._method_configs["UpdateCmekSettings"].retry, + default_timeout=self._method_configs["UpdateCmekSettings"].timeout, + client_info=self._client_info, + ) + + request = logging_config_pb2.UpdateCmekSettingsRequest( + name=name, cmek_settings=cmek_settings, update_mask=update_mask, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["update_cmek_settings"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py index b7c00db4cd93..00c7146e2627 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py @@ -4,6 +4,7 @@ "retry_codes": { "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], "non_idempotent": [], + "idempotent2": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], }, "retry_params": { "default": { @@ -76,6 +77,16 @@ "retry_codes_name": "idempotent", "retry_params_name": "default", }, + "GetCmekSettings": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent2", + "retry_params_name": "default", + }, + "UpdateCmekSettings": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, }, } } diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py index e677017ccbd2..ee1a098a5779 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index 072c4ebbbb7c..c43506d1bb74 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -79,24 +79,14 @@ def from_service_account_file(cls, filename, *args, **kwargs): @classmethod def billing_path(cls, billing_account): - """DEPRECATED. Return a fully-qualified billing string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod def billing_log_path(cls, billing_account, log): - """DEPRECATED. Return a fully-qualified billing_log string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified billing_log string.""" return google.api_core.path_template.expand( "billingAccounts/{billing_account}/logs/{log}", billing_account=billing_account, @@ -105,58 +95,33 @@ def billing_log_path(cls, billing_account, log): @classmethod def folder_path(cls, folder): - """DEPRECATED. Return a fully-qualified folder string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified folder string.""" return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def folder_log_path(cls, folder, log): - """DEPRECATED. Return a fully-qualified folder_log string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified folder_log string.""" return google.api_core.path_template.expand( "folders/{folder}/logs/{log}", folder=folder, log=log, ) @classmethod def log_path(cls, project, log): - """DEPRECATED. Return a fully-qualified log string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified log string.""" return google.api_core.path_template.expand( "projects/{project}/logs/{log}", project=project, log=log, ) @classmethod def organization_path(cls, organization): - """DEPRECATED. Return a fully-qualified organization string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( "organizations/{organization}", organization=organization, ) @classmethod def organization_log_path(cls, organization, log): - """DEPRECATED. Return a fully-qualified organization_log string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified organization_log string.""" return google.api_core.path_template.expand( "organizations/{organization}/logs/{log}", organization=organization, @@ -165,12 +130,7 @@ def organization_log_path(cls, organization, log): @classmethod def project_path(cls, project): - """DEPRECATED. Return a fully-qualified project string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified project string.""" return google.api_core.path_template.expand( "projects/{project}", project=project, ) @@ -296,10 +256,10 @@ def delete_log( metadata=None, ): """ - Deletes all the log entries in a log. - The log reappears if it receives new entries. - Log entries written shortly before the delete operation might not be - deleted. + Deletes all the log entries in a log. The log reappears if it receives new + entries. Log entries written shortly before the delete operation might not + be deleted. Entries received after the delete operation with a timestamp + before the operation will be deleted. Example: >>> from google.cloud import logging_v2 @@ -573,7 +533,7 @@ def list_log_entries( Example: ``"my-project-1A"``. filter_ (str): Optional. A filter that chooses which log entries to return. See `Advanced Logs - Filters `__. + Queries `__. Only log entries that match the filter are returned. An empty filter matches all log entries in the resources listed in ``resource_names``. Referencing a parent resource that is not listed in ``resource_names`` diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index b127502ee3de..0c80a5d43fe2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -81,58 +81,33 @@ def from_service_account_file(cls, filename, *args, **kwargs): @classmethod def billing_path(cls, billing_account): - """DEPRECATED. Return a fully-qualified billing string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod def folder_path(cls, folder): - """DEPRECATED. Return a fully-qualified folder string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified folder string.""" return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def metric_path(cls, project, metric): - """DEPRECATED. Return a fully-qualified metric string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified metric string.""" return google.api_core.path_template.expand( "projects/{project}/metrics/{metric}", project=project, metric=metric, ) @classmethod def organization_path(cls, organization): - """DEPRECATED. Return a fully-qualified organization string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( "organizations/{organization}", organization=organization, ) @classmethod def project_path(cls, project): - """DEPRECATED. Return a fully-qualified project string.""" - warnings.warn( - "Resource name helper functions are deprecated.", - PendingDeprecationWarning, - stacklevel=1, - ) + """Return a fully-qualified project string.""" return google.api_core.path_template.expand( "projects/{project}", project=project, ) @@ -377,7 +352,7 @@ def get_log_metric( >>> response = client.get_log_metric(metric_name) Args: - metric_name (str): The resource name of the desired metric: + metric_name (str): Required. The resource name of the desired metric: :: @@ -454,14 +429,15 @@ def create_log_metric( >>> response = client.create_log_metric(parent, metric) Args: - parent (str): The resource name of the project in which to create the metric: + parent (str): Required. The resource name of the project in which to create the + metric: :: "projects/[PROJECT_ID]" The new metric must be provided in the request. - metric (Union[dict, ~google.cloud.logging_v2.types.LogMetric]): The new logs-based metric, which must not have an identifier that + metric (Union[dict, ~google.cloud.logging_v2.types.LogMetric]): Required. The new logs-based metric, which must not have an identifier that already exists. If a dict is provided, it must be of the same form as the protobuf @@ -540,7 +516,7 @@ def update_log_metric( >>> response = client.update_log_metric(metric_name, metric) Args: - metric_name (str): The resource name of the metric to update: + metric_name (str): Required. The resource name of the metric to update: :: @@ -549,7 +525,7 @@ def update_log_metric( The updated metric must be provided in the request and it's ``name`` field must be the same as ``[METRIC_ID]`` If the metric does not exist in ``[PROJECT_ID]``, then a new metric is created. - metric (Union[dict, ~google.cloud.logging_v2.types.LogMetric]): The updated metric. + metric (Union[dict, ~google.cloud.logging_v2.types.LogMetric]): Required. The updated metric. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.LogMetric` @@ -623,7 +599,7 @@ def delete_log_metric( >>> client.delete_log_metric(metric_name) Args: - metric_name (str): The resource name of the metric to delete: + metric_name (str): Required. The resource name of the metric to delete: :: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py index b85abcd58a78..f3132ede0451 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -255,3 +255,52 @@ def delete_exclusion(self): deserialized response object. """ return self._stubs["config_service_v2_stub"].DeleteExclusion + + @property + def get_cmek_settings(self): + """Return the gRPC stub for :meth:`ConfigServiceV2Client.get_cmek_settings`. + + Gets the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured for GCP + organizations. Once configured, it applies to all projects and folders + in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["config_service_v2_stub"].GetCmekSettings + + @property + def update_cmek_settings(self): + """Return the gRPC stub for :meth:`ConfigServiceV2Client.update_cmek_settings`. + + Updates the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured for GCP + organizations. Once configured, it applies to all projects and folders + in the GCP organization. + + ``UpdateCmekSettings`` will fail if 1) ``kms_key_name`` is invalid, or + 2) the associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the + key, or + + 3) access to the key is disabled. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["config_service_v2_stub"].UpdateCmekSettings diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py index f6ab3ab8876c..4cf843caff47 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -117,10 +117,10 @@ def channel(self): def delete_log(self): """Return the gRPC stub for :meth:`LoggingServiceV2Client.delete_log`. - Deletes all the log entries in a log. - The log reappears if it receives new entries. - Log entries written shortly before the delete operation might not be - deleted. + Deletes all the log entries in a log. The log reappears if it receives new + entries. Log entries written shortly before the delete operation might not + be deleted. Entries received after the delete operation with a timestamp + before the operation will be deleted. Returns: Callable: A callable which accepts the appropriate diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py index bc66722729bb..605bc118e28d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto index f0b037545199..3f9c3d51d76d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto @@ -18,6 +18,7 @@ syntax = "proto3"; package google.logging.v2; import "google/api/monitored_resource.proto"; +import "google/api/resource.proto"; import "google/logging/type/http_request.proto"; import "google/logging/type/log_severity.proto"; import "google/protobuf/any.proto"; @@ -36,7 +37,17 @@ option php_namespace = "Google\\Cloud\\Logging\\V2"; // An individual entry in a log. // +// message LogEntry { + option (google.api.resource) = { + type: "logging.googleapis.com/Log" + pattern: "projects/{project}/logs/{log}" + pattern: "organizations/{organization}/logs/{log}" + pattern: "folders/{folder}/logs/{log}" + pattern: "billingAccounts/{billing_account}/logs/{log}" + name_field: "log_name" + }; + // Required. The resource name of the log to which this log entry belongs: // // "projects/[PROJECT_ID]/logs/[LOG_ID]" @@ -111,10 +122,15 @@ message LogEntry { // Optional. A unique identifier for the log entry. If you provide a value, // then Logging considers other log entries in the same project, with the same - // `timestamp`, and with the same `insert_id` to be duplicates which can be - // removed. If omitted in new log entries, then Logging assigns its own unique - // identifier. The `insert_id` is also used to order log entries that have the - // same `timestamp` value. + // `timestamp`, and with the same `insert_id` to be duplicates which are + // removed in a single query result. However, there are no guarantees of + // de-duplication in the export of logs. + // + // If the `insert_id` is omitted when writing a log entry, the Logging API + // assigns its own unique identifier in this field. + // + // In queries, the `insert_id` is also used to order log entries that have + // the same `log_name` and `timestamp` values. string insert_id = 4; // Optional. Information about the HTTP request associated with this log diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py index c2517d84adae..f4805192b30a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -18,6 +18,7 @@ from google.api import ( monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, ) +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.logging.type import ( http_request_pb2 as google_dot_logging_dot_type_dot_http__request__pb2, ) @@ -39,10 +40,11 @@ "\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\x8e\x06\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12;\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadataB\x02\x18\x01\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a\x19google/api/resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\xce\x07\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12;\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadataB\x02\x18\x01\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:\xbd\x01\xea\x41\xb9\x01\n\x1alogging.googleapis.com/Log\x12\x1dprojects/{project}/logs/{log}\x12\'organizations/{organization}/logs/{log}\x12\x1b\x66olders/{folder}/logs/{log}\x12,billingAccounts/{billing_account}/logs/{log}\x1a\x08log_nameB\t\n\x07payload"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_logging_dot_type_dot_http__request__pb2.DESCRIPTOR, google_dot_logging_dot_type_dot_log__severity__pb2.DESCRIPTOR, google_dot_protobuf_dot_any__pb2.DESCRIPTOR, @@ -106,8 +108,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1057, - serialized_end=1102, + serialized_start=1084, + serialized_end=1129, ) _LOGENTRY = _descriptor.Descriptor( @@ -427,7 +429,9 @@ extensions=[], nested_types=[_LOGENTRY_LABELSENTRY,], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352A\271\001\n\032logging.googleapis.com/Log\022\035projects/{project}/logs/{log}\022'organizations/{organization}/logs/{log}\022\033folders/{folder}/logs/{log}\022,billingAccounts/{billing_account}/logs/{log}\032\010log_name" + ), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -440,8 +444,8 @@ fields=[], ), ], - serialized_start=331, - serialized_end=1113, + serialized_start=358, + serialized_end=1332, ) @@ -533,8 +537,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1115, - serialized_end=1193, + serialized_start=1334, + serialized_end=1412, ) @@ -608,8 +612,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1195, - serialized_end=1265, + serialized_start=1414, + serialized_end=1484, ) _LOGENTRY_LABELSENTRY.containing_type = _LOGENTRY @@ -747,10 +751,13 @@ Optional. A unique identifier for the log entry. If you provide a value, then Logging considers other log entries in the same project, with the same ``timestamp``, and with the - same ``insert_id`` to be duplicates which can be removed. If - omitted in new log entries, then Logging assigns its own - unique identifier. The ``insert_id`` is also used to order log - entries that have the same ``timestamp`` value. + same ``insert_id`` to be duplicates which are removed in a + single query result. However, there are no guarantees of de- + duplication in the export of logs. If the ``insert_id`` is + omitted when writing a log entry, the Logging API assigns its + own unique identifier in this field. In queries, the + ``insert_id`` is also used to order log entries that have the + same ``log_name`` and ``timestamp`` values. http_request: Optional. Information about the HTTP request associated with this log entry, if applicable. @@ -808,8 +815,8 @@ dict( DESCRIPTOR=_LOGENTRYOPERATION, __module__="google.cloud.logging_v2.proto.log_entry_pb2", - __doc__="""Additional information about a potentially long-running operation with - which a log entry is associated. + __doc__="""Additional information about a potentially long-running + operation with which a log entry is associated. Attributes: @@ -840,8 +847,8 @@ dict( DESCRIPTOR=_LOGENTRYSOURCELOCATION, __module__="google.cloud.logging_v2.proto.log_entry_pb2", - __doc__="""Additional information about the source code location that produced the - log entry. + __doc__="""Additional information about the source code location that + produced the log entry. Attributes: @@ -870,4 +877,5 @@ DESCRIPTOR._options = None _LOGENTRY_LABELSENTRY._options = None _LOGENTRY.fields_by_name["metadata"]._options = None +_LOGENTRY._options = None # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto index fc4217593770..c3a5246334ca 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto @@ -17,15 +17,17 @@ syntax = "proto3"; package google.logging.v2; +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/api/monitored_resource.proto"; +import "google/api/resource.proto"; import "google/logging/v2/log_entry.proto"; import "google/logging/v2/logging_config.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; import "google/rpc/status.proto"; -import "google/api/annotations.proto"; -import "google/api/client.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Logging.V2"; @@ -45,10 +47,10 @@ service LoggingServiceV2 { "https://www.googleapis.com/auth/logging.read," "https://www.googleapis.com/auth/logging.write"; - // Deletes all the log entries in a log. - // The log reappears if it receives new entries. - // Log entries written shortly before the delete operation might not be - // deleted. + // Deletes all the log entries in a log. The log reappears if it receives new + // entries. Log entries written shortly before the delete operation might not + // be deleted. Entries received after the delete operation with a timestamp + // before the operation will be deleted. rpc DeleteLog(DeleteLogRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v2/{log_name=projects/*/logs/*}" @@ -65,6 +67,7 @@ service LoggingServiceV2 { delete: "/v2/{log_name=billingAccounts/*/logs/*}" } }; + option (google.api.method_signature) = "log_name"; } // Writes log entries to Logging. This API method is the @@ -79,6 +82,7 @@ service LoggingServiceV2 { post: "/v2/entries:write" body: "*" }; + option (google.api.method_signature) = "log_name,resource,labels,entries"; } // Lists log entries. Use this method to retrieve log entries that originated @@ -89,6 +93,7 @@ service LoggingServiceV2 { post: "/v2/entries:list" body: "*" }; + option (google.api.method_signature) = "resource_names,filter,order_by"; } // Lists the descriptors for monitored resource types used by Logging. @@ -116,6 +121,7 @@ service LoggingServiceV2 { get: "/v2/{parent=billingAccounts/*}/logs" } }; + option (google.api.method_signature) = "parent"; } } @@ -133,7 +139,12 @@ message DeleteLogRequest { // `"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"`. // For more information about log names, see // [LogEntry][google.logging.v2.LogEntry]. - string log_name = 1; + string log_name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "logging.googleapis.com/Log" + } + ]; } // The parameters to WriteLogEntries. @@ -155,7 +166,9 @@ message WriteLogEntriesRequest { // project, organization, billing account, or folder that is receiving // new log entries, whether the resource is specified in // logName or in an individual log entry. - string log_name = 1; + string log_name = 1 [(google.api.resource_reference) = { + type: "logging.googleapis.com/Log" + }]; // Optional. A default monitored resource object that is assigned to all log // entries in `entries` that do not specify a value for `resource`. Example: @@ -196,7 +209,7 @@ message WriteLogEntriesRequest { // [quota limit](/logging/quota-policy) for calls to `entries.write`, // you should try to include several log entries in this list, // rather than calling this method for each individual log entry. - repeated LogEntry entries = 4; + repeated LogEntry entries = 4 [(google.api.field_behavior) = REQUIRED]; // Optional. Whether valid entries should be written even if some other // entries fail due to INVALID_ARGUMENT or PERMISSION_DENIED errors. If any @@ -245,10 +258,15 @@ message ListLogEntriesRequest { // // // Projects listed in the `project_ids` field are added to this list. - repeated string resource_names = 8; + repeated string resource_names = 8 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "logging.googleapis.com/Log" + } + ]; // Optional. A filter that chooses which log entries to return. See [Advanced - // Logs Filters](/logging/docs/view/advanced_filters). Only log entries that + // Logs Queries](/logging/docs/view/advanced-queries). Only log entries that // match the filter are returned. An empty filter matches all log entries in // the resources listed in `resource_names`. Referencing a parent resource // that is not listed in `resource_names` will cause the filter to return no @@ -329,7 +347,9 @@ message ListLogsRequest { // "organizations/[ORGANIZATION_ID]" // "billingAccounts/[BILLING_ACCOUNT_ID]" // "folders/[FOLDER_ID]" - string parent = 1; + string parent = 1 [(google.api.resource_reference) = { + child_type: "logging.googleapis.com/Log" + }]; // Optional. The maximum number of results to return from this request. // Non-positive values are ignored. The presence of `nextPageToken` in the diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto index a9ccdf51cb19..7fb830ded21f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto @@ -17,12 +17,14 @@ syntax = "proto3"; package google.logging.v2; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; import "google/api/annotations.proto"; -import "google/api/client.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Logging.V2"; @@ -58,6 +60,7 @@ service ConfigServiceV2 { get: "/v2/{parent=billingAccounts/*}/sinks" } }; + option (google.api.method_signature) = "parent"; } // Gets a sink. @@ -77,6 +80,7 @@ service ConfigServiceV2 { get: "/v2/{sink_name=billingAccounts/*/sinks/*}" } }; + option (google.api.method_signature) = "sink_name"; } // Creates a sink that exports specified log entries to a destination. The @@ -104,6 +108,7 @@ service ConfigServiceV2 { body: "sink" } }; + option (google.api.method_signature) = "parent,sink"; } // Updates a sink. This method replaces the following fields in the existing @@ -148,6 +153,8 @@ service ConfigServiceV2 { body: "sink" } }; + option (google.api.method_signature) = "sink_name,sink,update_mask"; + option (google.api.method_signature) = "sink_name,sink"; } // Deletes a sink. If the sink has a unique `writer_identity`, then that @@ -168,6 +175,7 @@ service ConfigServiceV2 { delete: "/v2/{sink_name=billingAccounts/*/sinks/*}" } }; + option (google.api.method_signature) = "sink_name"; } // Lists all the exclusions in a parent resource. @@ -187,6 +195,7 @@ service ConfigServiceV2 { get: "/v2/{parent=billingAccounts/*}/exclusions" } }; + option (google.api.method_signature) = "parent"; } // Gets the description of an exclusion. @@ -206,6 +215,7 @@ service ConfigServiceV2 { get: "/v2/{name=billingAccounts/*/exclusions/*}" } }; + option (google.api.method_signature) = "name"; } // Creates a new exclusion in a specified parent resource. @@ -232,6 +242,7 @@ service ConfigServiceV2 { body: "exclusion" } }; + option (google.api.method_signature) = "parent,exclusion"; } // Changes one or more properties of an existing exclusion. @@ -256,6 +267,7 @@ service ConfigServiceV2 { body: "exclusion" } }; + option (google.api.method_signature) = "name,exclusion,update_mask"; } // Deletes an exclusion. @@ -275,6 +287,49 @@ service ConfigServiceV2 { delete: "/v2/{name=billingAccounts/*/exclusions/*}" } }; + option (google.api.method_signature) = "name"; + } + + // Gets the Logs Router CMEK settings for the given resource. + // + // Note: CMEK for the Logs Router can currently only be configured for GCP + // organizations. Once configured, it applies to all projects and folders in + // the GCP organization. + // + // See [Enabling CMEK for Logs + // Router](/logging/docs/routing/managed-encryption) for more information. + rpc GetCmekSettings(GetCmekSettingsRequest) returns (CmekSettings) { + option (google.api.http) = { + get: "/v2/{name=*/*}/cmekSettings" + additional_bindings { + get: "/v2/{name=organizations/*}/cmekSettings" + } + }; + } + + // Updates the Logs Router CMEK settings for the given resource. + // + // Note: CMEK for the Logs Router can currently only be configured for GCP + // organizations. Once configured, it applies to all projects and folders in + // the GCP organization. + // + // [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + // will fail if 1) `kms_key_name` is invalid, or 2) the associated service + // account does not have the required + // `roles/cloudkms.cryptoKeyEncrypterDecrypter` role assigned for the key, or + // 3) access to the key is disabled. + // + // See [Enabling CMEK for Logs + // Router](/logging/docs/routing/managed-encryption) for more information. + rpc UpdateCmekSettings(UpdateCmekSettingsRequest) returns (CmekSettings) { + option (google.api.http) = { + patch: "/v2/{name=*/*}/cmekSettings" + body: "cmek_settings" + additional_bindings { + patch: "/v2/{name=organizations/*}/cmekSettings" + body: "cmek_settings" + } + }; } } @@ -284,6 +339,14 @@ service ConfigServiceV2 { // The sink must be created within a project, organization, billing account, or // folder. message LogSink { + option (google.api.resource) = { + type: "logging.googleapis.com/Sink" + pattern: "projects/{project}/sinks/{sink}" + pattern: "organizations/{organization}/sinks/{sink}" + pattern: "folders/{folder}/sinks/{sink}" + pattern: "billingAccounts/{billing_account}/sinks/{sink}" + }; + // Available log entry formats. Log entries can be written to // Logging in either format and can be exported in either format. // Version 2 is the preferred format. @@ -302,7 +365,7 @@ message LogSink { // project. Example: `"my-syslog-errors-to-pubsub"`. Sink identifiers are // limited to 100 characters and can include only the following characters: // upper and lower-case alphanumeric characters, underscores, hyphens, and - // periods. + // periods. First character has to be alphanumeric. string name = 1; // Required. The export destination: @@ -315,7 +378,9 @@ message LogSink { // have permission to write to the destination or else the log // entries are not exported. For more information, see // [Exporting Logs with Sinks](/logging/docs/api/tasks/exporting-logs). - string destination = 3; + string destination = 3 [(google.api.resource_reference) = { + type: "*" + }]; // Optional. An [advanced logs filter](/logging/docs/view/advanced-queries). The only // exported log entries are those that are in the resource owning the sink and @@ -324,6 +389,14 @@ message LogSink { // logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR string filter = 5; + // Optional. A description of this sink. + // The maximum length of the description is 8000 characters. + string description = 18; + + // Optional. If set to True, then this sink is disabled and it does not + // export any log entries. + bool disabled = 19; + // Deprecated. The log entry format to use for this sink's exported log // entries. The v2 format is used by default and cannot be changed. VersionFormat output_version_format = 6 [deprecated = true]; @@ -342,7 +415,7 @@ message LogSink { // Resource](/iam/docs/granting-roles-to-service-accounts#granting_access_to_a_service_account_for_a_resource). // Consult the destination service's documentation to determine the // appropriate IAM roles to assign to the identity. - string writer_identity = 8; + string writer_identity = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. This field applies only to sinks owned by organizations and // folders. If the field is false, the default, only the logs owned by the @@ -368,12 +441,12 @@ message LogSink { // Output only. The creation timestamp of the sink. // // This field may not be present for older sinks. - google.protobuf.Timestamp create_time = 13; + google.protobuf.Timestamp create_time = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The last update timestamp of the sink. // // This field may not be present for older sinks. - google.protobuf.Timestamp update_time = 14; + google.protobuf.Timestamp update_time = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; // Do not use. This field is ignored. google.protobuf.Timestamp start_time = 10 [deprecated = true]; @@ -392,6 +465,14 @@ message BigQueryOptions { // syntax](/bigquery/docs/querying-partitioned-tables) has to be used instead. // In both cases, tables are sharded based on UTC timezone. bool use_partitioned_tables = 1; + + // Output only. True if new timestamp column based partitioning is in use, + // false if legacy ingestion-time partitioning is in use. + // All new sinks will have this field set true and will use timestamp column + // based partitioning. If use_partitioned_tables is false, this value has no + // meaning and will be false. Legacy sinks using partitioned tables will have + // this field set to false. + bool uses_timestamp_column_partitioning = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The parameters to `ListSinks`. @@ -402,7 +483,12 @@ message ListSinksRequest { // "organizations/[ORGANIZATION_ID]" // "billingAccounts/[BILLING_ACCOUNT_ID]" // "folders/[FOLDER_ID]" - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "logging.googleapis.com/Sink" + } + ]; // Optional. If present, then retrieve the next batch of results from the // preceding call to this method. `pageToken` must be the value of @@ -437,7 +523,12 @@ message GetSinkRequest { // "folders/[FOLDER_ID]/sinks/[SINK_ID]" // // Example: `"projects/my-project-id/sinks/my-sink-id"`. - string sink_name = 1; + string sink_name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/Sink" + } + ]; } // The parameters to `CreateSink`. @@ -450,11 +541,16 @@ message CreateSinkRequest { // "folders/[FOLDER_ID]" // // Examples: `"projects/my-logging-project"`, `"organizations/123456789"`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "logging.googleapis.com/Sink" + } + ]; // Required. The new sink, whose `name` parameter is a sink identifier that // is not already in use. - LogSink sink = 2; + LogSink sink = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. Determines the kind of IAM identity returned as `writer_identity` // in the new sink. If this value is omitted or set to false, and if the @@ -481,11 +577,16 @@ message UpdateSinkRequest { // "folders/[FOLDER_ID]/sinks/[SINK_ID]" // // Example: `"projects/my-project-id/sinks/my-sink-id"`. - string sink_name = 1; + string sink_name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/Sink" + } + ]; // Required. The updated sink, whose name is the same identifier that appears // as part of `sink_name`. - LogSink sink = 2; + LogSink sink = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. See [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] // for a description of this field. When updating a sink, the effect of this @@ -528,7 +629,12 @@ message DeleteSinkRequest { // "folders/[FOLDER_ID]/sinks/[SINK_ID]" // // Example: `"projects/my-project-id/sinks/my-sink-id"`. - string sink_name = 1; + string sink_name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/Sink" + } + ]; } // Specifies a set of log entries that are not to be stored in @@ -538,9 +644,18 @@ message DeleteSinkRequest { // excluded. Note that organization-level and folder-level exclusions don't // apply to child resources, and that you can't exclude audit log entries. message LogExclusion { + option (google.api.resource) = { + type: "logging.googleapis.com/Exclusion" + pattern: "projects/{project}/exclusions/{exclusion}" + pattern: "organizations/{organization}/exclusions/{exclusion}" + pattern: "folders/{folder}/exclusions/{exclusion}" + pattern: "billingAccounts/{billing_account}/exclusions/{exclusion}" + }; + // Required. A client-assigned identifier, such as // `"load-balancer-exclusion"`. Identifiers are limited to 100 characters and // can include only letters, digits, underscores, hyphens, and periods. + // First character has to be alphanumeric. string name = 1; // Optional. A description of this exclusion. @@ -581,7 +696,12 @@ message ListExclusionsRequest { // "organizations/[ORGANIZATION_ID]" // "billingAccounts/[BILLING_ACCOUNT_ID]" // "folders/[FOLDER_ID]" - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "logging.googleapis.com/Exclusion" + } + ]; // Optional. If present, then retrieve the next batch of results from the // preceding call to this method. `pageToken` must be the value of @@ -616,7 +736,12 @@ message GetExclusionRequest { // "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" // // Example: `"projects/my-project-id/exclusions/my-exclusion-id"`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/Exclusion" + } + ]; } // The parameters to `CreateExclusion`. @@ -629,7 +754,12 @@ message CreateExclusionRequest { // "folders/[FOLDER_ID]" // // Examples: `"projects/my-logging-project"`, `"organizations/123456789"`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "logging.googleapis.com/Exclusion" + } + ]; // Required. The new exclusion, whose `name` parameter is an exclusion name // that is not already used in the parent resource. @@ -646,11 +776,16 @@ message UpdateExclusionRequest { // "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" // // Example: `"projects/my-project-id/exclusions/my-exclusion-id"`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/Exclusion" + } + ]; // Required. New values for the existing exclusion. Only the fields specified // in `update_mask` are relevant. - LogExclusion exclusion = 2; + LogExclusion exclusion = 2 [(google.api.field_behavior) = REQUIRED]; // Required. A non-empty list of fields to change in the existing exclusion. // New values for the fields are taken from the corresponding fields in the @@ -659,7 +794,7 @@ message UpdateExclusionRequest { // // For example, to change the filter and description of an exclusion, // specify an `update_mask` of `"filter,description"`. - google.protobuf.FieldMask update_mask = 3; + google.protobuf.FieldMask update_mask = 3 [(google.api.field_behavior) = REQUIRED]; } // The parameters to `DeleteExclusion`. @@ -672,5 +807,120 @@ message DeleteExclusionRequest { // "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" // // Example: `"projects/my-project-id/exclusions/my-exclusion-id"`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/Exclusion" + } + ]; +} + +// The parameters to +// [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. +// +// See [Enabling CMEK for Logs Router](/logging/docs/routing/managed-encryption) +// for more information. +message GetCmekSettingsRequest { + // Required. The resource for which to retrieve CMEK settings. + // + // "projects/[PROJECT_ID]/cmekSettings" + // "organizations/[ORGANIZATION_ID]/cmekSettings" + // "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" + // "folders/[FOLDER_ID]/cmekSettings" + // + // Example: `"organizations/12345/cmekSettings"`. + // + // Note: CMEK for the Logs Router can currently only be configured for GCP + // organizations. Once configured, it applies to all projects and folders in + // the GCP organization. + string name = 1; +} + +// The parameters to +// [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. +// +// See [Enabling CMEK for Logs Router](/logging/docs/routing/managed-encryption) +// for more information. +message UpdateCmekSettingsRequest { + // Required. The resource name for the CMEK settings to update. + // + // "projects/[PROJECT_ID]/cmekSettings" + // "organizations/[ORGANIZATION_ID]/cmekSettings" + // "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" + // "folders/[FOLDER_ID]/cmekSettings" + // + // Example: `"organizations/12345/cmekSettings"`. + // + // Note: CMEK for the Logs Router can currently only be configured for GCP + // organizations. Once configured, it applies to all projects and folders in + // the GCP organization. + string name = 1; + + // Required. The CMEK settings to update. + // + // See [Enabling CMEK for Logs + // Router](/logging/docs/routing/managed-encryption) for more information. + CmekSettings cmek_settings = 2; + + // Optional. Field mask identifying which fields from `cmek_settings` should + // be updated. A field will be overwritten if and only if it is in the update + // mask. Output only fields cannot be updated. + // + // See [FieldMask][google.protobuf.FieldMask] for more information. + // + // Example: `"updateMask=kmsKeyName"` + google.protobuf.FieldMask update_mask = 3; +} + +// Describes the customer-managed encryption key (CMEK) settings associated with +// a project, folder, organization, billing account, or flexible resource. +// +// Note: CMEK for the Logs Router can currently only be configured for GCP +// organizations. Once configured, it applies to all projects and folders in the +// GCP organization. +// +// See [Enabling CMEK for Logs Router](/logging/docs/routing/managed-encryption) +// for more information. +message CmekSettings { + // Output Only. The resource name of the CMEK settings. string name = 1; + + // The resource name for the configured Cloud KMS key. + // + // KMS key name format: + // "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" + // + // For example: + // `"projects/my-project-id/locations/my-region/keyRings/key-ring-name/cryptoKeys/key-name"` + // + // + // + // To enable CMEK for the Logs Router, set this field to a valid + // `kms_key_name` for which the associated service account has the required + // `roles/cloudkms.cryptoKeyEncrypterDecrypter` role assigned for the key. + // + // The Cloud KMS key used by the Log Router can be updated by changing the + // `kms_key_name` to a new valid key name. Encryption operations that are in + // progress will be completed with the key that was in use when they started. + // Decryption operations will be completed using the key that was used at the + // time of encryption unless access to that key has been revoked. + // + // To disable CMEK for the Logs Router, set this field to an empty string. + // + // See [Enabling CMEK for Logs + // Router](/logging/docs/routing/managed-encryption) for more information. + string kms_key_name = 2; + + // Output Only. The service account that will be used by the Logs Router to + // access your Cloud KMS key. + // + // Before enabling CMEK for Logs Router, you must first assign the role + // `roles/cloudkms.cryptoKeyEncrypterDecrypter` to the service account that + // the Logs Router will use to access your Cloud KMS key. Use + // [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings] to + // obtain the service account ID. + // + // See [Enabling CMEK for Logs + // Router](/logging/docs/routing/managed-encryption) for more information. + string service_account_id = 3; } diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py index cc2a143fc6fc..65fd2cff616a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -15,12 +15,14 @@ _sym_db = _symbol_database.Default() +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -31,15 +33,17 @@ "\n\025com.google.logging.v2B\022LoggingConfigProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"\x90\x04\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12K\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormatB\x02\x18\x01\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12>\n\x10\x62igquery_options\x18\x0c \x01(\x0b\x32".google.logging.v2.BigQueryOptionsH\x00\x12/\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x0e \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\x12\x30\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02\x42\t\n\x07options"1\n\x0f\x42igQueryOptions\x12\x1e\n\x16use_partitioned_tables\x18\x01 \x01(\x08"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"\xb5\x01\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\xf6\x1a\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}\x1a\xdf\x01\xca\x41\x16logging.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.readB\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\x8d\x06\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1b\n\x0b\x64\x65stination\x18\x03 \x01(\tB\x06\xfa\x41\x03\n\x01*\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x12 \x01(\t\x12\x10\n\x08\x64isabled\x18\x13 \x01(\x08\x12K\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormatB\x02\x18\x01\x12\x1c\n\x0fwriter_identity\x18\x08 \x01(\tB\x03\xe0\x41\x03\x12\x18\n\x10include_children\x18\t \x01(\x08\x12>\n\x10\x62igquery_options\x18\x0c \x01(\x0b\x32".google.logging.v2.BigQueryOptionsH\x00\x12\x34\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x0e \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x32\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\x12\x30\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02:\xbc\x01\xea\x41\xb8\x01\n\x1blogging.googleapis.com/Sink\x12\x1fprojects/{project}/sinks/{sink}\x12)organizations/{organization}/sinks/{sink}\x12\x1d\x66olders/{folder}/sinks/{sink}\x12.billingAccounts/{billing_account}/sinks/{sink}B\t\n\x07options"b\n\x0f\x42igQueryOptions\x12\x1e\n\x16use_partitioned_tables\x18\x01 \x01(\x08\x12/\n"uses_timestamp_column_partitioning\x18\x03 \x01(\x08\x42\x03\xe0\x41\x03"n\n\x10ListSinksRequest\x12\x33\n\x06parent\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\x12\x1blogging.googleapis.com/Sink\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0eGetSinkRequest\x12\x36\n\tsink_name\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1blogging.googleapis.com/Sink"\x97\x01\n\x11\x43reateSinkRequest\x12\x33\n\x06parent\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\x12\x1blogging.googleapis.com/Sink\x12-\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSinkB\x03\xe0\x41\x02\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xcb\x01\n\x11UpdateSinkRequest\x12\x36\n\tsink_name\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1blogging.googleapis.com/Sink\x12-\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSinkB\x03\xe0\x41\x02\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x11\x44\x65leteSinkRequest\x12\x36\n\tsink_name\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1blogging.googleapis.com/Sink"\xa1\x03\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp:\xe9\x01\xea\x41\xe5\x01\n logging.googleapis.com/Exclusion\x12)projects/{project}/exclusions/{exclusion}\x12\x33organizations/{organization}/exclusions/{exclusion}\x12\'folders/{folder}/exclusions/{exclusion}\x12\x38\x62illingAccounts/{billing_account}/exclusions/{exclusion}"x\n\x15ListExclusionsRequest\x12\x38\n\x06parent\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\x12 logging.googleapis.com/Exclusion\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"M\n\x13GetExclusionRequest\x12\x36\n\x04name\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\n logging.googleapis.com/Exclusion"\x86\x01\n\x16\x43reateExclusionRequest\x12\x38\n\x06parent\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\x12 logging.googleapis.com/Exclusion\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\xbf\x01\n\x16UpdateExclusionRequest\x12\x36\n\x04name\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\n logging.googleapis.com/Exclusion\x12\x37\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusionB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"P\n\x16\x44\x65leteExclusionRequest\x12\x36\n\x04name\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\n logging.googleapis.com/Exclusion"&\n\x16GetCmekSettingsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x92\x01\n\x19UpdateCmekSettingsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\rcmek_settings\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.CmekSettings\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"N\n\x0c\x43mekSettings\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0ckms_key_name\x18\x02 \x01(\t\x12\x1a\n\x12service_account_id\x18\x03 \x01(\t2\x9e\x1f\n\x0f\x43onfigServiceV2\x12\x90\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xb7\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\xda\x41\x06parent\x12\x9e\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xd3\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\xda\x41\tsink_name\x12\xab\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xda\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\xda\x41\x0bparent,sink\x12\x9f\x04\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xce\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\xda\x41\x1asink_name,sink,update_mask\xda\x41\x0esink_name,sink\x12\xa0\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xd3\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\xda\x41\tsink_name\x12\xb8\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xd0\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\xda\x41\x06parent\x12\xa8\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xce\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\xda\x41\x04name\x12\xf1\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\x91\x02\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\xda\x41\x10parent,exclusion\x12\xfb\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\x9b\x02\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\xda\x41\x1aname,exclusion,update_mask\x12\xa5\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xce\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}\xda\x41\x04name\x12\xad\x01\n\x0fGetCmekSettings\x12).google.logging.v2.GetCmekSettingsRequest\x1a\x1f.google.logging.v2.CmekSettings"N\x82\xd3\xe4\x93\x02H\x12\x1b/v2/{name=*/*}/cmekSettingsZ)\x12\'/v2/{name=organizations/*}/cmekSettings\x12\xd1\x01\n\x12UpdateCmekSettings\x12,.google.logging.v2.UpdateCmekSettingsRequest\x1a\x1f.google.logging.v2.CmekSettings"l\x82\xd3\xe4\x93\x02\x66\x32\x1b/v2/{name=*/*}/cmekSettings:\rcmek_settingsZ82\'/v2/{name=organizations/*}/cmekSettings:\rcmek_settings\x1a\xdf\x01\xca\x41\x16logging.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.readB\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -66,8 +70,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=711, - serialized_end=774, + serialized_start=833, + serialized_end=896, ) _sym_db.RegisterEnumDescriptor(_LOGSINK_VERSIONFORMAT) @@ -112,7 +116,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\372A\003\n\001*"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -133,10 +137,46 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="description", + full_name="google.logging.v2.LogSink.description", + index=3, + number=18, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="disabled", + full_name="google.logging.v2.LogSink.disabled", + index=4, + number=19, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="output_version_format", full_name="google.logging.v2.LogSink.output_version_format", - index=3, + index=5, number=6, type=14, cpp_type=8, @@ -154,7 +194,7 @@ _descriptor.FieldDescriptor( name="writer_identity", full_name="google.logging.v2.LogSink.writer_identity", - index=4, + index=6, number=8, type=9, cpp_type=9, @@ -166,13 +206,13 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="include_children", full_name="google.logging.v2.LogSink.include_children", - index=5, + index=7, number=9, type=8, cpp_type=7, @@ -190,7 +230,7 @@ _descriptor.FieldDescriptor( name="bigquery_options", full_name="google.logging.v2.LogSink.bigquery_options", - index=6, + index=8, number=12, type=11, cpp_type=10, @@ -208,7 +248,7 @@ _descriptor.FieldDescriptor( name="create_time", full_name="google.logging.v2.LogSink.create_time", - index=7, + index=9, number=13, type=11, cpp_type=10, @@ -220,13 +260,13 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="update_time", full_name="google.logging.v2.LogSink.update_time", - index=8, + index=10, number=14, type=11, cpp_type=10, @@ -238,13 +278,13 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="start_time", full_name="google.logging.v2.LogSink.start_time", - index=9, + index=11, number=10, type=11, cpp_type=10, @@ -262,7 +302,7 @@ _descriptor.FieldDescriptor( name="end_time", full_name="google.logging.v2.LogSink.end_time", - index=10, + index=12, number=11, type=11, cpp_type=10, @@ -281,7 +321,9 @@ extensions=[], nested_types=[], enum_types=[_LOGSINK_VERSIONFORMAT,], - serialized_options=None, + serialized_options=_b( + "\352A\270\001\n\033logging.googleapis.com/Sink\022\037projects/{project}/sinks/{sink}\022)organizations/{organization}/sinks/{sink}\022\035folders/{folder}/sinks/{sink}\022.billingAccounts/{billing_account}/sinks/{sink}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -294,8 +336,8 @@ fields=[], ), ], - serialized_start=257, - serialized_end=785, + serialized_start=317, + serialized_end=1098, ) @@ -324,6 +366,24 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="uses_timestamp_column_partitioning", + full_name="google.logging.v2.BigQueryOptions.uses_timestamp_column_partitioning", + index=1, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -333,8 +393,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=787, - serialized_end=836, + serialized_start=1100, + serialized_end=1198, ) @@ -360,7 +420,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\035\022\033logging.googleapis.com/Sink" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -408,8 +470,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=838, - serialized_end=911, + serialized_start=1200, + serialized_end=1310, ) @@ -465,8 +527,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=913, - serialized_end=1000, + serialized_start=1312, + serialized_end=1399, ) @@ -492,7 +554,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\035\n\033logging.googleapis.com/Sink" + ), file=DESCRIPTOR, ), ], @@ -504,8 +568,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1002, - serialized_end=1037, + serialized_start=1401, + serialized_end=1473, ) @@ -531,7 +595,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\035\022\033logging.googleapis.com/Sink" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -549,7 +615,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -579,8 +645,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1039, - serialized_end=1148, + serialized_start=1476, + serialized_end=1627, ) @@ -606,7 +672,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\035\n\033logging.googleapis.com/Sink" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -624,7 +692,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -672,8 +740,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1151, - serialized_end=1312, + serialized_start=1630, + serialized_end=1833, ) @@ -699,7 +767,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\035\n\033logging.googleapis.com/Sink" + ), file=DESCRIPTOR, ), ], @@ -711,8 +781,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1314, - serialized_end=1352, + serialized_start=1835, + serialized_end=1910, ) @@ -835,13 +905,15 @@ extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352A\345\001\n logging.googleapis.com/Exclusion\022)projects/{project}/exclusions/{exclusion}\0223organizations/{organization}/exclusions/{exclusion}\022'folders/{folder}/exclusions/{exclusion}\0228billingAccounts/{billing_account}/exclusions/{exclusion}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1355, - serialized_end=1536, + serialized_start=1913, + serialized_end=2330, ) @@ -867,7 +939,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + '\340A\002\372A"\022 logging.googleapis.com/Exclusion' + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -915,8 +989,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1538, - serialized_end=1616, + serialized_start=2332, + serialized_end=2452, ) @@ -972,8 +1046,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1618, - serialized_end=1720, + serialized_start=2454, + serialized_end=2556, ) @@ -999,7 +1073,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b('\340A\002\372A"\n logging.googleapis.com/Exclusion'), file=DESCRIPTOR, ), ], @@ -1011,8 +1085,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1722, - serialized_end=1757, + serialized_start=2558, + serialized_end=2635, ) @@ -1038,7 +1112,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + '\340A\002\372A"\022 logging.googleapis.com/Exclusion' + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1068,8 +1144,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1759, - serialized_end=1851, + serialized_start=2638, + serialized_end=2772, ) @@ -1095,7 +1171,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b('\340A\002\372A"\n logging.googleapis.com/Exclusion'), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1113,7 +1189,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1131,7 +1207,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1143,8 +1219,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1854, - serialized_end=1993, + serialized_start=2775, + serialized_end=2966, ) @@ -1170,6 +1246,120 @@ containing_type=None, is_extension=False, extension_scope=None, + serialized_options=_b('\340A\002\372A"\n logging.googleapis.com/Exclusion'), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2968, + serialized_end=3048, +) + + +_GETCMEKSETTINGSREQUEST = _descriptor.Descriptor( + name="GetCmekSettingsRequest", + full_name="google.logging.v2.GetCmekSettingsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.logging.v2.GetCmekSettingsRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3050, + serialized_end=3088, +) + + +_UPDATECMEKSETTINGSREQUEST = _descriptor.Descriptor( + name="UpdateCmekSettingsRequest", + full_name="google.logging.v2.UpdateCmekSettingsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.logging.v2.UpdateCmekSettingsRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cmek_settings", + full_name="google.logging.v2.UpdateCmekSettingsRequest.cmek_settings", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_mask", + full_name="google.logging.v2.UpdateCmekSettingsRequest.update_mask", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, serialized_options=None, file=DESCRIPTOR, ), @@ -1182,8 +1372,83 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1995, - serialized_end=2033, + serialized_start=3091, + serialized_end=3237, +) + + +_CMEKSETTINGS = _descriptor.Descriptor( + name="CmekSettings", + full_name="google.logging.v2.CmekSettings", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.logging.v2.CmekSettings.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="kms_key_name", + full_name="google.logging.v2.CmekSettings.kms_key_name", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="service_account_id", + full_name="google.logging.v2.CmekSettings.service_account_id", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3239, + serialized_end=3317, ) _LOGSINK.fields_by_name["output_version_format"].enum_type = _LOGSINK_VERSIONFORMAT @@ -1225,6 +1490,10 @@ _UPDATEEXCLUSIONREQUEST.fields_by_name[ "update_mask" ].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_UPDATECMEKSETTINGSREQUEST.fields_by_name["cmek_settings"].message_type = _CMEKSETTINGS +_UPDATECMEKSETTINGSREQUEST.fields_by_name[ + "update_mask" +].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK DESCRIPTOR.message_types_by_name["LogSink"] = _LOGSINK DESCRIPTOR.message_types_by_name["BigQueryOptions"] = _BIGQUERYOPTIONS DESCRIPTOR.message_types_by_name["ListSinksRequest"] = _LISTSINKSREQUEST @@ -1240,6 +1509,11 @@ DESCRIPTOR.message_types_by_name["CreateExclusionRequest"] = _CREATEEXCLUSIONREQUEST DESCRIPTOR.message_types_by_name["UpdateExclusionRequest"] = _UPDATEEXCLUSIONREQUEST DESCRIPTOR.message_types_by_name["DeleteExclusionRequest"] = _DELETEEXCLUSIONREQUEST +DESCRIPTOR.message_types_by_name["GetCmekSettingsRequest"] = _GETCMEKSETTINGSREQUEST +DESCRIPTOR.message_types_by_name[ + "UpdateCmekSettingsRequest" +] = _UPDATECMEKSETTINGSREQUEST +DESCRIPTOR.message_types_by_name["CmekSettings"] = _CMEKSETTINGS _sym_db.RegisterFileDescriptor(DESCRIPTOR) LogSink = _reflection.GeneratedProtocolMessageType( @@ -1248,11 +1522,11 @@ dict( DESCRIPTOR=_LOGSINK, __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Describes a sink used to export log entries to one of the following - destinations in any project: a Cloud Storage bucket, a BigQuery dataset, - or a Cloud Pub/Sub topic. A logs filter controls which log entries are - exported. The sink must be created within a project, organization, - billing account, or folder. + __doc__="""Describes a sink used to export log entries to one of the + following destinations in any project: a Cloud Storage bucket, a + BigQuery dataset, or a Cloud Pub/Sub topic. A logs filter controls which + log entries are exported. The sink must be created within a project, + organization, billing account, or folder. Attributes: @@ -1261,7 +1535,8 @@ the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink identifiers are limited to 100 characters and can include only the following characters: upper and lower-case alphanumeric - characters, underscores, hyphens, and periods. + characters, underscores, hyphens, and periods. First character + has to be alphanumeric. destination: Required. The export destination: :: "storage.googleapis.com/[GCS_BUCKET]" "bigquery.googleapis @@ -1278,6 +1553,12 @@ and that match the filter. For example: :: logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR + description: + Optional. A description of this sink. The maximum length of + the description is 8000 characters. + disabled: + Optional. If set to True, then this sink is disabled and it + does not export any log entries. output_version_format: Deprecated. The log entry format to use for this sink's exported log entries. The v2 format is used by default and @@ -1339,7 +1620,8 @@ dict( DESCRIPTOR=_BIGQUERYOPTIONS, __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Options that change functionality of a sink exporting data to BigQuery. + __doc__="""Options that change functionality of a sink exporting data + to BigQuery. Attributes: @@ -1352,6 +1634,14 @@ `__ has to be used instead. In both cases, tables are sharded based on UTC timezone. + uses_timestamp_column_partitioning: + Output only. True if new timestamp column based partitioning + is in use, false if legacy ingestion-time partitioning is in + use. All new sinks will have this field set true and will use + timestamp column based partitioning. If + use\_partitioned\_tables is false, this value has no meaning + and will be false. Legacy sinks using partitioned tables will + have this field set to false. """, # @@protoc_insertion_point(class_scope:google.logging.v2.BigQueryOptions) ), @@ -1558,12 +1848,12 @@ dict( DESCRIPTOR=_LOGEXCLUSION, __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Specifies a set of log entries that are not to be stored in Logging. If - your GCP resource receives a large volume of logs, you can use - exclusions to reduce your chargeable logs. Exclusions are processed - after log sinks, so you can export log entries before they are excluded. - Note that organization-level and folder-level exclusions don't apply to - child resources, and that you can't exclude audit log entries. + __doc__="""Specifies a set of log entries that are not to be stored + in Logging. If your GCP resource receives a large volume of logs, you + can use exclusions to reduce your chargeable logs. Exclusions are + processed after log sinks, so you can export log entries before they are + excluded. Note that organization-level and folder-level exclusions don't + apply to child resources, and that you can't exclude audit log entries. Attributes: @@ -1571,7 +1861,7 @@ Required. A client-assigned identifier, such as ``"load- balancer-exclusion"``. Identifiers are limited to 100 characters and can include only letters, digits, underscores, - hyphens, and periods. + hyphens, and periods. First character has to be alphanumeric. description: Optional. A description of this exclusion. filter: @@ -1769,11 +2059,162 @@ ) _sym_db.RegisterMessage(DeleteExclusionRequest) +GetCmekSettingsRequest = _reflection.GeneratedProtocolMessageType( + "GetCmekSettingsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETCMEKSETTINGSREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + + See `Enabling CMEK for Logs + Router `__ for more + information. + + + Attributes: + name: + Required. The resource for which to retrieve CMEK settings. + :: "projects/[PROJECT_ID]/cmekSettings" + "organizations/[ORGANIZATION_ID]/cmekSettings" + "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" + "folders/[FOLDER_ID]/cmekSettings" Example: + ``"organizations/12345/cmekSettings"``. Note: CMEK for the + Logs Router can currently only be configured for GCP + organizations. Once configured, it applies to all projects and + folders in the GCP organization. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.GetCmekSettingsRequest) + ), +) +_sym_db.RegisterMessage(GetCmekSettingsRequest) + +UpdateCmekSettingsRequest = _reflection.GeneratedProtocolMessageType( + "UpdateCmekSettingsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATECMEKSETTINGSREQUEST, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + + See `Enabling CMEK for Logs + Router `__ for more + information. + + + Attributes: + name: + Required. The resource name for the CMEK settings to update. + :: "projects/[PROJECT_ID]/cmekSettings" + "organizations/[ORGANIZATION_ID]/cmekSettings" + "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" + "folders/[FOLDER_ID]/cmekSettings" Example: + ``"organizations/12345/cmekSettings"``. Note: CMEK for the + Logs Router can currently only be configured for GCP + organizations. Once configured, it applies to all projects and + folders in the GCP organization. + cmek_settings: + Required. The CMEK settings to update. See `Enabling CMEK for + Logs Router `__ for + more information. + update_mask: + Optional. Field mask identifying which fields from + ``cmek_settings`` should be updated. A field will be + overwritten if and only if it is in the update mask. Output + only fields cannot be updated. See + [FieldMask][google.protobuf.FieldMask] for more information. + Example: ``"updateMask=kmsKeyName"`` + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateCmekSettingsRequest) + ), +) +_sym_db.RegisterMessage(UpdateCmekSettingsRequest) + +CmekSettings = _reflection.GeneratedProtocolMessageType( + "CmekSettings", + (_message.Message,), + dict( + DESCRIPTOR=_CMEKSETTINGS, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""Describes the customer-managed encryption key (CMEK) + settings associated with a project, folder, organization, billing + account, or flexible resource. + + Note: CMEK for the Logs Router can currently only be configured for GCP + organizations. Once configured, it applies to all projects and folders + in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ for more + information. + + + Attributes: + name: + Output Only. The resource name of the CMEK settings. + kms_key_name: + The resource name for the configured Cloud KMS key. KMS key + name format: "projects/[PROJECT\_ID]/locations/[LOCATION]/keyR + ings/[KEYRING]/cryptoKeys/[KEY]" For example: ``"projects/my- + project-id/locations/my-region/keyRings/key-ring- + name/cryptoKeys/key-name"`` To enable CMEK for the Logs + Router, set this field to a valid ``kms_key_name`` for which + the associated service account has the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned + for the key. The Cloud KMS key used by the Log Router can be + updated by changing the ``kms_key_name`` to a new valid key + name. Encryption operations that are in progress will be + completed with the key that was in use when they started. + Decryption operations will be completed using the key that was + used at the time of encryption unless access to that key has + been revoked. To disable CMEK for the Logs Router, set this + field to an empty string. See `Enabling CMEK for Logs Router + `__ for more + information. + service_account_id: + Output Only. The service account that will be used by the Logs + Router to access your Cloud KMS key. Before enabling CMEK for + Logs Router, you must first assign the role + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` to the service + account that the Logs Router will use to access your Cloud KMS + key. Use [GetCmekSettings][google.logging.v2.ConfigServiceV2.G + etCmekSettings] to obtain the service account ID. See + `Enabling CMEK for Logs Router `__ for more information. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.CmekSettings) + ), +) +_sym_db.RegisterMessage(CmekSettings) + DESCRIPTOR._options = None +_LOGSINK.fields_by_name["destination"]._options = None _LOGSINK.fields_by_name["output_version_format"]._options = None +_LOGSINK.fields_by_name["writer_identity"]._options = None +_LOGSINK.fields_by_name["create_time"]._options = None +_LOGSINK.fields_by_name["update_time"]._options = None _LOGSINK.fields_by_name["start_time"]._options = None _LOGSINK.fields_by_name["end_time"]._options = None +_LOGSINK._options = None +_BIGQUERYOPTIONS.fields_by_name["uses_timestamp_column_partitioning"]._options = None +_LISTSINKSREQUEST.fields_by_name["parent"]._options = None +_GETSINKREQUEST.fields_by_name["sink_name"]._options = None +_CREATESINKREQUEST.fields_by_name["parent"]._options = None +_CREATESINKREQUEST.fields_by_name["sink"]._options = None +_UPDATESINKREQUEST.fields_by_name["sink_name"]._options = None +_UPDATESINKREQUEST.fields_by_name["sink"]._options = None +_DELETESINKREQUEST.fields_by_name["sink_name"]._options = None +_LOGEXCLUSION._options = None +_LISTEXCLUSIONSREQUEST.fields_by_name["parent"]._options = None +_GETEXCLUSIONREQUEST.fields_by_name["name"]._options = None +_CREATEEXCLUSIONREQUEST.fields_by_name["parent"]._options = None +_UPDATEEXCLUSIONREQUEST.fields_by_name["name"]._options = None +_UPDATEEXCLUSIONREQUEST.fields_by_name["exclusion"]._options = None +_UPDATEEXCLUSIONREQUEST.fields_by_name["update_mask"]._options = None +_DELETEEXCLUSIONREQUEST.fields_by_name["name"]._options = None _CONFIGSERVICEV2 = _descriptor.ServiceDescriptor( name="ConfigServiceV2", @@ -1783,8 +2224,8 @@ serialized_options=_b( "\312A\026logging.googleapis.com\322A\302\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read" ), - serialized_start=2036, - serialized_end=5482, + serialized_start=3320, + serialized_end=7318, methods=[ _descriptor.MethodDescriptor( name="ListSinks", @@ -1794,7 +2235,7 @@ input_type=_LISTSINKSREQUEST, output_type=_LISTSINKSRESPONSE, serialized_options=_b( - '\202\323\344\223\002\247\001\022\026/v2/{parent=*/*}/sinksZ\037\022\035/v2/{parent=projects/*}/sinksZ$\022"/v2/{parent=organizations/*}/sinksZ\036\022\034/v2/{parent=folders/*}/sinksZ&\022$/v2/{parent=billingAccounts/*}/sinks' + '\202\323\344\223\002\247\001\022\026/v2/{parent=*/*}/sinksZ\037\022\035/v2/{parent=projects/*}/sinksZ$\022"/v2/{parent=organizations/*}/sinksZ\036\022\034/v2/{parent=folders/*}/sinksZ&\022$/v2/{parent=billingAccounts/*}/sinks\332A\006parent' ), ), _descriptor.MethodDescriptor( @@ -1805,7 +2246,7 @@ input_type=_GETSINKREQUEST, output_type=_LOGSINK, serialized_options=_b( - "\202\323\344\223\002\300\001\022\033/v2/{sink_name=*/*/sinks/*}Z$\022\"/v2/{sink_name=projects/*/sinks/*}Z)\022'/v2/{sink_name=organizations/*/sinks/*}Z#\022!/v2/{sink_name=folders/*/sinks/*}Z+\022)/v2/{sink_name=billingAccounts/*/sinks/*}" + "\202\323\344\223\002\300\001\022\033/v2/{sink_name=*/*/sinks/*}Z$\022\"/v2/{sink_name=projects/*/sinks/*}Z)\022'/v2/{sink_name=organizations/*/sinks/*}Z#\022!/v2/{sink_name=folders/*/sinks/*}Z+\022)/v2/{sink_name=billingAccounts/*/sinks/*}\332A\tsink_name" ), ), _descriptor.MethodDescriptor( @@ -1816,7 +2257,7 @@ input_type=_CREATESINKREQUEST, output_type=_LOGSINK, serialized_options=_b( - '\202\323\344\223\002\305\001"\026/v2/{parent=*/*}/sinks:\004sinkZ%"\035/v2/{parent=projects/*}/sinks:\004sinkZ*""/v2/{parent=organizations/*}/sinks:\004sinkZ$"\034/v2/{parent=folders/*}/sinks:\004sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\004sink' + '\202\323\344\223\002\305\001"\026/v2/{parent=*/*}/sinks:\004sinkZ%"\035/v2/{parent=projects/*}/sinks:\004sinkZ*""/v2/{parent=organizations/*}/sinks:\004sinkZ$"\034/v2/{parent=folders/*}/sinks:\004sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\004sink\332A\013parent,sink' ), ), _descriptor.MethodDescriptor( @@ -1827,7 +2268,7 @@ input_type=_UPDATESINKREQUEST, output_type=_LOGSINK, serialized_options=_b( - "\202\323\344\223\002\231\003\032\033/v2/{sink_name=*/*/sinks/*}:\004sinkZ*\032\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/\032'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)\032!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ1\032)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sinkZ*2\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/2'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sink" + "\202\323\344\223\002\231\003\032\033/v2/{sink_name=*/*/sinks/*}:\004sinkZ*\032\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/\032'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)\032!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ1\032)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sinkZ*2\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/2'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sink\332A\032sink_name,sink,update_mask\332A\016sink_name,sink" ), ), _descriptor.MethodDescriptor( @@ -1838,7 +2279,7 @@ input_type=_DELETESINKREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002\300\001*\033/v2/{sink_name=*/*/sinks/*}Z$*\"/v2/{sink_name=projects/*/sinks/*}Z)*'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}" + "\202\323\344\223\002\300\001*\033/v2/{sink_name=*/*/sinks/*}Z$*\"/v2/{sink_name=projects/*/sinks/*}Z)*'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\332A\tsink_name" ), ), _descriptor.MethodDescriptor( @@ -1849,7 +2290,7 @@ input_type=_LISTEXCLUSIONSREQUEST, output_type=_LISTEXCLUSIONSRESPONSE, serialized_options=_b( - "\202\323\344\223\002\300\001\022\033/v2/{parent=*/*}/exclusionsZ$\022\"/v2/{parent=projects/*}/exclusionsZ)\022'/v2/{parent=organizations/*}/exclusionsZ#\022!/v2/{parent=folders/*}/exclusionsZ+\022)/v2/{parent=billingAccounts/*}/exclusions" + "\202\323\344\223\002\300\001\022\033/v2/{parent=*/*}/exclusionsZ$\022\"/v2/{parent=projects/*}/exclusionsZ)\022'/v2/{parent=organizations/*}/exclusionsZ#\022!/v2/{parent=folders/*}/exclusionsZ+\022)/v2/{parent=billingAccounts/*}/exclusions\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -1860,7 +2301,7 @@ input_type=_GETEXCLUSIONREQUEST, output_type=_LOGEXCLUSION, serialized_options=_b( - "\202\323\344\223\002\300\001\022\033/v2/{name=*/*/exclusions/*}Z$\022\"/v2/{name=projects/*/exclusions/*}Z)\022'/v2/{name=organizations/*/exclusions/*}Z#\022!/v2/{name=folders/*/exclusions/*}Z+\022)/v2/{name=billingAccounts/*/exclusions/*}" + "\202\323\344\223\002\300\001\022\033/v2/{name=*/*/exclusions/*}Z$\022\"/v2/{name=projects/*/exclusions/*}Z)\022'/v2/{name=organizations/*/exclusions/*}Z#\022!/v2/{name=folders/*/exclusions/*}Z+\022)/v2/{name=billingAccounts/*/exclusions/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1871,7 +2312,7 @@ input_type=_CREATEEXCLUSIONREQUEST, output_type=_LOGEXCLUSION, serialized_options=_b( - '\202\323\344\223\002\367\001"\033/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion' + '\202\323\344\223\002\367\001"\033/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\332A\020parent,exclusion' ), ), _descriptor.MethodDescriptor( @@ -1882,7 +2323,7 @@ input_type=_UPDATEEXCLUSIONREQUEST, output_type=_LOGEXCLUSION, serialized_options=_b( - "\202\323\344\223\002\367\0012\033/v2/{name=*/*/exclusions/*}:\texclusionZ/2\"/v2/{name=projects/*/exclusions/*}:\texclusionZ42'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion" + "\202\323\344\223\002\367\0012\033/v2/{name=*/*/exclusions/*}:\texclusionZ/2\"/v2/{name=projects/*/exclusions/*}:\texclusionZ42'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\332A\032name,exclusion,update_mask" ), ), _descriptor.MethodDescriptor( @@ -1893,7 +2334,29 @@ input_type=_DELETEEXCLUSIONREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002\300\001*\033/v2/{name=*/*/exclusions/*}Z$*\"/v2/{name=projects/*/exclusions/*}Z)*'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}" + "\202\323\344\223\002\300\001*\033/v2/{name=*/*/exclusions/*}Z$*\"/v2/{name=projects/*/exclusions/*}Z)*'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}\332A\004name" + ), + ), + _descriptor.MethodDescriptor( + name="GetCmekSettings", + full_name="google.logging.v2.ConfigServiceV2.GetCmekSettings", + index=10, + containing_service=None, + input_type=_GETCMEKSETTINGSREQUEST, + output_type=_CMEKSETTINGS, + serialized_options=_b( + "\202\323\344\223\002H\022\033/v2/{name=*/*}/cmekSettingsZ)\022'/v2/{name=organizations/*}/cmekSettings" + ), + ), + _descriptor.MethodDescriptor( + name="UpdateCmekSettings", + full_name="google.logging.v2.ConfigServiceV2.UpdateCmekSettings", + index=11, + containing_service=None, + input_type=_UPDATECMEKSETTINGSREQUEST, + output_type=_CMEKSETTINGS, + serialized_options=_b( + "\202\323\344\223\002f2\033/v2/{name=*/*}/cmekSettings:\rcmek_settingsZ82'/v2/{name=organizations/*}/cmekSettings:\rcmek_settings" ), ), ], diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py index b250dc7dec22..c2e910e1987b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py @@ -67,6 +67,16 @@ def __init__(self, channel): request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteExclusionRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) + self.GetCmekSettings = channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetCmekSettings", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetCmekSettingsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CmekSettings.FromString, + ) + self.UpdateCmekSettings = channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateCmekSettingsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CmekSettings.FromString, + ) class ConfigServiceV2Servicer(object): @@ -153,6 +163,40 @@ def DeleteExclusion(self, request, context): context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") + def GetCmekSettings(self, request, context): + """Gets the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured for GCP + organizations. Once configured, it applies to all projects and folders in + the GCP organization. + + See [Enabling CMEK for Logs + Router](/logging/docs/routing/managed-encryption) for more information. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def UpdateCmekSettings(self, request, context): + """Updates the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured for GCP + organizations. Once configured, it applies to all projects and folders in + the GCP organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) `kms_key_name` is invalid, or 2) the associated service + account does not have the required + `roles/cloudkms.cryptoKeyEncrypterDecrypter` role assigned for the key, or + 3) access to the key is disabled. + + See [Enabling CMEK for Logs + Router](/logging/docs/routing/managed-encryption) for more information. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + def add_ConfigServiceV2Servicer_to_server(servicer, server): rpc_method_handlers = { @@ -206,6 +250,16 @@ def add_ConfigServiceV2Servicer_to_server(servicer, server): request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteExclusionRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), + "GetCmekSettings": grpc.unary_unary_rpc_method_handler( + servicer.GetCmekSettings, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetCmekSettingsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CmekSettings.SerializeToString, + ), + "UpdateCmekSettings": grpc.unary_unary_rpc_method_handler( + servicer.UpdateCmekSettings, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateCmekSettingsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CmekSettings.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( "google.logging.v2.ConfigServiceV2", rpc_method_handlers diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto index 0c294b1013ee..582c067e6833 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto @@ -17,13 +17,16 @@ syntax = "proto3"; package google.logging.v2; +import "google/api/client.proto"; import "google/api/distribution.proto"; +import "google/api/field_behavior.proto"; import "google/api/metric.proto"; +import "google/api/resource.proto"; +import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; import "google/api/annotations.proto"; -import "google/api/client.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Logging.V2"; @@ -48,6 +51,7 @@ service MetricsServiceV2 { option (google.api.http) = { get: "/v2/{parent=projects/*}/metrics" }; + option (google.api.method_signature) = "parent"; } // Gets a logs-based metric. @@ -55,6 +59,7 @@ service MetricsServiceV2 { option (google.api.http) = { get: "/v2/{metric_name=projects/*/metrics/*}" }; + option (google.api.method_signature) = "metric_name"; } // Creates a logs-based metric. @@ -63,6 +68,7 @@ service MetricsServiceV2 { post: "/v2/{parent=projects/*}/metrics" body: "metric" }; + option (google.api.method_signature) = "parent,metric"; } // Creates or updates a logs-based metric. @@ -71,6 +77,7 @@ service MetricsServiceV2 { put: "/v2/{metric_name=projects/*/metrics/*}" body: "metric" }; + option (google.api.method_signature) = "metric_name,metric"; } // Deletes a logs-based metric. @@ -78,6 +85,7 @@ service MetricsServiceV2 { option (google.api.http) = { delete: "/v2/{metric_name=projects/*/metrics/*}" }; + option (google.api.method_signature) = "metric_name"; } } @@ -89,6 +97,11 @@ service MetricsServiceV2 { // extracted values along with an optional histogram of the values as specified // by the bucket options. message LogMetric { + option (google.api.resource) = { + type: "logging.googleapis.com/Metric" + pattern: "projects/{project}/metrics/{metric}" + }; + // Logging API version. enum ApiVersion { // Logging API v2. @@ -211,7 +224,12 @@ message ListLogMetricsRequest { // Required. The name of the project containing the metrics: // // "projects/[PROJECT_ID]" - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; // Optional. If present, then retrieve the next batch of results from the // preceding call to this method. `pageToken` must be the value of @@ -238,45 +256,65 @@ message ListLogMetricsResponse { // The parameters to GetLogMetric. message GetLogMetricRequest { - // The resource name of the desired metric: + // Required. The resource name of the desired metric: // // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - string metric_name = 1; + string metric_name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/Metric" + } + ]; } // The parameters to CreateLogMetric. message CreateLogMetricRequest { - // The resource name of the project in which to create the metric: + // Required. The resource name of the project in which to create the metric: // // "projects/[PROJECT_ID]" // // The new metric must be provided in the request. - string parent = 1; - - // The new logs-based metric, which must not have an identifier that + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/Metric" + } + ]; + + // Required. The new logs-based metric, which must not have an identifier that // already exists. - LogMetric metric = 2; + LogMetric metric = 2 [(google.api.field_behavior) = REQUIRED]; } // The parameters to UpdateLogMetric. message UpdateLogMetricRequest { - // The resource name of the metric to update: + // Required. The resource name of the metric to update: // // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" // // The updated metric must be provided in the request and it's // `name` field must be the same as `[METRIC_ID]` If the metric // does not exist in `[PROJECT_ID]`, then a new metric is created. - string metric_name = 1; - - // The updated metric. - LogMetric metric = 2; + string metric_name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/Metric" + } + ]; + + // Required. The updated metric. + LogMetric metric = 2 [(google.api.field_behavior) = REQUIRED]; } // The parameters to DeleteLogMetric. message DeleteLogMetricRequest { - // The resource name of the metric to delete: + // Required. The resource name of the metric to delete: // // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - string metric_name = 1; + string metric_name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/Metric" + } + ]; } diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py index 1addc0a0b592..01e308fb741d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -15,13 +15,16 @@ _sym_db = _symbol_database.Default() +from google.api import client_pb2 as google_dot_api_dot_client__pb2 from google.api import distribution_pb2 as google_dot_api_dot_distribution__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import metric_pb2 as google_dot_api_dot_metric__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -32,16 +35,19 @@ "\n\025com.google.logging.v2B\023LoggingMetricsProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x1dgoogle/api/distribution.proto\x1a\x17google/api/metric.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"\x93\x04\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12/\n\x0b\x63reate_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xe4\x07\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"/\x82\xd3\xe4\x93\x02)"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x17google/api/client.proto\x1a\x1dgoogle/api/distribution.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x17google/api/metric.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\xdc\x04\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12/\n\x0b\x63reate_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01:G\xea\x41\x44\n\x1dlogging.googleapis.com/Metric\x12#projects/{project}/metrics/{metric}"\x83\x01\n\x15ListLogMetricsRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x13GetLogMetricRequest\x12:\n\x0bmetric_name\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1dlogging.googleapis.com/Metric"\x82\x01\n\x16\x43reateLogMetricRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1dlogging.googleapis.com/Metric\x12\x31\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetricB\x03\xe0\x41\x02"\x87\x01\n\x16UpdateLogMetricRequest\x12:\n\x0bmetric_name\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1dlogging.googleapis.com/Metric\x12\x31\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetricB\x03\xe0\x41\x02"T\n\x16\x44\x65leteLogMetricRequest\x12:\n\x0bmetric_name\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1dlogging.googleapis.com/Metric2\xae\x08\n\x10MetricsServiceV2\x12\x97\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse"0\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\xda\x41\x06parent\x12\x92\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"<\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\xda\x41\x0bmetric_name\x12\x9b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"?\x82\xd3\xe4\x93\x02)"\x1f/v2/{parent=projects/*}/metrics:\x06metric\xda\x41\rparent,metric\x12\xa7\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"K\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\xda\x41\x12metric_name,metric\x12\x92\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty"<\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}\xda\x41\x0bmetric_name\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ + google_dot_api_dot_client__pb2.DESCRIPTOR, google_dot_api_dot_distribution__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_api_dot_metric__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, + google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -61,8 +67,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=785, - serialized_end=813, + serialized_start=877, + serialized_end=905, ) _sym_db.RegisterEnumDescriptor(_LOGMETRIC_APIVERSION) @@ -119,8 +125,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=729, - serialized_end=783, + serialized_start=821, + serialized_end=875, ) _LOGMETRIC = _descriptor.Descriptor( @@ -314,13 +320,15 @@ extensions=[], nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY,], enum_types=[_LOGMETRIC_APIVERSION,], - serialized_options=None, + serialized_options=_b( + "\352AD\n\035logging.googleapis.com/Metric\022#projects/{project}/metrics/{metric}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=282, - serialized_end=813, + serialized_start=374, + serialized_end=978, ) @@ -346,7 +354,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -394,8 +404,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=815, - serialized_end=893, + serialized_start=981, + serialized_end=1112, ) @@ -451,8 +461,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=895, - serialized_end=991, + serialized_start=1114, + serialized_end=1210, ) @@ -478,7 +488,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\037\n\035logging.googleapis.com/Metric" + ), file=DESCRIPTOR, ), ], @@ -490,8 +502,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=993, - serialized_end=1035, + serialized_start=1212, + serialized_end=1293, ) @@ -517,7 +529,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\037\n\035logging.googleapis.com/Metric" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -535,7 +549,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -547,8 +561,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1037, - serialized_end=1123, + serialized_start=1296, + serialized_end=1426, ) @@ -574,7 +588,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\037\n\035logging.googleapis.com/Metric" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -592,7 +608,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -604,8 +620,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1125, - serialized_end=1216, + serialized_start=1429, + serialized_end=1564, ) @@ -631,7 +647,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\037\n\035logging.googleapis.com/Metric" + ), file=DESCRIPTOR, ), ], @@ -643,8 +661,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1218, - serialized_end=1263, + serialized_start=1566, + serialized_end=1650, ) _LOGMETRIC_LABELEXTRACTORSENTRY.containing_type = _LOGMETRIC @@ -692,8 +710,9 @@ ), DESCRIPTOR=_LOGMETRIC, __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""Describes a logs-based metric. The value of the metric is the number of - log entries that match a logs filter in a given time interval. + __doc__="""Describes a logs-based metric. The value of the metric is + the number of log entries that match a logs filter in a given time + interval. Logs-based metric can also be used to extract values from logs and create a a distribution of the values. The distribution records the @@ -864,7 +883,7 @@ Attributes: metric_name: - The resource name of the desired metric: :: + Required. The resource name of the desired metric: :: "projects/[PROJECT_ID]/metrics/[METRIC_ID]" """, # @@protoc_insertion_point(class_scope:google.logging.v2.GetLogMetricRequest) @@ -883,12 +902,12 @@ Attributes: parent: - The resource name of the project in which to create the - metric: :: "projects/[PROJECT_ID]" The new metric must - be provided in the request. + Required. The resource name of the project in which to create + the metric: :: "projects/[PROJECT_ID]" The new metric + must be provided in the request. metric: - The new logs-based metric, which must not have an identifier - that already exists. + Required. The new logs-based metric, which must not have an + identifier that already exists. """, # @@protoc_insertion_point(class_scope:google.logging.v2.CreateLogMetricRequest) ), @@ -906,13 +925,13 @@ Attributes: metric_name: - The resource name of the metric to update: :: + Required. The resource name of the metric to update: :: "projects/[PROJECT_ID]/metrics/[METRIC_ID]" The updated metric must be provided in the request and it's ``name`` field must be the same as ``[METRIC_ID]`` If the metric does not exist in ``[PROJECT_ID]``, then a new metric is created. metric: - The updated metric. + Required. The updated metric. """, # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateLogMetricRequest) ), @@ -930,7 +949,7 @@ Attributes: metric_name: - The resource name of the metric to delete: :: + Required. The resource name of the metric to delete: :: "projects/[PROJECT_ID]/metrics/[METRIC_ID]" """, # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogMetricRequest) @@ -942,6 +961,14 @@ DESCRIPTOR._options = None _LOGMETRIC_LABELEXTRACTORSENTRY._options = None _LOGMETRIC.fields_by_name["version"]._options = None +_LOGMETRIC._options = None +_LISTLOGMETRICSREQUEST.fields_by_name["parent"]._options = None +_GETLOGMETRICREQUEST.fields_by_name["metric_name"]._options = None +_CREATELOGMETRICREQUEST.fields_by_name["parent"]._options = None +_CREATELOGMETRICREQUEST.fields_by_name["metric"]._options = None +_UPDATELOGMETRICREQUEST.fields_by_name["metric_name"]._options = None +_UPDATELOGMETRICREQUEST.fields_by_name["metric"]._options = None +_DELETELOGMETRICREQUEST.fields_by_name["metric_name"]._options = None _METRICSSERVICEV2 = _descriptor.ServiceDescriptor( name="MetricsServiceV2", @@ -951,8 +978,8 @@ serialized_options=_b( "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" ), - serialized_start=1266, - serialized_end=2262, + serialized_start=1653, + serialized_end=2723, methods=[ _descriptor.MethodDescriptor( name="ListLogMetrics", @@ -962,7 +989,7 @@ input_type=_LISTLOGMETRICSREQUEST, output_type=_LISTLOGMETRICSRESPONSE, serialized_options=_b( - "\202\323\344\223\002!\022\037/v2/{parent=projects/*}/metrics" + "\202\323\344\223\002!\022\037/v2/{parent=projects/*}/metrics\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -973,7 +1000,7 @@ input_type=_GETLOGMETRICREQUEST, output_type=_LOGMETRIC, serialized_options=_b( - "\202\323\344\223\002(\022&/v2/{metric_name=projects/*/metrics/*}" + "\202\323\344\223\002(\022&/v2/{metric_name=projects/*/metrics/*}\332A\013metric_name" ), ), _descriptor.MethodDescriptor( @@ -984,7 +1011,7 @@ input_type=_CREATELOGMETRICREQUEST, output_type=_LOGMETRIC, serialized_options=_b( - '\202\323\344\223\002)"\037/v2/{parent=projects/*}/metrics:\006metric' + '\202\323\344\223\002)"\037/v2/{parent=projects/*}/metrics:\006metric\332A\rparent,metric' ), ), _descriptor.MethodDescriptor( @@ -995,7 +1022,7 @@ input_type=_UPDATELOGMETRICREQUEST, output_type=_LOGMETRIC, serialized_options=_b( - "\202\323\344\223\0020\032&/v2/{metric_name=projects/*/metrics/*}:\006metric" + "\202\323\344\223\0020\032&/v2/{metric_name=projects/*/metrics/*}:\006metric\332A\022metric_name,metric" ), ), _descriptor.MethodDescriptor( @@ -1006,7 +1033,7 @@ input_type=_DELETELOGMETRICREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002(*&/v2/{metric_name=projects/*/metrics/*}" + "\202\323\344\223\002(*&/v2/{metric_name=projects/*/metrics/*}\332A\013metric_name" ), ), ], diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py index 79a73bd0f5fc..35c9b9c52449 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py @@ -15,9 +15,13 @@ _sym_db = _symbol_database.Default() +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import ( monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, ) +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.logging_v2.proto import ( log_entry_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2, ) @@ -28,8 +32,6 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -40,18 +42,20 @@ "\n\025com.google.logging.v2B\014LoggingProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x32google/cloud/logging_v2/proto/logging_config.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t"\xa9\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x19\n\x17WriteLogEntriesResponse"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01"\x91\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x85\n\n\x10LoggingServiceV2\x12\x88\x02\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty"\xbd\x01\x82\xd3\xe4\x93\x02\xb6\x01* /v2/{log_name=projects/*/logs/*}Z\x1b*\x19/v2/{log_name=*/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse"\x1c\x82\xd3\xe4\x93\x02\x16"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse"\x1b\x82\xd3\xe4\x93\x02\x15"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse"(\x82\xd3\xe4\x93\x02"\x12 /v2/monitoredResourceDescriptors\x12\xff\x01\n\x08ListLogs\x12".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logs\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a#google/api/monitored_resource.proto\x1a\x19google/api/resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x32google/cloud/logging_v2/proto/logging_config.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"H\n\x10\x44\x65leteLogRequest\x12\x34\n\x08log_name\x18\x01 \x01(\tB"\xe0\x41\x02\xfa\x41\x1c\x12\x1alogging.googleapis.com/Log"\xcf\x02\n\x16WriteLogEntriesRequest\x12\x31\n\x08log_name\x18\x01 \x01(\tB\x1f\xfa\x41\x1c\n\x1alogging.googleapis.com/Log\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12\x31\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntryB\x03\xe0\x41\x02\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x19\n\x17WriteLogEntriesResponse"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01"\xb5\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12:\n\x0eresource_names\x18\x08 \x03(\tB"\xe0\x41\x02\xfa\x41\x1c\x12\x1alogging.googleapis.com/Log\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"i\n\x0fListLogsRequest\x12/\n\x06parent\x18\x01 \x01(\tB\x1f\xfa\x41\x1c\x12\x1alogging.googleapis.com/Log\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xdd\n\n\x10LoggingServiceV2\x12\x93\x02\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty"\xc8\x01\x82\xd3\xe4\x93\x02\xb6\x01* /v2/{log_name=projects/*/logs/*}Z\x1b*\x19/v2/{log_name=*/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\xda\x41\x08log_name\x12\xa9\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse"?\x82\xd3\xe4\x93\x02\x16"\x11/v2/entries:write:\x01*\xda\x41 log_name,resource,labels,entries\x12\xa3\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse"<\x82\xd3\xe4\x93\x02\x15"\x10/v2/entries:list:\x01*\xda\x41\x1eresource_names,filter,order_by\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse"(\x82\xd3\xe4\x93\x02"\x12 /v2/monitoredResourceDescriptors\x12\x88\x02\n\x08ListLogs\x12".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse"\xb2\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logs\xda\x41\x06parent\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2.DESCRIPTOR, google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_rpc_dot_status__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -78,7 +82,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\034\022\032logging.googleapis.com/Log" + ), file=DESCRIPTOR, ), ], @@ -90,8 +96,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=376, - serialized_end=412, + serialized_start=436, + serialized_end=508, ) @@ -147,8 +153,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=667, - serialized_end=712, + serialized_start=801, + serialized_end=846, ) _WRITELOGENTRIESREQUEST = _descriptor.Descriptor( @@ -173,7 +179,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\372A\034\n\032logging.googleapis.com/Log"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -227,7 +233,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -275,8 +281,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=415, - serialized_end=712, + serialized_start=511, + serialized_end=846, ) @@ -295,8 +301,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=714, - serialized_end=739, + serialized_start=848, + serialized_end=873, ) @@ -352,8 +358,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=869, - serialized_end=942, + serialized_start=1003, + serialized_end=1076, ) _WRITELOGENTRIESPARTIALERRORS = _descriptor.Descriptor( @@ -390,8 +396,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=742, - serialized_end=942, + serialized_start=876, + serialized_end=1076, ) @@ -435,7 +441,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\034\022\032logging.googleapis.com/Log" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -519,8 +527,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=945, - serialized_end=1090, + serialized_start=1079, + serialized_end=1260, ) @@ -576,8 +584,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1092, - serialized_end=1187, + serialized_start=1262, + serialized_end=1357, ) @@ -633,8 +641,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1189, - serialized_end=1269, + serialized_start=1359, + serialized_end=1439, ) @@ -690,8 +698,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1272, - serialized_end=1410, + serialized_start=1442, + serialized_end=1580, ) @@ -717,7 +725,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\372A\034\022\032logging.googleapis.com/Log"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -765,8 +773,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1412, - serialized_end=1484, + serialized_start=1582, + serialized_end=1687, ) @@ -822,8 +830,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1486, - serialized_end=1548, + serialized_start=1689, + serialized_end=1751, ) _WRITELOGENTRIESREQUEST_LABELSENTRY.containing_type = _WRITELOGENTRIESREQUEST @@ -997,6 +1005,7 @@ DESCRIPTOR=_WRITELOGENTRIESRESPONSE, __module__="google.cloud.logging_v2.proto.logging_pb2", __doc__="""Result returned from WriteLogEntries. empty + """, # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesResponse) ), @@ -1058,13 +1067,13 @@ field are added to this list. filter: Optional. A filter that chooses which log entries to return. - See `Advanced Logs Filters - `__. Only log entries - that match the filter are returned. An empty filter matches - all log entries in the resources listed in ``resource_names``. - Referencing a parent resource that is not listed in - ``resource_names`` will cause the filter to return no results. - The maximum length of the filter is 20000 characters. + See `Advanced Logs Queries `__. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will cause + the filter to return no results. The maximum length of the + filter is 20000 characters. order_by: Optional. How the results should be sorted. Presently, the only permitted values are ``"timestamp asc"`` (default) and @@ -1233,9 +1242,14 @@ DESCRIPTOR._options = None +_DELETELOGREQUEST.fields_by_name["log_name"]._options = None _WRITELOGENTRIESREQUEST_LABELSENTRY._options = None +_WRITELOGENTRIESREQUEST.fields_by_name["log_name"]._options = None +_WRITELOGENTRIESREQUEST.fields_by_name["entries"]._options = None _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY._options = None _LISTLOGENTRIESREQUEST.fields_by_name["project_ids"]._options = None +_LISTLOGENTRIESREQUEST.fields_by_name["resource_names"]._options = None +_LISTLOGSREQUEST.fields_by_name["parent"]._options = None _LOGGINGSERVICEV2 = _descriptor.ServiceDescriptor( name="LoggingServiceV2", @@ -1245,8 +1259,8 @@ serialized_options=_b( "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" ), - serialized_start=1551, - serialized_end=2836, + serialized_start=1754, + serialized_end=3127, methods=[ _descriptor.MethodDescriptor( name="DeleteLog", @@ -1256,7 +1270,7 @@ input_type=_DELETELOGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002\266\001* /v2/{log_name=projects/*/logs/*}Z\033*\031/v2/{log_name=*/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}" + "\202\323\344\223\002\266\001* /v2/{log_name=projects/*/logs/*}Z\033*\031/v2/{log_name=*/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}\332A\010log_name" ), ), _descriptor.MethodDescriptor( @@ -1267,7 +1281,7 @@ input_type=_WRITELOGENTRIESREQUEST, output_type=_WRITELOGENTRIESRESPONSE, serialized_options=_b( - '\202\323\344\223\002\026"\021/v2/entries:write:\001*' + '\202\323\344\223\002\026"\021/v2/entries:write:\001*\332A log_name,resource,labels,entries' ), ), _descriptor.MethodDescriptor( @@ -1278,7 +1292,7 @@ input_type=_LISTLOGENTRIESREQUEST, output_type=_LISTLOGENTRIESRESPONSE, serialized_options=_b( - '\202\323\344\223\002\025"\020/v2/entries:list:\001*' + '\202\323\344\223\002\025"\020/v2/entries:list:\001*\332A\036resource_names,filter,order_by' ), ), _descriptor.MethodDescriptor( @@ -1300,7 +1314,7 @@ input_type=_LISTLOGSREQUEST, output_type=_LISTLOGSRESPONSE, serialized_options=_b( - "\202\323\344\223\002\242\001\022\025/v2/{parent=*/*}/logsZ\036\022\034/v2/{parent=projects/*}/logsZ#\022!/v2/{parent=organizations/*}/logsZ\035\022\033/v2/{parent=folders/*}/logsZ%\022#/v2/{parent=billingAccounts/*}/logs" + "\202\323\344\223\002\242\001\022\025/v2/{parent=*/*}/logsZ\036\022\034/v2/{parent=projects/*}/logsZ#\022!/v2/{parent=organizations/*}/logsZ\035\022\033/v2/{parent=folders/*}/logsZ%\022#/v2/{parent=billingAccounts/*}/logs\332A\006parent" ), ), ], diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py index 2a2b3656925c..e1759bbc1b99 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py @@ -49,10 +49,10 @@ class LoggingServiceV2Servicer(object): """ def DeleteLog(self, request, context): - """Deletes all the log entries in a log. - The log reappears if it receives new entries. - Log entries written shortly before the delete operation might not be - deleted. + """Deletes all the log entries in a log. The log reappears if it receives new + entries. Log entries written shortly before the delete operation might not + be deleted. Entries received after the delete operation with a timestamp + before the operation will be deleted. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index f8b9e8a14c93..8d8d23a3ba2f 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,27 +1,41 @@ { - "updateTime": "2020-02-05T13:11:21.216339Z", + "updateTime": "2020-02-19T02:25:24.328145Z", "sources": [ { "generator": { "name": "artman", - "version": "0.44.4", - "dockerImage": "googleapis/artman@sha256:19e945954fc960a4bdfee6cb34695898ab21a8cf0bac063ee39b91f00a1faec8" + "version": "0.45.0", + "dockerImage": "googleapis/artman@sha256:6aec9c34db0e4be221cdaf6faba27bdc07cfea846808b3d3b964dfce3a9a0f9b" + } + }, + { + "git": { + "name": ".", + "remote": "https://github.com/googleapis/python-logging.git", + "sha": "5bbe5de139c7c62f916bd20cf7d3d78ca6b42c9e" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "a8ed9d921fdddc61d8467bfd7c1668f0ad90435c", - "internalRef": "293257997", - "log": "a8ed9d921fdddc61d8467bfd7c1668f0ad90435c\nfix: set Ruby module name for OrgPolicy\n\nPiperOrigin-RevId: 293257997\n\n6c7d28509bd8315de8af0889688ee20099594269\nredis: v1beta1 add UpgradeInstance and connect_mode field to Instance\n\nPiperOrigin-RevId: 293242878\n\nae0abed4fcb4c21f5cb67a82349a049524c4ef68\nredis: v1 add connect_mode field to Instance\n\nPiperOrigin-RevId: 293241914\n\n3f7a0d29b28ee9365771da2b66edf7fa2b4e9c56\nAdds service config definition for bigqueryreservation v1beta1\n\nPiperOrigin-RevId: 293234418\n\n0c88168d5ed6fe353a8cf8cbdc6bf084f6bb66a5\naddition of BUILD & configuration for accessapproval v1\n\nPiperOrigin-RevId: 293219198\n\n39bedc2e30f4778ce81193f6ba1fec56107bcfc4\naccessapproval: v1 publish protos\n\nPiperOrigin-RevId: 293167048\n\n69d9945330a5721cd679f17331a78850e2618226\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080182\n\nf6a1a6b417f39694275ca286110bc3c1ca4db0dc\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080178\n\n29d40b78e3dc1579b0b209463fbcb76e5767f72a\nExpose managedidentities/v1beta1/ API for client library usage.\n\nPiperOrigin-RevId: 292979741\n\na22129a1fb6e18056d576dfb7717aef74b63734a\nExpose managedidentities/v1/ API for client library usage.\n\nPiperOrigin-RevId: 292968186\n\nb5cbe4a4ba64ab19e6627573ff52057a1657773d\nSecurityCenter v1p1beta1: move file-level option on top to workaround protobuf.js bug.\n\nPiperOrigin-RevId: 292647187\n\nb224b317bf20c6a4fbc5030b4a969c3147f27ad3\nAdds API definitions for bigqueryreservation v1beta1.\n\nPiperOrigin-RevId: 292634722\n\nc1468702f9b17e20dd59007c0804a089b83197d2\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 292626173\n\nffdfa4f55ab2f0afc11d0eb68f125ccbd5e404bd\nvision: v1p3beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605599\n\n78f61482cd028fc1d9892aa5d89d768666a954cd\nvision: v1p1beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605125\n\n60bb5a294a604fd1778c7ec87b265d13a7106171\nvision: v1p2beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604980\n\n3bcf7aa79d45eb9ec29ab9036e9359ea325a7fc3\nvision: v1p4beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604656\n\n2717b8a1c762b26911b45ecc2e4ee01d98401b28\nFix dataproc artman client library generation.\n\nPiperOrigin-RevId: 292555664\n\n7ac66d9be8a7d7de4f13566d8663978c9ee9dcd7\nAdd Dataproc Autoscaling API to V1.\n\nPiperOrigin-RevId: 292450564\n\n5d932b2c1be3a6ef487d094e3cf5c0673d0241dd\n- Improve documentation\n- Add a client_id field to StreamingPullRequest\n\nPiperOrigin-RevId: 292434036\n\neaff9fa8edec3e914995ce832b087039c5417ea7\nmonitoring: v3 publish annotations and client retry config\n\nPiperOrigin-RevId: 292425288\n\n70958bab8c5353870d31a23fb2c40305b050d3fe\nBigQuery Storage Read API v1 clients.\n\nPiperOrigin-RevId: 292407644\n\n7a15e7fe78ff4b6d5c9606a3264559e5bde341d1\nUpdate backend proto for Google Cloud Endpoints\n\nPiperOrigin-RevId: 292391607\n\n3ca2c014e24eb5111c8e7248b1e1eb833977c83d\nbazel: Add --flaky_test_attempts=3 argument to prevent CI failures caused by flaky tests\n\nPiperOrigin-RevId: 292382559\n\n9933347c1f677e81e19a844c2ef95bfceaf694fe\nbazel:Integrate latest protoc-java-resource-names-plugin changes (fix for PyYAML dependency in bazel rules)\n\nPiperOrigin-RevId: 292376626\n\nb835ab9d2f62c88561392aa26074c0b849fb0bd3\nasset: v1p2beta1 add client config annotations\n\n* remove unintentionally exposed RPCs\n* remove messages relevant to removed RPCs\n\nPiperOrigin-RevId: 292369593\n\nc1246a29e22b0f98e800a536b5b0da2d933a55f2\nUpdating v1 protos with the latest inline documentation (in comments) and config options. Also adding a per-service .yaml file.\n\nPiperOrigin-RevId: 292310790\n\nb491d07cadaae7cde5608321f913e5ca1459b32d\nRevert accidental local_repository change\n\nPiperOrigin-RevId: 292245373\n\naf3400a8cb6110025198b59a0f7d018ae3cda700\nUpdate gapic-generator dependency (prebuilt PHP binary support).\n\nPiperOrigin-RevId: 292243997\n\n341fd5690fae36f36cf626ef048fbcf4bbe7cee6\ngrafeas: v1 add resource_definition for the grafeas.io/Project and change references for Project.\n\nPiperOrigin-RevId: 292221998\n\n42e915ec2ece1cd37a590fbcd10aa2c0fb0e5b06\nUpdate the gapic-generator, protoc-java-resource-name-plugin and protoc-docs-plugin to the latest commit.\n\nPiperOrigin-RevId: 292182368\n\nf035f47250675d31492a09f4a7586cfa395520a7\nFix grafeas build and update build.sh script to include gerafeas.\n\nPiperOrigin-RevId: 292168753\n\n26ccb214b7bc4a716032a6266bcb0a9ca55d6dbb\nasset: v1p1beta1 add client config annotations and retry config\n\nPiperOrigin-RevId: 292154210\n\n974ee5c0b5d03e81a50dafcedf41e0efebb5b749\nasset: v1beta1 add client config annotations\n\nPiperOrigin-RevId: 292152573\n\ncf3b61102ed5f36b827bc82ec39be09525f018c8\n Fix to protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 292034635\n\n4e1cfaa7c0fede9e65d64213ca3da1b1255816c0\nUpdate the public proto to support UTF-8 encoded id for CatalogService API, increase the ListCatalogItems deadline to 300s and some minor documentation change\n\nPiperOrigin-RevId: 292030970\n\n9c483584f8fd5a1b862ae07973f4cc7bb3e46648\nasset: add annotations to v1p1beta1\n\nPiperOrigin-RevId: 292009868\n\ne19209fac29731d0baf6d9ac23da1164f7bdca24\nAdd the google.rpc.context.AttributeContext message to the open source\ndirectories.\n\nPiperOrigin-RevId: 291999930\n\nae5662960573f279502bf98a108a35ba1175e782\noslogin API: move file level option on top of the file to avoid protobuf.js bug.\n\nPiperOrigin-RevId: 291990506\n\neba3897fff7c49ed85d3c47fc96fe96e47f6f684\nAdd cc_proto_library and cc_grpc_library targets for Spanner and IAM protos.\n\nPiperOrigin-RevId: 291988651\n\n8e981acfd9b97ea2f312f11bbaa7b6c16e412dea\nBeta launch for PersonDetection and FaceDetection features.\n\nPiperOrigin-RevId: 291821782\n\n994e067fae3b21e195f7da932b08fff806d70b5d\nasset: add annotations to v1p2beta1\n\nPiperOrigin-RevId: 291815259\n\n244e1d2c89346ca2e0701b39e65552330d68545a\nAdd Playable Locations service\n\nPiperOrigin-RevId: 291806349\n\n909f8f67963daf45dd88d020877fb9029b76788d\nasset: add annotations to v1beta2\n\nPiperOrigin-RevId: 291805301\n\n3c39a1d6e23c1ef63c7fba4019c25e76c40dfe19\nKMS: add file-level message for CryptoKeyPath, it is defined in gapic yaml but not\nin proto files.\n\nPiperOrigin-RevId: 291420695\n\nc6f3f350b8387f8d1b85ed4506f30187ebaaddc3\ncontaineranalysis: update v1beta1 and bazel build with annotations\n\nPiperOrigin-RevId: 291401900\n\n92887d74b44e4e636252b7b8477d0d2570cd82db\nfix: fix the location of grpc config file.\n\nPiperOrigin-RevId: 291396015\n\ne26cab8afd19d396b929039dac5d874cf0b5336c\nexpr: add default_host and method_signature annotations to CelService\n\nPiperOrigin-RevId: 291240093\n\n06093ae3952441c34ec176d1f7431b8765cec0be\nirm: fix v1alpha2 bazel build by adding missing proto imports\n\nPiperOrigin-RevId: 291227940\n\na8a2514af326e4673063f9a3c9d0ef1091c87e6c\nAdd proto annotation for cloud/irm API\n\nPiperOrigin-RevId: 291217859\n\n8d16f76de065f530d395a4c7eabbf766d6a120fd\nGenerate Memcache v1beta2 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 291008516\n\n3af1dabd93df9a9f17bf3624d3b875c11235360b\ngrafeas: Add containeranalysis default_host to Grafeas service\n\nPiperOrigin-RevId: 290965849\n\nbe2663fa95e31cba67d0cd62611a6674db9f74b7\nfix(google/maps/roads): add missing opening bracket\n\nPiperOrigin-RevId: 290964086\n\nfacc26550a0af0696e0534bc9cae9df14275aa7c\nUpdating v2 protos with the latest inline documentation (in comments) and adding a per-service .yaml file.\n\nPiperOrigin-RevId: 290952261\n\ncda99c1f7dc5e4ca9b1caeae1dc330838cbc1461\nChange api_name to 'asset' for v1p1beta1\n\nPiperOrigin-RevId: 290800639\n\n94e9e90c303a820ce40643d9129e7f0d2054e8a1\nAdds Google Maps Road service\n\nPiperOrigin-RevId: 290795667\n\na3b23dcb2eaecce98c600c7d009451bdec52dbda\nrpc: new message ErrorInfo, other comment updates\n\nPiperOrigin-RevId: 290781668\n\n26420ef4e46c37f193c0fbe53d6ebac481de460e\nAdd proto definition for Org Policy v1.\n\nPiperOrigin-RevId: 290771923\n\n7f0dab8177cf371ae019a082e2512de7ac102888\nPublish Routes Preferred API v1 service definitions.\n\nPiperOrigin-RevId: 290326986\n\nad6e508d0728e1d1bca6e3f328cd562718cb772d\nFix: Qualify resource type references with \"jobs.googleapis.com/\"\n\nPiperOrigin-RevId: 290285762\n\n58e770d568a2b78168ddc19a874178fee8265a9d\ncts client library\n\nPiperOrigin-RevId: 290146169\n\naf9daa4c3b4c4a8b7133b81588dd9ffd37270af2\nAdd more programming language options to public proto\n\nPiperOrigin-RevId: 290144091\n\nd9f2bbf2df301ef84641d4cec7c828736a0bd907\ntalent: add missing resource.proto dep to Bazel build target\n\nPiperOrigin-RevId: 290143164\n\n3b3968237451d027b42471cd28884a5a1faed6c7\nAnnotate Talent API.\nAdd gRPC service config for retry.\nUpdate bazel file with google.api.resource dependency.\n\nPiperOrigin-RevId: 290125172\n\n0735b4b096872960568d1f366bfa75b7b0e1f1a3\nWeekly library update.\n\nPiperOrigin-RevId: 289939042\n\n8760d3d9a4543d7f9c0d1c7870aca08b116e4095\nWeekly library update.\n\nPiperOrigin-RevId: 289939020\n\n8607df842f782a901805187e02fff598145b0b0e\nChange Talent API timeout to 30s.\n\nPiperOrigin-RevId: 289912621\n\n908155991fe32570653bcb72ecfdcfc896642f41\nAdd Recommendations AI V1Beta1\n\nPiperOrigin-RevId: 289901914\n\n5c9a8c2bebd8b71aa66d1cc473edfaac837a2c78\nAdding no-arg method signatures for ListBillingAccounts and ListServices\n\nPiperOrigin-RevId: 289891136\n\n50b0e8286ac988b0593bd890eb31fef6ea2f5767\nlongrunning: add grpc service config and default_host annotation to operations.proto\n\nPiperOrigin-RevId: 289876944\n\n6cac27dabe51c54807b0401698c32d34998948a9\n Updating default deadline for Cloud Security Command Center's v1 APIs.\n\nPiperOrigin-RevId: 289875412\n\nd99df0d67057a233c711187e0689baa4f8e6333d\nFix: Correct spelling in C# namespace option\n\nPiperOrigin-RevId: 289709813\n\n2fa8d48165cc48e35b0c62e6f7bdade12229326c\nfeat: Publish Recommender v1 to GitHub.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289619243\n\n9118db63d1ab493a2e44a3b4973fde810a835c49\nfirestore: don't retry reads that fail with Aborted\n\nFor transaction reads that fail with ABORTED, we need to rollback and start a new transaction. Our current configuration makes it so that GAPIC retries ABORTED reads multiple times without making any progress. Instead, we should retry at the transaction level.\n\nPiperOrigin-RevId: 289532382\n\n1dbfd3fe4330790b1e99c0bb20beb692f1e20b8a\nFix bazel build\nAdd other langauges (Java was already there) for bigquery/storage/v1alpha2 api.\n\nPiperOrigin-RevId: 289519766\n\nc06599cdd7d11f8d3fd25f8d3249e5bb1a3d5d73\nInitial commit of google.cloud.policytroubleshooter API, The API helps in troubleshooting GCP policies. Refer https://cloud.google.com/iam/docs/troubleshooting-access for more information\n\nPiperOrigin-RevId: 289491444\n\nfce7d80fa16ea241e87f7bc33d68595422e94ecd\nDo not pass samples option for Artman config of recommender v1 API.\n\nPiperOrigin-RevId: 289477403\n\nef179e8c61436297e6bb124352e47e45c8c80cb1\nfix: Address missing Bazel dependency.\n\nBazel builds stopped working in 06ec6d5 because\nthe google/longrunning/operations.proto file took\nan import from google/api/client.proto, but that\nimport was not added to BUILD.bazel.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446074\n\n8841655b242c84fd691d77d7bcf21b61044f01ff\nMigrate Data Labeling v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446026\n\n06ec6d5d053fff299eaa6eaa38afdd36c5e2fc68\nAdd annotations to google.longrunning.v1\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289413169\n\n0480cf40be1d3cc231f4268a2fdb36a8dd60e641\nMigrate IAM Admin v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289411084\n\n1017173e9adeb858587639af61889ad970c728b1\nSpecify a C# namespace for BigQuery Connection v1beta1\n\nPiperOrigin-RevId: 289396763\n\nb08714b378e8e5b0c4ecdde73f92c36d6303b4b6\nfix: Integrate latest proto-docs-plugin fix.\nFixes dialogflow v2\n\nPiperOrigin-RevId: 289189004\n\n51217a67e79255ee1f2e70a6a3919df082513327\nCreate BUILD file for recommender v1\n\nPiperOrigin-RevId: 289183234\n\nacacd87263c0a60e458561b8b8ce9f67c760552a\nGenerate recommender v1 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 289177510\n\n9d2f7133b97720b1fa3601f6dcd30760ba6d8a1e\nFix kokoro build script\n\nPiperOrigin-RevId: 289166315\n\nc43a67530d2a47a0220cad20ca8de39b3fbaf2c5\ncloudtasks: replace missing RPC timeout config for v2beta2 and v2beta3\n\nPiperOrigin-RevId: 289162391\n\n4cefc229a9197236fc0adf02d69b71c0c5cf59de\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 289158456\n\n56f263fe959c50786dab42e3c61402d32d1417bd\nCatalog API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 289149879\n\n4543762b23a57fc3c53d409efc3a9affd47b6ab3\nFix Bazel build\nbilling/v1 and dialogflow/v2 remain broken (not bazel-related issues).\nBilling has wrong configuration, dialogflow failure is caused by a bug in documentation plugin.\n\nPiperOrigin-RevId: 289140194\n\nc9dce519127b97e866ca133a01157f4ce27dcceb\nUpdate Bigtable docs\n\nPiperOrigin-RevId: 289114419\n\n802c5c5f2bf94c3facb011267d04e71942e0d09f\nMigrate DLP to proto annotations (but not GAPIC v2).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289102579\n\n6357f30f2ec3cff1d8239d18b707ff9d438ea5da\nRemove gRPC configuration file that was in the wrong place.\n\nPiperOrigin-RevId: 289096111\n\n360a8792ed62f944109d7e22d613a04a010665b4\n Protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 289011995\n\na79211c20c4f2807eec524d00123bf7c06ad3d6e\nRoll back containeranalysis v1 to GAPIC v1.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288999068\n\n9e60345ba603e03484a8aaa33ce5ffa19c1c652b\nPublish Routes Preferred API v1 proto definitions.\n\nPiperOrigin-RevId: 288941399\n\nd52885b642ad2aa1f42b132ee62dbf49a73e1e24\nMigrate the service management API to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288909426\n\n6ace586805c08896fef43e28a261337fcf3f022b\ncloudtasks: replace missing RPC timeout config\n\nPiperOrigin-RevId: 288783603\n\n51d906cabee4876b12497054b15b05d4a50ad027\nImport of Grafeas from Github.\n\nUpdate BUILD.bazel accordingly.\n\nPiperOrigin-RevId: 288783426\n\n5ef42bcd363ba0440f0ee65b3c80b499e9067ede\nMigrate Recommender v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288713066\n\n94f986afd365b7d7e132315ddcd43d7af0e652fb\nMigrate Container Analysis v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288708382\n\n7a751a279184970d3b6ba90e4dd4d22a382a0747\nRemove Container Analysis v1alpha1 (nobody publishes it).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288707473\n\n3c0d9c71242e70474b2b640e15bb0a435fd06ff0\nRemove specious annotation from BigQuery Data Transfer before\nanyone accidentally does anything that uses it.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288701604\n\n1af307a4764bd415ef942ac5187fa1def043006f\nMigrate BigQuery Connection to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288698681\n\n08b488e0660c59842a7dee0e3e2b65d9e3a514a9\nExposing cloud_catalog.proto (This API is already available through REST)\n\nPiperOrigin-RevId: 288625007\n\na613482977e11ac09fa47687a5d1b5a01efcf794\nUpdate the OS Login v1beta API description to render better in the UI.\n\nPiperOrigin-RevId: 288547940\n\n5e182b8d9943f1b17008d69d4c7e865dc83641a7\nUpdate the OS Login API description to render better in the UI.\n\nPiperOrigin-RevId: 288546443\n\ncb79155f596e0396dd900da93872be7066f6340d\nFix: Add a resource annotation for Agent\nFix: Correct the service name in annotations for Intent and SessionEntityType\n\nPiperOrigin-RevId: 288441307\n\nf7f6e9daec3315fd47cb638789bd8415bf4a27cc\nAdded cloud asset api v1p1beta1\n\nPiperOrigin-RevId: 288427239\n\nf2880f5b342c6345f3dcaad24fcb3c6ca9483654\nBilling account API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 288351810\n\ndc250ffe071729f8f8bef9d6fd0fbbeb0254c666\nFix: Remove incorrect resource annotations in requests\n\nPiperOrigin-RevId: 288321208\n\n91ef2d9dd69807b0b79555f22566fb2d81e49ff9\nAdd GAPIC annotations to Cloud KMS (but do not migrate the GAPIC config yet).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 287999179\n\n4d45a6399e9444fbddaeb1c86aabfde210723714\nRefreshing Cloud Billing API protos.\n\nThis exposes the following API methods:\n- UpdateBillingAccount\n- CreateBillingAccount\n- GetIamPolicy\n- SetIamPolicy\n- TestIamPermissions\n\nThere are also some new fields to support the management of sub-accounts.\n\nPiperOrigin-RevId: 287908369\n\nec285d3d230810147ebbf8d5b691ee90320c6d2d\nHide not yet implemented update_transforms message\n\nPiperOrigin-RevId: 287608953\n\na202fb3b91cd0e4231be878b0348afd17067cbe2\nBigQuery Storage Write API v1alpha2 clients. The service is enabled by whitelist only.\n\nPiperOrigin-RevId: 287379998\n\n650d7f1f8adb0cfaf37b3ce2241c3168f24efd4d\nUpdate Readme.md to match latest Bazel updates\n090d98aea20270e3be4b64240775588f7ce50ff8\ndocs(bigtable): Fix library release level listed in generated documentation\n\nPiperOrigin-RevId: 287308849\n\n2c28f646ca77b1d57550368be22aa388adde2e66\nfirestore: retry reads that fail with contention\n\nPiperOrigin-RevId: 287250665\n\nfd3091fbe9b2083cabc53dc50c78035658bfc4eb\nSync timeout in grpc config back to 10s for tasks API with github googelapis gapic config.\n\nPiperOrigin-RevId: 287207067\n\n49dd7d856a6f77c0cf7e5cb3334423e5089a9e8a\nbazel: Integrate bazel-2.0.0 compatibility fixes\n\nPiperOrigin-RevId: 287205644\n\n46e52fd64973e815cae61e78b14608fe7aa7b1df\nbazel: Integrate bazel build file generator\n\nTo generate/update BUILD.bazel files for any particular client or a batch of clients:\n```\nbazel run //:build_gen -- --src=google/example/library\n```\n\nPiperOrigin-RevId: 286958627\n\n1a380ea21dea9b6ac6ad28c60ad96d9d73574e19\nBigQuery Storage Read API v1beta2 clients.\n\nPiperOrigin-RevId: 286616241\n\n5f3f1d0f1c06b6475a17d995e4f7a436ca67ec9e\nAdd Artman config for secretmanager.\n\nPiperOrigin-RevId: 286598440\n\n50af0530730348f1e3697bf3c70261f7daaf2981\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 286491002\n\n91818800384f4ed26961aea268910b1a2ec58cc8\nFor Data Catalog API,\n1. Add support for marking a tag template field as required when creating a new tag template.\n2. Add support for updating a tag template field from required to optional.\n\nPiperOrigin-RevId: 286490262\n\nff4a2047b3d66f38c9b22197c370ed0d02fc0238\nWeekly library update.\n\nPiperOrigin-RevId: 286484215\n\n192c14029861752a911ed434fd6ee5b850517cd9\nWeekly library update.\n\nPiperOrigin-RevId: 286484165\n\nd9e328eaf790d4e4346fbbf32858160f497a03e0\nFix bazel build (versions 1.x)\n\nBump gapic-generator and resource names plugins to the latest version.\n\nPiperOrigin-RevId: 286469287\n\n0ca305403dcc50e31ad9477c9b6241ddfd2056af\nsecretmanager client package name option updates for java and go\n\nPiperOrigin-RevId: 286439553\n\nade4803e8a1a9e3efd249c8c86895d2f12eb2aaa\niam credentials: publish v1 protos containing annotations\n\nPiperOrigin-RevId: 286418383\n\n03e5708e5f8d1909dcb74b25520309e59ebf24be\nsecuritycenter: add missing proto deps for Bazel build\n\nPiperOrigin-RevId: 286417075\n\n8b991eb3eb82483b0ca1f1361a9c8e5b375c4747\nAdd secretmanager client package name options.\n\nPiperOrigin-RevId: 286415883\n\nd400cb8d45df5b2ae796b909f098a215b2275c1d\ndialogflow: add operation_info annotations to BatchUpdateEntities and BatchDeleteEntities.\n\nPiperOrigin-RevId: 286312673\n\nf2b25232db397ebd4f67eb901a2a4bc99f7cc4c6\nIncreased the default timeout time for all the Cloud Security Command Center client libraries.\n\nPiperOrigin-RevId: 286263771\n\ncb2f1eefd684c7efd56fd375cde8d4084a20439e\nExposing new Resource fields in the SecurityCenterProperties proto, added more comments to the filter logic for these Resource fields, and updated the response proto for the ListFindings API with the new Resource fields.\n\nPiperOrigin-RevId: 286263092\n\n73cebb20432b387c3d8879bb161b517d60cf2552\nUpdate v1beta2 clusters and jobs to include resource ids in GRPC header.\n\nPiperOrigin-RevId: 286261392\n\n1b4e453d51c0bd77e7b73896cdd8357d62768d83\nsecuritycenter: publish v1beta1 protos with annotations\n\nPiperOrigin-RevId: 286228860\n\na985eeda90ae98e8519d2320bee4dec148eb8ccb\nAdd default retry configurations for speech_v1p1beta1.\n\nSettings are copied from speech_gapic.legacy.yaml. The Python client library is being generated with timeouts that are too low. See https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2578\n\nPiperOrigin-RevId: 286191318\n\n3352100a15ede383f5ab3c34599f7a10a3d066fe\nMake importing rule with the same name (but different aliases) from different repositories possible.\n\nThis is needed to allow monolitic gapic-generator and microgenerators coexist during transition period.\n\nTo plug a microgenerator:\n\n1) Add corresponding rules bidnings under `switched_rules_by_language` in repository_rules.bzl:\n rules[\"go_gapic_library2\"] = _switch(\n go and grpc and gapic,\n \"@gapic_generator_go//rules_go_gapic/go_gapic.bzl\",\n \"go_gapic_library\",\n )\n\n2) Import microgenerator in WORKSPACE (the above example assumes that the generator was imported under name \"gapic_generator_go\").\n\n3) To migrate an API from monolith to micro generator (this is done per API and per language) modify the corresponding load statement in the API's BUILD.bazel file. For example, for the example above, to migrate to go microgenerator modify the go-specific load statement in BUILD.bazel file of a specific API (which you want to migrate) to the following:\n\nload(\n \"@com_google_googleapis_imports//:imports.bzl\",\n \"go_gapic_assembly_pkg\",\n go_gapic_library = \"go_gapic_library2\",\n \"go_proto_library\",\n \"go_test\",\n)\n\nPiperOrigin-RevId: 286065440\n\n6ad2bb13bc4b0f3f785517f0563118f6ca52ddfd\nUpdated v1beta1 protos for the client:\n- added support for GenericSignedAttestation which has a generic Signature\n- added support for CVSSv3 and WindowsDetail in Vulnerability\n- documentation updates\n\nPiperOrigin-RevId: 286008145\n\nfe1962e49999a832eed8162c45f23096336a9ced\nAdMob API v1 20191210\n\nBasic account info, mediation and network report available. See https://developers.google.com/admob/api/release-notes for more details.\n\nPiperOrigin-RevId: 285894502\n\n41fc1403738b61427f3a798ca9750ef47eb9c0f2\nAnnotate the required fields for the Monitoring Dashboards API\n\nPiperOrigin-RevId: 285824386\n\n27d0e0f202cbe91bf155fcf36824a87a5764ef1e\nRemove inappropriate resource_reference annotations for UpdateWorkflowTemplateRequest.template.\n\nPiperOrigin-RevId: 285802643\n\ne5c4d3a2b5b5bef0a30df39ebb27711dc98dee64\nAdd Artman BUILD.bazel file for the Monitoring Dashboards API\n\nPiperOrigin-RevId: 285445602\n\n2085a0d3c76180ee843cf2ecef2b94ca5266be31\nFix path in the artman config for Monitoring Dashboard API.\n\nPiperOrigin-RevId: 285233245\n\n2da72dfe71e4cca80902f9e3e125c40f02c2925b\nAdd Artman and GAPIC configs for the Monitoring Dashboards API.\n\nPiperOrigin-RevId: 285211544\n\n9f6eeebf1f30f51ffa02acea5a71680fe592348e\nAdd annotations to Dataproc v1. (Also forwarding comment changes from internal source control.)\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 285197557\n\n19c4589a3cb44b3679f7b3fba88365b3d055d5f8\noslogin: fix v1beta retry configuration\n\nPiperOrigin-RevId: 285013366\n\nee3f02926d0f8a0bc13f8d716581aad20f575751\nAdd Monitoring Dashboards API protocol buffers to Google Cloud Monitoring API.\n\nPiperOrigin-RevId: 284982647\n\ne47fdd266542386e5e7346697f90476e96dc7ee8\nbigquery datatransfer: Remove non-publicly available DataSourceService.\n\nPiperOrigin-RevId: 284822593\n\n6156f433fd1d9d5e4a448d6c6da7f637921d92ea\nAdds OSConfig v1beta protos and initial client library config\n\nPiperOrigin-RevId: 284799663\n\n6cc9499e225a4f6a5e34fe07e390f67055d7991c\nAdd datetime.proto to google/type/BUILD.bazel\n\nPiperOrigin-RevId: 284643689\n\nfe7dd5277e39ffe0075729c61e8d118d7527946d\nCosmetic changes to proto comment as part of testing internal release instructions.\n\nPiperOrigin-RevId: 284608712\n\n68d109adad726b89f74276d2f4b2ba6aac6ec04a\nAdd annotations to securitycenter v1, but leave GAPIC v1 in place.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 284580511\n\ndf8a1707a910fc17c71407a75547992fd1864c51\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 284568564\n\na69a974976221ce3bb944901b739418b85d6408c\nclient library update\n\nPiperOrigin-RevId: 284463979\n\na4adac3a12aca6e3a792c9c35ee850435fe7cf7e\nAdded DateTime, TimeZone, and Month proto files to google/type\n\nPiperOrigin-RevId: 284277770\n\ned5dec392906078db4f7745fe4f11d34dd401ae9\nchange common resources from message-level annotations to file-level annotations.\n\nPiperOrigin-RevId: 284236794\n\na00e2c575ef1b637667b4ebe96b8c228b2ddb273\nbigquerydatatransfer: change resource type TransferRun to Run to be consistent with gapic configs\nbigquerydatatransfer: add missing patterns for DataSource, TransferConfig and Run (to allow the location segment)\nbigquerydatatransfer: add file-level Parent resource type (to allow the location segement)\nbigquerydatatransfer: update grpc service config with correct retry delays\n\nPiperOrigin-RevId: 284234378\n\nb10e4547017ca529ac8d183e839f3c272e1c13de\ncloud asset: replace required fields for batchgetassethistory. Correct the time out duration.\n\nPiperOrigin-RevId: 284059574\n\n6690161e3dcc3367639a2ec10db67bf1cf392550\nAdd default retry configurations for speech_v1.\n\nSettings are copied from speech_gapic.legacy.yaml. The Python client library is being generated with timeouts that are too low. See https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2578\n\nPiperOrigin-RevId: 284035915\n\n9b2635ef91e114f0357bdb87652c26a8f59316d5\ncloudtasks: fix gapic v2 config\n\nPiperOrigin-RevId: 284020555\n\ne5676ba8b863951a8ed0bfd6046e1db38062743c\nReinstate resource name handling in GAPIC config for Asset v1.\n\nPiperOrigin-RevId: 283993903\n\nf337f7fb702c85833b7b6ca56afaf9a1bf32c096\nOSConfig AgentEndpoint: add LookupEffectiveGuestPolicy rpc\n\nPiperOrigin-RevId: 283989762\n\nc0ac9b55f2e2efd0ee525b3a6591a1b09330e55a\nInclude real time feed api into v1 version\n\nPiperOrigin-RevId: 283845474\n\n2427a3a0f6f4222315362d973d91a082a3a884a7\nfirestore admin: update v1 protos with annotations & retry config\n\nPiperOrigin-RevId: 283826605\n\n555e844dbe04af50a8f55fe1217fa9d39a0a80b2\nchore: publish retry configs for iam admin, cloud asset, and remoteworkers\n\nPiperOrigin-RevId: 283801979\n\n6311dc536668849142d1fe5cd9fc46da66d1f77f\nfirestore: update v1beta1 protos with annotations and retry config\n\nPiperOrigin-RevId: 283794315\n\nda0edeeef953b05eb1524d514d2e9842ac2df0fd\nfeat: publish several retry config files for client generation\n\nPiperOrigin-RevId: 283614497\n\n59a78053537e06190f02d0a7ffb792c34e185c5a\nRemoving TODO comment\n\nPiperOrigin-RevId: 283592535\n\n8463992271d162e2aff1d5da5b78db11f2fb5632\nFix bazel build\n\nPiperOrigin-RevId: 283589351\n\n3bfcb3d8df10dfdba58f864d3bdb8ccd69364669\nPublic client library for bebop_jobs_api_20191118_1_RC3 release.\n\nPiperOrigin-RevId: 283568877\n\n27ab0db61021d267c452b34d149161a7bf0d9f57\nfirestore: publish annotated protos and new retry config\n\nPiperOrigin-RevId: 283565148\n\n38dc36a2a43cbab4a2a9183a43dd0441670098a9\nfeat: add http annotations for operations calls\n\nPiperOrigin-RevId: 283384331\n\n366caab94906975af0e17822e372f1d34e319d51\ndatastore: add a legacy artman config for PHP generation\n\nPiperOrigin-RevId: 283378578\n\n82944da21578a53b74e547774cf62ed31a05b841\nMigrate container v1beta1 to GAPIC v2.\n\nPiperOrigin-RevId: 283342796\n\n584dcde5826dd11ebe222016b7b208a4e1196f4b\nRemove resource name annotation for UpdateKeyRequest.key, because it's the resource, not a name.\n\nPiperOrigin-RevId: 283167368\n\n6ab0171e3688bfdcf3dbc4056e2df6345e843565\nAdded resource annotation for Key message.\n\nPiperOrigin-RevId: 283066965\n\n86c1a2db1707a25cec7d92f8850cc915163ec3c3\nExpose Admin API methods for Key manipulation.\n\nPiperOrigin-RevId: 282988776\n\n3ddad085965896ffb205d44cb0c0616fe3def10b\nC++ targets: correct deps so they build, rename them from trace* to cloudtrace*\nto match the proto names.\n\nPiperOrigin-RevId: 282857635\n\ne9389365a971ad6457ceb9646c595e79dfdbdea5\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 282810797\n\ne42eaaa9abed3c4d63d64f790bd3191448dbbca6\nPut back C++ targets for cloud trace v2 api.\n\nPiperOrigin-RevId: 282803841\n\nd8896a3d8a191702a9e39f29cf4c2e16fa05f76d\nAdd initial BUILD.bazel for secretmanager.googleapis.com\n\nPiperOrigin-RevId: 282674885\n\n2cc56cb83ea3e59a6364e0392c29c9e23ad12c3a\nCreate sample for list recommendations\n\nPiperOrigin-RevId: 282665402\n\nf88e2ca65790e3b44bb3455e4779b41de1bf7136\nbump Go to ga\n\nPiperOrigin-RevId: 282651105\n\naac86d932b3cefd7d746f19def6935d16d6235e0\nDocumentation update. Add location_id in preparation for regionalization.\n\nPiperOrigin-RevId: 282586371\n\n5b501cd384f6b842486bd41acce77854876158e7\nMigrate Datastore Admin to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 282570874\n\n6a16d474d5be201b20a27646e2009c4dfde30452\nMigrate Datastore to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 282564329\n\n74bd9b95ac8c70b883814e4765a725cffe43d77c\nmark Go lib ga\n\nPiperOrigin-RevId: 282562558\n\nf7b3d434f44f6a77cf6c37cae5474048a0639298\nAdd secretmanager.googleapis.com protos\n\nPiperOrigin-RevId: 282546399\n\nc34a911aaa0660a45f5a556578f764f135e6e060\niot: bump Go GAPIC to GA release level\n\nPiperOrigin-RevId: 282494787\n\n79b7f1c5ba86859dbf70aa6cd546057c1002cdc0\nPut back C++ targets.\nPrevious change overrode custom C++ targets made by external teams. This PR puts those targets back.\n\nPiperOrigin-RevId: 282458292\n\n06a840781d2dc1b0a28e03e30fb4b1bfb0b29d1e\nPopulate BAZEL.build files for around 100 APIs (all APIs we publish) in all 7 langauges.\n\nPiperOrigin-RevId: 282449910\n\n777b580a046c4fa84a35e1d00658b71964120bb0\nCreate BUILD file for recommender v1beta1\n\nPiperOrigin-RevId: 282068850\n\n48b385b6ef71dfe2596490ea34c9a9a434e74243\nGenerate recommender v1beta1 gRPC ServiceConfig file\n\nPiperOrigin-RevId: 282067795\n\n8395b0f1435a4d7ce8737b3b55392627758bd20c\nfix: Set timeout to 25s, because Tasks fails for any deadline above 30s.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 282017295\n\n3ba7ddc4b2acf532bdfb0004ca26311053c11c30\nfix: Shift Ruby and PHP to legacy GAPIC YAMLs for back-compat.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281852671\n\nad6f0c002194c3ec6c13d592d911d122d2293931\nRemove unneeded yaml files\n\nPiperOrigin-RevId: 281835839\n\n1f42588e4373750588152cdf6f747de1cadbcbef\nrefactor: Migrate Tasks beta 2 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281769558\n\n902b51f2073e9958a2aba441f7f7ac54ea00966d\nrefactor: Migrate Tasks to GAPIC v2 (for real this time).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281769522\n\n17561f59970eede87f61ef6e9c322fa1198a2f4d\nMigrate Tasks Beta 3 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281769519\n\nf95883b15a1ddd58eb7e3583fdefe7b00505faa3\nRegenerate recommender v1beta1 protos and sanitized yaml\n\nPiperOrigin-RevId: 281765245\n\n9a52df54c626b36699a058013d1735a166933167\nadd gRPC ServiceConfig for grafeas v1\n\nPiperOrigin-RevId: 281762754\n\n7a79d682ef40c5ca39c3fca1c0901a8e90021f8a\nfix: Roll back Tasks GAPIC v2 while we investigate C# issue.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281758548\n\n3fc31491640a90f029f284289e7e97f78f442233\nMigrate Tasks to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 281751187\n\n5bc0fecee454f857cec042fb99fe2d22e1bff5bc\nfix: adds operation HTTP rules back to v1p1beta1 config\n\nPiperOrigin-RevId: 281635572\n\n5364a19284a1333b3ffe84e4e78a1919363d9f9c\nbazel: Fix build\n\n1) Update to latest gapic-generator (has iam resource names fix for java).\n2) Fix non-trivial issues with oslogin (resources defined in sibling package to the one they are used from) and monitoring.\n3) Fix trivial missing dependencies in proto_library targets for other apis.\n\nThis is to prepare the repository to being populated with BUILD.bazel files for all supported apis (101 API) in all 7 languages.\n\nPiperOrigin-RevId: 281618750\n\n0aa77cbe45538d5e5739eb637db3f2940b912789\nUpdating common proto files in google/type/ with their latest versions.\n\nPiperOrigin-RevId: 281603926\n\nd47e1b4485b3effbb2298eb10dd13a544c0f66dc\nfix: replace Speech Recognize RPC retry_codes_name for non-standard assignment\n\nPiperOrigin-RevId: 281594037\n\n16543773103e2619d2b5f52456264de5bb9be104\nRegenerating public protos for datacatalog, also adding gRPC service config.\n\nPiperOrigin-RevId: 281423227\n\n328ebe76adb06128d12547ed70107fb841aebf4e\nChange custom data type from String to google.protobuf.Struct to be consistent with other docs such as\nhttps://developers.google.com/actions/smarthome/develop/process-intents#response_format\n\nPiperOrigin-RevId: 281402467\n\n5af83f47b9656261cafcf88b0b3334521ab266b3\n(internal change without visible public changes)\n\nPiperOrigin-RevId: 281334391\n\nc53ed56649583a149382bd88d3c427be475b91b6\nFix typo in protobuf docs.\n\nPiperOrigin-RevId: 281293109\n\n" + "sha": "ce4f4c21d9dd2bfab18873a80449b9d9851efde8", + "internalRef": "295861722", + "log": "ce4f4c21d9dd2bfab18873a80449b9d9851efde8\nasset: v1p1beta1 remove SearchResources and SearchIamPolicies\n\nPiperOrigin-RevId: 295861722\n\ncb61d6c2d070b589980c779b68ffca617f789116\nasset: v1p1beta1 remove SearchResources and SearchIamPolicies\n\nPiperOrigin-RevId: 295855449\n\nab2685d8d3a0e191dc8aef83df36773c07cb3d06\nfix: Dataproc v1 - AutoscalingPolicy annotation\n\nThis adds the second resource name pattern to the\nAutoscalingPolicy resource.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 295738415\n\n8a1020bf6828f6e3c84c3014f2c51cb62b739140\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 295286165\n\n5cfa105206e77670369e4b2225597386aba32985\nAdd service control related proto build rule.\n\nPiperOrigin-RevId: 295262088\n\nee4dddf805072004ab19ac94df2ce669046eec26\nmonitoring v3: Add prefix \"https://cloud.google.com/\" into the link for global access\ncl 295167522, get ride of synth.py hacks\n\nPiperOrigin-RevId: 295238095\n\nd9835e922ea79eed8497db270d2f9f85099a519c\nUpdate some minor docs changes about user event proto\n\nPiperOrigin-RevId: 295185610\n\n5f311e416e69c170243de722023b22f3df89ec1c\nfix: use correct PHP package name in gapic configuration\n\nPiperOrigin-RevId: 295161330\n\n6cdd74dcdb071694da6a6b5a206e3a320b62dd11\npubsub: v1 add client config annotations and retry config\n\nPiperOrigin-RevId: 295158776\n\n5169f46d9f792e2934d9fa25c36d0515b4fd0024\nAdded cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 295026522\n\n56b55aa8818cd0a532a7d779f6ef337ba809ccbd\nFix: Resource annotations for CreateTimeSeriesRequest and ListTimeSeriesRequest should refer to valid resources. TimeSeries is not a named resource.\n\nPiperOrigin-RevId: 294931650\n\n0646bc775203077226c2c34d3e4d50cc4ec53660\nRemove unnecessary languages from bigquery-related artman configuration files.\n\nPiperOrigin-RevId: 294809380\n\n8b78aa04382e3d4147112ad6d344666771bb1909\nUpdate backend.proto for schemes and protocol\n\nPiperOrigin-RevId: 294788800\n\n80b8f8b3de2359831295e24e5238641a38d8488f\nAdds artman config files for bigquerystorage endpoints v1beta2, v1alpha2, v1\n\nPiperOrigin-RevId: 294763931\n\n2c17ac33b226194041155bb5340c3f34733f1b3a\nAdd parameter to sample generated for UpdateInstance. Related to https://github.com/googleapis/python-redis/issues/4\n\nPiperOrigin-RevId: 294734008\n\nd5e8a8953f2acdfe96fb15e85eb2f33739623957\nMove bigquery datatransfer to gapic v2.\n\nPiperOrigin-RevId: 294703703\n\nefd36705972cfcd7d00ab4c6dfa1135bafacd4ae\nfix: Add two annotations that we missed.\n\nPiperOrigin-RevId: 294664231\n\n8a36b928873ff9c05b43859b9d4ea14cd205df57\nFix: Define the \"bigquery.googleapis.com/Table\" resource in the BigQuery Storage API (v1beta2).\n\nPiperOrigin-RevId: 294459768\n\nc7a3caa2c40c49f034a3c11079dd90eb24987047\nFix: Define the \"bigquery.googleapis.com/Table\" resource in the BigQuery Storage API (v1).\n\nPiperOrigin-RevId: 294456889\n\n5006247aa157e59118833658084345ee59af7c09\nFix: Make deprecated fields optional\nFix: Deprecate SetLoggingServiceRequest.zone in line with the comments\nFeature: Add resource name method signatures where appropriate\n\nPiperOrigin-RevId: 294383128\n\neabba40dac05c5cbe0fca3a35761b17e372036c4\nFix: C# and PHP package/namespace capitalization for BigQuery Storage v1.\n\nPiperOrigin-RevId: 294382444\n\nf8d9a858a7a55eba8009a23aa3f5cc5fe5e88dde\nfix: artman configuration file for bigtable-admin\n\nPiperOrigin-RevId: 294322616\n\n0f29555d1cfcf96add5c0b16b089235afbe9b1a9\nAPI definition for (not-yet-launched) GCS gRPC.\n\nPiperOrigin-RevId: 294321472\n\nfcc86bee0e84dc11e9abbff8d7c3529c0626f390\nfix: Bigtable Admin v2\n\nChange LRO metadata from PartialUpdateInstanceMetadata\nto UpdateInstanceMetadata. (Otherwise, it will not build.)\n\nPiperOrigin-RevId: 294264582\n\n6d9361eae2ebb3f42d8c7ce5baf4bab966fee7c0\nrefactor: Add annotations to Bigtable Admin v2.\n\nPiperOrigin-RevId: 294243406\n\nad7616f3fc8e123451c8b3a7987bc91cea9e6913\nFix: Resource type in CreateLogMetricRequest should use logging.googleapis.com.\nFix: ListLogEntries should have a method signature for convenience of calling it.\n\nPiperOrigin-RevId: 294222165\n\n63796fcbb08712676069e20a3e455c9f7aa21026\nFix: Remove extraneous resource definition for cloudkms.googleapis.com/CryptoKey.\n\nPiperOrigin-RevId: 294176658\n\ne7d8a694f4559201e6913f6610069cb08b39274e\nDepend on the latest gapic-generator and resource names plugin.\n\nThis fixes the very old an very annoying bug: https://github.com/googleapis/gapic-generator/pull/3087\n\nPiperOrigin-RevId: 293903652\n\n806b2854a966d55374ee26bb0cef4e30eda17b58\nfix: correct capitalization of Ruby namespaces in SecurityCenter V1p1beta1\n\nPiperOrigin-RevId: 293903613\n\n1b83c92462b14d67a7644e2980f723112472e03a\nPublish annotations and grpc service config for Logging API.\n\nPiperOrigin-RevId: 293893514\n\ne46f761cd6ec15a9e3d5ed4ff321a4bcba8e8585\nGenerate the Bazel build file for recommendengine public api\n\nPiperOrigin-RevId: 293710856\n\n68477017c4173c98addac0373950c6aa9d7b375f\nMake `language_code` optional for UpdateIntentRequest and BatchUpdateIntentsRequest.\n\nThe comments and proto annotations describe this parameter as optional.\n\nPiperOrigin-RevId: 293703548\n\n16f823f578bca4e845a19b88bb9bc5870ea71ab2\nAdd BUILD.bazel files for managedidentities API\n\nPiperOrigin-RevId: 293698246\n\n2f53fd8178c9a9de4ad10fae8dd17a7ba36133f2\nAdd v1p1beta1 config file\n\nPiperOrigin-RevId: 293696729\n\n052b274138fce2be80f97b6dcb83ab343c7c8812\nAdd source field for user event and add field behavior annotations\n\nPiperOrigin-RevId: 293693115\n\n1e89732b2d69151b1b3418fff3d4cc0434f0dded\ndatacatalog: v1beta1 add three new RPCs to gapic v1beta1 config\n\nPiperOrigin-RevId: 293692823\n\n9c8bd09bbdc7c4160a44f1fbab279b73cd7a2337\nchange the name of AccessApproval service to AccessApprovalAdmin\n\nPiperOrigin-RevId: 293690934\n\n2e23b8fbc45f5d9e200572ca662fe1271bcd6760\nAdd ListEntryGroups method, add http bindings to support entry group tagging, and update some comments.\n\nPiperOrigin-RevId: 293666452\n\n0275e38a4ca03a13d3f47a9613aac8c8b0d3f1f2\nAdd proto_package field to managedidentities API. It is needed for APIs that still depend on artman generation.\n\nPiperOrigin-RevId: 293643323\n\n4cdfe8278cb6f308106580d70648001c9146e759\nRegenerating public protos for Data Catalog to add new Custom Type Entry feature.\n\nPiperOrigin-RevId: 293614782\n\n45d2a569ab526a1fad3720f95eefb1c7330eaada\nEnable client generation for v1 ManagedIdentities API.\n\nPiperOrigin-RevId: 293515675\n\n2c17086b77e6f3bcf04a1f65758dfb0c3da1568f\nAdd the Actions on Google common types (//google/actions/type/*).\n\nPiperOrigin-RevId: 293478245\n\n781aadb932e64a12fb6ead7cd842698d99588433\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293443396\n\ne2602608c9138c2fca24162720e67f9307c30b95\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293442964\n\nc8aef82028d06b7992278fa9294c18570dc86c3d\nAdd cc_proto_library and cc_grpc_library targets for Bigtable protos.\n\nAlso fix indentation of cc_grpc_library targets in Spanner and IAM protos.\n\nPiperOrigin-RevId: 293440538\n\ne2faab04f4cb7f9755072330866689b1943a16e9\ncloudtasks: v2 replace non-standard retry params in gapic config v2\n\nPiperOrigin-RevId: 293424055\n\ndfb4097ea628a8470292c6590a4313aee0c675bd\nerrorreporting: v1beta1 add legacy artman config for php\n\nPiperOrigin-RevId: 293423790\n\nb18aed55b45bfe5b62476292c72759e6c3e573c6\nasset: v1p1beta1 updated comment for `page_size` limit.\n\nPiperOrigin-RevId: 293421386\n\nc9ef36b7956d9859a2fc86ad35fcaa16958ab44f\nbazel: Refactor CI build scripts\n\nPiperOrigin-RevId: 293387911\n\n" + } + }, + { + "git": { + "name": "synthtool", + "remote": "rpc://devrel/cloud/libraries/tools/autosynth", + "sha": "b4b7af4a16a07b40bfd8dcdda89f9f193ff4e2ed" } }, { "template": { "name": "python_split_library", "origin": "synthtool.gcp", - "version": "2019.10.17" + "version": "2020.2.4" } } ], diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py index 5ac89493cd7e..0483a458296a 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -110,12 +110,16 @@ def test_get_sink(self): name = "name3373707" destination = "destination-1429847026" filter_ = "filter-1274492040" + description = "description-1724546052" + disabled = True writer_identity = "writerIdentity775638794" include_children = True expected_response = { "name": name, "destination": destination, "filter": filter_, + "description": description, + "disabled": disabled, "writer_identity": writer_identity, "include_children": include_children, } @@ -158,12 +162,16 @@ def test_create_sink(self): name = "name3373707" destination = "destination-1429847026" filter_ = "filter-1274492040" + description = "description-1724546052" + disabled = True writer_identity = "writerIdentity775638794" include_children = True expected_response = { "name": name, "destination": destination, "filter": filter_, + "description": description, + "disabled": disabled, "writer_identity": writer_identity, "include_children": include_children, } @@ -210,12 +218,16 @@ def test_update_sink(self): name = "name3373707" destination = "destination-1429847026" filter_ = "filter-1274492040" + description = "description-1724546052" + disabled = True writer_identity = "writerIdentity775638794" include_children = True expected_response = { "name": name, "destination": destination, "filter": filter_, + "description": description, + "disabled": disabled, "writer_identity": writer_identity, "include_children": include_children, } @@ -514,3 +526,79 @@ def test_delete_exclusion_exception(self): with pytest.raises(CustomException): client.delete_exclusion(name) + + def test_get_cmek_settings(self): + # Setup Expected Response + name = "name3373707" + kms_key_name = "kmsKeyName2094986649" + service_account_id = "serviceAccountId-111486921" + expected_response = { + "name": name, + "kms_key_name": kms_key_name, + "service_account_id": service_account_id, + } + expected_response = logging_config_pb2.CmekSettings(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() + + response = client.get_cmek_settings() + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.GetCmekSettingsRequest() + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_cmek_settings_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() + + with pytest.raises(CustomException): + client.get_cmek_settings() + + def test_update_cmek_settings(self): + # Setup Expected Response + name = "name3373707" + kms_key_name = "kmsKeyName2094986649" + service_account_id = "serviceAccountId-111486921" + expected_response = { + "name": name, + "kms_key_name": kms_key_name, + "service_account_id": service_account_id, + } + expected_response = logging_config_pb2.CmekSettings(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() + + response = client.update_cmek_settings() + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.UpdateCmekSettingsRequest() + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_cmek_settings_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = logging_v2.ConfigServiceV2Client() + + with pytest.raises(CustomException): + client.update_cmek_settings() diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py index 30aa9b807329..ef2abc733bc4 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py index e9dd3e348d48..35201f790cd8 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 6986e92b79be8c7706eac1eb3f7c1947d7f3052b Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 25 Feb 2020 09:45:18 -0800 Subject: [PATCH 272/855] chore: change docstring line breaks (via synth) (#26) --- .../logging_v2/proto/logging_config_pb2.py | 3 +- packages/google-cloud-logging/synth.metadata | 28 +++++-------------- 2 files changed, 8 insertions(+), 23 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py index 65fd2cff616a..5ce61c644e7d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -1620,8 +1620,7 @@ dict( DESCRIPTOR=_BIGQUERYOPTIONS, __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Options that change functionality of a sink exporting data - to BigQuery. + __doc__="""Options that change functionality of a sink exporting data to BigQuery. Attributes: diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 8d8d23a3ba2f..91118fcdb011 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,39 +1,25 @@ { - "updateTime": "2020-02-19T02:25:24.328145Z", + "updateTime": "2020-02-25T13:15:44.768838Z", "sources": [ { "generator": { "name": "artman", - "version": "0.45.0", - "dockerImage": "googleapis/artman@sha256:6aec9c34db0e4be221cdaf6faba27bdc07cfea846808b3d3b964dfce3a9a0f9b" - } - }, - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/python-logging.git", - "sha": "5bbe5de139c7c62f916bd20cf7d3d78ca6b42c9e" + "version": "0.45.1", + "dockerImage": "googleapis/artman@sha256:36956ca6a4dc70a59de5d5d0fd35061b050bb56884516f0898f46d8220f25738" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "ce4f4c21d9dd2bfab18873a80449b9d9851efde8", - "internalRef": "295861722", - "log": "ce4f4c21d9dd2bfab18873a80449b9d9851efde8\nasset: v1p1beta1 remove SearchResources and SearchIamPolicies\n\nPiperOrigin-RevId: 295861722\n\ncb61d6c2d070b589980c779b68ffca617f789116\nasset: v1p1beta1 remove SearchResources and SearchIamPolicies\n\nPiperOrigin-RevId: 295855449\n\nab2685d8d3a0e191dc8aef83df36773c07cb3d06\nfix: Dataproc v1 - AutoscalingPolicy annotation\n\nThis adds the second resource name pattern to the\nAutoscalingPolicy resource.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 295738415\n\n8a1020bf6828f6e3c84c3014f2c51cb62b739140\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 295286165\n\n5cfa105206e77670369e4b2225597386aba32985\nAdd service control related proto build rule.\n\nPiperOrigin-RevId: 295262088\n\nee4dddf805072004ab19ac94df2ce669046eec26\nmonitoring v3: Add prefix \"https://cloud.google.com/\" into the link for global access\ncl 295167522, get ride of synth.py hacks\n\nPiperOrigin-RevId: 295238095\n\nd9835e922ea79eed8497db270d2f9f85099a519c\nUpdate some minor docs changes about user event proto\n\nPiperOrigin-RevId: 295185610\n\n5f311e416e69c170243de722023b22f3df89ec1c\nfix: use correct PHP package name in gapic configuration\n\nPiperOrigin-RevId: 295161330\n\n6cdd74dcdb071694da6a6b5a206e3a320b62dd11\npubsub: v1 add client config annotations and retry config\n\nPiperOrigin-RevId: 295158776\n\n5169f46d9f792e2934d9fa25c36d0515b4fd0024\nAdded cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 295026522\n\n56b55aa8818cd0a532a7d779f6ef337ba809ccbd\nFix: Resource annotations for CreateTimeSeriesRequest and ListTimeSeriesRequest should refer to valid resources. TimeSeries is not a named resource.\n\nPiperOrigin-RevId: 294931650\n\n0646bc775203077226c2c34d3e4d50cc4ec53660\nRemove unnecessary languages from bigquery-related artman configuration files.\n\nPiperOrigin-RevId: 294809380\n\n8b78aa04382e3d4147112ad6d344666771bb1909\nUpdate backend.proto for schemes and protocol\n\nPiperOrigin-RevId: 294788800\n\n80b8f8b3de2359831295e24e5238641a38d8488f\nAdds artman config files for bigquerystorage endpoints v1beta2, v1alpha2, v1\n\nPiperOrigin-RevId: 294763931\n\n2c17ac33b226194041155bb5340c3f34733f1b3a\nAdd parameter to sample generated for UpdateInstance. Related to https://github.com/googleapis/python-redis/issues/4\n\nPiperOrigin-RevId: 294734008\n\nd5e8a8953f2acdfe96fb15e85eb2f33739623957\nMove bigquery datatransfer to gapic v2.\n\nPiperOrigin-RevId: 294703703\n\nefd36705972cfcd7d00ab4c6dfa1135bafacd4ae\nfix: Add two annotations that we missed.\n\nPiperOrigin-RevId: 294664231\n\n8a36b928873ff9c05b43859b9d4ea14cd205df57\nFix: Define the \"bigquery.googleapis.com/Table\" resource in the BigQuery Storage API (v1beta2).\n\nPiperOrigin-RevId: 294459768\n\nc7a3caa2c40c49f034a3c11079dd90eb24987047\nFix: Define the \"bigquery.googleapis.com/Table\" resource in the BigQuery Storage API (v1).\n\nPiperOrigin-RevId: 294456889\n\n5006247aa157e59118833658084345ee59af7c09\nFix: Make deprecated fields optional\nFix: Deprecate SetLoggingServiceRequest.zone in line with the comments\nFeature: Add resource name method signatures where appropriate\n\nPiperOrigin-RevId: 294383128\n\neabba40dac05c5cbe0fca3a35761b17e372036c4\nFix: C# and PHP package/namespace capitalization for BigQuery Storage v1.\n\nPiperOrigin-RevId: 294382444\n\nf8d9a858a7a55eba8009a23aa3f5cc5fe5e88dde\nfix: artman configuration file for bigtable-admin\n\nPiperOrigin-RevId: 294322616\n\n0f29555d1cfcf96add5c0b16b089235afbe9b1a9\nAPI definition for (not-yet-launched) GCS gRPC.\n\nPiperOrigin-RevId: 294321472\n\nfcc86bee0e84dc11e9abbff8d7c3529c0626f390\nfix: Bigtable Admin v2\n\nChange LRO metadata from PartialUpdateInstanceMetadata\nto UpdateInstanceMetadata. (Otherwise, it will not build.)\n\nPiperOrigin-RevId: 294264582\n\n6d9361eae2ebb3f42d8c7ce5baf4bab966fee7c0\nrefactor: Add annotations to Bigtable Admin v2.\n\nPiperOrigin-RevId: 294243406\n\nad7616f3fc8e123451c8b3a7987bc91cea9e6913\nFix: Resource type in CreateLogMetricRequest should use logging.googleapis.com.\nFix: ListLogEntries should have a method signature for convenience of calling it.\n\nPiperOrigin-RevId: 294222165\n\n63796fcbb08712676069e20a3e455c9f7aa21026\nFix: Remove extraneous resource definition for cloudkms.googleapis.com/CryptoKey.\n\nPiperOrigin-RevId: 294176658\n\ne7d8a694f4559201e6913f6610069cb08b39274e\nDepend on the latest gapic-generator and resource names plugin.\n\nThis fixes the very old an very annoying bug: https://github.com/googleapis/gapic-generator/pull/3087\n\nPiperOrigin-RevId: 293903652\n\n806b2854a966d55374ee26bb0cef4e30eda17b58\nfix: correct capitalization of Ruby namespaces in SecurityCenter V1p1beta1\n\nPiperOrigin-RevId: 293903613\n\n1b83c92462b14d67a7644e2980f723112472e03a\nPublish annotations and grpc service config for Logging API.\n\nPiperOrigin-RevId: 293893514\n\ne46f761cd6ec15a9e3d5ed4ff321a4bcba8e8585\nGenerate the Bazel build file for recommendengine public api\n\nPiperOrigin-RevId: 293710856\n\n68477017c4173c98addac0373950c6aa9d7b375f\nMake `language_code` optional for UpdateIntentRequest and BatchUpdateIntentsRequest.\n\nThe comments and proto annotations describe this parameter as optional.\n\nPiperOrigin-RevId: 293703548\n\n16f823f578bca4e845a19b88bb9bc5870ea71ab2\nAdd BUILD.bazel files for managedidentities API\n\nPiperOrigin-RevId: 293698246\n\n2f53fd8178c9a9de4ad10fae8dd17a7ba36133f2\nAdd v1p1beta1 config file\n\nPiperOrigin-RevId: 293696729\n\n052b274138fce2be80f97b6dcb83ab343c7c8812\nAdd source field for user event and add field behavior annotations\n\nPiperOrigin-RevId: 293693115\n\n1e89732b2d69151b1b3418fff3d4cc0434f0dded\ndatacatalog: v1beta1 add three new RPCs to gapic v1beta1 config\n\nPiperOrigin-RevId: 293692823\n\n9c8bd09bbdc7c4160a44f1fbab279b73cd7a2337\nchange the name of AccessApproval service to AccessApprovalAdmin\n\nPiperOrigin-RevId: 293690934\n\n2e23b8fbc45f5d9e200572ca662fe1271bcd6760\nAdd ListEntryGroups method, add http bindings to support entry group tagging, and update some comments.\n\nPiperOrigin-RevId: 293666452\n\n0275e38a4ca03a13d3f47a9613aac8c8b0d3f1f2\nAdd proto_package field to managedidentities API. It is needed for APIs that still depend on artman generation.\n\nPiperOrigin-RevId: 293643323\n\n4cdfe8278cb6f308106580d70648001c9146e759\nRegenerating public protos for Data Catalog to add new Custom Type Entry feature.\n\nPiperOrigin-RevId: 293614782\n\n45d2a569ab526a1fad3720f95eefb1c7330eaada\nEnable client generation for v1 ManagedIdentities API.\n\nPiperOrigin-RevId: 293515675\n\n2c17086b77e6f3bcf04a1f65758dfb0c3da1568f\nAdd the Actions on Google common types (//google/actions/type/*).\n\nPiperOrigin-RevId: 293478245\n\n781aadb932e64a12fb6ead7cd842698d99588433\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293443396\n\ne2602608c9138c2fca24162720e67f9307c30b95\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293442964\n\nc8aef82028d06b7992278fa9294c18570dc86c3d\nAdd cc_proto_library and cc_grpc_library targets for Bigtable protos.\n\nAlso fix indentation of cc_grpc_library targets in Spanner and IAM protos.\n\nPiperOrigin-RevId: 293440538\n\ne2faab04f4cb7f9755072330866689b1943a16e9\ncloudtasks: v2 replace non-standard retry params in gapic config v2\n\nPiperOrigin-RevId: 293424055\n\ndfb4097ea628a8470292c6590a4313aee0c675bd\nerrorreporting: v1beta1 add legacy artman config for php\n\nPiperOrigin-RevId: 293423790\n\nb18aed55b45bfe5b62476292c72759e6c3e573c6\nasset: v1p1beta1 updated comment for `page_size` limit.\n\nPiperOrigin-RevId: 293421386\n\nc9ef36b7956d9859a2fc86ad35fcaa16958ab44f\nbazel: Refactor CI build scripts\n\nPiperOrigin-RevId: 293387911\n\n" - } - }, - { - "git": { - "name": "synthtool", - "remote": "rpc://devrel/cloud/libraries/tools/autosynth", - "sha": "b4b7af4a16a07b40bfd8dcdda89f9f193ff4e2ed" + "sha": "0b1876b35e98f560f9c9ca9797955f020238a092", + "internalRef": "296986742", + "log": "0b1876b35e98f560f9c9ca9797955f020238a092\nUse an older version of protoc-docs-plugin that is compatible with the specified gapic-generator and protobuf versions.\n\nprotoc-docs-plugin >=0.4.0 (see commit https://github.com/googleapis/protoc-docs-plugin/commit/979f03ede6678c487337f3d7e88bae58df5207af) is incompatible with protobuf 3.9.1.\n\nPiperOrigin-RevId: 296986742\n\n1e47e676cddbbd8d93f19ba0665af15b5532417e\nFix: Restore a method signature for UpdateCluster\n\nPiperOrigin-RevId: 296901854\n\n7f910bcc4fc4704947ccfd3ceed015d16b9e00c2\nUpdate Dataproc v1beta2 client.\n\nPiperOrigin-RevId: 296451205\n\nde287524405a3dce124d301634731584fc0432d7\nFix: Reinstate method signatures that had been missed off some RPCs\nFix: Correct resource types for two fields\n\nPiperOrigin-RevId: 296435091\n\ne5bc9566ae057fb4c92f8b7e047f1c8958235b53\nDeprecate the endpoint_uris field, as it is unused.\n\nPiperOrigin-RevId: 296357191\n\n8c12e2b4dca94e12bff9f538bdac29524ff7ef7a\nUpdate Dataproc v1 client.\n\nPiperOrigin-RevId: 296336662\n\n17567c4a1ef0a9b50faa87024d66f8acbb561089\nRemoving erroneous comment, a la https://github.com/googleapis/java-speech/pull/103\n\nPiperOrigin-RevId: 296332968\n\n3eaaaf8626ce5b0c0bc7eee05e143beffa373b01\nAdd BUILD.bazel for v1 secretmanager.googleapis.com\n\nPiperOrigin-RevId: 296274723\n\ne76149c3d992337f85eeb45643106aacae7ede82\nMove securitycenter v1 to use generate from annotations.\n\nPiperOrigin-RevId: 296266862\n\n203740c78ac69ee07c3bf6be7408048751f618f8\nAdd StackdriverLoggingConfig field to Cloud Tasks v2 API.\n\nPiperOrigin-RevId: 296256388\n\ne4117d5e9ed8bbca28da4a60a94947ca51cb2083\nCreate a Bazel BUILD file for the google.actions.type export.\n\nPiperOrigin-RevId: 296212567\n\na9639a0a9854fd6e1be08bba1ac3897f4f16cb2f\nAdd secretmanager.googleapis.com v1 protos\n\nPiperOrigin-RevId: 295983266\n\n" } }, { "template": { - "name": "python_split_library", + "name": "python_library", "origin": "synthtool.gcp", "version": "2020.2.4" } From f7d87a4b8239175a779c3b81b7c03f73424cc3a1 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 26 Feb 2020 04:46:58 -0500 Subject: [PATCH 273/855] test: drop majyk per-lang-version coverage level (#28) Closes #27. --- packages/google-cloud-logging/noxfile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index d3af7cb0c13c..060f561c3623 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -91,7 +91,7 @@ def default(session, django_dep=('django',)): '--cov-append', '--cov-config=.coveragerc', '--cov-report=', - '--cov-fail-under=97', + '--cov-fail-under=0', 'tests/unit', *session.posargs ) @@ -179,4 +179,4 @@ def docs(session): os.path.join("docs", "_build", "doctrees", ""), os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), - ) \ No newline at end of file + ) From 8640599f425e93a93f9a7e5ca016ed7c373f0e75 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 2 Mar 2020 13:36:21 -0800 Subject: [PATCH 274/855] chore: release 1.15.0 (#24) * updated CHANGELOG.md [ci skip] * updated setup.py [ci skip] --- packages/google-cloud-logging/CHANGELOG.md | 14 ++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 05caf8d580b8..367ad2e8f1e8 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [1.15.0](https://www.github.com/googleapis/python-logging/compare/v1.14.0...v1.15.0) (2020-02-26) + + +### Features + +* add support for cmek settings; undeprecate resource name helper methods; bump copyright year to 2020 ([#22](https://www.github.com/googleapis/python-logging/issues/22)) ([1c687c1](https://www.github.com/googleapis/python-logging/commit/1c687c168cdc1f5ebc74d2380ad87335a42209a2)) + + +### Bug Fixes + +* **logging:** deprecate resource name helper methods (via synth) ([#9837](https://www.github.com/googleapis/python-logging/issues/9837)) ([335af9e](https://www.github.com/googleapis/python-logging/commit/335af9e909eb7fb4696ba906a82176611653531d)) +* **logging:** update test assertion and core version pins ([#10087](https://www.github.com/googleapis/python-logging/issues/10087)) ([4aedea8](https://www.github.com/googleapis/python-logging/commit/4aedea80e2bccb5ba3c41fae7a0ee46cc07eefa9)) +* replace unsafe six.PY3 with PY2 for better future compatibility with Python 4 ([#10081](https://www.github.com/googleapis/python-logging/issues/10081)) ([c6eb601](https://www.github.com/googleapis/python-logging/commit/c6eb60179d674dfd5137d90d209094c9369b3581)) + ## 1.14.0 10-15-2019 06:50 PDT diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 95d45ad641c1..c06667d72bc9 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-logging' description = 'Stackdriver Logging API client library' -version = '1.14.0' +version = "1.15.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 0a7bf6b7bc50ebcd4b03c5e23997b9c0dd198c62 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 4 Mar 2020 09:28:00 -0800 Subject: [PATCH 275/855] chore: change docstring line breaks (via synth) --- .../cloud/logging_v2/proto/logging_config_pb2.py | 3 ++- packages/google-cloud-logging/synth.metadata | 12 ++++++------ 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py index 5ce61c644e7d..65fd2cff616a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -1620,7 +1620,8 @@ dict( DESCRIPTOR=_BIGQUERYOPTIONS, __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Options that change functionality of a sink exporting data to BigQuery. + __doc__="""Options that change functionality of a sink exporting data + to BigQuery. Attributes: diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 91118fcdb011..0ef99f84e757 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,20 +1,20 @@ { - "updateTime": "2020-02-25T13:15:44.768838Z", + "updateTime": "2020-03-04T13:20:11.396458Z", "sources": [ { "generator": { "name": "artman", - "version": "0.45.1", - "dockerImage": "googleapis/artman@sha256:36956ca6a4dc70a59de5d5d0fd35061b050bb56884516f0898f46d8220f25738" + "version": "1.0.0", + "dockerImage": "googleapis/artman@sha256:f37f2464788cb551299209b4fcab4eb323533154488c2ef9ec0c75d7c2b4b482" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "0b1876b35e98f560f9c9ca9797955f020238a092", - "internalRef": "296986742", - "log": "0b1876b35e98f560f9c9ca9797955f020238a092\nUse an older version of protoc-docs-plugin that is compatible with the specified gapic-generator and protobuf versions.\n\nprotoc-docs-plugin >=0.4.0 (see commit https://github.com/googleapis/protoc-docs-plugin/commit/979f03ede6678c487337f3d7e88bae58df5207af) is incompatible with protobuf 3.9.1.\n\nPiperOrigin-RevId: 296986742\n\n1e47e676cddbbd8d93f19ba0665af15b5532417e\nFix: Restore a method signature for UpdateCluster\n\nPiperOrigin-RevId: 296901854\n\n7f910bcc4fc4704947ccfd3ceed015d16b9e00c2\nUpdate Dataproc v1beta2 client.\n\nPiperOrigin-RevId: 296451205\n\nde287524405a3dce124d301634731584fc0432d7\nFix: Reinstate method signatures that had been missed off some RPCs\nFix: Correct resource types for two fields\n\nPiperOrigin-RevId: 296435091\n\ne5bc9566ae057fb4c92f8b7e047f1c8958235b53\nDeprecate the endpoint_uris field, as it is unused.\n\nPiperOrigin-RevId: 296357191\n\n8c12e2b4dca94e12bff9f538bdac29524ff7ef7a\nUpdate Dataproc v1 client.\n\nPiperOrigin-RevId: 296336662\n\n17567c4a1ef0a9b50faa87024d66f8acbb561089\nRemoving erroneous comment, a la https://github.com/googleapis/java-speech/pull/103\n\nPiperOrigin-RevId: 296332968\n\n3eaaaf8626ce5b0c0bc7eee05e143beffa373b01\nAdd BUILD.bazel for v1 secretmanager.googleapis.com\n\nPiperOrigin-RevId: 296274723\n\ne76149c3d992337f85eeb45643106aacae7ede82\nMove securitycenter v1 to use generate from annotations.\n\nPiperOrigin-RevId: 296266862\n\n203740c78ac69ee07c3bf6be7408048751f618f8\nAdd StackdriverLoggingConfig field to Cloud Tasks v2 API.\n\nPiperOrigin-RevId: 296256388\n\ne4117d5e9ed8bbca28da4a60a94947ca51cb2083\nCreate a Bazel BUILD file for the google.actions.type export.\n\nPiperOrigin-RevId: 296212567\n\na9639a0a9854fd6e1be08bba1ac3897f4f16cb2f\nAdd secretmanager.googleapis.com v1 protos\n\nPiperOrigin-RevId: 295983266\n\n" + "sha": "541b1ded4abadcc38e8178680b0677f65594ea6f", + "internalRef": "298686266", + "log": "541b1ded4abadcc38e8178680b0677f65594ea6f\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 298686266\n\nc0d171acecb4f5b0bfd2c4ca34fc54716574e300\n Updated to include the Notification v1 API.\n\nPiperOrigin-RevId: 298652775\n\n2346a9186c0bff2c9cc439f2459d558068637e05\nAdd Service Directory v1beta1 protos and configs\n\nPiperOrigin-RevId: 298625638\n\na78ed801b82a5c6d9c5368e24b1412212e541bb7\nPublishing v3 protos and configs.\n\nPiperOrigin-RevId: 298607357\n\n4a180bfff8a21645b3a935c2756e8d6ab18a74e0\nautoml/v1beta1 publish proto updates\n\nPiperOrigin-RevId: 298484782\n\n6de6e938b7df1cd62396563a067334abeedb9676\nchore: use the latest gapic-generator and protoc-java-resource-name-plugin in Bazel workspace.\n\nPiperOrigin-RevId: 298474513\n\n244ab2b83a82076a1fa7be63b7e0671af73f5c02\nAdds service config definition for bigqueryreservation v1\n\nPiperOrigin-RevId: 298455048\n\n83c6f84035ee0f80eaa44d8b688a010461cc4080\nUpdate google/api/auth.proto to make AuthProvider to have JwtLocation\n\nPiperOrigin-RevId: 297918498\n\ne9e90a787703ec5d388902e2cb796aaed3a385b4\nDialogflow weekly v2/v2beta1 library update:\n - adding get validation result\n - adding field mask override control for output audio config\nImportant updates are also posted at:\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 297671458\n\n1a2b05cc3541a5f7714529c665aecc3ea042c646\nAdding .yaml and .json config files.\n\nPiperOrigin-RevId: 297570622\n\ndfe1cf7be44dee31d78f78e485d8c95430981d6e\nPublish `QueryOptions` proto.\n\nIntroduced a `query_options` input in `ExecuteSqlRequest`.\n\nPiperOrigin-RevId: 297497710\n\ndafc905f71e5d46f500b41ed715aad585be062c3\npubsub: revert pull init_rpc_timeout & max_rpc_timeout back to 25 seconds and reset multiplier to 1.0\n\nPiperOrigin-RevId: 297486523\n\nf077632ba7fee588922d9e8717ee272039be126d\nfirestore: add update_transform\n\nPiperOrigin-RevId: 297405063\n\n0aba1900ffef672ec5f0da677cf590ee5686e13b\ncluster: use square brace for cross-reference\n\nPiperOrigin-RevId: 297204568\n\n5dac2da18f6325cbaed54603c43f0667ecd50247\nRestore retry params in gapic config because securitycenter has non-standard default retry params.\nRestore a few retry codes for some idempotent methods.\n\nPiperOrigin-RevId: 297196720\n\n1eb61455530252bba8b2c8d4bc9832960e5a56f6\npubsub: v1 replace IAM HTTP rules\n\nPiperOrigin-RevId: 297188590\n\n80b2d25f8d43d9d47024ff06ead7f7166548a7ba\nDialogflow weekly v2/v2beta1 library update:\n - updates to mega agent api\n - adding field mask override control for output audio config\nImportant updates are also posted at:\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 297187629\n\n" } }, { From 1ec5c57a89b98c4500aafee8b938785d604e913c Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 1 May 2020 14:00:22 -0700 Subject: [PATCH 276/855] chore: update templates; install google-cloud-testutils (via synth) (#36) --- packages/google-cloud-logging/.coveragerc | 16 ++ packages/google-cloud-logging/.flake8 | 16 ++ .../.github/ISSUE_TEMPLATE/bug_report.md | 3 +- .../google-cloud-logging/CONTRIBUTING.rst | 15 +- packages/google-cloud-logging/MANIFEST.in | 16 ++ packages/google-cloud-logging/noxfile.py | 4 +- packages/google-cloud-logging/setup.cfg | 16 ++ packages/google-cloud-logging/synth.metadata | 25 +- .../test_utils/credentials.json.enc | 49 ---- .../scripts/circleci/get_tagged_package.py | 64 ----- .../scripts/circleci/twine_upload.sh | 36 --- .../test_utils/scripts/get_target_packages.py | 268 ------------------ .../scripts/get_target_packages_kokoro.py | 98 ------- .../test_utils/scripts/run_emulator.py | 199 ------------- .../test_utils/scripts/update_docs.sh | 93 ------ .../google-cloud-logging/test_utils/setup.py | 64 ----- .../test_utils/test_utils/__init__.py | 0 .../test_utils/test_utils/imports.py | 38 --- .../test_utils/test_utils/retry.py | 207 -------------- .../test_utils/test_utils/system.py | 81 ------ .../test_utils/test_utils/vpcsc_config.py | 118 -------- 21 files changed, 85 insertions(+), 1341 deletions(-) delete mode 100644 packages/google-cloud-logging/test_utils/credentials.json.enc delete mode 100644 packages/google-cloud-logging/test_utils/scripts/circleci/get_tagged_package.py delete mode 100755 packages/google-cloud-logging/test_utils/scripts/circleci/twine_upload.sh delete mode 100644 packages/google-cloud-logging/test_utils/scripts/get_target_packages.py delete mode 100644 packages/google-cloud-logging/test_utils/scripts/get_target_packages_kokoro.py delete mode 100644 packages/google-cloud-logging/test_utils/scripts/run_emulator.py delete mode 100755 packages/google-cloud-logging/test_utils/scripts/update_docs.sh delete mode 100644 packages/google-cloud-logging/test_utils/setup.py delete mode 100644 packages/google-cloud-logging/test_utils/test_utils/__init__.py delete mode 100644 packages/google-cloud-logging/test_utils/test_utils/imports.py delete mode 100644 packages/google-cloud-logging/test_utils/test_utils/retry.py delete mode 100644 packages/google-cloud-logging/test_utils/test_utils/system.py delete mode 100644 packages/google-cloud-logging/test_utils/test_utils/vpcsc_config.py diff --git a/packages/google-cloud-logging/.coveragerc b/packages/google-cloud-logging/.coveragerc index b178b094aa1d..dd39c8546c41 100644 --- a/packages/google-cloud-logging/.coveragerc +++ b/packages/google-cloud-logging/.coveragerc @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! [run] branch = True diff --git a/packages/google-cloud-logging/.flake8 b/packages/google-cloud-logging/.flake8 index 0268ecc9c55c..20fe9bda2ee4 100644 --- a/packages/google-cloud-logging/.flake8 +++ b/packages/google-cloud-logging/.flake8 @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! [flake8] ignore = E203, E266, E501, W503 diff --git a/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/bug_report.md b/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/bug_report.md index d575444a7b5c..e9f7d79ac9c8 100644 --- a/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/bug_report.md +++ b/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/bug_report.md @@ -11,8 +11,7 @@ Thanks for stopping by to let us know something could be better! Please run down the following list and make sure you've tried the usual "quick fixes": - Search the issues already opened: https://github.com/googleapis/python-logging/issues - - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-python - - Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+python + - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python If you are still having issues, please be sure to include as much information as possible: diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index 4fad6c852f61..64c917ca84ae 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, and 3.7 on both UNIX and Windows. + 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -214,26 +214,18 @@ We support: - `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ +- `Python 3.8`_ .. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-logging/blob/master/noxfile.py -We explicitly decided not to support `Python 2.5`_ due to `decreased usage`_ -and lack of continuous integration `support`_. - -.. _Python 2.5: https://docs.python.org/2.5/ -.. _decreased usage: https://caremad.io/2013/10/a-look-at-pypi-downloads/ -.. _support: https://blog.travis-ci.com/2013-11-18-upcoming-build-environment-updates/ - -We have `dropped 2.6`_ as a supported version as well since Python 2.6 is no -longer supported by the core development team. - Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version @@ -247,7 +239,6 @@ We also explicitly decided to support Python 3 beginning with version .. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django .. _projects: http://flask.pocoo.org/docs/0.10/python3/ .. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ -.. _dropped 2.6: https://github.com/googleapis/google-cloud-python/issues/995 ********** Versioning diff --git a/packages/google-cloud-logging/MANIFEST.in b/packages/google-cloud-logging/MANIFEST.in index cd011be27a0e..68855abc3f02 100644 --- a/packages/google-cloud-logging/MANIFEST.in +++ b/packages/google-cloud-logging/MANIFEST.in @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE recursive-include google *.json *.proto diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 060f561c3623..826477c01ba0 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -132,11 +132,11 @@ def system(session): 'google-cloud-bigquery', 'google-cloud-pubsub', 'google-cloud-storage', + 'google-cloud-testutils', ] for systest_dep in systest_deps: session.install(systest_dep) - session.install('-e', 'test_utils/') session.install('-e', '.') # Run py.test against the system tests. @@ -165,7 +165,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/google-cloud-logging/setup.cfg b/packages/google-cloud-logging/setup.cfg index 3bd555500e37..c3a2b39f6528 100644 --- a/packages/google-cloud-logging/setup.cfg +++ b/packages/google-cloud-logging/setup.cfg @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! [bdist_wheel] universal = 1 diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 0ef99f84e757..a5616d3e5170 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,27 +1,32 @@ { - "updateTime": "2020-03-04T13:20:11.396458Z", "sources": [ { "generator": { "name": "artman", - "version": "1.0.0", - "dockerImage": "googleapis/artman@sha256:f37f2464788cb551299209b4fcab4eb323533154488c2ef9ec0c75d7c2b4b482" + "version": "2.0.0", + "dockerImage": "googleapis/artman@sha256:b3b47805231a305d0f40c4bf069df20f6a2635574e6d4259fac651d3f9f6e098" + } + }, + { + "git": { + "name": ".", + "remote": "git@github.com:googleapis/python-logging", + "sha": "a22a3bfdd4c8a4d6e9cc0c7d7504322ff31ad7ea" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "541b1ded4abadcc38e8178680b0677f65594ea6f", - "internalRef": "298686266", - "log": "541b1ded4abadcc38e8178680b0677f65594ea6f\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 298686266\n\nc0d171acecb4f5b0bfd2c4ca34fc54716574e300\n Updated to include the Notification v1 API.\n\nPiperOrigin-RevId: 298652775\n\n2346a9186c0bff2c9cc439f2459d558068637e05\nAdd Service Directory v1beta1 protos and configs\n\nPiperOrigin-RevId: 298625638\n\na78ed801b82a5c6d9c5368e24b1412212e541bb7\nPublishing v3 protos and configs.\n\nPiperOrigin-RevId: 298607357\n\n4a180bfff8a21645b3a935c2756e8d6ab18a74e0\nautoml/v1beta1 publish proto updates\n\nPiperOrigin-RevId: 298484782\n\n6de6e938b7df1cd62396563a067334abeedb9676\nchore: use the latest gapic-generator and protoc-java-resource-name-plugin in Bazel workspace.\n\nPiperOrigin-RevId: 298474513\n\n244ab2b83a82076a1fa7be63b7e0671af73f5c02\nAdds service config definition for bigqueryreservation v1\n\nPiperOrigin-RevId: 298455048\n\n83c6f84035ee0f80eaa44d8b688a010461cc4080\nUpdate google/api/auth.proto to make AuthProvider to have JwtLocation\n\nPiperOrigin-RevId: 297918498\n\ne9e90a787703ec5d388902e2cb796aaed3a385b4\nDialogflow weekly v2/v2beta1 library update:\n - adding get validation result\n - adding field mask override control for output audio config\nImportant updates are also posted at:\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 297671458\n\n1a2b05cc3541a5f7714529c665aecc3ea042c646\nAdding .yaml and .json config files.\n\nPiperOrigin-RevId: 297570622\n\ndfe1cf7be44dee31d78f78e485d8c95430981d6e\nPublish `QueryOptions` proto.\n\nIntroduced a `query_options` input in `ExecuteSqlRequest`.\n\nPiperOrigin-RevId: 297497710\n\ndafc905f71e5d46f500b41ed715aad585be062c3\npubsub: revert pull init_rpc_timeout & max_rpc_timeout back to 25 seconds and reset multiplier to 1.0\n\nPiperOrigin-RevId: 297486523\n\nf077632ba7fee588922d9e8717ee272039be126d\nfirestore: add update_transform\n\nPiperOrigin-RevId: 297405063\n\n0aba1900ffef672ec5f0da677cf590ee5686e13b\ncluster: use square brace for cross-reference\n\nPiperOrigin-RevId: 297204568\n\n5dac2da18f6325cbaed54603c43f0667ecd50247\nRestore retry params in gapic config because securitycenter has non-standard default retry params.\nRestore a few retry codes for some idempotent methods.\n\nPiperOrigin-RevId: 297196720\n\n1eb61455530252bba8b2c8d4bc9832960e5a56f6\npubsub: v1 replace IAM HTTP rules\n\nPiperOrigin-RevId: 297188590\n\n80b2d25f8d43d9d47024ff06ead7f7166548a7ba\nDialogflow weekly v2/v2beta1 library update:\n - updates to mega agent api\n - adding field mask override control for output audio config\nImportant updates are also posted at:\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 297187629\n\n" + "sha": "aaff764c185e18a6c73227357c3df5fa60fec85a", + "internalRef": "309426927" } }, { - "template": { - "name": "python_library", - "origin": "synthtool.gcp", - "version": "2020.2.4" + "git": { + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "cdddf139b36000b3a7c65fd2a7781e253262359a" } } ], diff --git a/packages/google-cloud-logging/test_utils/credentials.json.enc b/packages/google-cloud-logging/test_utils/credentials.json.enc deleted file mode 100644 index f073c7e4f774..000000000000 --- a/packages/google-cloud-logging/test_utils/credentials.json.enc +++ /dev/null @@ -1,49 +0,0 @@ -U2FsdGVkX1/vVm/dOEg1DCACYbdOcL+ey6+64A+DZGZVgF8Z/3skK6rpPocu6GOA -UZAqASsBH9QifDf8cKVXQXVYpYq6HSv2O0w7vOmVorZO9GYPo98s9/8XO+4ty/AU -aB6TD68frBAYv4cT/l5m7aYdzfzMTy0EOXoleZT09JYP3B5FV3KCO114FzMXGwrj -HXsR6E5SyUUlUnWPC3eD3aqmovay0gxOKYO3ZwjFK1nlbN/8q6/8nwBCf/Bg6SHV -V93pNxdolRlJev9kgKz4RN1z4jGCy5PAndhSLE82NFIs9LoAiEOU5YeMlN+Ulqus -J92nh+ptUe9a4pJGbAuveUWO7zdS1QyXvTMUcmmSfXCNm/eIQjNuu5+rHtIjWKh8 -Ilwj2w1aTfSptQEhk/kwRgFz/d11vfwJzvwTmCxO6zyOeL0VUWLqdCBGgG5As9He -/RenF8PZ1O0WbTt7fns5oTlTk/MUo+0xJ1xqvu/y45LaqqcBAnEdrWKmtM3dJHWv -ufQku+kD+83F/VwBnQdvgMHu6KZEs6LRrNo58r4QuK6fS7VCACdzxID1RM2cL7kT -6BFRlyGj1aigmjne9g9M9Jx4R+mZDpPU1WDzzG71J4qCUwaX8Dfwutuv4uiFvzwq -NUF0wLJJPtKWmtW+hnZ/fhHQGCRsOpZzFnqp6Zv7J7k6esqxMgIjfal7Djk5Acy8 -j3iVvm6CYmKMVqzL62JHYS9Ye83tzBCaR8hpnJQKgH3FSOFY8HSwrtQSIsl/hSeF -41sgnz0Y+/gkzNeU18qFk+eCZmvljyu+JK0nPYUgpOCJYVBNQpNHz5PUyiAEKhtM -IOSdjPRW1Y+Xf4RroJnLPoF24Ijwrow5LCm9hBRY6TPPMMmnIXCd23xcLJ1rMj6g -x4ZikElans+cwuc9wtbb7w01DcpTwQ1+eIV1qV+KIgpnLjRGLhZD4etobBsrwYu/ -vnIwy2QHCKENPb8sbdgp7x2mF7VSX0/7tf+9+i70EBiMzpOKBkiZhtLzm6hOBkEy -ODaWrx4lTTwbSw8Rmtf58APhPFMsjHoNsjiUoK249Y8Y2Ff4fMfqYsXu6VC1n/At -CuWYHc3EfBwFcLJS+RQB9kFk/4FygFBWq4Kj0MqoRruLbKmoGeJKH9q35W0f0NCD -j+iHt3014kMGiuyJe1UDQ6fvEihFFdHuDivFpPAXDt4PTY/WtpDhaGMx23kb54pK -jkAuxpznAB1lK3u9bGRXDasGeHIrNtIlPvgkrWHXvoBVqM7zry8TGtoxp3E3I42Z -cUfDWfB9GqVdrOwvrTzyZsl2uShRkAJaZFZj5aMyYxiptp4gM8CwWiNtOd2EwtRO -LxZX4M02PQFIqXV3FSDA0q6EwglUrTZdAlYeOEkopaKCtG31dEPOSQG3NGJAEYso -Cxm99H7970dp0OAgpNSgRbcWDbhVbQXnRzvFGqLeH6a9dQ/a8uD3s8Qm9Du/kB6d -XxTRe2OGxzcD0AgI8GClE4rIZHCLbcwuJRp0EYcN+pgY80O4U98fZ5RYpU6OYbU/ -MEiaBYFKtZtGkV6AQD568V7hHJWqc5DDfVHUQ/aeQwnKi2vnU66u+nnV2rZxXxLP -+dqeLRpul+wKa5b/Z5SfQ14Ff8s7aVyxaogGpyggyPL1vyq4KWZ6Or/wEE5hgNO4 -kBh6ht0QT1Hti8XY2JK1M+Jgbjgcg4jkHBGVqegrG1Rvcc2A4TYKwx+QMSBhyxrU -5qhROjS4lTcC42hQslMUkUwc4U/Y91XdFbOOnaAkwzI36NRYL0pmgZnYxGJZeRvr -E5foOhnOEVSFGdOkLfFh+FkWZQf56Lmn8Gg2wHE3dZTxLHibiUYfkgOr1uEosq29 -D1NstvlJURPQ0Q+8QQNWcl9nEZHMAjOmnL1hbx+QfuC6seucp+sXGzdZByMLZbvT -tG8KNL293CmyQowgf9MXToWYnwRkcvqfTaKyor2Ggze3JtoFW4t0j4DI1XPciZFX -XmfApHrzdB/bZadzxyaZ2NE0CuH9zDelwI6rz38xsN5liYnp5qmNKVCZVOHccXa6 -J8x365m5/VaaA2RrtdPqKxn8VaKy7+T690QgMXVGM4PbzQzQxHuSleklocqlP+sB -jSMXCZY+ng/i4UmRO9noiyW3UThYh0hIdMYs12EmmI9cnF/OuYZpl30fmqwV+VNM -td5B2fYvAvvsjiX60SFCn3DATP1GrPMBlZSmhhP3GYS+xrWt3Xxta9qIX2BEF1Gg -twnZZRjoULSRFUYPfJPEOfEH2UQwm84wxx/GezVE+S/RpBlatPOgCiLnNNaLfdTC -mTG9qY9elJv3GGQO8Lqgf4i8blExs05lSPk1BDhzTB6H9TLz+Ge0/l1QxKf3gPXU -aImK1azieXMXHECkdKxrzmehwu1dZ/oYOLc/OFQCETwSRoLPFOFpYUpizwmVVHR6 -uLSfRptte4ZOU3zHfpd/0+J4tkwHwEkGzsmMdqudlm7qME6upuIplyVBH8JiXzUK -n1RIH/OPmVEluAnexWRLZNdk7MrakIO4XACVbICENiYQgAIErP568An6twWEGDbZ -bEN64E3cVDTDRPRAunIhhsEaapcxpFEPWlHorxv36nMUt0R0h0bJlCu5QdzckfcX -ZrRuu1kl76ZfbSE8T0G4/rBb9gsU4Gn3WyvLIO3MgFBuxR68ZwcR8LpEUd8qp38H -NG4cxPmN1nGKo663Z+xI2Gt5up4gpl+fOt4mXqxY386rB7yHaOfElMG5TUYdrS9w -1xbbCVgeJ6zxX+NFlndG33cSAPprhw+C18eUu6ZU63WZcYFo3GfK6rs3lvYtofvE -8DxztdTidQedNVNE+63YCjhxd/cZUI5n/UpgYkr9owp7hNGJiR3tdoNLR2gcoGqL -qWhH928k2aSgF2j97LZ2OqoPCp0tUB7ho4jD2u4Ik3GLVNlCc3dCvWRvpHtDTQDv -tujESMfHUc9I2r4S/PD3bku/ABGwa977Yp1PjzJGr9RajA5is5n6GVpyynwjtKG4 -iyyITpdwpCgr8pueTBLwZnas3slmiMOog/E4PmPgctHzvC+vhQijhUtw5zSsmv0l -bZlw/mVhp5Ta7dTcLBKR8DA3m3vTbaEGkz0xpfQr7GfiSMRbJyvIw88pDK0gyTMD diff --git a/packages/google-cloud-logging/test_utils/scripts/circleci/get_tagged_package.py b/packages/google-cloud-logging/test_utils/scripts/circleci/get_tagged_package.py deleted file mode 100644 index c148b9dc2370..000000000000 --- a/packages/google-cloud-logging/test_utils/scripts/circleci/get_tagged_package.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Helper to determine package from tag. -Get the current package directory corresponding to the Circle Tag. -""" - -from __future__ import print_function - -import os -import re -import sys - - -TAG_RE = re.compile(r""" - ^ - (?P - (([a-z]+)[_-])*) # pkg-name-with-hyphens-or-underscores (empty allowed) - ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints) - $ -""", re.VERBOSE) -TAG_ENV = 'CIRCLE_TAG' -ERROR_MSG = '%s env. var. not set' % (TAG_ENV,) -BAD_TAG_MSG = 'Invalid tag name: %s. Expected pkg-name-x.y.z' -CIRCLE_CI_SCRIPTS_DIR = os.path.dirname(__file__) -ROOT_DIR = os.path.realpath( - os.path.join(CIRCLE_CI_SCRIPTS_DIR, '..', '..', '..')) - - -def main(): - """Get the current package directory. - Prints the package directory out so callers can consume it. - """ - if TAG_ENV not in os.environ: - print(ERROR_MSG, file=sys.stderr) - sys.exit(1) - - tag_name = os.environ[TAG_ENV] - match = TAG_RE.match(tag_name) - if match is None: - print(BAD_TAG_MSG % (tag_name,), file=sys.stderr) - sys.exit(1) - - pkg_name = match.group('pkg') - if pkg_name is None: - print(ROOT_DIR) - else: - pkg_dir = pkg_name.rstrip('-').replace('-', '_') - print(os.path.join(ROOT_DIR, pkg_dir)) - - -if __name__ == '__main__': - main() diff --git a/packages/google-cloud-logging/test_utils/scripts/circleci/twine_upload.sh b/packages/google-cloud-logging/test_utils/scripts/circleci/twine_upload.sh deleted file mode 100755 index 23a4738e90b9..000000000000 --- a/packages/google-cloud-logging/test_utils/scripts/circleci/twine_upload.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ev - -# If this is not a CircleCI tag, no-op. -if [[ -z "$CIRCLE_TAG" ]]; then - echo "This is not a release tag. Doing nothing." - exit 0 -fi - -# H/T: http://stackoverflow.com/a/246128/1068170 -SCRIPT="$(dirname "${BASH_SOURCE[0]}")/get_tagged_package.py" -# Determine the package directory being deploying on this tag. -PKG_DIR="$(python ${SCRIPT})" - -# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. -python3 -m pip install --upgrade twine wheel setuptools - -# Move into the package, build the distribution and upload. -cd ${PKG_DIR} -python3 setup.py sdist bdist_wheel -twine upload dist/* diff --git a/packages/google-cloud-logging/test_utils/scripts/get_target_packages.py b/packages/google-cloud-logging/test_utils/scripts/get_target_packages.py deleted file mode 100644 index 1d51830cc23a..000000000000 --- a/packages/google-cloud-logging/test_utils/scripts/get_target_packages.py +++ /dev/null @@ -1,268 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Print a list of packages which require testing.""" - -import os -import re -import subprocess -import warnings - - -CURRENT_DIR = os.path.realpath(os.path.dirname(__file__)) -BASE_DIR = os.path.realpath(os.path.join(CURRENT_DIR, '..', '..')) -GITHUB_REPO = os.environ.get('GITHUB_REPO', 'google-cloud-python') -CI = os.environ.get('CI', '') -CI_BRANCH = os.environ.get('CIRCLE_BRANCH') -CI_PR = os.environ.get('CIRCLE_PR_NUMBER') -CIRCLE_TAG = os.environ.get('CIRCLE_TAG') -head_hash, head_name = subprocess.check_output(['git', 'show-ref', 'HEAD'] -).strip().decode('ascii').split() -rev_parse = subprocess.check_output( - ['git', 'rev-parse', '--abbrev-ref', 'HEAD'] -).strip().decode('ascii') -MAJOR_DIV = '#' * 78 -MINOR_DIV = '#' + '-' * 77 - -# NOTE: This reg-ex is copied from ``get_tagged_packages``. -TAG_RE = re.compile(r""" - ^ - (?P - (([a-z]+)-)*) # pkg-name-with-hyphens- (empty allowed) - ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints) - $ -""", re.VERBOSE) - -# This is the current set of dependencies by package. -# As of this writing, the only "real" dependency is that of error_reporting -# (on logging), the rest are just system test dependencies. -PKG_DEPENDENCIES = { - 'logging': {'pubsub'}, -} - - -def get_baseline(): - """Return the baseline commit. - - On a pull request, or on a branch, return the common parent revision - with the master branch. - - Locally, return a value pulled from environment variables, or None if - the environment variables are not set. - - On a push to master, return None. This will effectively cause everything - to be considered to be affected. - """ - - # If this is a pull request or branch, return the tip for master. - # We will test only packages which have changed since that point. - ci_non_master = (CI == 'true') and any([CI_BRANCH != 'master', CI_PR]) - - if ci_non_master: - - repo_url = 'git@github.com:GoogleCloudPlatform/{}'.format(GITHUB_REPO) - subprocess.run(['git', 'remote', 'add', 'baseline', repo_url], - stderr=subprocess.DEVNULL) - subprocess.run(['git', 'pull', 'baseline'], stderr=subprocess.DEVNULL) - - if CI_PR is None and CI_BRANCH is not None: - output = subprocess.check_output([ - 'git', 'merge-base', '--fork-point', - 'baseline/master', CI_BRANCH]) - return output.strip().decode('ascii') - - return 'baseline/master' - - # If environment variables are set identifying what the master tip is, - # use that. - if os.environ.get('GOOGLE_CLOUD_TESTING_REMOTE', ''): - remote = os.environ['GOOGLE_CLOUD_TESTING_REMOTE'] - branch = os.environ.get('GOOGLE_CLOUD_TESTING_BRANCH', 'master') - return '%s/%s' % (remote, branch) - - # If we are not in CI and we got this far, issue a warning. - if not CI: - warnings.warn('No baseline could be determined; this means tests ' - 'will run for every package. If this is local ' - 'development, set the $GOOGLE_CLOUD_TESTING_REMOTE ' - 'environment variable.') - - # That is all we can do; return None. - return None - - -def get_changed_files(): - """Return a list of files that have been changed since the baseline. - - If there is no base, return None. - """ - # Get the baseline, and fail quickly if there is no baseline. - baseline = get_baseline() - print('# Baseline commit: {}'.format(baseline)) - if not baseline: - return None - - # Return a list of altered files. - try: - return subprocess.check_output([ - 'git', 'diff', '--name-only', '{}..HEAD'.format(baseline), - ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n') - except subprocess.CalledProcessError: - warnings.warn('Unable to perform git diff; falling back to assuming ' - 'all packages have changed.') - return None - - -def reverse_map(dict_of_sets): - """Reverse a map of one-to-many. - - So the map:: - - { - 'A': {'B', 'C'}, - 'B': {'C'}, - } - - becomes - - { - 'B': {'A'}, - 'C': {'A', 'B'}, - } - - Args: - dict_of_sets (dict[set]): A dictionary of sets, mapping - one value to many. - - Returns: - dict[set]: The reversed map. - """ - result = {} - for key, values in dict_of_sets.items(): - for value in values: - result.setdefault(value, set()).add(key) - - return result - -def get_changed_packages(file_list): - """Return a list of changed packages based on the provided file list. - - If the file list is None, then all packages should be considered to be - altered. - """ - # Determine a complete list of packages. - all_packages = set() - for file_ in os.listdir(BASE_DIR): - abs_file = os.path.realpath(os.path.join(BASE_DIR, file_)) - nox_file = os.path.join(abs_file, 'nox.py') - if os.path.isdir(abs_file) and os.path.isfile(nox_file): - all_packages.add(file_) - - # If ther is no file list, send down the full package set. - if file_list is None: - return all_packages - - # Create a set based on the list of changed files. - answer = set() - reverse_deps = reverse_map(PKG_DEPENDENCIES) - for file_ in file_list: - # Ignore root directory changes (setup.py, .gitignore, etc.). - if os.path.sep not in file_: - continue - - # Ignore changes that are not in a package (usually this will be docs). - package = file_.split(os.path.sep, 1)[0] - if package not in all_packages: - continue - - # If there is a change in core, short-circuit now and return - # everything. - if package in ('core',): - return all_packages - - # Add the package, as well as any dependencies this package has. - # NOTE: For now, dependencies only go down one level. - answer.add(package) - answer = answer.union(reverse_deps.get(package, set())) - - # We got this far without being short-circuited; return the final answer. - return answer - - -def get_tagged_package(): - """Return the package corresponding to the current tag. - - If there is not tag, will return :data:`None`. - """ - if CIRCLE_TAG is None: - return - - match = TAG_RE.match(CIRCLE_TAG) - if match is None: - return - - pkg_name = match.group('pkg') - if pkg_name == '': - # NOTE: This corresponds to the "umbrella" tag. - return - - return pkg_name.rstrip('-').replace('-', '_') - - -def get_target_packages(): - """Return a list of target packages to be run in the current build. - - If in a tag build, will run only the package(s) that are tagged, otherwise - will run the packages that have file changes in them (or packages that - depend on those). - """ - tagged_package = get_tagged_package() - if tagged_package is None: - file_list = get_changed_files() - print(MAJOR_DIV) - print('# Changed files:') - print(MINOR_DIV) - for file_ in file_list or (): - print('# {}'.format(file_)) - for package in sorted(get_changed_packages(file_list)): - yield package - else: - yield tagged_package - - -def main(): - print(MAJOR_DIV) - print('# Environment') - print(MINOR_DIV) - print('# CircleCI: {}'.format(CI)) - print('# CircleCI branch: {}'.format(CI_BRANCH)) - print('# CircleCI pr: {}'.format(CI_PR)) - print('# CircleCI tag: {}'.format(CIRCLE_TAG)) - print('# HEAD ref: {}'.format(head_hash)) - print('# {}'.format(head_name)) - print('# Git branch: {}'.format(rev_parse)) - print(MAJOR_DIV) - - packages = list(get_target_packages()) - - print(MAJOR_DIV) - print('# Target packages:') - print(MINOR_DIV) - for package in packages: - print(package) - print(MAJOR_DIV) - - -if __name__ == '__main__': - main() diff --git a/packages/google-cloud-logging/test_utils/scripts/get_target_packages_kokoro.py b/packages/google-cloud-logging/test_utils/scripts/get_target_packages_kokoro.py deleted file mode 100644 index 27d3a0c940ea..000000000000 --- a/packages/google-cloud-logging/test_utils/scripts/get_target_packages_kokoro.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Print a list of packages which require testing.""" - -import pathlib -import subprocess - -import ci_diff_helper -import requests - - -def print_environment(environment): - print("-> CI environment:") - print('Branch', environment.branch) - print('PR', environment.pr) - print('In PR', environment.in_pr) - print('Repo URL', environment.repo_url) - if environment.in_pr: - print('PR Base', environment.base) - - -def get_base(environment): - if environment.in_pr: - return environment.base - else: - # If we're not in a PR, just calculate the changes between this commit - # and its parent. - return 'HEAD~1' - - -def get_changed_files_from_base(base): - return subprocess.check_output([ - 'git', 'diff', '--name-only', f'{base}..HEAD', - ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n') - - -_URL_TEMPLATE = ( - 'https://api.github.com/repos/googleapis/google-cloud-python/pulls/' - '{}/files' -) - - -def get_changed_files_from_pr(pr): - url = _URL_TEMPLATE.format(pr) - while url is not None: - response = requests.get(url) - for info in response.json(): - yield info['filename'] - url = response.links.get('next', {}).get('url') - - -def determine_changed_packages(changed_files): - packages = [ - path.parent for path in pathlib.Path('.').glob('*/noxfile.py') - ] - - changed_packages = set() - for file in changed_files: - file = pathlib.Path(file) - for package in packages: - if package in file.parents: - changed_packages.add(package) - - return changed_packages - - -def main(): - environment = ci_diff_helper.get_config() - print_environment(environment) - base = get_base(environment) - - if environment.in_pr: - changed_files = list(get_changed_files_from_pr(environment.pr)) - else: - changed_files = get_changed_files_from_base(base) - - packages = determine_changed_packages(changed_files) - - print(f"Comparing against {base}.") - print("-> Changed packages:") - - for package in packages: - print(package) - - -main() diff --git a/packages/google-cloud-logging/test_utils/scripts/run_emulator.py b/packages/google-cloud-logging/test_utils/scripts/run_emulator.py deleted file mode 100644 index 287b08640691..000000000000 --- a/packages/google-cloud-logging/test_utils/scripts/run_emulator.py +++ /dev/null @@ -1,199 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Run system tests locally with the emulator. - -First makes system calls to spawn the emulator and get the local environment -variable needed for it. Then calls the system tests. -""" - - -import argparse -import os -import subprocess - -import psutil - -from google.cloud.environment_vars import BIGTABLE_EMULATOR -from google.cloud.environment_vars import GCD_DATASET -from google.cloud.environment_vars import GCD_HOST -from google.cloud.environment_vars import PUBSUB_EMULATOR -from run_system_test import run_module_tests - - -BIGTABLE = 'bigtable' -DATASTORE = 'datastore' -PUBSUB = 'pubsub' -PACKAGE_INFO = { - BIGTABLE: (BIGTABLE_EMULATOR,), - DATASTORE: (GCD_DATASET, GCD_HOST), - PUBSUB: (PUBSUB_EMULATOR,), -} -EXTRA = { - DATASTORE: ('--no-legacy',), -} -_DS_READY_LINE = '[datastore] Dev App Server is now running.\n' -_PS_READY_LINE_PREFIX = '[pubsub] INFO: Server started, listening on ' -_BT_READY_LINE_PREFIX = '[bigtable] Cloud Bigtable emulator running on ' - - -def get_parser(): - """Get simple ``argparse`` parser to determine package. - - :rtype: :class:`argparse.ArgumentParser` - :returns: The parser for this script. - """ - parser = argparse.ArgumentParser( - description='Run google-cloud system tests against local emulator.') - parser.add_argument('--package', dest='package', - choices=sorted(PACKAGE_INFO.keys()), - default=DATASTORE, help='Package to be tested.') - return parser - - -def get_start_command(package): - """Get command line arguments for starting emulator. - - :type package: str - :param package: The package to start an emulator for. - - :rtype: tuple - :returns: The arguments to be used, in a tuple. - """ - result = ('gcloud', 'beta', 'emulators', package, 'start') - extra = EXTRA.get(package, ()) - return result + extra - - -def get_env_init_command(package): - """Get command line arguments for getting emulator env. info. - - :type package: str - :param package: The package to get environment info for. - - :rtype: tuple - :returns: The arguments to be used, in a tuple. - """ - result = ('gcloud', 'beta', 'emulators', package, 'env-init') - extra = EXTRA.get(package, ()) - return result + extra - - -def datastore_wait_ready(popen): - """Wait until the datastore emulator is ready to use. - - :type popen: :class:`subprocess.Popen` - :param popen: An open subprocess to interact with. - """ - emulator_ready = False - while not emulator_ready: - emulator_ready = popen.stderr.readline() == _DS_READY_LINE - - -def wait_ready_prefix(popen, prefix): - """Wait until the a process encounters a line with matching prefix. - - :type popen: :class:`subprocess.Popen` - :param popen: An open subprocess to interact with. - - :type prefix: str - :param prefix: The prefix to match - """ - emulator_ready = False - while not emulator_ready: - emulator_ready = popen.stderr.readline().startswith(prefix) - - -def wait_ready(package, popen): - """Wait until the emulator is ready to use. - - :type package: str - :param package: The package to check if ready. - - :type popen: :class:`subprocess.Popen` - :param popen: An open subprocess to interact with. - - :raises: :class:`KeyError` if the ``package`` is not among - ``datastore``, ``pubsub`` or ``bigtable``. - """ - if package == DATASTORE: - datastore_wait_ready(popen) - elif package == PUBSUB: - wait_ready_prefix(popen, _PS_READY_LINE_PREFIX) - elif package == BIGTABLE: - wait_ready_prefix(popen, _BT_READY_LINE_PREFIX) - else: - raise KeyError('Package not supported', package) - - -def cleanup(pid): - """Cleanup a process (including all of its children). - - :type pid: int - :param pid: Process ID. - """ - proc = psutil.Process(pid) - for child_proc in proc.children(recursive=True): - try: - child_proc.kill() - child_proc.terminate() - except psutil.NoSuchProcess: - pass - proc.terminate() - proc.kill() - - -def run_tests_in_emulator(package): - """Spawn an emulator instance and run the system tests. - - :type package: str - :param package: The package to run system tests against. - """ - # Make sure this package has environment vars to replace. - env_vars = PACKAGE_INFO[package] - - start_command = get_start_command(package) - # Ignore stdin and stdout, don't pollute the user's output with them. - proc_start = subprocess.Popen(start_command, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - try: - wait_ready(package, proc_start) - env_init_command = get_env_init_command(package) - proc_env = subprocess.Popen(env_init_command, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - env_status = proc_env.wait() - if env_status != 0: - raise RuntimeError(env_status, proc_env.stderr.read()) - env_lines = proc_env.stdout.read().strip().split('\n') - # Set environment variables before running the system tests. - for env_var in env_vars: - line_prefix = 'export ' + env_var + '=' - value, = [line.split(line_prefix, 1)[1] for line in env_lines - if line.startswith(line_prefix)] - os.environ[env_var] = value - run_module_tests(package, - ignore_requirements=True) - finally: - cleanup(proc_start.pid) - - -def main(): - """Main method to run this script.""" - parser = get_parser() - args = parser.parse_args() - run_tests_in_emulator(args.package) - - -if __name__ == '__main__': - main() diff --git a/packages/google-cloud-logging/test_utils/scripts/update_docs.sh b/packages/google-cloud-logging/test_utils/scripts/update_docs.sh deleted file mode 100755 index 8cbab9f0dad0..000000000000 --- a/packages/google-cloud-logging/test_utils/scripts/update_docs.sh +++ /dev/null @@ -1,93 +0,0 @@ -#!/bin/bash - -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ev - -GH_OWNER='GoogleCloudPlatform' -GH_PROJECT_NAME='google-cloud-python' - -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" - -# Function to build the docs. -function build_docs { - rm -rf docs/_build/ - rm -f docs/bigquery/generated/*.rst - # -W -> warnings as errors - # -T -> show full traceback on exception - # -N -> no color - sphinx-build \ - -W -T -N \ - -b html \ - -d docs/_build/doctrees \ - docs/ \ - docs/_build/html/ - return $? -} - -# Only update docs if we are on CircleCI. -if [[ "${CIRCLE_BRANCH}" == "master" ]] && [[ -z "${CIRCLE_PR_NUMBER}" ]]; then - echo "Building new docs on a merged commit." -elif [[ "$1" == "kokoro" ]]; then - echo "Building and publishing docs on Kokoro." -elif [[ -n "${CIRCLE_TAG}" ]]; then - echo "Building new docs on a tag (but will not deploy)." - build_docs - exit $? -else - echo "Not on master nor a release tag." - echo "Building new docs for testing purposes, but not deploying." - build_docs - exit $? -fi - -# Adding GitHub pages branch. `git submodule add` checks it -# out at HEAD. -GH_PAGES_DIR='ghpages' -git submodule add -q -b gh-pages \ - "git@github.com:${GH_OWNER}/${GH_PROJECT_NAME}" ${GH_PAGES_DIR} - -# Determine if we are building a new tag or are building docs -# for master. Then build new docs in docs/_build from master. -if [[ -n "${CIRCLE_TAG}" ]]; then - # Sphinx will use the package version by default. - build_docs -else - SPHINX_RELEASE=$(git log -1 --pretty=%h) build_docs -fi - -# Update gh-pages with the created docs. -cd ${GH_PAGES_DIR} -git rm -fr latest/ -cp -R ../docs/_build/html/ latest/ - -# Update the files push to gh-pages. -git add . -git status - -# If there are no changes, just exit cleanly. -if [[ -z "$(git status --porcelain)" ]]; then - echo "Nothing to commit. Exiting without pushing changes." - exit -fi - -# Commit to gh-pages branch to apply changes. -git config --global user.email "dpebot@google.com" -git config --global user.name "dpebot" -git commit -m "Update docs after merge to master." - -# NOTE: This may fail if two docs updates (on merges to master) -# happen in close proximity. -git push -q origin HEAD:gh-pages diff --git a/packages/google-cloud-logging/test_utils/setup.py b/packages/google-cloud-logging/test_utils/setup.py deleted file mode 100644 index 8e9222a7f862..000000000000 --- a/packages/google-cloud-logging/test_utils/setup.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -from setuptools import find_packages -from setuptools import setup - - -PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) - - -# NOTE: This is duplicated throughout and we should try to -# consolidate. -SETUP_BASE = { - 'author': 'Google Cloud Platform', - 'author_email': 'googleapis-publisher@google.com', - 'scripts': [], - 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', - 'license': 'Apache 2.0', - 'platforms': 'Posix; MacOS X; Windows', - 'include_package_data': True, - 'zip_safe': False, - 'classifiers': [ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Topic :: Internet', - ], -} - - -REQUIREMENTS = [ - 'google-auth >= 0.4.0', - 'six', -] - -setup( - name='google-cloud-testutils', - version='0.24.0', - description='System test utilities for google-cloud-python', - packages=find_packages(), - install_requires=REQUIREMENTS, - python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*', - **SETUP_BASE -) diff --git a/packages/google-cloud-logging/test_utils/test_utils/__init__.py b/packages/google-cloud-logging/test_utils/test_utils/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-logging/test_utils/test_utils/imports.py b/packages/google-cloud-logging/test_utils/test_utils/imports.py deleted file mode 100644 index 5991af7fc465..000000000000 --- a/packages/google-cloud-logging/test_utils/test_utils/imports.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import mock -import six - - -def maybe_fail_import(predicate): - """Create and return a patcher that conditionally makes an import fail. - - Args: - predicate (Callable[[...], bool]): A callable that, if it returns `True`, - triggers an `ImportError`. It must accept the same arguments as the - built-in `__import__` function. - https://docs.python.org/3/library/functions.html#__import__ - - Returns: - A mock patcher object that can be used to enable patched import behavior. - """ - orig_import = six.moves.builtins.__import__ - - def custom_import(name, globals=None, locals=None, fromlist=(), level=0): - if predicate(name, globals, locals, fromlist, level): - raise ImportError - return orig_import(name, globals, locals, fromlist, level) - - return mock.patch.object(six.moves.builtins, "__import__", new=custom_import) diff --git a/packages/google-cloud-logging/test_utils/test_utils/retry.py b/packages/google-cloud-logging/test_utils/test_utils/retry.py deleted file mode 100644 index e61c001a03e1..000000000000 --- a/packages/google-cloud-logging/test_utils/test_utils/retry.py +++ /dev/null @@ -1,207 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time -from functools import wraps - -import six - -MAX_TRIES = 4 -DELAY = 1 -BACKOFF = 2 - - -def _retry_all(_): - """Retry all caught exceptions.""" - return True - - -class BackoffFailed(Exception): - """Retry w/ backoffs did not complete successfully.""" - - -class RetryBase(object): - """Base for retrying calling a decorated function w/ exponential backoff. - - :type max_tries: int - :param max_tries: Number of times to try (not retry) before giving up. - - :type delay: int - :param delay: Initial delay between retries in seconds. - - :type backoff: int - :param backoff: Backoff multiplier e.g. value of 2 will double the - delay each retry. - - :type logger: logging.Logger instance - :param logger: Logger to use. If None, print. - """ - def __init__(self, max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, - logger=None): - self.max_tries = max_tries - self.delay = delay - self.backoff = backoff - self.logger = logger.warning if logger else six.print_ - - -class RetryErrors(RetryBase): - """Decorator for retrying given exceptions in testing. - - :type exception: Exception or tuple of Exceptions - :param exception: The exception to check or may be a tuple of - exceptions to check. - - :type error_predicate: function, takes caught exception, returns bool - :param error_predicate: Predicate evaluating whether to retry after a - caught exception. - - :type max_tries: int - :param max_tries: Number of times to try (not retry) before giving up. - - :type delay: int - :param delay: Initial delay between retries in seconds. - - :type backoff: int - :param backoff: Backoff multiplier e.g. value of 2 will double the - delay each retry. - - :type logger: logging.Logger instance - :param logger: Logger to use. If None, print. - """ - def __init__(self, exception, error_predicate=_retry_all, - max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, - logger=None): - super(RetryErrors, self).__init__(max_tries, delay, backoff, logger) - self.exception = exception - self.error_predicate = error_predicate - - def __call__(self, to_wrap): - @wraps(to_wrap) - def wrapped_function(*args, **kwargs): - tries = 0 - while tries < self.max_tries: - try: - return to_wrap(*args, **kwargs) - except self.exception as caught_exception: - - if not self.error_predicate(caught_exception): - raise - - delay = self.delay * self.backoff**tries - msg = ("%s, Trying again in %d seconds..." % - (caught_exception, delay)) - self.logger(msg) - - time.sleep(delay) - tries += 1 - return to_wrap(*args, **kwargs) - - return wrapped_function - - -class RetryResult(RetryBase): - """Decorator for retrying based on non-error result. - - :type result_predicate: function, takes result, returns bool - :param result_predicate: Predicate evaluating whether to retry after a - result is returned. - - :type max_tries: int - :param max_tries: Number of times to try (not retry) before giving up. - - :type delay: int - :param delay: Initial delay between retries in seconds. - - :type backoff: int - :param backoff: Backoff multiplier e.g. value of 2 will double the - delay each retry. - - :type logger: logging.Logger instance - :param logger: Logger to use. If None, print. - """ - def __init__(self, result_predicate, - max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, - logger=None): - super(RetryResult, self).__init__(max_tries, delay, backoff, logger) - self.result_predicate = result_predicate - - def __call__(self, to_wrap): - @wraps(to_wrap) - def wrapped_function(*args, **kwargs): - tries = 0 - while tries < self.max_tries: - result = to_wrap(*args, **kwargs) - if self.result_predicate(result): - return result - - delay = self.delay * self.backoff**tries - msg = "%s. Trying again in %d seconds..." % ( - self.result_predicate.__name__, delay,) - self.logger(msg) - - time.sleep(delay) - tries += 1 - raise BackoffFailed() - - return wrapped_function - - -class RetryInstanceState(RetryBase): - """Decorator for retrying based on instance state. - - :type instance_predicate: function, takes instance, returns bool - :param instance_predicate: Predicate evaluating whether to retry after an - API-invoking method is called. - - :type max_tries: int - :param max_tries: Number of times to try (not retry) before giving up. - - :type delay: int - :param delay: Initial delay between retries in seconds. - - :type backoff: int - :param backoff: Backoff multiplier e.g. value of 2 will double the - delay each retry. - - :type logger: logging.Logger instance - :param logger: Logger to use. If None, print. - """ - def __init__(self, instance_predicate, - max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, - logger=None): - super(RetryInstanceState, self).__init__( - max_tries, delay, backoff, logger) - self.instance_predicate = instance_predicate - - def __call__(self, to_wrap): - instance = to_wrap.__self__ # only instance methods allowed - - @wraps(to_wrap) - def wrapped_function(*args, **kwargs): - tries = 0 - while tries < self.max_tries: - result = to_wrap(*args, **kwargs) - if self.instance_predicate(instance): - return result - - delay = self.delay * self.backoff**tries - msg = "%s. Trying again in %d seconds..." % ( - self.instance_predicate.__name__, delay,) - self.logger(msg) - - time.sleep(delay) - tries += 1 - raise BackoffFailed() - - return wrapped_function diff --git a/packages/google-cloud-logging/test_utils/test_utils/system.py b/packages/google-cloud-logging/test_utils/test_utils/system.py deleted file mode 100644 index 590dc62a06e6..000000000000 --- a/packages/google-cloud-logging/test_utils/test_utils/system.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright 2014 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -import os -import sys -import time - -import google.auth.credentials -from google.auth.environment_vars import CREDENTIALS as TEST_CREDENTIALS - - -# From shell environ. May be None. -CREDENTIALS = os.getenv(TEST_CREDENTIALS) - -ENVIRON_ERROR_MSG = """\ -To run the system tests, you need to set some environment variables. -Please check the CONTRIBUTING guide for instructions. -""" - - -class EmulatorCreds(google.auth.credentials.Credentials): - """A mock credential object. - - Used to avoid unnecessary token refreshing or reliance on the network - while an emulator is running. - """ - - def __init__(self): # pylint: disable=super-init-not-called - self.token = b'seekrit' - self.expiry = None - - @property - def valid(self): - """Would-be validity check of the credentials. - - Always is :data:`True`. - """ - return True - - def refresh(self, unused_request): # pylint: disable=unused-argument - """Off-limits implementation for abstract method.""" - raise RuntimeError('Should never be refreshed.') - - -def check_environ(): - err_msg = None - if CREDENTIALS is None: - err_msg = '\nMissing variables: ' + TEST_CREDENTIALS - elif not os.path.isfile(CREDENTIALS): - err_msg = '\nThe %s path %r is not a file.' % (TEST_CREDENTIALS, - CREDENTIALS) - - if err_msg is not None: - msg = ENVIRON_ERROR_MSG + err_msg - print(msg, file=sys.stderr) - sys.exit(1) - - -def unique_resource_id(delimiter='_'): - """A unique identifier for a resource. - - Intended to help locate resources created in particular - testing environments and at particular times. - """ - build_id = os.getenv('CIRCLE_BUILD_NUM', '') - if build_id == '': - return '%s%d' % (delimiter, 1000 * time.time()) - else: - return '%s%s%s%d' % (delimiter, build_id, delimiter, time.time()) diff --git a/packages/google-cloud-logging/test_utils/test_utils/vpcsc_config.py b/packages/google-cloud-logging/test_utils/test_utils/vpcsc_config.py deleted file mode 100644 index 36b15d6be991..000000000000 --- a/packages/google-cloud-logging/test_utils/test_utils/vpcsc_config.py +++ /dev/null @@ -1,118 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -import pytest - - -INSIDE_VPCSC_ENVVAR = "GOOGLE_CLOUD_TESTS_IN_VPCSC" -PROJECT_INSIDE_ENVVAR = "PROJECT_ID" -PROJECT_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT" -BUCKET_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_BUCKET" - - -class VPCSCTestConfig(object): - """System test utility for VPCSC detection. - - See: https://cloud.google.com/vpc-service-controls/docs/ - """ - - @property - def inside_vpcsc(self): - """Test whether the test environment is configured to run inside VPCSC. - - Returns: - bool: - true if the environment is configured to run inside VPCSC, - else false. - """ - return INSIDE_VPCSC_ENVVAR in os.environ - - @property - def project_inside(self): - """Project ID for testing outside access. - - Returns: - str: project ID used for testing outside access; None if undefined. - """ - return os.environ.get(PROJECT_INSIDE_ENVVAR, None) - - @property - def project_outside(self): - """Project ID for testing inside access. - - Returns: - str: project ID used for testing inside access; None if undefined. - """ - return os.environ.get(PROJECT_OUTSIDE_ENVVAR, None) - - @property - def bucket_outside(self): - """GCS bucket for testing inside access. - - Returns: - str: bucket ID used for testing inside access; None if undefined. - """ - return os.environ.get(BUCKET_OUTSIDE_ENVVAR, None) - - def skip_if_inside_vpcsc(self, testcase): - """Test decorator: skip if running inside VPCSC.""" - reason = ( - "Running inside VPCSC. " - "Unset the {} environment variable to enable this test." - ).format(INSIDE_VPCSC_ENVVAR) - skip = pytest.mark.skipif(self.inside_vpcsc, reason=reason) - return skip(testcase) - - def skip_unless_inside_vpcsc(self, testcase): - """Test decorator: skip if running outside VPCSC.""" - reason = ( - "Running outside VPCSC. " - "Set the {} environment variable to enable this test." - ).format(INSIDE_VPCSC_ENVVAR) - skip = pytest.mark.skipif(not self.inside_vpcsc, reason=reason) - return skip(testcase) - - def skip_unless_inside_project(self, testcase): - """Test decorator: skip if inside project env var not set.""" - reason = ( - "Project ID for running inside VPCSC not set. " - "Set the {} environment variable to enable this test." - ).format(PROJECT_INSIDE_ENVVAR) - skip = pytest.mark.skipif(self.project_inside is None, reason=reason) - return skip(testcase) - - def skip_unless_outside_project(self, testcase): - """Test decorator: skip if outside project env var not set.""" - reason = ( - "Project ID for running outside VPCSC not set. " - "Set the {} environment variable to enable this test." - ).format(PROJECT_OUTSIDE_ENVVAR) - skip = pytest.mark.skipif(self.project_outside is None, reason=reason) - return skip(testcase) - - def skip_unless_outside_bucket(self, testcase): - """Test decorator: skip if outside bucket env var not set.""" - reason = ( - "Bucket ID for running outside VPCSC not set. " - "Set the {} environment variable to enable this test." - ).format(BUCKET_OUTSIDE_ENVVAR) - skip = pytest.mark.skipif(self.bucket_outside is None, reason=reason) - return skip(testcase) - - -vpcsc_config = VPCSCTestConfig() From 0a775910723476a99e4e3756dde58e7b54f572bb Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Fri, 1 May 2020 14:12:17 -0700 Subject: [PATCH 277/855] chore: Migrate python-logging synth.py from artman to bazel (#35) --- packages/google-cloud-logging/synth.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/synth.py index 4364f387b4c9..ee1b168aa46b 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/synth.py @@ -16,23 +16,21 @@ import synthtool as s from synthtool import gcp -gapic = gcp.GAPICGenerator() +gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() # ---------------------------------------------------------------------------- # Generate logging GAPIC layer # ---------------------------------------------------------------------------- library = gapic.py_library( - "logging", - "v2", - config_path="/google/logging/artman_logging.yaml", - artman_output_name="logging-v2", + service="logging", + version="v2", + bazel_target="//google/logging/v2:logging-v2-py", include_protos=True, ) # the structure of the logging directory is a bit different, so manually copy the protos -s.move(library / "google/cloud/logging_v2/proto/cloud/logging_v2/proto", "google/cloud/logging_v2/proto") -s.move(library / "google/cloud/logging_v2/proto/*.proto") +s.move(library / "google/cloud/logging_v2/proto", "google/cloud/logging_v2/proto") s.move(library / "google/cloud/logging_v2/gapic") s.move(library / "tests/unit/gapic/v2") From 8ab71d7f203aa44172a3bbbc361411f6895202bc Mon Sep 17 00:00:00 2001 From: Yoshi Yamaguchi <145104+ymotongpoo@users.noreply.github.com> Date: Thu, 2 Jul 2020 01:54:03 +0900 Subject: [PATCH 278/855] docs: change descriptions for virtual environment (#48) Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [X] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-logging/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [X] Ensure the tests and linter pass - [X] Code coverage does not decrease (if any source code was changed) - [X] Appropriate docs were updated (if necessary) Fixes #47 --- packages/google-cloud-logging/README.rst | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 220a6cf17be2..d41b02328c20 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -1,7 +1,7 @@ Python Client for Stackdriver Logging ===================================== -|pypi| |versions| +|pypi| |versions| `Stackdriver Logging API`_: Writes log entries and manages your Stackdriver Logging configuration. @@ -35,15 +35,15 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +Install this library in a `venv`_ using pip. `venv`_ is a tool to create isolated Python environments. The basic problem it addresses is one of dependencies and versions, and indirectly permissions. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Supported Python Versions @@ -52,7 +52,7 @@ Python >= 3.5 Deprecated Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. +Python == 2.7. Python 2.7 support was removed on January 1, 2020. Mac/Linux @@ -60,8 +60,7 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python -m venv source /bin/activate /bin/pip install google-cloud-logging @@ -71,8 +70,7 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv + python -m venv \Scripts\activate \Scripts\pip.exe install google-cloud-logging From a9b095713c8e709455a455b88a3808d0e4f3b110 Mon Sep 17 00:00:00 2001 From: Yoshi Yamaguchi <145104+ymotongpoo@users.noreply.github.com> Date: Thu, 2 Jul 2020 02:14:04 +0900 Subject: [PATCH 279/855] docs: add initialization of LogEntry instance in the v2 example (#46) Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [X] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-logging/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [X] Ensure the tests and linter pass - [X] Code coverage does not decrease (if any source code was changed) - [X] Appropriate docs were updated (if necessary) Fixes #44 --- packages/google-cloud-logging/README.rst | 26 ++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index d41b02328c20..2399e08bb168 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -80,9 +80,31 @@ Using the API .. code:: python from google.cloud import logging_v2 - client = logging_v2.LoggingServiceV2Client() - entries = [] + + resource = { + "type": "global", + "labels": { + "project_id": "[PROJECT_ID]" + } + } + + """ + Log entries can be either LogEntry or dict. + You can describe the same data in the following format: + + e = { + "log_name": "projects/[PROJECT_ID]/logs/test-logging", + "resource": resource, + "text_payload": "this is a log statement", + } + """ + e = logging_v2.types.LogEntry( + log_name="projects/[PROJECT_ID]/logs/test-logging", # optional + resource=resource, # optional + text_payload="this is a log statement") + + entries = [e] response = client.write_log_entries(entries) .. code:: python From 073f6d5113350eda0ca89cce517de547fdfb1887 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 7 Aug 2020 17:06:28 -0400 Subject: [PATCH 280/855] chore: release 1.15.1 (#50) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-logging/CHANGELOG.md | 8 ++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 367ad2e8f1e8..517664976b31 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +### [1.15.1](https://www.github.com/googleapis/python-logging/compare/v1.15.0...v1.15.1) (2020-07-01) + + +### Documentation + +* add initialization of LogEntry instance in the v2 example ([#46](https://www.github.com/googleapis/python-logging/issues/46)) ([251ac93](https://www.github.com/googleapis/python-logging/commit/251ac9355b192121572552c1c9cfd4df94a42802)), closes [#44](https://www.github.com/googleapis/python-logging/issues/44) +* change descriptions for virtual environment ([#48](https://www.github.com/googleapis/python-logging/issues/48)) ([c5c3c15](https://www.github.com/googleapis/python-logging/commit/c5c3c153d1ae91f44c4104279baae9d9e4f88d03)), closes [#47](https://www.github.com/googleapis/python-logging/issues/47) + ## [1.15.0](https://www.github.com/googleapis/python-logging/compare/v1.14.0...v1.15.0) (2020-02-26) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index c06667d72bc9..42d4eee542f5 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-logging' description = 'Stackdriver Logging API client library' -version = "1.15.0" +version = "1.15.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 18ec7120a2caa67076d0589641537a969cce0a76 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 12 Aug 2020 16:28:40 -0400 Subject: [PATCH 281/855] feat: pass 'client_options' to super ctor (#61) Closes #55 --- packages/google-cloud-logging/google/cloud/logging/client.py | 5 ++++- packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 680c29c8a9dd..0997d21a7652 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -124,7 +124,10 @@ def __init__( client_options=None, ): super(Client, self).__init__( - project=project, credentials=credentials, _http=_http + project=project, + credentials=credentials, + _http=_http, + client_options=client_options, ) kw_args = {"client_info": client_info} diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 42d4eee542f5..776c30b3cf54 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -30,7 +30,7 @@ release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ "google-api-core[grpc] >= 1.15.0, < 2.0.0dev", - "google-cloud-core >= 1.1.0, < 2.0dev", + "google-cloud-core >= 1.4.1, < 2.0dev", ] extras = { } From bb24a95472cbf8e451ceafca05ff60f19fb8105e Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 24 Sep 2020 09:23:06 -0600 Subject: [PATCH 282/855] chore: add default CODEOWNERS (#65) --- packages/google-cloud-logging/.github/CODEOWNERS | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 packages/google-cloud-logging/.github/CODEOWNERS diff --git a/packages/google-cloud-logging/.github/CODEOWNERS b/packages/google-cloud-logging/.github/CODEOWNERS new file mode 100644 index 000000000000..30c3973aa372 --- /dev/null +++ b/packages/google-cloud-logging/.github/CODEOWNERS @@ -0,0 +1,11 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. +# +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax + +# The @googleapis/yoshi-python is the default owner for changes in this repo +* @googleapis/yoshi-python + +# The python-samples-reviewers team is the default owner for samples changes +/samples/ @googleapis/python-samples-owners \ No newline at end of file From d1ac0b03602b37a77ea96d32c7ce4e77339b7732 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 19 Oct 2020 13:45:04 -0700 Subject: [PATCH 283/855] fix: failing CI tests (#70) --- .../google-cloud-logging/.kokoro/build.sh | 8 +- .../.kokoro/docker/docs/Dockerfile | 98 ++++ .../.kokoro/docker/docs/fetch_gpg_keys.sh | 45 ++ .../.kokoro/docs/common.cfg | 21 +- .../.kokoro/docs/docs-presubmit.cfg | 17 + .../.kokoro/publish-docs.sh | 41 +- .../google-cloud-logging/.kokoro/release.sh | 2 - .../.kokoro/samples/lint/common.cfg | 34 ++ .../.kokoro/samples/lint/continuous.cfg | 6 + .../.kokoro/samples/lint/periodic.cfg | 6 + .../.kokoro/samples/lint/presubmit.cfg | 6 + .../.kokoro/samples/python3.6/common.cfg | 34 ++ .../.kokoro/samples/python3.6/continuous.cfg | 7 + .../.kokoro/samples/python3.6/periodic.cfg | 6 + .../.kokoro/samples/python3.6/presubmit.cfg | 6 + .../.kokoro/samples/python3.7/common.cfg | 34 ++ .../.kokoro/samples/python3.7/continuous.cfg | 6 + .../.kokoro/samples/python3.7/periodic.cfg | 6 + .../.kokoro/samples/python3.7/presubmit.cfg | 6 + .../.kokoro/samples/python3.8/common.cfg | 34 ++ .../.kokoro/samples/python3.8/continuous.cfg | 6 + .../.kokoro/samples/python3.8/periodic.cfg | 6 + .../.kokoro/samples/python3.8/presubmit.cfg | 6 + .../.kokoro/test-samples.sh | 110 ++++ .../.kokoro/trampoline_v2.sh | 487 ++++++++++++++++++ packages/google-cloud-logging/.trampolinerc | 51 ++ packages/google-cloud-logging/docs/conf.py | 5 +- .../google/cloud/logging/entries.py | 12 +- .../google/cloud/logging/handlers/_helpers.py | 4 +- .../google/cloud/logging/logger.py | 3 +- .../gapic/config_service_v2_client.py | 75 ++- .../gapic/logging_service_v2_client.py | 43 +- .../gapic/metrics_service_v2_client.py | 47 +- .../cloud/logging_v2/proto/log_entry_pb2.py | 4 +- .../logging_v2/proto/logging_config_pb2.py | 4 +- .../proto/logging_config_pb2_grpc.py | 90 ++-- .../logging_v2/proto/logging_metrics_pb2.py | 8 +- .../proto/logging_metrics_pb2_grpc.py | 27 +- .../cloud/logging_v2/proto/logging_pb2.py | 8 +- .../logging_v2/proto/logging_pb2_grpc.py | 47 +- packages/google-cloud-logging/noxfile.py | 2 +- .../scripts/decrypt-secrets.sh | 33 ++ .../google-cloud-logging/testing/.gitignore | 3 + .../tests/system/test_system.py | 8 +- .../tests/unit/test__http.py | 13 +- 45 files changed, 1342 insertions(+), 183 deletions(-) create mode 100644 packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile create mode 100755 packages/google-cloud-logging/.kokoro/docker/docs/fetch_gpg_keys.sh create mode 100644 packages/google-cloud-logging/.kokoro/docs/docs-presubmit.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/lint/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/lint/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/lint/periodic.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/lint/presubmit.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.6/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.6/periodic.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.6/presubmit.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.7/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.7/periodic.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.7/presubmit.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.8/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.8/periodic.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.8/presubmit.cfg create mode 100755 packages/google-cloud-logging/.kokoro/test-samples.sh create mode 100755 packages/google-cloud-logging/.kokoro/trampoline_v2.sh create mode 100644 packages/google-cloud-logging/.trampolinerc create mode 100755 packages/google-cloud-logging/scripts/decrypt-secrets.sh create mode 100644 packages/google-cloud-logging/testing/.gitignore diff --git a/packages/google-cloud-logging/.kokoro/build.sh b/packages/google-cloud-logging/.kokoro/build.sh index 8df566562b91..a194a9eadccb 100755 --- a/packages/google-cloud-logging/.kokoro/build.sh +++ b/packages/google-cloud-logging/.kokoro/build.sh @@ -36,4 +36,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation python3.6 -m pip install --upgrade --quiet nox python3.6 -m nox --version -python3.6 -m nox +# If NOX_SESSION is set, it only runs the specified session, +# otherwise run all the sessions. +if [[ -n "${NOX_SESSION:-}" ]]; then + python3.6 -m nox -s "${NOX_SESSION:-}" +else + python3.6 -m nox +fi diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile new file mode 100644 index 000000000000..412b0b56a921 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile @@ -0,0 +1,98 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ubuntu:20.04 + +ENV DEBIAN_FRONTEND noninteractive + +# Ensure local Python is preferred over distribution Python. +ENV PATH /usr/local/bin:$PATH + +# Install dependencies. +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + apt-transport-https \ + build-essential \ + ca-certificates \ + curl \ + dirmngr \ + git \ + gpg-agent \ + graphviz \ + libbz2-dev \ + libdb5.3-dev \ + libexpat1-dev \ + libffi-dev \ + liblzma-dev \ + libreadline-dev \ + libsnappy-dev \ + libssl-dev \ + libsqlite3-dev \ + portaudio19-dev \ + redis-server \ + software-properties-common \ + ssh \ + sudo \ + tcl \ + tcl-dev \ + tk \ + tk-dev \ + uuid-dev \ + wget \ + zlib1g-dev \ + && add-apt-repository universe \ + && apt-get update \ + && apt-get -y install jq \ + && apt-get clean autoclean \ + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /var/cache/apt/archives/*.deb + + +COPY fetch_gpg_keys.sh /tmp +# Install the desired versions of Python. +RUN set -ex \ + && export GNUPGHOME="$(mktemp -d)" \ + && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ + && /tmp/fetch_gpg_keys.sh \ + && for PYTHON_VERSION in 3.7.8 3.8.5; do \ + wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ + && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ + && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ + && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ + && mkdir -p /usr/src/python-${PYTHON_VERSION} \ + && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ + && rm python-${PYTHON_VERSION}.tar.xz \ + && cd /usr/src/python-${PYTHON_VERSION} \ + && ./configure \ + --enable-shared \ + # This works only on Python 2.7 and throws a warning on every other + # version, but seems otherwise harmless. + --enable-unicode=ucs4 \ + --with-system-ffi \ + --without-ensurepip \ + && make -j$(nproc) \ + && make install \ + && ldconfig \ + ; done \ + && rm -rf "${GNUPGHOME}" \ + && rm -rf /usr/src/python* \ + && rm -rf ~/.cache/ + +RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ + && python3.7 /tmp/get-pip.py \ + && python3.8 /tmp/get-pip.py \ + && rm /tmp/get-pip.py + +CMD ["python3.7"] diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/fetch_gpg_keys.sh b/packages/google-cloud-logging/.kokoro/docker/docs/fetch_gpg_keys.sh new file mode 100755 index 000000000000..d653dd868e4b --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/docker/docs/fetch_gpg_keys.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A script to fetch gpg keys with retry. +# Avoid jinja parsing the file. +# + +function retry { + if [[ "${#}" -le 1 ]]; then + echo "Usage: ${0} retry_count commands.." + exit 1 + fi + local retries=${1} + local command="${@:2}" + until [[ "${retries}" -le 0 ]]; do + $command && return 0 + if [[ $? -ne 0 ]]; then + echo "command failed, retrying" + ((retries--)) + fi + done + return 1 +} + +# 3.6.9, 3.7.5 (Ned Deily) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D + +# 3.8.0 (Łukasz Langa) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + E3FF2839C048B25C084DEBE9B26995E310250568 + +# diff --git a/packages/google-cloud-logging/.kokoro/docs/common.cfg b/packages/google-cloud-logging/.kokoro/docs/common.cfg index 01a16ec856c1..4206e8ac43ed 100644 --- a/packages/google-cloud-logging/.kokoro/docs/common.cfg +++ b/packages/google-cloud-logging/.kokoro/docs/common.cfg @@ -11,12 +11,12 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline.sh" +build_file: "python-logging/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" + value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" @@ -28,6 +28,23 @@ env_vars: { value: "docs-staging" } +env_vars: { + key: "V2_STAGING_BUCKET" + value: "docs-staging-v2-staging" +} + +# It will upload the docker image after successful builds. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "true" +} + +# It will always build the docker image. +env_vars: { + key: "TRAMPOLINE_DOCKERFILE" + value: ".kokoro/docker/docs/Dockerfile" +} + # Fetch the token needed for reporting release status to GitHub before_action { fetch_keystore { diff --git a/packages/google-cloud-logging/.kokoro/docs/docs-presubmit.cfg b/packages/google-cloud-logging/.kokoro/docs/docs-presubmit.cfg new file mode 100644 index 000000000000..1118107829b7 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/docs/docs-presubmit.cfg @@ -0,0 +1,17 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "STAGING_BUCKET" + value: "gcloud-python-test" +} + +env_vars: { + key: "V2_STAGING_BUCKET" + value: "gcloud-python-test" +} + +# We only upload the image in the main `docs` build. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "false" +} diff --git a/packages/google-cloud-logging/.kokoro/publish-docs.sh b/packages/google-cloud-logging/.kokoro/publish-docs.sh index d01483d2dfb6..8acb14e802b0 100755 --- a/packages/google-cloud-logging/.kokoro/publish-docs.sh +++ b/packages/google-cloud-logging/.kokoro/publish-docs.sh @@ -13,33 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash - set -eo pipefail # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 -cd github/python-logging - -# Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --user --upgrade --quiet nox +python3 -m nox --version # build docs nox -s docs -python3 -m pip install gcp-docuploader - -# install a json parser -sudo apt-get update -sudo apt-get -y install software-properties-common -sudo add-apt-repository universe -sudo apt-get update -sudo apt-get -y install jq +python3 -m pip install --user gcp-docuploader # create metadata python3 -m docuploader create-metadata \ @@ -54,4 +42,23 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" + + +# docfx yaml files +nox -s docfx + +# create metadata. +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/packages/google-cloud-logging/.kokoro/release.sh b/packages/google-cloud-logging/.kokoro/release.sh index c9b0928bad84..2ef944a00e3f 100755 --- a/packages/google-cloud-logging/.kokoro/release.sh +++ b/packages/google-cloud-logging/.kokoro/release.sh @@ -13,8 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash - set -eo pipefail # Start the releasetool reporter diff --git a/packages/google-cloud-logging/.kokoro/samples/lint/common.cfg b/packages/google-cloud-logging/.kokoro/samples/lint/common.cfg new file mode 100644 index 000000000000..ceb20370e175 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/lint/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "lint" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/lint/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/lint/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/lint/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/lint/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/lint/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/lint/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/lint/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/lint/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/lint/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg new file mode 100644 index 000000000000..a9d6d48c40a3 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.6" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.6/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.6/continuous.cfg new file mode 100644 index 000000000000..7218af1499e5 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.6/continuous.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.6/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.6/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.6/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg new file mode 100644 index 000000000000..1f7cc1973d29 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.7" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.7/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.7/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg new file mode 100644 index 000000000000..9ba81c4b7b80 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.8" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.8/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.8/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/test-samples.sh b/packages/google-cloud-logging/.kokoro/test-samples.sh new file mode 100755 index 000000000000..ba97b53d500c --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/test-samples.sh @@ -0,0 +1,110 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-logging + +# Run periodic samples tests at latest release +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + LATEST_RELEASE=$(git describe --abbrev=0 --tags) + git checkout $LATEST_RELEASE +fi + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the Build Cop Bot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop + $KOKORO_GFILE_DIR/linux_amd64/buildcop + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/packages/google-cloud-logging/.kokoro/trampoline_v2.sh b/packages/google-cloud-logging/.kokoro/trampoline_v2.sh new file mode 100755 index 000000000000..719bcd5ba84d --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/trampoline_v2.sh @@ -0,0 +1,487 @@ +#!/usr/bin/env bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# To run this script, first download few files from gcs to /dev/shm. +# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). +# +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm +# +# Then run the script. +# .kokoro/trampoline_v2.sh +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. + + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.5" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For Build Cop Bot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/packages/google-cloud-logging/.trampolinerc b/packages/google-cloud-logging/.trampolinerc new file mode 100644 index 000000000000..995ee29111e1 --- /dev/null +++ b/packages/google-cloud-logging/.trampolinerc @@ -0,0 +1,51 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Template for .trampolinerc + +# Add required env vars here. +required_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index 45db4f8b2923..7a03936bb41d 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -340,7 +340,10 @@ intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.io/grpc/python/", None), } diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging/entries.py index ed1c28163f60..3847102dc504 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging/entries.py @@ -232,8 +232,7 @@ def from_api_repr(cls, resource, client, loggers=None): return inst def to_api_repr(self): - """API repr (JSON format) for entry. - """ + """API repr (JSON format) for entry.""" info = {} if self.log_name is not None: info["logName"] = self.log_name @@ -285,8 +284,7 @@ def _extract_payload(cls, resource): return resource["textPayload"] def to_api_repr(self): - """API repr (JSON format) for entry. - """ + """API repr (JSON format) for entry.""" info = super(TextEntry, self).to_api_repr() info["textPayload"] = self.payload return info @@ -313,8 +311,7 @@ def _extract_payload(cls, resource): return resource["jsonPayload"] def to_api_repr(self): - """API repr (JSON format) for entry. - """ + """API repr (JSON format) for entry.""" info = super(StructEntry, self).to_api_repr() info["jsonPayload"] = self.payload return info @@ -351,8 +348,7 @@ def payload_json(self): return self.payload def to_api_repr(self): - """API repr (JSON format) for entry. - """ + """API repr (JSON format) for entry.""" info = super(ProtobufEntry, self).to_api_repr() info["protoPayload"] = MessageToDict(self.payload) return info diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py index d65a2690f8f7..b4b7fcf5b892 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py @@ -41,8 +41,8 @@ def format_stackdriver_json(record, message): """Helper to format a LogRecord in in Stackdriver fluentd format. - :rtype: str - :returns: JSON str to be written to the log file. + :rtype: str + :returns: JSON str to be written to the log file. """ subsecond, second = math.modf(record.created) diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index b212b6e8b0c3..6b5445d0578e 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -114,8 +114,7 @@ def batch(self, client=None): return Batch(self, client) def _do_log(self, client, _entry_class, payload=None, **kw): - """Helper for :meth:`log_empty`, :meth:`log_text`, etc. - """ + """Helper for :meth:`log_empty`, :meth:`log_text`, etc.""" client = self._require_client(client) # Apply defaults diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index 37dafa34ac0e..d3d08370e63a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -40,7 +40,9 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-logging", +).version class ConfigServiceV2Client(object): @@ -77,7 +79,8 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account, + "billingAccounts/{billing_account}", + billing_account=billing_account, ) @classmethod @@ -110,7 +113,10 @@ def exclusion_path(cls, project, exclusion): @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder,) + return google.api_core.path_template.expand( + "folders/{folder}", + folder=folder, + ) @classmethod def folder_exclusion_path(cls, folder, exclusion): @@ -125,14 +131,17 @@ def folder_exclusion_path(cls, folder, exclusion): def folder_sink_path(cls, folder, sink): """Return a fully-qualified folder_sink string.""" return google.api_core.path_template.expand( - "folders/{folder}/sinks/{sink}", folder=folder, sink=sink, + "folders/{folder}/sinks/{sink}", + folder=folder, + sink=sink, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization, + "organizations/{organization}", + organization=organization, ) @classmethod @@ -157,14 +166,17 @@ def organization_sink_path(cls, organization, sink): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project, + "projects/{project}", + project=project, ) @classmethod def sink_path(cls, project, sink): """Return a fully-qualified sink string.""" return google.api_core.path_template.expand( - "projects/{project}/sinks/{sink}", project=project, sink=sink, + "projects/{project}/sinks/{sink}", + project=project, + sink=sink, ) def __init__( @@ -253,8 +265,12 @@ def __init__( ) self.transport = transport else: - self.transport = config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, + self.transport = ( + config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport( + address=api_endpoint, + channel=channel, + credentials=credentials, + ) ) if client_info is None: @@ -360,7 +376,8 @@ def list_sinks( ) request = logging_config_pb2.ListSinksRequest( - parent=parent, page_size=page_size, + parent=parent, + page_size=page_size, ) if metadata is None: metadata = [] @@ -450,7 +467,9 @@ def get_sink( client_info=self._client_info, ) - request = logging_config_pb2.GetSinkRequest(sink_name=sink_name,) + request = logging_config_pb2.GetSinkRequest( + sink_name=sink_name, + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -554,7 +573,9 @@ def create_sink( ) request = logging_config_pb2.CreateSinkRequest( - parent=parent, sink=sink, unique_writer_identity=unique_writer_identity, + parent=parent, + sink=sink, + unique_writer_identity=unique_writer_identity, ) if metadata is None: metadata = [] @@ -759,7 +780,9 @@ def delete_sink( client_info=self._client_info, ) - request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name,) + request = logging_config_pb2.DeleteSinkRequest( + sink_name=sink_name, + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -857,7 +880,8 @@ def list_exclusions( ) request = logging_config_pb2.ListExclusionsRequest( - parent=parent, page_size=page_size, + parent=parent, + page_size=page_size, ) if metadata is None: metadata = [] @@ -947,7 +971,9 @@ def get_exclusion( client_info=self._client_info, ) - request = logging_config_pb2.GetExclusionRequest(name=name,) + request = logging_config_pb2.GetExclusionRequest( + name=name, + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -1038,7 +1064,8 @@ def create_exclusion( ) request = logging_config_pb2.CreateExclusionRequest( - parent=parent, exclusion=exclusion, + parent=parent, + exclusion=exclusion, ) if metadata is None: metadata = [] @@ -1142,7 +1169,9 @@ def update_exclusion( ) request = logging_config_pb2.UpdateExclusionRequest( - name=name, exclusion=exclusion, update_mask=update_mask, + name=name, + exclusion=exclusion, + update_mask=update_mask, ) if metadata is None: metadata = [] @@ -1218,7 +1247,9 @@ def delete_exclusion( client_info=self._client_info, ) - request = logging_config_pb2.DeleteExclusionRequest(name=name,) + request = logging_config_pb2.DeleteExclusionRequest( + name=name, + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -1306,7 +1337,9 @@ def get_cmek_settings( client_info=self._client_info, ) - request = logging_config_pb2.GetCmekSettingsRequest(name=name,) + request = logging_config_pb2.GetCmekSettingsRequest( + name=name, + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -1422,7 +1455,9 @@ def update_cmek_settings( ) request = logging_config_pb2.UpdateCmekSettingsRequest( - name=name, cmek_settings=cmek_settings, update_mask=update_mask, + name=name, + cmek_settings=cmek_settings, + update_mask=update_mask, ) if metadata is None: metadata = [] diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index c43506d1bb74..c823deacb1c9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -44,7 +44,9 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-logging", +).version class LoggingServiceV2Client(object): @@ -81,7 +83,8 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account, + "billingAccounts/{billing_account}", + billing_account=billing_account, ) @classmethod @@ -96,27 +99,35 @@ def billing_log_path(cls, billing_account, log): @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder,) + return google.api_core.path_template.expand( + "folders/{folder}", + folder=folder, + ) @classmethod def folder_log_path(cls, folder, log): """Return a fully-qualified folder_log string.""" return google.api_core.path_template.expand( - "folders/{folder}/logs/{log}", folder=folder, log=log, + "folders/{folder}/logs/{log}", + folder=folder, + log=log, ) @classmethod def log_path(cls, project, log): """Return a fully-qualified log string.""" return google.api_core.path_template.expand( - "projects/{project}/logs/{log}", project=project, log=log, + "projects/{project}/logs/{log}", + project=project, + log=log, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization, + "organizations/{organization}", + organization=organization, ) @classmethod @@ -132,7 +143,8 @@ def organization_log_path(cls, organization, log): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project, + "projects/{project}", + project=project, ) def __init__( @@ -221,8 +233,12 @@ def __init__( ) self.transport = transport else: - self.transport = logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, + self.transport = ( + logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport( + address=api_endpoint, + channel=channel, + credentials=credentials, + ) ) if client_info is None: @@ -311,7 +327,9 @@ def delete_log( client_info=self._client_info, ) - request = logging_pb2.DeleteLogRequest(log_name=log_name,) + request = logging_pb2.DeleteLogRequest( + log_name=log_name, + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -776,7 +794,10 @@ def list_logs( client_info=self._client_info, ) - request = logging_pb2.ListLogsRequest(parent=parent, page_size=page_size,) + request = logging_pb2.ListLogsRequest( + parent=parent, + page_size=page_size, + ) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 0c80a5d43fe2..87cf5b89d60d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -46,7 +46,9 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-logging", +).version class MetricsServiceV2Client(object): @@ -83,33 +85,41 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account, + "billingAccounts/{billing_account}", + billing_account=billing_account, ) @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder,) + return google.api_core.path_template.expand( + "folders/{folder}", + folder=folder, + ) @classmethod def metric_path(cls, project, metric): """Return a fully-qualified metric string.""" return google.api_core.path_template.expand( - "projects/{project}/metrics/{metric}", project=project, metric=metric, + "projects/{project}/metrics/{metric}", + project=project, + metric=metric, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization, + "organizations/{organization}", + organization=organization, ) @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project, + "projects/{project}", + project=project, ) def __init__( @@ -198,8 +208,12 @@ def __init__( ) self.transport = transport else: - self.transport = metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, + self.transport = ( + metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport( + address=api_endpoint, + channel=channel, + credentials=credentials, + ) ) if client_info is None: @@ -302,7 +316,8 @@ def list_log_metrics( ) request = logging_metrics_pb2.ListLogMetricsRequest( - parent=parent, page_size=page_size, + parent=parent, + page_size=page_size, ) if metadata is None: metadata = [] @@ -387,7 +402,9 @@ def get_log_metric( client_info=self._client_info, ) - request = logging_metrics_pb2.GetLogMetricRequest(metric_name=metric_name,) + request = logging_metrics_pb2.GetLogMetricRequest( + metric_name=metric_name, + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -473,7 +490,8 @@ def create_log_metric( ) request = logging_metrics_pb2.CreateLogMetricRequest( - parent=parent, metric=metric, + parent=parent, + metric=metric, ) if metadata is None: metadata = [] @@ -560,7 +578,8 @@ def update_log_metric( ) request = logging_metrics_pb2.UpdateLogMetricRequest( - metric_name=metric_name, metric=metric, + metric_name=metric_name, + metric=metric, ) if metadata is None: metadata = [] @@ -631,7 +650,9 @@ def delete_log_metric( client_info=self._client_info, ) - request = logging_metrics_pb2.DeleteLogMetricRequest(metric_name=metric_name,) + request = logging_metrics_pb2.DeleteLogMetricRequest( + metric_name=metric_name, + ) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py index f4805192b30a..9b0ef2205f20 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -427,7 +427,9 @@ ), ], extensions=[], - nested_types=[_LOGENTRY_LABELSENTRY,], + nested_types=[ + _LOGENTRY_LABELSENTRY, + ], enum_types=[], serialized_options=_b( "\352A\271\001\n\032logging.googleapis.com/Log\022\035projects/{project}/logs/{log}\022'organizations/{organization}/logs/{log}\022\033folders/{folder}/logs/{log}\022,billingAccounts/{billing_account}/logs/{log}\032\010log_name" diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py index 65fd2cff616a..7e4ae83dd9d0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -320,7 +320,9 @@ ], extensions=[], nested_types=[], - enum_types=[_LOGSINK_VERSIONFORMAT,], + enum_types=[ + _LOGSINK_VERSIONFORMAT, + ], serialized_options=_b( "\352A\270\001\n\033logging.googleapis.com/Sink\022\037projects/{project}/sinks/{sink}\022)organizations/{organization}/sinks/{sink}\022\035folders/{folder}/sinks/{sink}\022.billingAccounts/{billing_account}/sinks/{sink}" ), diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py index c2e910e1987b..62e751bf554a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py @@ -8,15 +8,14 @@ class ConfigServiceV2Stub(object): - """Service for configuring sinks used to route log entries. - """ + """Service for configuring sinks used to route log entries.""" def __init__(self, channel): """Constructor. - Args: - channel: A grpc.Channel. - """ + Args: + channel: A grpc.Channel. + """ self.ListSinks = channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListSinks", request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksRequest.SerializeToString, @@ -80,85 +79,78 @@ def __init__(self, channel): class ConfigServiceV2Servicer(object): - """Service for configuring sinks used to route log entries. - """ + """Service for configuring sinks used to route log entries.""" def ListSinks(self, request, context): - """Lists sinks. - """ + """Lists sinks.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def GetSink(self, request, context): - """Gets a sink. - """ + """Gets a sink.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def CreateSink(self, request, context): """Creates a sink that exports specified log entries to a destination. The - export of newly-ingested log entries begins immediately, unless the sink's - `writer_identity` is not permitted to write to the destination. A sink can - export log entries only from the resource owning the sink. - """ + export of newly-ingested log entries begins immediately, unless the sink's + `writer_identity` is not permitted to write to the destination. A sink can + export log entries only from the resource owning the sink. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def UpdateSink(self, request, context): """Updates a sink. This method replaces the following fields in the existing - sink with values from the new sink: `destination`, and `filter`. + sink with values from the new sink: `destination`, and `filter`. - The updated sink might also have a new `writer_identity`; see the - `unique_writer_identity` field. - """ + The updated sink might also have a new `writer_identity`; see the + `unique_writer_identity` field. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def DeleteSink(self, request, context): """Deletes a sink. If the sink has a unique `writer_identity`, then that - service account is also deleted. - """ + service account is also deleted. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ListExclusions(self, request, context): - """Lists all the exclusions in a parent resource. - """ + """Lists all the exclusions in a parent resource.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def GetExclusion(self, request, context): - """Gets the description of an exclusion. - """ + """Gets the description of an exclusion.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def CreateExclusion(self, request, context): """Creates a new exclusion in a specified parent resource. - Only log entries belonging to that resource can be excluded. - You can have up to 10 exclusions in a resource. - """ + Only log entries belonging to that resource can be excluded. + You can have up to 10 exclusions in a resource. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def UpdateExclusion(self, request, context): - """Changes one or more properties of an existing exclusion. - """ + """Changes one or more properties of an existing exclusion.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def DeleteExclusion(self, request, context): - """Deletes an exclusion. - """ + """Deletes an exclusion.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") @@ -166,13 +158,13 @@ def DeleteExclusion(self, request, context): def GetCmekSettings(self, request, context): """Gets the Logs Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured for GCP - organizations. Once configured, it applies to all projects and folders in - the GCP organization. + Note: CMEK for the Logs Router can currently only be configured for GCP + organizations. Once configured, it applies to all projects and folders in + the GCP organization. - See [Enabling CMEK for Logs - Router](/logging/docs/routing/managed-encryption) for more information. - """ + See [Enabling CMEK for Logs + Router](/logging/docs/routing/managed-encryption) for more information. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") @@ -180,19 +172,19 @@ def GetCmekSettings(self, request, context): def UpdateCmekSettings(self, request, context): """Updates the Logs Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured for GCP - organizations. Once configured, it applies to all projects and folders in - the GCP organization. + Note: CMEK for the Logs Router can currently only be configured for GCP + organizations. Once configured, it applies to all projects and folders in + the GCP organization. - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] - will fail if 1) `kms_key_name` is invalid, or 2) the associated service - account does not have the required - `roles/cloudkms.cryptoKeyEncrypterDecrypter` role assigned for the key, or - 3) access to the key is disabled. + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) `kms_key_name` is invalid, or 2) the associated service + account does not have the required + `roles/cloudkms.cryptoKeyEncrypterDecrypter` role assigned for the key, or + 3) access to the key is disabled. - See [Enabling CMEK for Logs - Router](/logging/docs/routing/managed-encryption) for more information. - """ + See [Enabling CMEK for Logs + Router](/logging/docs/routing/managed-encryption) for more information. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py index 01e308fb741d..08eaf8099fc0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -318,8 +318,12 @@ ), ], extensions=[], - nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY,], - enum_types=[_LOGMETRIC_APIVERSION,], + nested_types=[ + _LOGMETRIC_LABELEXTRACTORSENTRY, + ], + enum_types=[ + _LOGMETRIC_APIVERSION, + ], serialized_options=_b( "\352AD\n\035logging.googleapis.com/Metric\022#projects/{project}/metrics/{metric}" ), diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py index 09f84e038a1b..a3a3733687c4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py @@ -8,15 +8,14 @@ class MetricsServiceV2Stub(object): - """Service for configuring logs-based metrics. - """ + """Service for configuring logs-based metrics.""" def __init__(self, channel): """Constructor. - Args: - channel: A grpc.Channel. - """ + Args: + channel: A grpc.Channel. + """ self.ListLogMetrics = channel.unary_unary( "/google.logging.v2.MetricsServiceV2/ListLogMetrics", request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.SerializeToString, @@ -45,40 +44,34 @@ def __init__(self, channel): class MetricsServiceV2Servicer(object): - """Service for configuring logs-based metrics. - """ + """Service for configuring logs-based metrics.""" def ListLogMetrics(self, request, context): - """Lists logs-based metrics. - """ + """Lists logs-based metrics.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def GetLogMetric(self, request, context): - """Gets a logs-based metric. - """ + """Gets a logs-based metric.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def CreateLogMetric(self, request, context): - """Creates a logs-based metric. - """ + """Creates a logs-based metric.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def UpdateLogMetric(self, request, context): - """Creates or updates a logs-based metric. - """ + """Creates or updates a logs-based metric.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def DeleteLogMetric(self, request, context): - """Deletes a logs-based metric. - """ + """Deletes a logs-based metric.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py index 35c9b9c52449..08cc2b49e507 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py @@ -274,7 +274,9 @@ ), ], extensions=[], - nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY,], + nested_types=[ + _WRITELOGENTRIESREQUEST_LABELSENTRY, + ], enum_types=[], serialized_options=None, is_extendable=False, @@ -389,7 +391,9 @@ ), ], extensions=[], - nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY,], + nested_types=[ + _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY, + ], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py index e1759bbc1b99..2e444b925486 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py @@ -8,15 +8,14 @@ class LoggingServiceV2Stub(object): - """Service for ingesting and querying logs. - """ + """Service for ingesting and querying logs.""" def __init__(self, channel): """Constructor. - Args: - channel: A grpc.Channel. - """ + Args: + channel: A grpc.Channel. + """ self.DeleteLog = channel.unary_unary( "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.SerializeToString, @@ -45,52 +44,50 @@ def __init__(self, channel): class LoggingServiceV2Servicer(object): - """Service for ingesting and querying logs. - """ + """Service for ingesting and querying logs.""" def DeleteLog(self, request, context): """Deletes all the log entries in a log. The log reappears if it receives new - entries. Log entries written shortly before the delete operation might not - be deleted. Entries received after the delete operation with a timestamp - before the operation will be deleted. - """ + entries. Log entries written shortly before the delete operation might not + be deleted. Entries received after the delete operation with a timestamp + before the operation will be deleted. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def WriteLogEntries(self, request, context): """Writes log entries to Logging. This API method is the - only way to send log entries to Logging. This method - is used, directly or indirectly, by the Logging agent - (fluentd) and all logging libraries configured to use Logging. - A single request may contain log entries for a maximum of 1000 - different resources (projects, organizations, billing accounts or - folders) - """ + only way to send log entries to Logging. This method + is used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use Logging. + A single request may contain log entries for a maximum of 1000 + different resources (projects, organizations, billing accounts or + folders) + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ListLogEntries(self, request, context): """Lists log entries. Use this method to retrieve log entries that originated - from a project/folder/organization/billing account. For ways to export log - entries, see [Exporting Logs](/logging/docs/export). - """ + from a project/folder/organization/billing account. For ways to export log + entries, see [Exporting Logs](/logging/docs/export). + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ListMonitoredResourceDescriptors(self, request, context): - """Lists the descriptors for monitored resource types used by Logging. - """ + """Lists the descriptors for monitored resource types used by Logging.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ListLogs(self, request, context): """Lists the logs in projects, organizations, folders, or billing accounts. - Only logs that have entries are listed. - """ + Only logs that have entries are listed. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 826477c01ba0..5e1508144098 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -114,7 +114,7 @@ def unit(session): default(session) -@nox.session(python=['2.7', '3.6']) +@nox.session(python=['3.6']) def system(session): """Run the system test suite.""" diff --git a/packages/google-cloud-logging/scripts/decrypt-secrets.sh b/packages/google-cloud-logging/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..ff599eb2af25 --- /dev/null +++ b/packages/google-cloud-logging/scripts/decrypt-secrets.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + > testing/client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-logging/testing/.gitignore b/packages/google-cloud-logging/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-logging/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index ea51aa8fd729..db6dbe95ef70 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -425,11 +425,11 @@ def test_create_sink_pubsub_topic(self): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(Config.CLIENT.project, TOPIC_NAME) self.to_delete.append(_DeleteWrapper(publisher, topic_path)) - publisher.create_topic(topic_path) + publisher.create_topic(request={"name": topic_path}) - policy = publisher.get_iam_policy(topic_path) + policy = publisher.get_iam_policy(request={"resource": topic_path}) policy.bindings.add(role="roles/owner", members=["group:cloud-logs@google.com"]) - publisher.set_iam_policy(topic_path, policy) + publisher.set_iam_policy(request={"resource": topic_path, "policy": policy}) TOPIC_URI = "pubsub.googleapis.com/%s" % (topic_path,) @@ -536,4 +536,4 @@ def __init__(self, publisher, topic_path): self.topic_path = topic_path def delete(self): - self.publisher.delete_topic(self.topic_path) + self.publisher.delete_topic(request={"topic": self.topic_path}) diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index 83de87aae299..7ad247ca7866 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -49,10 +49,19 @@ def test_default_url(self): self.assertIs(conn._client, client) def test_build_api_url_w_custom_endpoint(self): + from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import urlsplit + custom_endpoint = "https://foo-logging.googleapis.com" conn = self._make_one(object(), api_endpoint=custom_endpoint) - URI = "/".join([custom_endpoint, conn.API_VERSION, "foo"]) - self.assertEqual(conn.build_api_url("/foo"), URI) + uri = conn.build_api_url("/foo") + scheme, netloc, path, qs, _ = urlsplit(uri) + self.assertEqual("%s://%s" % (scheme, netloc), custom_endpoint) + self.assertEqual(path, "/".join(["", conn.API_VERSION, "foo"])) + parms = dict(parse_qsl(qs)) + pretty_print = parms.pop("prettyPrint", "false") + self.assertEqual(pretty_print, "false") + self.assertEqual(parms, {}) def test_extra_headers(self): import requests From 9fcbe07dcba27b6b61d1e4b33d94f981281172cb Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 19 Oct 2020 14:31:25 -0700 Subject: [PATCH 284/855] fix: add default filter settings to list_entries (#73) --- .../google/cloud/logging/_helpers.py | 27 ++++ .../google/cloud/logging/client.py | 4 + .../google/cloud/logging/logger.py | 3 + packages/google-cloud-logging/noxfile.py | 4 +- .../tests/unit/test__helpers.py | 56 ++++++++ .../tests/unit/test_client.py | 131 ++++++++++++++++-- .../tests/unit/test_logger.py | 104 ++++++++++++-- 7 files changed, 308 insertions(+), 21 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/_helpers.py index 4df8b12736bc..37e890eadc3a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging/_helpers.py @@ -16,6 +16,10 @@ import logging +from datetime import datetime +from datetime import timedelta +from datetime import timezone + import requests from google.cloud.logging.entries import LogEntry @@ -50,6 +54,9 @@ class LogSeverity(object): logging.NOTSET: LogSeverity.DEFAULT, } +_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f%z" +"""Time format for timestamps used in API""" + METADATA_URL = "http://metadata.google.internal./computeMetadata/v1/" METADATA_HEADERS = {"Metadata-Flavor": "Google"} @@ -123,3 +130,23 @@ def _normalize_severity(stdlib_level): :returns: Corresponding Stackdriver severity. """ return _NORMALIZED_SEVERITIES.get(stdlib_level, stdlib_level) + + +def _add_defaults_to_filter(filter_): + """Modify the input filter expression to add sensible defaults. + + :type filter_: str + :param filter_: The original filter expression + + :rtype: str + :returns: sensible default filter string + """ + + # By default, requests should only return logs in the last 24 hours + yesterday = datetime.now(timezone.utc) - timedelta(days=1) + time_filter = 'timestamp>="%s"' % yesterday.strftime(_TIME_FORMAT) + if filter_ is None: + filter_ = time_filter + elif "timestamp" not in filter_.lower(): + filter_ = "%s AND %s" % (filter_, time_filter) + return filter_ diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py index 0997d21a7652..64d9625060a9 100644 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ b/packages/google-cloud-logging/google/cloud/logging/client.py @@ -28,6 +28,7 @@ import google.api_core.client_options from google.cloud.client import ClientWithProject from google.cloud.environment_vars import DISABLE_GRPC +from google.cloud.logging._helpers import _add_defaults_to_filter from google.cloud.logging._helpers import retrieve_metadata_server from google.cloud.logging._http import Connection from google.cloud.logging._http import _LoggingAPI as JSONLoggingAPI @@ -223,6 +224,7 @@ def list_entries( :param filter_: a filter expression. See https://cloud.google.com/logging/docs/view/advanced_filters + By default, a 24 hour filter is applied. :type order_by: str :param order_by: One of :data:`~google.cloud.logging.ASCENDING` @@ -249,6 +251,8 @@ def list_entries( if projects is None: projects = [self.project] + filter_ = _add_defaults_to_filter(filter_) + return self.logging_api.list_entries( projects=projects, filter_=filter_, diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py index 6b5445d0578e..e6dae8b0eaa0 100644 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging/logger.py @@ -14,6 +14,7 @@ """Define API Loggers.""" +from google.cloud.logging._helpers import _add_defaults_to_filter from google.cloud.logging.entries import LogEntry from google.cloud.logging.entries import ProtobufEntry from google.cloud.logging.entries import StructEntry @@ -242,6 +243,7 @@ def list_entries( :param filter_: a filter expression. See https://cloud.google.com/logging/docs/view/advanced_filters + By default, a 24 hour filter is applied. :type order_by: str :param order_by: One of :data:`~google.cloud.logging.ASCENDING` @@ -270,6 +272,7 @@ def list_entries( filter_ = "%s AND %s" % (filter_, log_filter) else: filter_ = log_filter + filter_ = _add_defaults_to_filter(filter_) return self.client.list_entries( projects=projects, filter_=filter_, diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 5e1508144098..1de2a50c2cb4 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -97,7 +97,7 @@ def default(session, django_dep=('django',)): ) -@nox.session(python=['2.7', '3.5', '3.6', '3.7']) +@nox.session(python=['3.5', '3.6', '3.7']) def unit(session): """Run the unit test suite.""" @@ -156,7 +156,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") + session.run("coverage", "report", "--show-missing", "--fail-under=99") session.run("coverage", "erase") diff --git a/packages/google-cloud-logging/tests/unit/test__helpers.py b/packages/google-cloud-logging/tests/unit/test__helpers.py index db0804e66638..c924567944ec 100644 --- a/packages/google-cloud-logging/tests/unit/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/test__helpers.py @@ -12,6 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from datetime import datetime +from datetime import timedelta +from datetime import timezone import logging import unittest @@ -163,6 +166,59 @@ def test__normalize_severity_non_standard(self): self._normalize_severity_helper(unknown_level, unknown_level) +class Test__add_defaults_to_filter(unittest.TestCase): + @staticmethod + def _time_format(): + return "%Y-%m-%dT%H:%M:%S.%f%z" + + @staticmethod + def _add_defaults_to_filter(filter_): + from google.cloud.logging._helpers import _add_defaults_to_filter + + return _add_defaults_to_filter(filter_) + + def test_filter_defaults_empty_input(self): + """Filter should default to return logs < 24 hours old""" + out_filter = self._add_defaults_to_filter(None) + timestamp = datetime.strptime( + out_filter, 'timestamp>="' + self._time_format() + '"' + ) + yesterday = datetime.now(timezone.utc) - timedelta(days=1) + self.assertLess(yesterday - timestamp, timedelta(minutes=1)) + + def test_filter_defaults_no_timestamp(self): + """Filter should append 24 hour timestamp filter to input string""" + test_inputs = [ + "", + " ", + "logName=/projects/test/test", + "test1 AND test2 AND test3", + "time AND stamp ", + ] + for in_filter in test_inputs: + out_filter = self._add_defaults_to_filter(in_filter) + self.assertTrue(in_filter in out_filter) + self.assertTrue("timestamp" in out_filter) + + timestamp = datetime.strptime( + out_filter, in_filter + ' AND timestamp>="' + self._time_format() + '"' + ) + yesterday = datetime.now(timezone.utc) - timedelta(days=1) + self.assertLess(yesterday - timestamp, timedelta(minutes=1)) + + def test_filter_defaults_only_timestamp(self): + """If user inputs a timestamp filter, don't add default""" + in_filter = "timestamp=test" + out_filter = self._add_defaults_to_filter(in_filter) + self.assertEqual(in_filter, out_filter) + + def test_filter_defaults_capitalized_timestamp(self): + """Should work with capitalized timestamp strings""" + in_filter = "TIMESTAMP=test" + out_filter = self._add_defaults_to_filter(in_filter) + self.assertEqual(in_filter, out_filter) + + class EntryMock(object): def __init__(self): self.sentinel = object() diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 4e0b5ca22f0d..101baf63b297 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -12,6 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from copy import deepcopy +from datetime import datetime +from datetime import timedelta +from datetime import timezone + import unittest import mock @@ -33,6 +38,7 @@ class TestClient(unittest.TestCase): METRIC_NAME = "metric_name" FILTER = "logName:syslog AND severity>=ERROR" DESCRIPTION = "DESCRIPTION" + TIME_FORMAT = '"%Y-%m-%dT%H:%M:%S.%f%z"' @staticmethod def _get_target_class(): @@ -279,15 +285,27 @@ def test_list_entries_defaults(self): self.assertEqual(logger.project, self.PROJECT) self.assertEqual(token, TOKEN) - called_with = client._connection._called_with + # check call payload + call_payload_no_filter = deepcopy(client._connection._called_with) + call_payload_no_filter["data"]["filter"] = "removed" self.assertEqual( - called_with, + call_payload_no_filter, { "path": "/entries:list", "method": "POST", - "data": {"projectIds": [self.PROJECT]}, + "data": { + "filter": "removed", + "projectIds": [self.PROJECT], + }, }, ) + # verify that default filter is 24 hours + timestamp = datetime.strptime( + client._connection._called_with["data"]["filter"], + "timestamp>=" + self.TIME_FORMAT, + ) + yesterday = datetime.now(timezone.utc) - timedelta(days=1) + self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit(self): from google.cloud.logging import DESCENDING @@ -297,7 +315,7 @@ def test_list_entries_explicit(self): PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" - FILTER = "logName:LOGNAME" + INPUT_FILTER = "logName:LOGNAME" IID1 = "IID1" IID2 = "IID2" PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} @@ -327,7 +345,7 @@ def test_list_entries_explicit(self): iterator = client.list_entries( projects=[PROJECT1, PROJECT2], - filter_=FILTER, + filter_=INPUT_FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN, @@ -360,14 +378,111 @@ def test_list_entries_explicit(self): self.assertIs(entries[0].logger, entries[1].logger) - called_with = client._connection._called_with + # check call payload + call_payload_no_filter = deepcopy(client._connection._called_with) + call_payload_no_filter["data"]["filter"] = "removed" self.assertEqual( - called_with, + call_payload_no_filter, + { + "path": "/entries:list", + "method": "POST", + "data": { + "filter": "removed", + "orderBy": DESCENDING, + "pageSize": PAGE_SIZE, + "pageToken": TOKEN, + "projectIds": [PROJECT1, PROJECT2], + }, + }, + ) + # verify that default timestamp filter is added + timestamp = datetime.strptime( + client._connection._called_with["data"]["filter"], + INPUT_FILTER + " AND timestamp>=" + self.TIME_FORMAT, + ) + yesterday = datetime.now(timezone.utc) - timedelta(days=1) + self.assertLess(yesterday - timestamp, timedelta(minutes=1)) + + def test_list_entries_explicit_timestamp(self): + from google.cloud.logging import DESCENDING + from google.cloud.logging.entries import ProtobufEntry + from google.cloud.logging.entries import StructEntry + from google.cloud.logging.logger import Logger + + PROJECT1 = "PROJECT1" + PROJECT2 = "PROJECT2" + INPUT_FILTER = 'logName:LOGNAME AND timestamp="2020-10-13T21"' + IID1 = "IID1" + IID2 = "IID2" + PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" + TOKEN = "TOKEN" + PAGE_SIZE = 42 + ENTRIES = [ + { + "jsonPayload": PAYLOAD, + "insertId": IID1, + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + { + "protoPayload": PROTO_PAYLOAD, + "insertId": IID2, + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + ] + client = self._make_one( + self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) + returned = {"entries": ENTRIES} + client._connection = _Connection(returned) + + iterator = client.list_entries( + projects=[PROJECT1, PROJECT2], + filter_=INPUT_FILTER, + order_by=DESCENDING, + page_size=PAGE_SIZE, + page_token=TOKEN, + ) + entries = list(iterator) + token = iterator.next_page_token + + # First, check the token. + self.assertIsNone(token) + # Then check the entries. + self.assertEqual(len(entries), 2) + entry = entries[0] + self.assertIsInstance(entry, StructEntry) + self.assertEqual(entry.insert_id, IID1) + self.assertEqual(entry.payload, PAYLOAD) + logger = entry.logger + self.assertIsInstance(logger, Logger) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + + entry = entries[1] + self.assertIsInstance(entry, ProtobufEntry) + self.assertEqual(entry.insert_id, IID2) + self.assertEqual(entry.payload, PROTO_PAYLOAD) + logger = entry.logger + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + + self.assertIs(entries[0].logger, entries[1].logger) + + # check call payload + # filter should not be changed + self.assertEqual( + client._connection._called_with, { "path": "/entries:list", "method": "POST", "data": { - "filter": FILTER, + "filter": INPUT_FILTER, "orderBy": DESCENDING, "pageSize": PAGE_SIZE, "pageToken": TOKEN, diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 5bf6a706815f..7cc870e3a916 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -12,6 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from copy import deepcopy +from datetime import datetime +from datetime import timedelta +from datetime import timezone + import unittest import mock @@ -27,6 +32,7 @@ class TestLogger(unittest.TestCase): PROJECT = "test-project" LOGGER_NAME = "logger-name" + TIME_FORMAT = '"%Y-%m-%dT%H:%M:%S.%f%z"' @staticmethod def _get_target_class(): @@ -498,16 +504,29 @@ def test_list_entries_defaults(self): self.assertEqual(len(entries), 0) self.assertEqual(token, TOKEN) - called_with = client._connection._called_with - FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + + # check call payload + call_payload_no_filter = deepcopy(client._connection._called_with) + call_payload_no_filter["data"]["filter"] = "removed" self.assertEqual( - called_with, + call_payload_no_filter, { - "method": "POST", "path": "/entries:list", - "data": {"filter": FILTER, "projectIds": [self.PROJECT]}, + "method": "POST", + "data": { + "filter": "removed", + "projectIds": [self.PROJECT], + }, }, ) + # verify that default filter is 24 hours + timestamp = datetime.strptime( + client._connection._called_with["data"]["filter"], + LOG_FILTER + " AND timestamp>=" + self.TIME_FORMAT, + ) + yesterday = datetime.now(timezone.utc) - timedelta(days=1) + self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit(self): from google.cloud.logging import DESCENDING @@ -515,7 +534,70 @@ def test_list_entries_explicit(self): PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" - FILTER = "resource.type:global" + INPUT_FILTER = "resource.type:global" + TOKEN = "TOKEN" + PAGE_SIZE = 42 + client = Client( + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) + client._connection = _Connection({}) + logger = self._make_one(self.LOGGER_NAME, client=client) + iterator = logger.list_entries( + projects=[PROJECT1, PROJECT2], + filter_=INPUT_FILTER, + order_by=DESCENDING, + page_size=PAGE_SIZE, + page_token=TOKEN, + ) + entries = list(iterator) + token = iterator.next_page_token + + self.assertEqual(len(entries), 0) + self.assertIsNone(token) + # self.assertEqual(client._listed, LISTED) + # check call payload + call_payload_no_filter = deepcopy(client._connection._called_with) + call_payload_no_filter["data"]["filter"] = "removed" + self.assertEqual( + call_payload_no_filter, + { + "method": "POST", + "path": "/entries:list", + "data": { + "filter": "removed", + "orderBy": DESCENDING, + "pageSize": PAGE_SIZE, + "pageToken": TOKEN, + "projectIds": [PROJECT1, PROJECT2], + }, + }, + ) + # verify that default filter is 24 hours + LOG_FILTER = "logName=projects/%s/logs/%s" % ( + self.PROJECT, + self.LOGGER_NAME, + ) + combined_filter = ( + INPUT_FILTER + + " AND " + + LOG_FILTER + + " AND " + + "timestamp>=" + + self.TIME_FORMAT + ) + timestamp = datetime.strptime( + client._connection._called_with["data"]["filter"], combined_filter + ) + yesterday = datetime.now(timezone.utc) - timedelta(days=1) + self.assertLess(yesterday - timestamp, timedelta(minutes=1)) + + def test_list_entries_explicit_timestamp(self): + from google.cloud.logging import DESCENDING + from google.cloud.logging.client import Client + + PROJECT1 = "PROJECT1" + PROJECT2 = "PROJECT2" + INPUT_FILTER = 'resource.type:global AND timestamp="2020-10-13T21"' TOKEN = "TOKEN" PAGE_SIZE = 42 client = Client( @@ -525,7 +607,7 @@ def test_list_entries_explicit(self): logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries( projects=[PROJECT1, PROJECT2], - filter_=FILTER, + filter_=INPUT_FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN, @@ -536,14 +618,14 @@ def test_list_entries_explicit(self): self.assertEqual(len(entries), 0) self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) - called_with = client._connection._called_with - combined_filter = "%s AND logName=projects/%s/logs/%s" % ( - FILTER, + # check call payload + LOG_FILTER = "logName=projects/%s/logs/%s" % ( self.PROJECT, self.LOGGER_NAME, ) + combined_filter = INPUT_FILTER + " AND " + LOG_FILTER self.assertEqual( - called_with, + client._connection._called_with, { "method": "POST", "path": "/entries:list", From afff1df2247cea11c2d399751ff38b84c105d0e2 Mon Sep 17 00:00:00 2001 From: Simon Zeltser Date: Tue, 27 Oct 2020 11:39:51 -0700 Subject: [PATCH 285/855] chore: add api-logging to codeowners (#81) --- packages/google-cloud-logging/.github/CODEOWNERS | 2 +- packages/google-cloud-logging/.repo-metadata.json | 5 +++-- packages/google-cloud-logging/CHANGELOG.md | 2 +- packages/google-cloud-logging/README.rst | 10 +++++----- packages/google-cloud-logging/docs/client.rst | 2 +- packages/google-cloud-logging/docs/usage.rst | 12 ++++++------ 6 files changed, 17 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-logging/.github/CODEOWNERS b/packages/google-cloud-logging/.github/CODEOWNERS index 30c3973aa372..64d2aaa367da 100644 --- a/packages/google-cloud-logging/.github/CODEOWNERS +++ b/packages/google-cloud-logging/.github/CODEOWNERS @@ -5,7 +5,7 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # The @googleapis/yoshi-python is the default owner for changes in this repo -* @googleapis/yoshi-python +* @googleapis/api-logging @googleapis/yoshi-python # The python-samples-reviewers team is the default owner for samples changes /samples/ @googleapis/python-samples-owners \ No newline at end of file diff --git a/packages/google-cloud-logging/.repo-metadata.json b/packages/google-cloud-logging/.repo-metadata.json index 4a83dbb62d84..30541e78b44f 100644 --- a/packages/google-cloud-logging/.repo-metadata.json +++ b/packages/google-cloud-logging/.repo-metadata.json @@ -1,6 +1,6 @@ { "name": "logging", - "name_pretty": "Stackdriver Logging", + "name_pretty": "Cloud Logging", "product_documentation": "https://cloud.google.com/logging/docs", "client_documentation": "https://googleapis.dev/python/logging/latest", "issue_tracker": "https://issuetracker.google.com/savedsearches/559764", @@ -8,5 +8,6 @@ "language": "python", "repo": "googleapis/python-logging", "distribution_name": "google-cloud-logging", - "api_id": "logging.googleapis.com" + "api_id": "logging.googleapis.com", + "codeowner_team": "@googleapis/api-logging" } \ No newline at end of file diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 517664976b31..2dfcc2eb41c1 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -62,7 +62,7 @@ ### Implementation Changes - Remove gRPC size restrictions (4MB default) ([#8860](https://github.com/googleapis/google-cloud-python/pull/8860)) -- Map stdlib loglevels to Stackdriver severity enum values. ([#8837](https://github.com/googleapis/google-cloud-python/pull/8837)) +- Map stdlib loglevels to Cloud Logging severity enum values. ([#8837](https://github.com/googleapis/google-cloud-python/pull/8837)) ### Documentation - Fix 'list_entries' example with projects. ([#8858](https://github.com/googleapis/google-cloud-python/pull/8858)) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 2399e08bb168..68c040298d7b 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -1,9 +1,9 @@ -Python Client for Stackdriver Logging +Python Client for Cloud Logging ===================================== |pypi| |versions| -`Stackdriver Logging API`_: Writes log entries and manages your Stackdriver +`Cloud Logging API`_: Writes log entries and manages your Cloud Logging configuration. - `Client Library Documentation`_ @@ -13,7 +13,7 @@ Logging configuration. :target: https://pypi.org/project/google-cloud-logging/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-logging.svg :target: https://pypi.org/project/google-cloud-logging/ -.. _Stackdriver Logging API: https://cloud.google.com/logging +.. _Cloud Logging API: https://cloud.google.com/logging .. _Client Library Documentation: https://googleapis.dev/python/logging/latest .. _Product Documentation: https://cloud.google.com/logging/docs @@ -24,12 +24,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ -3. `Enable the Stackdriver Logging API.`_ +3. `Enable the Cloud Logging API.`_ 4. `Setup Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the Stackdriver Logging API.: https://cloud.google.com/logging +.. _Enable the Cloud Logging API.: https://cloud.google.com/logging .. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation diff --git a/packages/google-cloud-logging/docs/client.rst b/packages/google-cloud-logging/docs/client.rst index f04d5c5255f1..c9e78db2795e 100644 --- a/packages/google-cloud-logging/docs/client.rst +++ b/packages/google-cloud-logging/docs/client.rst @@ -1,4 +1,4 @@ -Stackdriver Logging Client +Cloud Logging Client ========================== .. automodule:: google.cloud.logging.client diff --git a/packages/google-cloud-logging/docs/usage.rst b/packages/google-cloud-logging/docs/usage.rst index f5662bcbaa08..dccec49ea798 100644 --- a/packages/google-cloud-logging/docs/usage.rst +++ b/packages/google-cloud-logging/docs/usage.rst @@ -112,7 +112,7 @@ Manage log metrics ------------------ Metrics are counters of entries which match a given filter. They can be -used within Stackdriver Monitoring to create charts and alerts. +used within Cloud Monitoring to create charts and alerts. List all metrics for a project: @@ -259,7 +259,7 @@ Integration with Python logging module -------------------------------------- It's possible to tie the Python :mod:`logging` module directly into Google -Stackdriver Logging. There are different handler options to accomplish this. +Cloud Logging. There are different handler options to accomplish this. To automatically pick the default for your current environment, use :meth:`~google.cloud.logging.client.Client.get_default_handler`. @@ -269,7 +269,7 @@ To automatically pick the default for your current environment, use :dedent: 4 It is also possible to attach the handler to the root Python logger, so that -for example a plain ``logging.warn`` call would be sent to Stackdriver Logging, +for example a plain ``logging.warn`` call would be sent to Cloud Logging, as well as any other loggers created. A helper method :meth:`~google.cloud.logging.client.Client.setup_logging` is provided to configure this automatically. @@ -281,7 +281,7 @@ to configure this automatically. .. note:: - To reduce cost and quota usage, do not enable Stackdriver logging + To reduce cost and quota usage, do not enable Cloud Logging handlers while testing locally. You can also exclude certain loggers: @@ -348,7 +348,7 @@ recommended when running on the Google App Engine Flexible vanilla runtimes (i.e. your app.yaml contains ``runtime: python``), and :class:`~google.cloud.logging.handlers.container_engine.ContainerEngineHandler` , which is recommended when running on `Google Container Engine`_ with the -Stackdriver Logging plugin enabled. +Cloud Logging plugin enabled. :meth:`~google.cloud.logging.client.Client.get_default_handler` and :meth:`~google.cloud.logging.client.Client.setup_logging` will attempt to use @@ -356,6 +356,6 @@ the environment to automatically detect whether the code is running in these platforms and use the appropriate handler. In both cases, the fluentd agent is configured to automatically parse log files -in an expected format and forward them to Stackdriver logging. The handlers +in an expected format and forward them to Cloud logging. The handlers provided help set the correct metadata such as log level so that logs can be filtered accordingly. From 5ecdb65f8404e62737dae07e7087ff192db44015 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 27 Oct 2020 12:41:01 -0700 Subject: [PATCH 286/855] refactor!: remove python2 (#78) refactor!: drop support for Python2 BREAKING CHANGE: removes support for webapp2 and other Python2 specific code --- .../google/cloud/logging/handlers/_helpers.py | 38 ------------- .../logging/handlers/middleware/request.py | 5 +- packages/google-cloud-logging/noxfile.py | 18 ++----- packages/google-cloud-logging/setup.py | 4 +- .../tests/unit/handlers/test__helpers.py | 53 ------------------- 5 files changed, 5 insertions(+), 113 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py index b4b7fcf5b892..4bd319a53886 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py @@ -22,20 +22,10 @@ except ImportError: # pragma: NO COVER flask = None -try: - import webapp2 -except (ImportError, SyntaxError): # pragma: NO COVER - # If you try to import webapp2 under python3, you'll get a syntax - # error (since it hasn't been ported yet). We just pretend it - # doesn't exist. This is unlikely to hit in real life but does - # in the tests. - webapp2 = None - from google.cloud.logging.handlers.middleware.request import _get_django_request _DJANGO_TRACE_HEADER = "HTTP_X_CLOUD_TRACE_CONTEXT" _FLASK_TRACE_HEADER = "X_CLOUD_TRACE_CONTEXT" -_WEBAPP2_TRACE_HEADER = "X-CLOUD-TRACE-CONTEXT" def format_stackdriver_json(record, message): @@ -75,33 +65,6 @@ def get_trace_id_from_flask(): return trace_id -def get_trace_id_from_webapp2(): - """Get trace_id from webapp2 request headers. - - :rtype: str - :returns: TraceID in HTTP request headers. - """ - if webapp2 is None: - return None - - try: - # get_request() succeeds if we're in the middle of a webapp2 - # request, or raises an assertion error otherwise: - # "Request global variable is not set". - req = webapp2.get_request() - except AssertionError: - return None - - header = req.headers.get(_WEBAPP2_TRACE_HEADER) - - if header is None: - return None - - trace_id = header.split("/", 1)[0] - - return trace_id - - def get_trace_id_from_django(): """Get trace_id from django request headers. @@ -131,7 +94,6 @@ def get_trace_id(): checkers = ( get_trace_id_from_django, get_trace_id_from_flask, - get_trace_id_from_webapp2, ) for checker in checkers: diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py index 33bc278fcf60..0229e4c8e1cd 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py @@ -34,11 +34,8 @@ def _get_django_request(): try: - # Django >= 1.10 from django.utils.deprecation import MiddlewareMixin -except ImportError: - # Not required for Django <= 1.9, see: - # https://docs.djangoproject.com/en/1.10/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware +except ImportError: # pragma: NO COVER MiddlewareMixin = object diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 1de2a50c2cb4..2c976b6abeeb 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -26,8 +26,8 @@ 'pytest', 'pytest-cov', 'flask', - 'webapp2', 'webob', + 'django' ) @@ -71,13 +71,12 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") -def default(session, django_dep=('django',)): +def default(session): """Default unit test session. """ # Install all test dependencies, then install this package in-place. deps = UNIT_TEST_DEPS - deps += django_dep session.install(*deps) session.install('-e', '.') @@ -100,18 +99,7 @@ def default(session, django_dep=('django',)): @nox.session(python=['3.5', '3.6', '3.7']) def unit(session): """Run the unit test suite.""" - - # Testing multiple version of django - # See https://www.djangoproject.com/download/ for supported version - django_deps_27 = [ - ('django==1.8.19',), - ('django >= 1.11.0, < 2.0.0dev',), - ] - - if session.virtualenv.interpreter == '2.7': - [default(session, django_dep=django) for django in django_deps_27] - else: - default(session) + default(session) @nox.session(python=['3.6']) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 776c30b3cf54..cf56847bae32 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -70,8 +70,6 @@ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', @@ -84,7 +82,7 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*', + python_requires='>=3.5', include_package_data=True, zip_safe=False, ) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index 702015961771..972e3db392d1 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -12,18 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json import unittest import mock -import six - -try: - from webapp2 import RequestHandler -except SyntaxError: - # webapp2 has not been ported to python3, so it will give a syntax - # error if we try. We'll just skip the webapp2 tests in that case. - RequestHandler = object class Test_get_trace_id_from_flask(unittest.TestCase): @@ -68,50 +59,6 @@ def test_valid_context_header(self): self.assertEqual(trace_id, expected_trace_id) -class _GetTraceId(RequestHandler): - def get(self): - from google.cloud.logging.handlers import _helpers - - trace_id = _helpers.get_trace_id_from_webapp2() - self.response.content_type = "application/json" - self.response.out.write(json.dumps(trace_id)) - - -@unittest.skipIf(not six.PY2, "webapp2 is Python 2 only") -class Test_get_trace_id_from_webapp2(unittest.TestCase): - @staticmethod - def create_app(): - import webapp2 - - app = webapp2.WSGIApplication([("/", _GetTraceId)]) - - return app - - def test_no_context_header(self): - import webob - - req = webob.BaseRequest.blank("/") - response = req.get_response(self.create_app()) - trace_id = json.loads(response.body) - - self.assertEqual(None, trace_id) - - def test_valid_context_header(self): - import webob - - webapp2_trace_header = "X-Cloud-Trace-Context" - expected_trace_id = "testtraceidwebapp2" - webapp2_trace_id = expected_trace_id + "/testspanid" - - req = webob.BaseRequest.blank( - "/", headers={webapp2_trace_header: webapp2_trace_id} - ) - response = req.get_response(self.create_app()) - trace_id = json.loads(response.body) - - self.assertEqual(trace_id, expected_trace_id) - - class Test_get_trace_id_from_django(unittest.TestCase): @staticmethod def _call_fut(): From 9dff15e166c43234be06823786a00698e5953b13 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 27 Oct 2020 15:54:50 -0700 Subject: [PATCH 287/855] docs: update docs (#77) - update branding - remove docs for auto-generated library --- packages/google-cloud-logging/CHANGELOG.md | 2 +- packages/google-cloud-logging/README.rst | 57 ++----------------- packages/google-cloud-logging/docs/conf.py | 21 ++++--- .../docs/gapic/v2/api.rst | 6 -- .../docs/gapic/v2/types.rst | 5 -- .../docs/handlers-container-engine.rst | 4 +- packages/google-cloud-logging/docs/index.rst | 1 - .../google-cloud-logging/docs/snippets.py | 2 +- packages/google-cloud-logging/docs/usage.rst | 6 +- packages/google-cloud-logging/docs/v1.rst | 18 ------ packages/google-cloud-logging/docs/v2.rst | 15 ++++- packages/google-cloud-logging/noxfile.py | 37 +++++++++++- packages/google-cloud-logging/synth.py | 3 +- 13 files changed, 76 insertions(+), 101 deletions(-) delete mode 100644 packages/google-cloud-logging/docs/gapic/v2/api.rst delete mode 100644 packages/google-cloud-logging/docs/gapic/v2/types.rst delete mode 100644 packages/google-cloud-logging/docs/v1.rst diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 2dfcc2eb41c1..d0af80e6ceb5 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -292,6 +292,6 @@ - Upgrading to `google-cloud-core >= 0.28.0` and adding dependency on `google-api-core` (#4221, #4280) - Deferring to `google-api-core` for `grpcio` and - `googleapis-common-protos`dependencies (#4096, #4098) + `googleapis-common-protos` dependencies (#4096, #4098) PyPI: https://pypi.org/project/google-cloud-logging/1.4.0/ diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 68c040298d7b..a81f28e85516 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -16,6 +16,8 @@ Logging configuration. .. _Cloud Logging API: https://cloud.google.com/logging .. _Client Library Documentation: https://googleapis.dev/python/logging/latest .. _Product Documentation: https://cloud.google.com/logging/docs +.. _Setting Up Cloud Logging for Python: https://cloud.google.com/logging/docs/setup/python +.. _Python's standard logging library: https://docs.python.org/2/library/logging.html Quick Start ----------- @@ -74,60 +76,11 @@ Windows \Scripts\activate \Scripts\pip.exe install google-cloud-logging -Using the API -------------- - -.. code:: python - - from google.cloud import logging_v2 - client = logging_v2.LoggingServiceV2Client() - - resource = { - "type": "global", - "labels": { - "project_id": "[PROJECT_ID]" - } - } - - """ - Log entries can be either LogEntry or dict. - You can describe the same data in the following format: - - e = { - "log_name": "projects/[PROJECT_ID]/logs/test-logging", - "resource": resource, - "text_payload": "this is a log statement", - } - """ - e = logging_v2.types.LogEntry( - log_name="projects/[PROJECT_ID]/logs/test-logging", # optional - resource=resource, # optional - text_payload="this is a log statement") - - entries = [e] - response = client.write_log_entries(entries) - -.. code:: python - - from google.cloud import logging - client = logging.Client() - logger = client.logger('log_name') - logger.log_text('A simple entry') # API call - -Example of fetching entries: - -.. code:: python - - from google.cloud import logging - client = logging.Client() - logger = client.logger('log_name') - for entry in logger.list_entries(): - print(entry.payload) - Next Steps ~~~~~~~~~~ +- Read the `Setting Up Cloud Logging for Python`_ How-to Guide +- Read the `Product documentation`_ to learn more about the product and see + other How-to Guides. - Read the `Client Library Documentation`_ for to see other available methods on the client. -- Read the `Product documentation`_ to learn more about the product and see - How-to Guides. diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index 7a03936bb41d..b507b408e364 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -20,12 +20,16 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + __version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "1.5.5" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -35,24 +39,22 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] @@ -93,7 +95,12 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. diff --git a/packages/google-cloud-logging/docs/gapic/v2/api.rst b/packages/google-cloud-logging/docs/gapic/v2/api.rst deleted file mode 100644 index 2dc6bf6fcc6b..000000000000 --- a/packages/google-cloud-logging/docs/gapic/v2/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Stackdriver Logging API -================================== - -.. automodule:: google.cloud.logging_v2 - :members: - :inherited-members: \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/gapic/v2/types.rst b/packages/google-cloud-logging/docs/gapic/v2/types.rst deleted file mode 100644 index 5521d4f9bc12..000000000000 --- a/packages/google-cloud-logging/docs/gapic/v2/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Stackdriver Logging API Client -======================================== - -.. automodule:: google.cloud.logging_v2.types - :members: \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/handlers-container-engine.rst b/packages/google-cloud-logging/docs/handlers-container-engine.rst index a0c6b2bc9228..5286ec58b50c 100644 --- a/packages/google-cloud-logging/docs/handlers-container-engine.rst +++ b/packages/google-cloud-logging/docs/handlers-container-engine.rst @@ -1,5 +1,5 @@ -Google Container Engine Log Handler -=================================== +Google Kubernetes Engine Log Handler +==================================== .. automodule:: google.cloud.logging.handlers.container_engine :members: diff --git a/packages/google-cloud-logging/docs/index.rst b/packages/google-cloud-logging/docs/index.rst index f617201a90ab..347dc9f813e7 100644 --- a/packages/google-cloud-logging/docs/index.rst +++ b/packages/google-cloud-logging/docs/index.rst @@ -5,7 +5,6 @@ Documentation .. toctree:: :maxdepth: 3 - v1 v2 Changelog diff --git a/packages/google-cloud-logging/docs/snippets.py b/packages/google-cloud-logging/docs/snippets.py index 778327989b0f..7a86213472b6 100644 --- a/packages/google-cloud-logging/docs/snippets.py +++ b/packages/google-cloud-logging/docs/snippets.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Testable usage examples for Stackdriver Logging API wrapper +"""Testable usage examples for Cloud Logging API wrapper Each example function takes a ``client`` argument (which must be an instance of :class:`google.cloud.logging.client.Client`) and uses it to perform a task diff --git a/packages/google-cloud-logging/docs/usage.rst b/packages/google-cloud-logging/docs/usage.rst index dccec49ea798..4714144f926d 100644 --- a/packages/google-cloud-logging/docs/usage.rst +++ b/packages/google-cloud-logging/docs/usage.rst @@ -336,7 +336,7 @@ logging handler can use different transports. The default is direct API call on each logging statement to write the entry. -.. _Google Container Engine: https://cloud.google.com/container-engine/ +.. _Google Kubernetes Engine: https://cloud.google.com/kubernetes-engine fluentd logging handlers ~~~~~~~~~~~~~~~~~~~~~~~~ @@ -347,7 +347,7 @@ which writes directly to the API, two other handlers are provided. recommended when running on the Google App Engine Flexible vanilla runtimes (i.e. your app.yaml contains ``runtime: python``), and :class:`~google.cloud.logging.handlers.container_engine.ContainerEngineHandler` -, which is recommended when running on `Google Container Engine`_ with the +, which is recommended when running on `Google Kubernetes Engine`_ with the Cloud Logging plugin enabled. :meth:`~google.cloud.logging.client.Client.get_default_handler` and @@ -356,6 +356,6 @@ the environment to automatically detect whether the code is running in these platforms and use the appropriate handler. In both cases, the fluentd agent is configured to automatically parse log files -in an expected format and forward them to Cloud logging. The handlers +in an expected format and forward them to Cloud Logging. The handlers provided help set the correct metadata such as log level so that logs can be filtered accordingly. diff --git a/packages/google-cloud-logging/docs/v1.rst b/packages/google-cloud-logging/docs/v1.rst deleted file mode 100644 index f4f79d377a65..000000000000 --- a/packages/google-cloud-logging/docs/v1.rst +++ /dev/null @@ -1,18 +0,0 @@ -v1 -============== -.. toctree:: - :maxdepth: 2 - - usage - client - logger - entries - metric - sink - stdlib-usage - handlers - handlers-app-engine - handlers-container-engine - transports-sync - transports-thread - transports-base \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/v2.rst b/packages/google-cloud-logging/docs/v2.rst index 8dfc18b48171..567eabd7a4fd 100644 --- a/packages/google-cloud-logging/docs/v2.rst +++ b/packages/google-cloud-logging/docs/v2.rst @@ -3,5 +3,16 @@ v2 .. toctree:: :maxdepth: 2 - gapic/v2/api - gapic/v2/types \ No newline at end of file + usage + client + logger + entries + metric + sink + stdlib-usage + handlers + handlers-app-engine + handlers-container-engine + transports-sync + transports-thread + transports-base diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 2c976b6abeeb..c55ecf8741ce 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -152,15 +152,48 @@ def cover(session): def docs(session): """Build the docs for this library.""" + session.install('-e', '.') + session.install('sphinx', 'alabaster', 'recommonmark') + + shutil.rmtree(os.path.join('docs', '_build'), ignore_errors=True) + session.run( + 'sphinx-build', + '-W', # warnings as errors + '-T', # show full traceback on exception + '-N', # no colors + '-b', 'html', + '-d', os.path.join('docs', '_build', 'doctrees', ''), + os.path.join('docs', ''), + os.path.join('docs', '_build', 'html', ''), + ) + + +@nox.session(python="3.7") +def docfx(session): + """Build the docfx yaml files for this library.""" + session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") + # sphinx-docfx-yaml supports up to sphinx version 1.5.5. + # https://github.com/docascode/sphinx-docfx-yaml/issues/97 + session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", - "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), "-b", "html", "-d", diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/synth.py index ee1b168aa46b..9504c980b950 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/synth.py @@ -34,7 +34,8 @@ s.move(library / "google/cloud/logging_v2/gapic") s.move(library / "tests/unit/gapic/v2") -s.move(library / "docs/gapic/v2") +# Don't include gapic library docs. Users should use the hand-written layer instead +# s.move(library / "docs/gapic/v2") # ---------------------------------------------------------------------------- # Add templated files From 863e9dc64d6c0cccfca823a6d5fc37ba0e625936 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 27 Oct 2020 16:55:49 -0700 Subject: [PATCH 288/855] refactor: use upstream noxfile (#79) --- packages/google-cloud-logging/docs/conf.py | 5 +- .../gapic/config_service_v2_client.py | 75 ++----- .../gapic/logging_service_v2_client.py | 43 +--- .../gapic/metrics_service_v2_client.py | 47 ++--- .../cloud/logging_v2/proto/log_entry_pb2.py | 4 +- .../logging_v2/proto/logging_config_pb2.py | 4 +- .../logging_v2/proto/logging_metrics_pb2.py | 8 +- .../cloud/logging_v2/proto/logging_pb2.py | 8 +- packages/google-cloud-logging/noxfile.py | 195 +++++++++++------- packages/google-cloud-logging/setup.py | 54 ++--- packages/google-cloud-logging/synth.py | 21 +- .../tests/unit/test_client.py | 5 +- .../tests/unit/test_logger.py | 15 +- 13 files changed, 216 insertions(+), 268 deletions(-) diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index b507b408e364..1815da57acb6 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -347,10 +347,7 @@ intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google.api_core": ( - "https://googleapis.dev/python/google-api-core/latest/", - None, - ), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.io/grpc/python/", None), } diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index d3d08370e63a..37dafa34ac0e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -40,9 +40,7 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-logging", -).version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version class ConfigServiceV2Client(object): @@ -79,8 +77,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", - billing_account=billing_account, + "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod @@ -113,10 +110,7 @@ def exclusion_path(cls, project, exclusion): @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand( - "folders/{folder}", - folder=folder, - ) + return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def folder_exclusion_path(cls, folder, exclusion): @@ -131,17 +125,14 @@ def folder_exclusion_path(cls, folder, exclusion): def folder_sink_path(cls, folder, sink): """Return a fully-qualified folder_sink string.""" return google.api_core.path_template.expand( - "folders/{folder}/sinks/{sink}", - folder=folder, - sink=sink, + "folders/{folder}/sinks/{sink}", folder=folder, sink=sink, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", - organization=organization, + "organizations/{organization}", organization=organization, ) @classmethod @@ -166,17 +157,14 @@ def organization_sink_path(cls, organization, sink): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", - project=project, + "projects/{project}", project=project, ) @classmethod def sink_path(cls, project, sink): """Return a fully-qualified sink string.""" return google.api_core.path_template.expand( - "projects/{project}/sinks/{sink}", - project=project, - sink=sink, + "projects/{project}/sinks/{sink}", project=project, sink=sink, ) def __init__( @@ -265,12 +253,8 @@ def __init__( ) self.transport = transport else: - self.transport = ( - config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport( - address=api_endpoint, - channel=channel, - credentials=credentials, - ) + self.transport = config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: @@ -376,8 +360,7 @@ def list_sinks( ) request = logging_config_pb2.ListSinksRequest( - parent=parent, - page_size=page_size, + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -467,9 +450,7 @@ def get_sink( client_info=self._client_info, ) - request = logging_config_pb2.GetSinkRequest( - sink_name=sink_name, - ) + request = logging_config_pb2.GetSinkRequest(sink_name=sink_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -573,9 +554,7 @@ def create_sink( ) request = logging_config_pb2.CreateSinkRequest( - parent=parent, - sink=sink, - unique_writer_identity=unique_writer_identity, + parent=parent, sink=sink, unique_writer_identity=unique_writer_identity, ) if metadata is None: metadata = [] @@ -780,9 +759,7 @@ def delete_sink( client_info=self._client_info, ) - request = logging_config_pb2.DeleteSinkRequest( - sink_name=sink_name, - ) + request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -880,8 +857,7 @@ def list_exclusions( ) request = logging_config_pb2.ListExclusionsRequest( - parent=parent, - page_size=page_size, + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -971,9 +947,7 @@ def get_exclusion( client_info=self._client_info, ) - request = logging_config_pb2.GetExclusionRequest( - name=name, - ) + request = logging_config_pb2.GetExclusionRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1064,8 +1038,7 @@ def create_exclusion( ) request = logging_config_pb2.CreateExclusionRequest( - parent=parent, - exclusion=exclusion, + parent=parent, exclusion=exclusion, ) if metadata is None: metadata = [] @@ -1169,9 +1142,7 @@ def update_exclusion( ) request = logging_config_pb2.UpdateExclusionRequest( - name=name, - exclusion=exclusion, - update_mask=update_mask, + name=name, exclusion=exclusion, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -1247,9 +1218,7 @@ def delete_exclusion( client_info=self._client_info, ) - request = logging_config_pb2.DeleteExclusionRequest( - name=name, - ) + request = logging_config_pb2.DeleteExclusionRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1337,9 +1306,7 @@ def get_cmek_settings( client_info=self._client_info, ) - request = logging_config_pb2.GetCmekSettingsRequest( - name=name, - ) + request = logging_config_pb2.GetCmekSettingsRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1455,9 +1422,7 @@ def update_cmek_settings( ) request = logging_config_pb2.UpdateCmekSettingsRequest( - name=name, - cmek_settings=cmek_settings, - update_mask=update_mask, + name=name, cmek_settings=cmek_settings, update_mask=update_mask, ) if metadata is None: metadata = [] diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index c823deacb1c9..c43506d1bb74 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -44,9 +44,7 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-logging", -).version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version class LoggingServiceV2Client(object): @@ -83,8 +81,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", - billing_account=billing_account, + "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod @@ -99,35 +96,27 @@ def billing_log_path(cls, billing_account, log): @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand( - "folders/{folder}", - folder=folder, - ) + return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def folder_log_path(cls, folder, log): """Return a fully-qualified folder_log string.""" return google.api_core.path_template.expand( - "folders/{folder}/logs/{log}", - folder=folder, - log=log, + "folders/{folder}/logs/{log}", folder=folder, log=log, ) @classmethod def log_path(cls, project, log): """Return a fully-qualified log string.""" return google.api_core.path_template.expand( - "projects/{project}/logs/{log}", - project=project, - log=log, + "projects/{project}/logs/{log}", project=project, log=log, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", - organization=organization, + "organizations/{organization}", organization=organization, ) @classmethod @@ -143,8 +132,7 @@ def organization_log_path(cls, organization, log): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", - project=project, + "projects/{project}", project=project, ) def __init__( @@ -233,12 +221,8 @@ def __init__( ) self.transport = transport else: - self.transport = ( - logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport( - address=api_endpoint, - channel=channel, - credentials=credentials, - ) + self.transport = logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: @@ -327,9 +311,7 @@ def delete_log( client_info=self._client_info, ) - request = logging_pb2.DeleteLogRequest( - log_name=log_name, - ) + request = logging_pb2.DeleteLogRequest(log_name=log_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -794,10 +776,7 @@ def list_logs( client_info=self._client_info, ) - request = logging_pb2.ListLogsRequest( - parent=parent, - page_size=page_size, - ) + request = logging_pb2.ListLogsRequest(parent=parent, page_size=page_size,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 87cf5b89d60d..0c80a5d43fe2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -46,9 +46,7 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-logging", -).version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version class MetricsServiceV2Client(object): @@ -85,41 +83,33 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", - billing_account=billing_account, + "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand( - "folders/{folder}", - folder=folder, - ) + return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def metric_path(cls, project, metric): """Return a fully-qualified metric string.""" return google.api_core.path_template.expand( - "projects/{project}/metrics/{metric}", - project=project, - metric=metric, + "projects/{project}/metrics/{metric}", project=project, metric=metric, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", - organization=organization, + "organizations/{organization}", organization=organization, ) @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", - project=project, + "projects/{project}", project=project, ) def __init__( @@ -208,12 +198,8 @@ def __init__( ) self.transport = transport else: - self.transport = ( - metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport( - address=api_endpoint, - channel=channel, - credentials=credentials, - ) + self.transport = metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: @@ -316,8 +302,7 @@ def list_log_metrics( ) request = logging_metrics_pb2.ListLogMetricsRequest( - parent=parent, - page_size=page_size, + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -402,9 +387,7 @@ def get_log_metric( client_info=self._client_info, ) - request = logging_metrics_pb2.GetLogMetricRequest( - metric_name=metric_name, - ) + request = logging_metrics_pb2.GetLogMetricRequest(metric_name=metric_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -490,8 +473,7 @@ def create_log_metric( ) request = logging_metrics_pb2.CreateLogMetricRequest( - parent=parent, - metric=metric, + parent=parent, metric=metric, ) if metadata is None: metadata = [] @@ -578,8 +560,7 @@ def update_log_metric( ) request = logging_metrics_pb2.UpdateLogMetricRequest( - metric_name=metric_name, - metric=metric, + metric_name=metric_name, metric=metric, ) if metadata is None: metadata = [] @@ -650,9 +631,7 @@ def delete_log_metric( client_info=self._client_info, ) - request = logging_metrics_pb2.DeleteLogMetricRequest( - metric_name=metric_name, - ) + request = logging_metrics_pb2.DeleteLogMetricRequest(metric_name=metric_name,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py index 9b0ef2205f20..f4805192b30a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -427,9 +427,7 @@ ), ], extensions=[], - nested_types=[ - _LOGENTRY_LABELSENTRY, - ], + nested_types=[_LOGENTRY_LABELSENTRY,], enum_types=[], serialized_options=_b( "\352A\271\001\n\032logging.googleapis.com/Log\022\035projects/{project}/logs/{log}\022'organizations/{organization}/logs/{log}\022\033folders/{folder}/logs/{log}\022,billingAccounts/{billing_account}/logs/{log}\032\010log_name" diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py index 7e4ae83dd9d0..65fd2cff616a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -320,9 +320,7 @@ ], extensions=[], nested_types=[], - enum_types=[ - _LOGSINK_VERSIONFORMAT, - ], + enum_types=[_LOGSINK_VERSIONFORMAT,], serialized_options=_b( "\352A\270\001\n\033logging.googleapis.com/Sink\022\037projects/{project}/sinks/{sink}\022)organizations/{organization}/sinks/{sink}\022\035folders/{folder}/sinks/{sink}\022.billingAccounts/{billing_account}/sinks/{sink}" ), diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py index 08eaf8099fc0..01e308fb741d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -318,12 +318,8 @@ ), ], extensions=[], - nested_types=[ - _LOGMETRIC_LABELEXTRACTORSENTRY, - ], - enum_types=[ - _LOGMETRIC_APIVERSION, - ], + nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY,], + enum_types=[_LOGMETRIC_APIVERSION,], serialized_options=_b( "\352AD\n\035logging.googleapis.com/Metric\022#projects/{project}/metrics/{metric}" ), diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py index 08cc2b49e507..35c9b9c52449 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py @@ -274,9 +274,7 @@ ), ], extensions=[], - nested_types=[ - _WRITELOGENTRIESREQUEST_LABELSENTRY, - ], + nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -391,9 +389,7 @@ ), ], extensions=[], - nested_types=[ - _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY, - ], + nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index c55ecf8741ce..11fc0bf286b2 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -1,10 +1,12 @@ -# Copyright 2016 Google LLC +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,39 +14,33 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import absolute_import +# Generated by synthtool. DO NOT EDIT! +from __future__ import absolute_import import os import shutil -import sys import nox -UNIT_TEST_DEPS = ( - 'mock', - 'pytest', - 'pytest-cov', - 'flask', - 'webob', - 'django' -) +BLACK_VERSION = "black==19.10b0" +BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.8" +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.5", "3.6", "3.7", "3.8"] -@nox.session(python="3.7") + +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): """Run linters. Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.install("flake8", "black") + session.install("flake8", BLACK_VERSION) session.run( - "black", - "--check", - "google", - "tests", - "docs", + "black", "--check", *BLACK_PATHS, ) session.run("flake8", "google", "tests") @@ -54,17 +50,18 @@ def blacken(session): """Run black. Format code to uniform standard. + + This currently uses Python 3.6 due to the automated Kokoro run of synthtool. + That run uses an image that doesn't have 3.6 installed. Before updating this + check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ - session.install("black") + session.install(BLACK_VERSION) session.run( - "black", - "google", - "tests", - "docs", + "black", *BLACK_PATHS, ) -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.install("docutils", "pygments") @@ -72,71 +69,75 @@ def lint_setup_py(session): def default(session): - """Default unit test session. - """ - # Install all test dependencies, then install this package in-place. - deps = UNIT_TEST_DEPS - - session.install(*deps) - session.install('-e', '.') + session.install("mock", "pytest", "pytest-cov", "flask", "webob", "django") + session.install("-e", ".") # Run py.test against the unit tests. session.run( - 'py.test', - '--quiet', - '--cov=google.cloud.logging', - '--cov=tests.unit', - '--cov-append', - '--cov-config=.coveragerc', - '--cov-report=', - '--cov-fail-under=0', - 'tests/unit', - *session.posargs + "py.test", + "--quiet", + "--cov=google.cloud.logging", + "--cov=google.cloud", + "--cov=tests.unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, ) -@nox.session(python=['3.5', '3.6', '3.7']) +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) def unit(session): """Run the unit test suite.""" default(session) -@nox.session(python=['3.6']) +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" - - # Sanity check: Only run system tests if the environment variable is set. - if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - session.skip('Credentials must be set via environment variable.') + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Sanity check: Only run tests if the environment variable is set. + if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): + session.skip("Credentials must be set via environment variable") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") # Use pre-release gRPC for system tests. - session.install('--pre', 'grpcio') + session.install("--pre", "grpcio") # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install('mock', 'pytest') - systest_deps = [ - 'google-cloud-bigquery', - 'google-cloud-pubsub', - 'google-cloud-storage', - 'google-cloud-testutils', - ] - for systest_dep in systest_deps: - session.install(systest_dep) - - session.install('-e', '.') + session.install( + "mock", + "pytest", + "google-cloud-testutils", + "google-cloud-bigquery", + "google-cloud-pubsub", + "google-cloud-storage", + "google-cloud-testutils", + ) + session.install("-e", ".") # Run py.test against the system tests. - session.run( - 'py.test', - '-vvv', - '-s', - 'tests/system', - *session.posargs) + if system_test_exists: + session.run("py.test", "--quiet", system_test_path, *session.posargs) + if system_test_folder_exists: + session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. @@ -148,23 +149,26 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.7") + +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" - session.install('-e', '.') - session.install('sphinx', 'alabaster', 'recommonmark') + session.install("-e", ".") + session.install("sphinx", "alabaster", "recommonmark") - shutil.rmtree(os.path.join('docs', '_build'), ignore_errors=True) + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( - 'sphinx-build', - '-W', # warnings as errors - '-T', # show full traceback on exception - '-N', # no colors - '-b', 'html', - '-d', os.path.join('docs', '_build', 'doctrees', ''), - os.path.join('docs', ''), - os.path.join('docs', '_build', 'html', ''), + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), ) @@ -172,6 +176,39 @@ def docs(session): def docfx(session): """Build the docfx yaml files for this library.""" + session.install("-e", ".") + session.install("sphinx", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + session.install("-e", ".") # sphinx-docfx-yaml supports up to sphinx version 1.5.5. # https://github.com/docascode/sphinx-docfx-yaml/issues/97 diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index cf56847bae32..ebd73c131ba4 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -20,40 +20,39 @@ # Package metadata. -name = 'google-cloud-logging' -description = 'Stackdriver Logging API client library' +name = "google-cloud-logging" +description = "Stackdriver Logging API client library" version = "1.15.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' -release_status = 'Development Status :: 5 - Production/Stable' +release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.15.0, < 2.0.0dev", "google-cloud-core >= 1.4.1, < 2.0dev", ] -extras = { -} +extras = {} # Setup boilerplate below this line. package_root = os.path.abspath(os.path.dirname(__file__)) -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() # Only include packages under the 'google' namespace. Do not include tests, # benchmarks, etc. packages = [ - package for package in setuptools.find_packages() - if package.startswith('google')] + package for package in setuptools.find_packages() if package.startswith("google") +] # Determine which namespaces are needed. -namespaces = ['google'] -if 'google.cloud' in packages: - namespaces.append('google.cloud') +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") setuptools.setup( @@ -61,28 +60,29 @@ version=version, description=description, long_description=readme, - author='Google LLC', - author_email='googleapis-packages@google.com', - license='Apache 2.0', - url='https://github.com/googleapis/python-logging', + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url="https://github.com/googleapis/python-logging", classifiers=[ release_status, - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Operating System :: OS Independent', - 'Topic :: Internet', + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Operating System :: OS Independent", + "Topic :: Internet", ], - platforms='Posix; MacOS X; Windows', + platforms="Posix; MacOS X; Windows", packages=packages, namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires='>=3.5', + python_requires=">=3.5", include_package_data=True, zip_safe=False, ) diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/synth.py index 9504c980b950..45a49f131dc1 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/synth.py @@ -40,8 +40,23 @@ # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(unit_cov_level=95, cov_level=100) -# Don't move noxfile. logging has special testing setups for django, etc -s.move(templated_files, excludes="noxfile.py") +templated_files = common.py_library( + unit_cov_level=95, + cov_level=99, + system_test_python_versions = ['3.8'], + unit_test_python_versions = ['3.5', '3.6', '3.7', '3.8'], + system_test_external_dependencies = [ + 'google-cloud-bigquery', + 'google-cloud-pubsub', + 'google-cloud-storage', + 'google-cloud-testutils' + ], + unit_test_external_dependencies = [ + 'flask', + 'webob', + 'django' + ], +) +s.move(templated_files, excludes=[".coveragerc"]) s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 101baf63b297..9e2a15bb4696 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -293,10 +293,7 @@ def test_list_entries_defaults(self): { "path": "/entries:list", "method": "POST", - "data": { - "filter": "removed", - "projectIds": [self.PROJECT], - }, + "data": {"filter": "removed", "projectIds": [self.PROJECT]}, }, ) # verify that default filter is 24 hours diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 7cc870e3a916..966a515e0af5 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -514,10 +514,7 @@ def test_list_entries_defaults(self): { "path": "/entries:list", "method": "POST", - "data": { - "filter": "removed", - "projectIds": [self.PROJECT], - }, + "data": {"filter": "removed", "projectIds": [self.PROJECT]}, }, ) # verify that default filter is 24 hours @@ -573,10 +570,7 @@ def test_list_entries_explicit(self): }, ) # verify that default filter is 24 hours - LOG_FILTER = "logName=projects/%s/logs/%s" % ( - self.PROJECT, - self.LOGGER_NAME, - ) + LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME,) combined_filter = ( INPUT_FILTER + " AND " @@ -619,10 +613,7 @@ def test_list_entries_explicit_timestamp(self): self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) # check call payload - LOG_FILTER = "logName=projects/%s/logs/%s" % ( - self.PROJECT, - self.LOGGER_NAME, - ) + LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME,) combined_filter = INPUT_FILTER + " AND " + LOG_FILTER self.assertEqual( client._connection._called_with, From c81e8836e16cf2386ace5ff15c5c726ee8424731 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 4 Aug 2016 10:39:35 -0700 Subject: [PATCH 289/855] Update logging samples to fit new style guide and match Node.js samples. [(#435)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/435) --- .../samples/snippets/README.md | 36 +++++ .../samples/snippets/export.py | 144 ++++++++++++++++++ .../samples/snippets/export_test.py | 71 +++++++++ .../samples/snippets/requirements.txt | 1 + .../samples/snippets/snippets.py | 106 +++++++++++++ .../samples/snippets/snippets_test.py | 45 ++++++ 6 files changed, 403 insertions(+) create mode 100644 packages/google-cloud-logging/samples/snippets/README.md create mode 100644 packages/google-cloud-logging/samples/snippets/export.py create mode 100644 packages/google-cloud-logging/samples/snippets/export_test.py create mode 100644 packages/google-cloud-logging/samples/snippets/requirements.txt create mode 100644 packages/google-cloud-logging/samples/snippets/snippets.py create mode 100644 packages/google-cloud-logging/samples/snippets/snippets_test.py diff --git a/packages/google-cloud-logging/samples/snippets/README.md b/packages/google-cloud-logging/samples/snippets/README.md new file mode 100644 index 000000000000..c9042db88ff3 --- /dev/null +++ b/packages/google-cloud-logging/samples/snippets/README.md @@ -0,0 +1,36 @@ +# Stackdriver Logging v2 API Samples + +`snippets.py` is a simple command-line program to demonstrate writing to a log, +listing its entries, and deleting it. + +`export.py` demonstrates how to interact with sinks which are used to export +logs to Google Cloud Storage, Cloud Pub/Sub, or BigQuery. The sample uses +Google Cloud Storage, but can be easily adapted for other outputs. + + + + +## Prerequisites + +All samples require a [Google Cloud Project](https://console.cloud.google.com). + +To run `export.py`, you will also need a Google Cloud Storage Bucket. + + gsutil mb gs://[YOUR_PROJECT_ID] + +You must add Cloud Logging as an owner to the bucket. To do so, add +`cloud-logs@google.com` as an owner to the bucket. See the +[exportings logs](https://cloud.google.com/logging/docs/export/configure_export#configuring_log_sinks) +docs for complete details. + +# Running locally + +Use the [Cloud SDK](https://cloud.google.com/sdk) to provide authentication: + + gcloud beta auth application-default login + +Run the samples: + + python snippets.py -h + python export.py -h + diff --git a/packages/google-cloud-logging/samples/snippets/export.py b/packages/google-cloud-logging/samples/snippets/export.py new file mode 100644 index 000000000000..b24dd9878da7 --- /dev/null +++ b/packages/google-cloud-logging/samples/snippets/export.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse + +from gcloud import logging + + +def list_sinks(): + """Lists all sinks.""" + logging_client = logging.Client() + + sinks = [] + token = None + while True: + new_sinks, token = logging_client.list_sinks(page_token=token) + sinks.extend(new_sinks) + if token is None: + break + + if not sinks: + print('No sinks.') + + for sink in sinks: + print('{}: {} -> {}'.format(sink.name, sink.filter_, sink.destination)) + + +def create_sink(sink_name, destination_bucket, filter_): + """Creates a sink to export logs to the given Cloud Storage bucket. + + The filter determines which logs this sink matches and will be exported + to the destination. For example a filter of 'severity>=INFO' will send + all logs that have a severity of INFO or greater to the destination. + See https://cloud.google.com/logging/docs/view/advanced_filters for more + filter information. + """ + logging_client = logging.Client() + + # The destination can be a Cloud Storage bucket, a Cloud Pub/Sub topic, + # or a BigQuery dataset. In this case, it is a Cloud Storage Bucket. + # See https://cloud.google.com/logging/docs/api/tasks/exporting-logs for + # information on the destination format. + destination = 'storage.googleapis.com/{bucket}'.format( + bucket=destination_bucket) + + sink = logging_client.sink( + sink_name, + filter_, + destination) + + if sink.exists(): + print('Sink {} already exists.'.format(sink.name)) + return + + sink.create() + print('Created sink {}'.format(sink.name)) + + +def update_sink(sink_name, filter_): + """Changes a sink's filter. + + The filter determines which logs this sink matches and will be exported + to the destination. For example a filter of 'severity>=INFO' will send + all logs that have a severity of INFO or greater to the destination. + See https://cloud.google.com/logging/docs/view/advanced_filters for more + filter information. + """ + logging_client = logging.Client() + sink = logging_client.sink(sink_name) + + sink.reload() + + sink.filter_ = filter_ + print('Updated sink {}'.format(sink.name)) + sink.update() + # [END update] + + +def delete_sink(sink_name): + """Deletes a sink.""" + logging_client = logging.Client() + sink = logging_client.sink(sink_name) + + sink.delete() + + print('Deleted sink {}'.format(sink.name)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter + ) + + subparsers = parser.add_subparsers(dest='command') + subparsers.add_parser('list', help=list_sinks.__doc__) + + create_parser = subparsers.add_parser('create', help=list_sinks.__doc__) + create_parser.add_argument( + 'sink_name', + help='Name of the log export sink.') + create_parser.add_argument( + 'destination_bucket', + help='Cloud Storage bucket where logs will be exported.') + create_parser.add_argument( + 'filter', + help='The filter used to match logs.') + + update_parser = subparsers.add_parser('update', help=update_sink.__doc__) + update_parser.add_argument( + 'sink_name', + help='Name of the log export sink.') + update_parser.add_argument( + 'filter', + help='The filter used to match logs.') + + delete_parser = subparsers.add_parser('delete', help=delete_sink.__doc__) + delete_parser.add_argument( + 'sink_name', + help='Name of the log export sink.') + + args = parser.parse_args() + + if args.command == 'list': + list_sinks() + elif args.command == 'create': + create_sink(args.sink_name, args.destination_bucket, args.filter) + elif args.command == 'update': + update_sink(args.sink_name, args.filter) + elif args.command == 'delete': + delete_sink(args.sink_name) diff --git a/packages/google-cloud-logging/samples/snippets/export_test.py b/packages/google-cloud-logging/samples/snippets/export_test.py new file mode 100644 index 000000000000..b59c717ac2db --- /dev/null +++ b/packages/google-cloud-logging/samples/snippets/export_test.py @@ -0,0 +1,71 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import export +from gcloud import logging +from gcp.testing import eventually_consistent +import pytest + +TEST_SINK_NAME = 'example_sink' +TEST_SINK_FILTER = 'severity>=CRITICAL' + + +@pytest.fixture +def example_sink(cloud_config): + client = logging.Client() + + sink = client.sink( + TEST_SINK_NAME, + TEST_SINK_FILTER, + 'storage.googleapis.com/{bucket}'.format( + bucket=cloud_config.storage_bucket)) + + if sink.exists(): + sink.delete() + + sink.create() + + return sink + + +def test_list(example_sink, capsys): + @eventually_consistent.call + def _(): + export.list_sinks() + out, _ = capsys.readouterr() + assert example_sink.name in out + + +def test_create(cloud_config, capsys): + export.create_sink( + TEST_SINK_NAME, + TEST_SINK_FILTER, + 'storage.googleapis.com/{bucket}'.format( + bucket=cloud_config.storage_bucket)) + + out, _ = capsys.readouterr() + assert TEST_SINK_NAME in out + + +def test_update(example_sink, capsys): + updated_filter = 'severity>=INFO' + export.update_sink(TEST_SINK_NAME, updated_filter) + + example_sink.reload() + assert example_sink.filter_ == updated_filter + + +def test_delete(example_sink, capsys): + export.delete_sink(TEST_SINK_NAME) + assert not example_sink.exists() diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt new file mode 100644 index 000000000000..868847aebf3e --- /dev/null +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -0,0 +1 @@ +gcloud==0.17.0 diff --git a/packages/google-cloud-logging/samples/snippets/snippets.py b/packages/google-cloud-logging/samples/snippets/snippets.py new file mode 100644 index 000000000000..f73143ec8bc2 --- /dev/null +++ b/packages/google-cloud-logging/samples/snippets/snippets.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on logs and +log entries with Stackdriver Logging. + +For more information, see the README.md under /logging and the +documentation at https://cloud.google.com/logging/docs. +""" + +import argparse + +from gcloud import logging + + +def write_entry(logger_name): + """Writes log entries to the given logger.""" + logging_client = logging.Client() + + # This log can be found in the Cloud Logging console under 'Custom Logs'. + logger = logging_client.logger(logger_name) + + # Make a simple text log + logger.log_text('Hello, world!') + + # Simple text log with severity. + logger.log_text('Goodbye, world!', severity='ERROR') + + # Struct log. The struct can be any JSON-serializable dictionary. + logger.log_struct({ + 'name': 'King Arthur', + 'quest': 'Find the Holy Grail', + 'favorite_color': 'Blue' + }) + + print('Wrote logs to {}.'.format(logger.name)) + + +def list_entries(logger_name): + """Lists the most recent entries for a given logger.""" + logging_client = logging.Client() + logger = logging_client.logger(logger_name) + + print('Listing entries for logger {}:'.format(logger.name)) + + entries = [] + page_token = None + + while True: + new_entries, page_token = logger.list_entries(page_token=page_token) + entries.extend(new_entries) + if not page_token: + break + + for entry in entries: + timestamp = entry.timestamp.isoformat() + print('* {}: {}'.format + (timestamp, entry.payload)) + + +def delete_logger(logger_name): + """Deletes a logger and all its entries. + + Note that a deletion can take several minutes to take effect. + """ + logging_client = logging.Client() + logger = logging_client.logger(logger_name) + + logger.delete() + + print('Deleted all logging entries for {}'.format(logger.name)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument( + 'logger_name', help='Logger name', default='example_log') + subparsers = parser.add_subparsers(dest='command') + subparsers.add_parser('list', help=list_entries.__doc__) + subparsers.add_parser('write', help=write_entry.__doc__) + subparsers.add_parser('delete', help=delete_logger.__doc__) + + args = parser.parse_args() + + if args.command == 'list': + list_entries(args.logger_name) + elif args.command == 'write': + write_entry(args.logger_name) + elif args.command == 'delete': + delete_logger(args.logger_name) diff --git a/packages/google-cloud-logging/samples/snippets/snippets_test.py b/packages/google-cloud-logging/samples/snippets/snippets_test.py new file mode 100644 index 000000000000..6a1004d46090 --- /dev/null +++ b/packages/google-cloud-logging/samples/snippets/snippets_test.py @@ -0,0 +1,45 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gcloud import logging +from gcp.testing import eventually_consistent +import pytest +import snippets + +TEST_LOGGER_NAME = 'example_log' + + +@pytest.fixture +def example_log(): + client = logging.Client() + logger = client.logger(TEST_LOGGER_NAME) + text = 'Hello, world.' + logger.log_text(text) + return text + + +def test_list(example_log, capsys): + @eventually_consistent.call + def _(): + snippets.list_entries(TEST_LOGGER_NAME) + out, _ = capsys.readouterr() + assert example_log in out + + +def test_write(): + snippets.write_entry(TEST_LOGGER_NAME) + + +def test_delete(): + snippets.delete_logger(TEST_LOGGER_NAME) From 22168737335fb45128d352ddb6884604aff203da Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 4 Aug 2016 11:23:52 -0700 Subject: [PATCH 290/855] Fix test conflict. Change-Id: I67e149dc43ebdb11144ac3839e062aa4668ebb2e --- .../google-cloud-logging/samples/snippets/export_test.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/google-cloud-logging/samples/snippets/export_test.py b/packages/google-cloud-logging/samples/snippets/export_test.py index b59c717ac2db..6b7a5461e5bd 100644 --- a/packages/google-cloud-logging/samples/snippets/export_test.py +++ b/packages/google-cloud-logging/samples/snippets/export_test.py @@ -48,6 +48,12 @@ def _(): def test_create(cloud_config, capsys): + # Delete the sink if it exists, otherwise the test will fail in conflit. + client = logging.Client() + sink = client.sink(TEST_SINK_NAME) + if sink.exists(): + sink.delete() + export.create_sink( TEST_SINK_NAME, TEST_SINK_FILTER, @@ -56,6 +62,7 @@ def test_create(cloud_config, capsys): out, _ = capsys.readouterr() assert TEST_SINK_NAME in out + assert sink.exists() def test_update(example_sink, capsys): From b11e63a0610c9ef8851b48073d735d9998fe4a24 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 4 Aug 2016 13:31:20 -0700 Subject: [PATCH 291/855] Fix logging test. Change-Id: I866edcc956fda2265dd7af7b774336930ec6a151 --- .../samples/snippets/export_test.py | 56 +++++++++++-------- 1 file changed, 34 insertions(+), 22 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/export_test.py b/packages/google-cloud-logging/samples/snippets/export_test.py index 6b7a5461e5bd..5a0218aaaeee 100644 --- a/packages/google-cloud-logging/samples/snippets/export_test.py +++ b/packages/google-cloud-logging/samples/snippets/export_test.py @@ -12,31 +12,42 @@ # See the License for the specific language governing permissions and # limitations under the License. +import random +import string + import export from gcloud import logging from gcp.testing import eventually_consistent import pytest -TEST_SINK_NAME = 'example_sink' +TEST_SINK_NAME_TMPL = 'example_sink_{}' TEST_SINK_FILTER = 'severity>=CRITICAL' -@pytest.fixture +def _random_id(): + return ''.join( + random.choice(string.ascii_uppercase + string.digits) + for _ in range(6)) + + +@pytest.yield_fixture def example_sink(cloud_config): client = logging.Client() sink = client.sink( - TEST_SINK_NAME, + TEST_SINK_NAME_TMPL.format(_random_id()), TEST_SINK_FILTER, 'storage.googleapis.com/{bucket}'.format( bucket=cloud_config.storage_bucket)) - if sink.exists(): - sink.delete() - sink.create() - return sink + yield sink + + try: + sink.delete() + except: + pass def test_list(example_sink, capsys): @@ -48,31 +59,32 @@ def _(): def test_create(cloud_config, capsys): - # Delete the sink if it exists, otherwise the test will fail in conflit. - client = logging.Client() - sink = client.sink(TEST_SINK_NAME) - if sink.exists(): - sink.delete() - - export.create_sink( - TEST_SINK_NAME, - TEST_SINK_FILTER, - 'storage.googleapis.com/{bucket}'.format( - bucket=cloud_config.storage_bucket)) + sink_name = TEST_SINK_NAME_TMPL.format(_random_id()) + + try: + export.create_sink( + sink_name, + cloud_config.storage_bucket, + TEST_SINK_FILTER) + # Clean-up the temporary sink. + finally: + try: + logging.Client().sink(sink_name).delete() + except: + pass out, _ = capsys.readouterr() - assert TEST_SINK_NAME in out - assert sink.exists() + assert sink_name in out def test_update(example_sink, capsys): updated_filter = 'severity>=INFO' - export.update_sink(TEST_SINK_NAME, updated_filter) + export.update_sink(example_sink.name, updated_filter) example_sink.reload() assert example_sink.filter_ == updated_filter def test_delete(example_sink, capsys): - export.delete_sink(TEST_SINK_NAME) + export.delete_sink(example_sink.name) assert not example_sink.exists() From 7ae18d26b078e4fcb94abbb16fd7d6e8d488876d Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 16 Aug 2016 13:32:42 -0700 Subject: [PATCH 292/855] Auto-update dependencies. [(#456)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/456) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 868847aebf3e..2beeafe63a8a 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -gcloud==0.17.0 +gcloud==0.18.1 From aecb73ecb8a9a665fc407fb6497dd636c0ca3427 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 19 Aug 2016 13:56:28 -0700 Subject: [PATCH 293/855] Fix import order lint errors Change-Id: Ieaf7237fc6f925daec46a07d2e81a452b841198a --- packages/google-cloud-logging/samples/snippets/export_test.py | 3 ++- .../google-cloud-logging/samples/snippets/snippets_test.py | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/export_test.py b/packages/google-cloud-logging/samples/snippets/export_test.py index 5a0218aaaeee..d4dfd681ea31 100644 --- a/packages/google-cloud-logging/samples/snippets/export_test.py +++ b/packages/google-cloud-logging/samples/snippets/export_test.py @@ -15,11 +15,12 @@ import random import string -import export from gcloud import logging from gcp.testing import eventually_consistent import pytest +import export + TEST_SINK_NAME_TMPL = 'example_sink_{}' TEST_SINK_FILTER = 'severity>=CRITICAL' diff --git a/packages/google-cloud-logging/samples/snippets/snippets_test.py b/packages/google-cloud-logging/samples/snippets/snippets_test.py index 6a1004d46090..f41f52fb5575 100644 --- a/packages/google-cloud-logging/samples/snippets/snippets_test.py +++ b/packages/google-cloud-logging/samples/snippets/snippets_test.py @@ -15,6 +15,7 @@ from gcloud import logging from gcp.testing import eventually_consistent import pytest + import snippets TEST_LOGGER_NAME = 'example_log' From 46d17975197941e60095444f6561c781d7c7eba6 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Fri, 23 Sep 2016 09:48:46 -0700 Subject: [PATCH 294/855] Auto-update dependencies. [(#540)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/540) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 2beeafe63a8a..dfb42aaaaaf2 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -gcloud==0.18.1 +gcloud==0.18.2 From 57f9923bf1b07008681ad1a6b6fd4f47ed6fed78 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 26 Sep 2016 11:34:45 -0700 Subject: [PATCH 295/855] Auto-update dependencies. [(#542)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/542) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index dfb42aaaaaf2..97a207d3aad0 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -gcloud==0.18.2 +gcloud==0.18.3 From ac6715dea367d6a3c25fc518041fa4ec60ef9061 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 29 Sep 2016 20:51:47 -0700 Subject: [PATCH 296/855] Move to google-cloud [(#544)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/544) --- packages/google-cloud-logging/samples/snippets/export.py | 2 +- packages/google-cloud-logging/samples/snippets/export_test.py | 2 +- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- packages/google-cloud-logging/samples/snippets/snippets.py | 2 +- packages/google-cloud-logging/samples/snippets/snippets_test.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/export.py b/packages/google-cloud-logging/samples/snippets/export.py index b24dd9878da7..20367c274b21 100644 --- a/packages/google-cloud-logging/samples/snippets/export.py +++ b/packages/google-cloud-logging/samples/snippets/export.py @@ -16,7 +16,7 @@ import argparse -from gcloud import logging +from google.cloud import logging def list_sinks(): diff --git a/packages/google-cloud-logging/samples/snippets/export_test.py b/packages/google-cloud-logging/samples/snippets/export_test.py index d4dfd681ea31..8f1c299d7e76 100644 --- a/packages/google-cloud-logging/samples/snippets/export_test.py +++ b/packages/google-cloud-logging/samples/snippets/export_test.py @@ -15,8 +15,8 @@ import random import string -from gcloud import logging from gcp.testing import eventually_consistent +from google.cloud import logging import pytest import export diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 97a207d3aad0..303f67c091c9 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -gcloud==0.18.3 +google-cloud-logging==0.20.0 diff --git a/packages/google-cloud-logging/samples/snippets/snippets.py b/packages/google-cloud-logging/samples/snippets/snippets.py index f73143ec8bc2..0280cfbe4063 100644 --- a/packages/google-cloud-logging/samples/snippets/snippets.py +++ b/packages/google-cloud-logging/samples/snippets/snippets.py @@ -23,7 +23,7 @@ import argparse -from gcloud import logging +from google.cloud import logging def write_entry(logger_name): diff --git a/packages/google-cloud-logging/samples/snippets/snippets_test.py b/packages/google-cloud-logging/samples/snippets/snippets_test.py index f41f52fb5575..86eac1180a04 100644 --- a/packages/google-cloud-logging/samples/snippets/snippets_test.py +++ b/packages/google-cloud-logging/samples/snippets/snippets_test.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gcloud import logging from gcp.testing import eventually_consistent +from google.cloud import logging import pytest import snippets From 7ad03522136b3fec09ce4db155a139c0458edc08 Mon Sep 17 00:00:00 2001 From: Jason Dobry Date: Wed, 5 Oct 2016 09:56:04 -0700 Subject: [PATCH 297/855] Add new "quickstart" samples [(#547)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/547) --- .../samples/snippets/quickstart.py | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 packages/google-cloud-logging/samples/snippets/quickstart.py diff --git a/packages/google-cloud-logging/samples/snippets/quickstart.py b/packages/google-cloud-logging/samples/snippets/quickstart.py new file mode 100644 index 000000000000..19409c776111 --- /dev/null +++ b/packages/google-cloud-logging/samples/snippets/quickstart.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def run_quickstart(): + # [START logging_quickstart] + # Imports the Google Cloud client library + from google.cloud import logging + + # Instantiates a client + logging_client = logging.Client() + + # The name of the log to write to + log_name = 'my-log' + # Selects the log to write to + logger = logging_client.logger(log_name) + + # The data to log + text = 'Hello, world!' + + # Writes the log entry + logger.log_text(text) + + print('Logged: {}'.format(text)) + # [END logging_quickstart] + + +if __name__ == '__main__': + run_quickstart() From 3a63f130c32e7976633644086649b64054ec9d0f Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 12 Oct 2016 10:48:57 -0700 Subject: [PATCH 298/855] Quickstart tests [(#569)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/569) * Add tests for quickstarts * Update secrets --- .../samples/snippets/quickstart_test.py | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 packages/google-cloud-logging/samples/snippets/quickstart_test.py diff --git a/packages/google-cloud-logging/samples/snippets/quickstart_test.py b/packages/google-cloud-logging/samples/snippets/quickstart_test.py new file mode 100644 index 000000000000..1b49cd1263a9 --- /dev/null +++ b/packages/google-cloud-logging/samples/snippets/quickstart_test.py @@ -0,0 +1,22 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import quickstart + + +def test_quickstart(capsys): + quickstart.run_quickstart() + out, _ = capsys.readouterr() + assert 'Logged' in out From b4b09891da8d96dd55b4b024642f6733aeb42447 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 24 Oct 2016 11:03:17 -0700 Subject: [PATCH 299/855] Generate readmes for most service samples [(#599)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/599) --- .../samples/snippets/README.md | 36 ---- .../samples/snippets/README.rst | 163 ++++++++++++++++++ .../samples/snippets/README.rst.in | 26 +++ 3 files changed, 189 insertions(+), 36 deletions(-) delete mode 100644 packages/google-cloud-logging/samples/snippets/README.md create mode 100644 packages/google-cloud-logging/samples/snippets/README.rst create mode 100644 packages/google-cloud-logging/samples/snippets/README.rst.in diff --git a/packages/google-cloud-logging/samples/snippets/README.md b/packages/google-cloud-logging/samples/snippets/README.md deleted file mode 100644 index c9042db88ff3..000000000000 --- a/packages/google-cloud-logging/samples/snippets/README.md +++ /dev/null @@ -1,36 +0,0 @@ -# Stackdriver Logging v2 API Samples - -`snippets.py` is a simple command-line program to demonstrate writing to a log, -listing its entries, and deleting it. - -`export.py` demonstrates how to interact with sinks which are used to export -logs to Google Cloud Storage, Cloud Pub/Sub, or BigQuery. The sample uses -Google Cloud Storage, but can be easily adapted for other outputs. - - - - -## Prerequisites - -All samples require a [Google Cloud Project](https://console.cloud.google.com). - -To run `export.py`, you will also need a Google Cloud Storage Bucket. - - gsutil mb gs://[YOUR_PROJECT_ID] - -You must add Cloud Logging as an owner to the bucket. To do so, add -`cloud-logs@google.com` as an owner to the bucket. See the -[exportings logs](https://cloud.google.com/logging/docs/export/configure_export#configuring_log_sinks) -docs for complete details. - -# Running locally - -Use the [Cloud SDK](https://cloud.google.com/sdk) to provide authentication: - - gcloud beta auth application-default login - -Run the samples: - - python snippets.py -h - python export.py -h - diff --git a/packages/google-cloud-logging/samples/snippets/README.rst b/packages/google-cloud-logging/samples/snippets/README.rst new file mode 100644 index 000000000000..2647c799cff9 --- /dev/null +++ b/packages/google-cloud-logging/samples/snippets/README.rst @@ -0,0 +1,163 @@ +.. This file is automatically generated. Do not edit this file directly. + +Stackdriver Logging Python Samples +=============================================================================== + +This directory contains samples for Stackdriver Logging. `Stackdriver Logging`_ allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud Platform and Amazon Web Services. + + + + +.. _Stackdriver Logging: https://cloud.google.com/logging/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +Authentication is typically done through `Application Default Credentials`_, +which means you do not have to change the code to authenticate as long as +your environment has credentials. You have a few options for setting up +authentication: + +#. When running locally, use the `Google Cloud SDK`_ + + .. code-block:: bash + + gcloud beta auth application-default login + + +#. When running on App Engine or Compute Engine, credentials are already + set-up. However, you may need to configure your Compute Engine instance + with `additional scopes`_. + +#. You can create a `Service Account key file`_. This file can be used to + authenticate to Google Cloud Platform services from any environment. To use + the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to + the path to the key file, for example: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using +.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount + +Install Dependencies +++++++++++++++++++++ + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Quickstart ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python quickstart.py + + +Snippets ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python snippets.py + + usage: snippets.py [-h] logger_name {list,write,delete} ... + + This application demonstrates how to perform basic operations on logs and + log entries with Stackdriver Logging. + + For more information, see the README.md under /logging and the + documentation at https://cloud.google.com/logging/docs. + + positional arguments: + logger_name Logger name + {list,write,delete} + list Lists the most recent entries for a given logger. + write Writes log entries to the given logger. + delete Deletes a logger and all its entries. Note that a + deletion can take several minutes to take effect. + + optional arguments: + -h, --help show this help message and exit + + +Export ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python export.py + + usage: export.py [-h] {list,create,update,delete} ... + + positional arguments: + {list,create,update,delete} + list Lists all sinks. + create Lists all sinks. + update Changes a sink's filter. The filter determines which + logs this sink matches and will be exported to the + destination. For example a filter of 'severity>=INFO' + will send all logs that have a severity of INFO or + greater to the destination. See https://cloud.google.c + om/logging/docs/view/advanced_filters for more filter + information. + delete Deletes a sink. + + optional arguments: + -h, --help show this help message and exit + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-logging/samples/snippets/README.rst.in b/packages/google-cloud-logging/samples/snippets/README.rst.in new file mode 100644 index 000000000000..50862fa1da9b --- /dev/null +++ b/packages/google-cloud-logging/samples/snippets/README.rst.in @@ -0,0 +1,26 @@ +# This file is used to generate README.rst + +product: + name: Stackdriver Logging + short_name: Stackdriver Logging + url: https://cloud.google.com/logging/docs + description: > + `Stackdriver Logging`_ allows you to store, search, analyze, monitor, + and alert on log data and events from Google Cloud Platform and Amazon + Web Services. + +setup: +- auth +- install_deps + +samples: +- name: Quickstart + file: quickstart.py +- name: Snippets + file: snippets.py + show_help: true +- name: Export + file: export.py + show_help: true + +cloud_client_library: true From c18e26b137888b2e2dc8d210bb21c0c640279ef3 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 15 Nov 2016 14:58:27 -0800 Subject: [PATCH 300/855] Update samples to support latest Google Cloud Python [(#656)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/656) --- .../google-cloud-logging/samples/snippets/export.py | 8 +------- .../samples/snippets/requirements.txt | 2 +- .../google-cloud-logging/samples/snippets/snippets.py | 11 +---------- 3 files changed, 3 insertions(+), 18 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/export.py b/packages/google-cloud-logging/samples/snippets/export.py index 20367c274b21..f8c1f0c5f954 100644 --- a/packages/google-cloud-logging/samples/snippets/export.py +++ b/packages/google-cloud-logging/samples/snippets/export.py @@ -23,13 +23,7 @@ def list_sinks(): """Lists all sinks.""" logging_client = logging.Client() - sinks = [] - token = None - while True: - new_sinks, token = logging_client.list_sinks(page_token=token) - sinks.extend(new_sinks) - if token is None: - break + sinks = list(logging_client.list_sinks()) if not sinks: print('No sinks.') diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 303f67c091c9..ce5d85c60e7c 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==0.20.0 +google-cloud-logging==0.21.0 diff --git a/packages/google-cloud-logging/samples/snippets/snippets.py b/packages/google-cloud-logging/samples/snippets/snippets.py index 0280cfbe4063..8a31066fa3bb 100644 --- a/packages/google-cloud-logging/samples/snippets/snippets.py +++ b/packages/google-cloud-logging/samples/snippets/snippets.py @@ -56,16 +56,7 @@ def list_entries(logger_name): print('Listing entries for logger {}:'.format(logger.name)) - entries = [] - page_token = None - - while True: - new_entries, page_token = logger.list_entries(page_token=page_token) - entries.extend(new_entries) - if not page_token: - break - - for entry in entries: + for entry in logger.list_entries(): timestamp = entry.timestamp.isoformat() print('* {}: {}'.format (timestamp, entry.payload)) From 7a48312a3614526dd38d84a81a4aeb6feedae5f7 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 13 Dec 2016 09:54:02 -0800 Subject: [PATCH 301/855] Auto-update dependencies. [(#715)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/715) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index ce5d85c60e7c..4b9f9fcb756e 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==0.21.0 +google-cloud-logging==0.22.0 From 34caf5dd64d89698289aeb7f134d88dcf5d032e6 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 4 Apr 2017 16:08:30 -0700 Subject: [PATCH 302/855] Remove cloud config fixture [(#887)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/887) * Remove cloud config fixture * Fix client secrets * Fix bigtable instance --- .../samples/snippets/export_test.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/export_test.py b/packages/google-cloud-logging/samples/snippets/export_test.py index 8f1c299d7e76..99b78f8e43bf 100644 --- a/packages/google-cloud-logging/samples/snippets/export_test.py +++ b/packages/google-cloud-logging/samples/snippets/export_test.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import random import string @@ -21,6 +22,7 @@ import export +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] TEST_SINK_NAME_TMPL = 'example_sink_{}' TEST_SINK_FILTER = 'severity>=CRITICAL' @@ -32,14 +34,13 @@ def _random_id(): @pytest.yield_fixture -def example_sink(cloud_config): +def example_sink(): client = logging.Client() sink = client.sink( TEST_SINK_NAME_TMPL.format(_random_id()), TEST_SINK_FILTER, - 'storage.googleapis.com/{bucket}'.format( - bucket=cloud_config.storage_bucket)) + 'storage.googleapis.com/{bucket}'.format(bucket=BUCKET)) sink.create() @@ -59,13 +60,13 @@ def _(): assert example_sink.name in out -def test_create(cloud_config, capsys): +def test_create(capsys): sink_name = TEST_SINK_NAME_TMPL.format(_random_id()) try: export.create_sink( sink_name, - cloud_config.storage_bucket, + BUCKET, TEST_SINK_FILTER) # Clean-up the temporary sink. finally: From 6eb908ca0973c40d438d64d16ee7b87f00fe70f2 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 12 Apr 2017 15:14:35 -0700 Subject: [PATCH 303/855] Fix reference to our testing tools --- packages/google-cloud-logging/samples/snippets/export_test.py | 2 +- packages/google-cloud-logging/samples/snippets/snippets_test.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/export_test.py b/packages/google-cloud-logging/samples/snippets/export_test.py index 99b78f8e43bf..b53b8978f7b8 100644 --- a/packages/google-cloud-logging/samples/snippets/export_test.py +++ b/packages/google-cloud-logging/samples/snippets/export_test.py @@ -16,7 +16,7 @@ import random import string -from gcp.testing import eventually_consistent +from gcp_devrel.testing import eventually_consistent from google.cloud import logging import pytest diff --git a/packages/google-cloud-logging/samples/snippets/snippets_test.py b/packages/google-cloud-logging/samples/snippets/snippets_test.py index 86eac1180a04..f22fbc8f1bfb 100644 --- a/packages/google-cloud-logging/samples/snippets/snippets_test.py +++ b/packages/google-cloud-logging/samples/snippets/snippets_test.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gcp.testing import eventually_consistent +from gcp_devrel.testing import eventually_consistent from google.cloud import logging import pytest From b2defe5fda3964cbae796ef86541a86f4c673822 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 24 Apr 2017 13:12:09 -0700 Subject: [PATCH 304/855] Auto-update dependencies. [(#914)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/914) * Auto-update dependencies. * xfail the error reporting test * Fix lint --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 4b9f9fcb756e..4942b24ad744 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==0.22.0 +google-cloud-logging==1.0.0 From 56590ce253540d5b462022dd07408c9e1d8ae1c7 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 27 Apr 2017 09:54:41 -0700 Subject: [PATCH 305/855] Re-generate all readmes --- packages/google-cloud-logging/samples/snippets/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/README.rst b/packages/google-cloud-logging/samples/snippets/README.rst index 2647c799cff9..767abc1031e5 100644 --- a/packages/google-cloud-logging/samples/snippets/README.rst +++ b/packages/google-cloud-logging/samples/snippets/README.rst @@ -26,7 +26,7 @@ authentication: .. code-block:: bash - gcloud beta auth application-default login + gcloud auth application-default login #. When running on App Engine or Compute Engine, credentials are already From 9ad8fa728288501599e799d09ad07f3e1e01c37d Mon Sep 17 00:00:00 2001 From: Bill Prin Date: Tue, 23 May 2017 17:01:25 -0700 Subject: [PATCH 306/855] Fix README rst links [(#962)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/962) * Fix README rst links * Update all READMEs --- packages/google-cloud-logging/samples/snippets/README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/README.rst b/packages/google-cloud-logging/samples/snippets/README.rst index 767abc1031e5..a9ff67afc42d 100644 --- a/packages/google-cloud-logging/samples/snippets/README.rst +++ b/packages/google-cloud-logging/samples/snippets/README.rst @@ -152,11 +152,11 @@ This sample uses the `Google Cloud Client Library for Python`_. You can read the documentation for more details on API usage and use GitHub to `browse the source`_ and `report issues`_. -.. Google Cloud Client Library for Python: +.. _Google Cloud Client Library for Python: https://googlecloudplatform.github.io/google-cloud-python/ -.. browse the source: +.. _browse the source: https://github.com/GoogleCloudPlatform/google-cloud-python -.. report issues: +.. _report issues: https://github.com/GoogleCloudPlatform/google-cloud-python/issues From 2153b03201de4aacab76892bfd9965fb411d9c61 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 27 Jun 2017 12:41:15 -0700 Subject: [PATCH 307/855] Auto-update dependencies. [(#1004)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1004) * Auto-update dependencies. * Fix natural language samples * Fix pubsub iam samples * Fix language samples * Fix bigquery samples --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 4942b24ad744..8b01c04cb14b 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==1.0.0 +google-cloud-logging==1.1.0 From 105db11275fc14b97831806b54b6ce12337a35d0 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 7 Aug 2017 10:04:55 -0700 Subject: [PATCH 308/855] Auto-update dependencies. [(#1055)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1055) * Auto-update dependencies. * Explicitly use latest bigtable client Change-Id: Id71e9e768f020730e4ca9514a0d7ebaa794e7d9e * Revert language update for now Change-Id: I8867f154e9a5aae00d0047c9caf880e5e8f50c53 * Remove pdb. smh Change-Id: I5ff905fadc026eebbcd45512d4e76e003e3b2b43 --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 8b01c04cb14b..0cf479d75d8c 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==1.1.0 +google-cloud-logging==1.2.0 From 0019f4a2dbd266d4804fc965d1c683ff4c0418b0 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 29 Aug 2017 13:35:20 -0700 Subject: [PATCH 309/855] Fix logging tests Change-Id: I6691c70912b1e1b5993e962a4827a846642feac3 --- packages/google-cloud-logging/samples/snippets/snippets_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/snippets_test.py b/packages/google-cloud-logging/samples/snippets/snippets_test.py index f22fbc8f1bfb..78093781c81b 100644 --- a/packages/google-cloud-logging/samples/snippets/snippets_test.py +++ b/packages/google-cloud-logging/samples/snippets/snippets_test.py @@ -42,5 +42,5 @@ def test_write(): snippets.write_entry(TEST_LOGGER_NAME) -def test_delete(): +def test_delete(example_log): snippets.delete_logger(TEST_LOGGER_NAME) From 322eb79ee13b9c7c230b8dc586125d7e9644291c Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 29 Aug 2017 16:53:02 -0700 Subject: [PATCH 310/855] Auto-update dependencies. [(#1093)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1093) * Auto-update dependencies. * Fix storage notification poll sample Change-Id: I6afbc79d15e050531555e4c8e51066996717a0f3 * Fix spanner samples Change-Id: I40069222c60d57e8f3d3878167591af9130895cb * Drop coverage because it's not useful Change-Id: Iae399a7083d7866c3c7b9162d0de244fbff8b522 * Try again to fix flaky logging test Change-Id: I6225c074701970c17c426677ef1935bb6d7e36b4 --- .../google-cloud-logging/samples/snippets/requirements.txt | 2 +- .../google-cloud-logging/samples/snippets/snippets_test.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 0cf479d75d8c..4a49685280c6 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==1.2.0 +google-cloud-logging==1.3.0 diff --git a/packages/google-cloud-logging/samples/snippets/snippets_test.py b/packages/google-cloud-logging/samples/snippets/snippets_test.py index 78093781c81b..480763cdbfdb 100644 --- a/packages/google-cloud-logging/samples/snippets/snippets_test.py +++ b/packages/google-cloud-logging/samples/snippets/snippets_test.py @@ -43,4 +43,6 @@ def test_write(): def test_delete(example_log): - snippets.delete_logger(TEST_LOGGER_NAME) + @eventually_consistent.call + def _(): + snippets.delete_logger(TEST_LOGGER_NAME) From 2d407e21106b6183e4c7f2e5f2fe0af080af36a2 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 18 Sep 2017 11:04:05 -0700 Subject: [PATCH 311/855] Update all generated readme auth instructions [(#1121)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1121) Change-Id: I03b5eaef8b17ac3dc3c0339fd2c7447bd3e11bd2 --- .../samples/snippets/README.rst | 32 +++---------------- 1 file changed, 5 insertions(+), 27 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/README.rst b/packages/google-cloud-logging/samples/snippets/README.rst index a9ff67afc42d..349787427e39 100644 --- a/packages/google-cloud-logging/samples/snippets/README.rst +++ b/packages/google-cloud-logging/samples/snippets/README.rst @@ -17,34 +17,12 @@ Setup Authentication ++++++++++++++ -Authentication is typically done through `Application Default Credentials`_, -which means you do not have to change the code to authenticate as long as -your environment has credentials. You have a few options for setting up -authentication: +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. -#. When running locally, use the `Google Cloud SDK`_ - - .. code-block:: bash - - gcloud auth application-default login - - -#. When running on App Engine or Compute Engine, credentials are already - set-up. However, you may need to configure your Compute Engine instance - with `additional scopes`_. - -#. You can create a `Service Account key file`_. This file can be used to - authenticate to Google Cloud Platform services from any environment. To use - the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to - the path to the key file, for example: - - .. code-block:: bash - - export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json - -.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow -.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using -.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started Install Dependencies ++++++++++++++++++++ From b38b4c9c06f2072b2e342c009651c78a3e425d6d Mon Sep 17 00:00:00 2001 From: michaelawyu Date: Thu, 12 Oct 2017 10:16:11 -0700 Subject: [PATCH 312/855] Added Link to Python Setup Guide [(#1158)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1158) * Update Readme.rst to add Python setup guide As requested in b/64770713. This sample is linked in documentation https://cloud.google.com/bigtable/docs/scaling, and it would make more sense to update the guide here than in the documentation. * Update README.rst * Update README.rst * Update README.rst * Update README.rst * Update README.rst * Update install_deps.tmpl.rst * Updated readmegen scripts and re-generated related README files * Fixed the lint error --- packages/google-cloud-logging/samples/snippets/README.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/README.rst b/packages/google-cloud-logging/samples/snippets/README.rst index 349787427e39..74f8dae990ee 100644 --- a/packages/google-cloud-logging/samples/snippets/README.rst +++ b/packages/google-cloud-logging/samples/snippets/README.rst @@ -27,7 +27,10 @@ credentials for applications. Install Dependencies ++++++++++++++++++++ -#. Install `pip`_ and `virtualenv`_ if you do not already have them. +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup #. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. From 4599a53b12e7b7de0e40ccc34ec6bcd926dd9d38 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 24 Oct 2017 12:14:35 -0700 Subject: [PATCH 313/855] Fix a few more lint issues Change-Id: I0d420f3053f391fa225e4b8179e45fd1138f5c65 --- packages/google-cloud-logging/samples/snippets/export_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/export_test.py b/packages/google-cloud-logging/samples/snippets/export_test.py index b53b8978f7b8..ea090390506e 100644 --- a/packages/google-cloud-logging/samples/snippets/export_test.py +++ b/packages/google-cloud-logging/samples/snippets/export_test.py @@ -48,7 +48,7 @@ def example_sink(): try: sink.delete() - except: + except Exception: pass @@ -72,7 +72,7 @@ def test_create(capsys): finally: try: logging.Client().sink(sink_name).delete() - except: + except Exception: pass out, _ = capsys.readouterr() From 40f71b6fa4f2230ecfe456d99a5568b5c8eceaca Mon Sep 17 00:00:00 2001 From: DPE bot Date: Wed, 1 Nov 2017 12:30:10 -0700 Subject: [PATCH 314/855] Auto-update dependencies. [(#1186)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1186) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 4a49685280c6..78aa79015101 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==1.3.0 +google-cloud-logging==1.4.0 From 335d8eb09a8681a9426c82244f71fc0b98d3e033 Mon Sep 17 00:00:00 2001 From: Andrew Gorcester Date: Tue, 28 Nov 2017 10:21:53 -0800 Subject: [PATCH 315/855] Add sample for standard library logging handler configuration [(#1233)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1233) * Add sample for standard library logging handler configuration * Add handler.py to readme --- .../samples/snippets/README.rst | 12 +++++ .../samples/snippets/handler.py | 46 +++++++++++++++++++ .../samples/snippets/handler_test.py | 22 +++++++++ 3 files changed, 80 insertions(+) create mode 100644 packages/google-cloud-logging/samples/snippets/handler.py create mode 100644 packages/google-cloud-logging/samples/snippets/handler_test.py diff --git a/packages/google-cloud-logging/samples/snippets/README.rst b/packages/google-cloud-logging/samples/snippets/README.rst index 74f8dae990ee..4612fdd05cf9 100644 --- a/packages/google-cloud-logging/samples/snippets/README.rst +++ b/packages/google-cloud-logging/samples/snippets/README.rst @@ -63,6 +63,18 @@ To run this sample: $ python quickstart.py +Handler ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python handler.py + + Snippets +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ diff --git a/packages/google-cloud-logging/samples/snippets/handler.py b/packages/google-cloud-logging/samples/snippets/handler.py new file mode 100644 index 000000000000..9986eab8593e --- /dev/null +++ b/packages/google-cloud-logging/samples/snippets/handler.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def use_logging_handler(): + # [START logging_handler_setup] + # Imports the Google Cloud client library + import google.cloud.logging + + # Instantiates a client + client = google.cloud.logging.Client() + + # Connects the logger to the root logging handler; by default this captures + # all logs at INFO level and higher + client.setup_logging() + # [END logging_handler_setup] + + # [START logging_handler_usage] + # Imports Python standard library logging + import logging + + # The data to log + text = 'Hello, world!' + + # Emits the data using the standard logging module + logging.warn(text) + # [END logging_handler_usage] + + print('Logged: {}'.format(text)) + + +if __name__ == '__main__': + use_logging_handler() diff --git a/packages/google-cloud-logging/samples/snippets/handler_test.py b/packages/google-cloud-logging/samples/snippets/handler_test.py new file mode 100644 index 000000000000..d48ee2e20d06 --- /dev/null +++ b/packages/google-cloud-logging/samples/snippets/handler_test.py @@ -0,0 +1,22 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import handler + + +def test_handler(capsys): + handler.use_logging_handler() + out, _ = capsys.readouterr() + assert 'Logged' in out From 705a62e75e2fb2a2455f2b16a43659f951756a74 Mon Sep 17 00:00:00 2001 From: michaelawyu Date: Thu, 7 Dec 2017 10:34:29 -0800 Subject: [PATCH 316/855] Added "Open in Cloud Shell" buttons to README files [(#1254)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1254) --- .../samples/snippets/README.rst | 40 +++++++++++-------- .../samples/snippets/README.rst.in | 2 + 2 files changed, 25 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/README.rst b/packages/google-cloud-logging/samples/snippets/README.rst index 4612fdd05cf9..00b7fa91a84d 100644 --- a/packages/google-cloud-logging/samples/snippets/README.rst +++ b/packages/google-cloud-logging/samples/snippets/README.rst @@ -3,6 +3,10 @@ Stackdriver Logging Python Samples =============================================================================== +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/README.rst + + This directory contains samples for Stackdriver Logging. `Stackdriver Logging`_ allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud Platform and Amazon Web Services. @@ -54,30 +58,26 @@ Samples Quickstart +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/quickstart.py;logging/cloud-client/README.rst -To run this sample: - -.. code-block:: bash - - $ python quickstart.py - - -Handler -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - To run this sample: .. code-block:: bash - $ python handler.py + $ python quickstart.py Snippets +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/snippets.py;logging/cloud-client/README.rst + + To run this sample: @@ -87,13 +87,13 @@ To run this sample: $ python snippets.py usage: snippets.py [-h] logger_name {list,write,delete} ... - + This application demonstrates how to perform basic operations on logs and log entries with Stackdriver Logging. - + For more information, see the README.md under /logging and the documentation at https://cloud.google.com/logging/docs. - + positional arguments: logger_name Logger name {list,write,delete} @@ -101,14 +101,19 @@ To run this sample: write Writes log entries to the given logger. delete Deletes a logger and all its entries. Note that a deletion can take several minutes to take effect. - + optional arguments: -h, --help show this help message and exit + Export +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/export.py;logging/cloud-client/README.rst + + To run this sample: @@ -118,7 +123,7 @@ To run this sample: $ python export.py usage: export.py [-h] {list,create,update,delete} ... - + positional arguments: {list,create,update,delete} list Lists all sinks. @@ -131,13 +136,14 @@ To run this sample: om/logging/docs/view/advanced_filters for more filter information. delete Deletes a sink. - + optional arguments: -h, --help show this help message and exit + The client library ------------------------------------------------------------------------------- diff --git a/packages/google-cloud-logging/samples/snippets/README.rst.in b/packages/google-cloud-logging/samples/snippets/README.rst.in index 50862fa1da9b..00fa4b6b83c1 100644 --- a/packages/google-cloud-logging/samples/snippets/README.rst.in +++ b/packages/google-cloud-logging/samples/snippets/README.rst.in @@ -24,3 +24,5 @@ samples: show_help: true cloud_client_library: true + +folder: logging/cloud-client \ No newline at end of file From 20a383adfe5619cd1f53c25185357ffefc7c0aef Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 26 Feb 2018 09:03:37 -0800 Subject: [PATCH 317/855] Auto-update dependencies. [(#1359)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1359) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 78aa79015101..fd8ba2ec480d 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==1.4.0 +google-cloud-logging==1.5.0 From 861d7274dae402eed900c21c671fae78ff06bd5d Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 5 Mar 2018 12:28:55 -0800 Subject: [PATCH 318/855] Auto-update dependencies. [(#1377)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1377) * Auto-update dependencies. * Update requirements.txt --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index fd8ba2ec480d..b4a1672d3797 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==1.5.0 +google-cloud-logging==1.6.0 From 374dd331802fe2fb1ec0966ce2ef3a8b579fa9d0 Mon Sep 17 00:00:00 2001 From: chenyumic Date: Fri, 6 Apr 2018 22:57:36 -0700 Subject: [PATCH 319/855] Regenerate the README files and fix the Open in Cloud Shell link for some samples [(#1441)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1441) --- packages/google-cloud-logging/samples/snippets/README.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/README.rst b/packages/google-cloud-logging/samples/snippets/README.rst index 00b7fa91a84d..829cf041d2fa 100644 --- a/packages/google-cloud-logging/samples/snippets/README.rst +++ b/packages/google-cloud-logging/samples/snippets/README.rst @@ -12,7 +12,7 @@ This directory contains samples for Stackdriver Logging. `Stackdriver Logging`_ -.. _Stackdriver Logging: https://cloud.google.com/logging/docs +.. _Stackdriver Logging: https://cloud.google.com/logging/docs Setup ------------------------------------------------------------------------------- @@ -59,7 +59,7 @@ Quickstart +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/quickstart.py;logging/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/quickstart.py,logging/cloud-client/README.rst @@ -75,7 +75,7 @@ Snippets +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/snippets.py;logging/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/snippets.py,logging/cloud-client/README.rst @@ -111,7 +111,7 @@ Export +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/export.py;logging/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/export.py,logging/cloud-client/README.rst From b01ed0bdc331feb401a5d08dfe27233ab8500075 Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Thu, 26 Apr 2018 10:26:41 -0700 Subject: [PATCH 320/855] Update READMEs to fix numbering and add git clone [(#1464)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1464) --- .../google-cloud-logging/samples/snippets/README.rst | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/README.rst b/packages/google-cloud-logging/samples/snippets/README.rst index 829cf041d2fa..1ae5e6d66717 100644 --- a/packages/google-cloud-logging/samples/snippets/README.rst +++ b/packages/google-cloud-logging/samples/snippets/README.rst @@ -31,10 +31,16 @@ credentials for applications. Install Dependencies ++++++++++++++++++++ +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + #. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. - .. _Python Development Environment Setup Guide: - https://cloud.google.com/python/setup + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup #. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. From 85a2efae6afc0310eb371b4942549a9b77ae1c30 Mon Sep 17 00:00:00 2001 From: Jeffrey Rennie Date: Wed, 15 Aug 2018 13:12:16 -0700 Subject: [PATCH 321/855] Update logging doc tags. [(#1634)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1634) --- packages/google-cloud-logging/samples/snippets/export.py | 9 ++++++++- .../google-cloud-logging/samples/snippets/snippets.py | 6 ++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/export.py b/packages/google-cloud-logging/samples/snippets/export.py index f8c1f0c5f954..f7606ba6c125 100644 --- a/packages/google-cloud-logging/samples/snippets/export.py +++ b/packages/google-cloud-logging/samples/snippets/export.py @@ -19,6 +19,7 @@ from google.cloud import logging +# [START logging_list_sinks] def list_sinks(): """Lists all sinks.""" logging_client = logging.Client() @@ -30,8 +31,10 @@ def list_sinks(): for sink in sinks: print('{}: {} -> {}'.format(sink.name, sink.filter_, sink.destination)) +# [END logging_list_sinks] +# [START logging_create_sink] def create_sink(sink_name, destination_bucket, filter_): """Creates a sink to export logs to the given Cloud Storage bucket. @@ -61,8 +64,10 @@ def create_sink(sink_name, destination_bucket, filter_): sink.create() print('Created sink {}'.format(sink.name)) +# [END logging_create_sink] +# [START logging_update_sink] def update_sink(sink_name, filter_): """Changes a sink's filter. @@ -80,9 +85,10 @@ def update_sink(sink_name, filter_): sink.filter_ = filter_ print('Updated sink {}'.format(sink.name)) sink.update() - # [END update] +# [END logging_update_sink] +# [START logging_delete_sink] def delete_sink(sink_name): """Deletes a sink.""" logging_client = logging.Client() @@ -91,6 +97,7 @@ def delete_sink(sink_name): sink.delete() print('Deleted sink {}'.format(sink.name)) +# [END logging_delete_sink] if __name__ == '__main__': diff --git a/packages/google-cloud-logging/samples/snippets/snippets.py b/packages/google-cloud-logging/samples/snippets/snippets.py index 8a31066fa3bb..78f67e8a983b 100644 --- a/packages/google-cloud-logging/samples/snippets/snippets.py +++ b/packages/google-cloud-logging/samples/snippets/snippets.py @@ -26,6 +26,7 @@ from google.cloud import logging +# [START logging_write_log_entry] def write_entry(logger_name): """Writes log entries to the given logger.""" logging_client = logging.Client() @@ -47,8 +48,10 @@ def write_entry(logger_name): }) print('Wrote logs to {}.'.format(logger.name)) +# [END logging_write_log_entry] +# [START logging_list_log_entries] def list_entries(logger_name): """Lists the most recent entries for a given logger.""" logging_client = logging.Client() @@ -60,8 +63,10 @@ def list_entries(logger_name): timestamp = entry.timestamp.isoformat() print('* {}: {}'.format (timestamp, entry.payload)) +# [END logging_list_log_entries] +# [START logging_delete_log] def delete_logger(logger_name): """Deletes a logger and all its entries. @@ -73,6 +78,7 @@ def delete_logger(logger_name): logger.delete() print('Deleted all logging entries for {}'.format(logger.name)) +# [END logging_delete_log] if __name__ == '__main__': From cdb6ecc9f7944c807eddc044c4e3994f8afef267 Mon Sep 17 00:00:00 2001 From: Andrew Gorcester Date: Tue, 20 Nov 2018 12:57:25 -0800 Subject: [PATCH 322/855] Fix deprecation warning [(#1801)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1801) logging.warn -> logging.warning to fix "DeprecationWarning: The 'warn' function is deprecated, use 'warning' instead" --- packages/google-cloud-logging/samples/snippets/handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/handler.py b/packages/google-cloud-logging/samples/snippets/handler.py index 9986eab8593e..7b4e50f217a3 100644 --- a/packages/google-cloud-logging/samples/snippets/handler.py +++ b/packages/google-cloud-logging/samples/snippets/handler.py @@ -36,7 +36,7 @@ def use_logging_handler(): text = 'Hello, world!' # Emits the data using the standard logging module - logging.warn(text) + logging.warning(text) # [END logging_handler_usage] print('Logged: {}'.format(text)) From ff15fee21dbc7db0031b63f5efce2ca87e8fb7c8 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 20 Nov 2018 15:40:29 -0800 Subject: [PATCH 323/855] Auto-update dependencies. [(#1846)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1846) ACK, merging. --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index b4a1672d3797..22b650de9d05 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==1.6.0 +google-cloud-logging==1.8.0 From c549bbbcba4c0f02c06ad9c1cbe058068d0528cf Mon Sep 17 00:00:00 2001 From: DPEBot Date: Wed, 6 Feb 2019 12:06:35 -0800 Subject: [PATCH 324/855] Auto-update dependencies. [(#1980)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1980) * Auto-update dependencies. * Update requirements.txt * Update requirements.txt --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 22b650de9d05..986c1456934d 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==1.8.0 +google-cloud-logging==1.10.0 From 27dd15e8b4a4ae87a579901908d1201832b87b54 Mon Sep 17 00:00:00 2001 From: Gus Class Date: Tue, 8 Oct 2019 09:53:32 -0700 Subject: [PATCH 325/855] Adds split updates for Firebase ... opencensus [(#2438)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2438) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 986c1456934d..8048908a2c0e 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==1.10.0 +google-cloud-logging==1.12.1 From f4567d33589aba98242a3e22d4e220415533dbd2 Mon Sep 17 00:00:00 2001 From: DPEBot Date: Fri, 20 Dec 2019 17:41:38 -0800 Subject: [PATCH 326/855] Auto-update dependencies. [(#2005)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2005) * Auto-update dependencies. * Revert update of appengine/flexible/datastore. * revert update of appengine/flexible/scipy * revert update of bigquery/bqml * revert update of bigquery/cloud-client * revert update of bigquery/datalab-migration * revert update of bigtable/quickstart * revert update of compute/api * revert update of container_registry/container_analysis * revert update of dataflow/run_template * revert update of datastore/cloud-ndb * revert update of dialogflow/cloud-client * revert update of dlp * revert update of functions/imagemagick * revert update of functions/ocr/app * revert update of healthcare/api-client/fhir * revert update of iam/api-client * revert update of iot/api-client/gcs_file_to_device * revert update of iot/api-client/mqtt_example * revert update of language/automl * revert update of run/image-processing * revert update of vision/automl * revert update testing/requirements.txt * revert update of vision/cloud-client/detect * revert update of vision/cloud-client/product_search * revert update of jobs/v2/api_client * revert update of jobs/v3/api_client * revert update of opencensus * revert update of translate/cloud-client * revert update to speech/cloud-client Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Co-authored-by: Doug Mahugh --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 8048908a2c0e..03f959145956 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==1.12.1 +google-cloud-logging==1.14.0 From f08e0562b4d342020f1dc74f5c4799c8478c3970 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 27 Mar 2020 19:40:24 +0100 Subject: [PATCH 327/855] chore(deps): update dependency google-cloud-logging to v1.15.0 [(#3161)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3161) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-cloud-logging](https://togithub.com/googleapis/python-logging) | minor | `==1.14.0` -> `==1.15.0` | --- ### Release Notes
googleapis/python-logging ### [`v1.15.0`](https://togithub.com/googleapis/python-logging/blob/master/CHANGELOG.md#​1150httpswwwgithubcomgoogleapispython-loggingcomparev1140v1150-2020-02-26) [Compare Source](https://togithub.com/googleapis/python-logging/compare/v1.14.0...v1.15.0) ##### Features - add support for cmek settings; undeprecate resource name helper methods; bump copyright year to 2020 ([#​22](https://www.github.com/googleapis/python-logging/issues/22)) ([1c687c1](https://www.github.com/googleapis/python-logging/commit/1c687c168cdc1f5ebc74d2380ad87335a42209a2)) ##### Bug Fixes - **logging:** deprecate resource name helper methods (via synth) ([#​9837](https://www.github.com/googleapis/python-logging/issues/9837)) ([335af9e](https://www.github.com/googleapis/python-logging/commit/335af9e909eb7fb4696ba906a82176611653531d)) - **logging:** update test assertion and core version pins ([#​10087](https://www.github.com/googleapis/python-logging/issues/10087)) ([4aedea8](https://www.github.com/googleapis/python-logging/commit/4aedea80e2bccb5ba3c41fae7a0ee46cc07eefa9)) - replace unsafe six.PY3 with PY2 for better future compatibility with Python 4 ([#​10081](https://www.github.com/googleapis/python-logging/issues/10081)) ([c6eb601](https://www.github.com/googleapis/python-logging/commit/c6eb60179d674dfd5137d90d209094c9369b3581))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 03f959145956..9bd776d79632 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==1.14.0 +google-cloud-logging==1.15.0 From 2cd0d9dbd2402ebbf00fb41a1564f0ee9531fe01 Mon Sep 17 00:00:00 2001 From: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Date: Wed, 1 Apr 2020 19:11:50 -0700 Subject: [PATCH 328/855] Simplify noxfile setup. [(#2806)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2806) * chore(deps): update dependency requests to v2.23.0 * Simplify noxfile and add version control. * Configure appengine/standard to only test Python 2.7. * Update Kokokro configs to match noxfile. * Add requirements-test to each folder. * Remove Py2 versions from everything execept appengine/standard. * Remove conftest.py. * Remove appengine/standard/conftest.py * Remove 'no-sucess-flaky-report' from pytest.ini. * Add GAE SDK back to appengine/standard tests. * Fix typo. * Roll pytest to python 2 version. * Add a bunch of testing requirements. * Remove typo. * Add appengine lib directory back in. * Add some additional requirements. * Fix issue with flake8 args. * Even more requirements. * Readd appengine conftest.py. * Add a few more requirements. * Even more Appengine requirements. * Add webtest for appengine/standard/mailgun. * Add some additional requirements. * Add workaround for issue with mailjet-rest. * Add responses for appengine/standard/mailjet. Co-authored-by: Renovate Bot --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 packages/google-cloud-logging/samples/snippets/requirements-test.txt diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt new file mode 100644 index 000000000000..1cfcb6852802 --- /dev/null +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -0,0 +1,2 @@ +pytest==5.3.2 +gcp-devrel-py-tools==0.0.15 From 4935c97ef7fb7f2bf0fa9709a58a1aa3d3659298 Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Wed, 22 Apr 2020 18:38:58 -0700 Subject: [PATCH 329/855] [logging] chore: remove gcp-devrel-py-tools [(#3477)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3477) --- .../samples/snippets/export_test.py | 9 ++++++--- .../samples/snippets/requirements-test.txt | 2 +- .../samples/snippets/snippets_test.py | 17 ++++++++++------- 3 files changed, 17 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/export_test.py b/packages/google-cloud-logging/samples/snippets/export_test.py index ea090390506e..b787c066af67 100644 --- a/packages/google-cloud-logging/samples/snippets/export_test.py +++ b/packages/google-cloud-logging/samples/snippets/export_test.py @@ -16,12 +16,13 @@ import random import string -from gcp_devrel.testing import eventually_consistent +import backoff from google.cloud import logging import pytest import export + BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] TEST_SINK_NAME_TMPL = 'example_sink_{}' TEST_SINK_FILTER = 'severity>=CRITICAL' @@ -53,12 +54,14 @@ def example_sink(): def test_list(example_sink, capsys): - @eventually_consistent.call - def _(): + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): export.list_sinks() out, _ = capsys.readouterr() assert example_sink.name in out + eventually_consistent_test() + def test_create(capsys): sink_name = TEST_SINK_NAME_TMPL.format(_random_id()) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 1cfcb6852802..8855f3cf1f88 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ +backoff==1.10.0 pytest==5.3.2 -gcp-devrel-py-tools==0.0.15 diff --git a/packages/google-cloud-logging/samples/snippets/snippets_test.py b/packages/google-cloud-logging/samples/snippets/snippets_test.py index 480763cdbfdb..075c88a5199d 100644 --- a/packages/google-cloud-logging/samples/snippets/snippets_test.py +++ b/packages/google-cloud-logging/samples/snippets/snippets_test.py @@ -12,12 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gcp_devrel.testing import eventually_consistent +import backoff from google.cloud import logging import pytest import snippets + TEST_LOGGER_NAME = 'example_log' @@ -31,18 +32,20 @@ def example_log(): def test_list(example_log, capsys): - @eventually_consistent.call - def _(): + @backoff.on_exception(backoff.expo, AssertionError, max_time=120) + def eventually_consistent_test(): snippets.list_entries(TEST_LOGGER_NAME) out, _ = capsys.readouterr() assert example_log in out + eventually_consistent_test() + def test_write(): snippets.write_entry(TEST_LOGGER_NAME) -def test_delete(example_log): - @eventually_consistent.call - def _(): - snippets.delete_logger(TEST_LOGGER_NAME) +def test_delete(example_log, capsys): + snippets.delete_logger(TEST_LOGGER_NAME) + out, _ = capsys.readouterr() + assert TEST_LOGGER_NAME in out From 244428090c869c7671c0d2be3fd868f91a7a7a07 Mon Sep 17 00:00:00 2001 From: Darren Carlton Date: Wed, 6 May 2020 17:21:13 -0400 Subject: [PATCH 330/855] Update logging example to retrieve the default handler [(#3691)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3691) * Update handler.py * Update README.rst * Update handler.py Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> --- .../google-cloud-logging/samples/snippets/README.rst | 6 +++--- .../google-cloud-logging/samples/snippets/handler.py | 9 ++++++--- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/README.rst b/packages/google-cloud-logging/samples/snippets/README.rst index 1ae5e6d66717..f8cb576a9282 100644 --- a/packages/google-cloud-logging/samples/snippets/README.rst +++ b/packages/google-cloud-logging/samples/snippets/README.rst @@ -7,12 +7,12 @@ Stackdriver Logging Python Samples :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/README.rst -This directory contains samples for Stackdriver Logging. `Stackdriver Logging`_ allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud Platform and Amazon Web Services. +This directory contains samples for `Cloud Logging`_, which you can use to store, search, analyze, monitor, and alert on log data and events from Google Cloud Platform and Amazon Web Services. -.. _Stackdriver Logging: https://cloud.google.com/logging/docs +.. _Cloud Logging: https://cloud.google.com/logging/docs Setup ------------------------------------------------------------------------------- @@ -165,4 +165,4 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues -.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file +.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/packages/google-cloud-logging/samples/snippets/handler.py b/packages/google-cloud-logging/samples/snippets/handler.py index 7b4e50f217a3..d59458425633 100644 --- a/packages/google-cloud-logging/samples/snippets/handler.py +++ b/packages/google-cloud-logging/samples/snippets/handler.py @@ -17,14 +17,17 @@ def use_logging_handler(): # [START logging_handler_setup] - # Imports the Google Cloud client library + # Imports the Cloud Logging client library import google.cloud.logging # Instantiates a client client = google.cloud.logging.Client() - # Connects the logger to the root logging handler; by default this captures - # all logs at INFO level and higher + # Retrieves a Cloud Logging handler based on the environment + # you're running in and integrates the handler with the + # Python logging module. By default this captures all logs + # at INFO level and higher + client.get_default_handler() client.setup_logging() # [END logging_handler_setup] From ea48a001d956ab42d8cdd7164b6074460202fdfc Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Tue, 26 May 2020 10:41:50 -0700 Subject: [PATCH 331/855] testing: various cleanups [(#3877)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3877) * testing: various cleanups * [iap]: only run iap tests on Kokoro * [vision/automl]: use temporary directory for temporary files * [appengine/flexible/scipy]: use temporary directory * [bigtable/snippets/reads]: update pytest snapshot * [texttospeech/cloud-client]: added output.mp3 to .gitignore * [iot/api-client/gcs_file_to_device]: use temporary directory * [iot/api-client/mqtt_example]: use temporary directory * [logging/cloud-client]: use uuid and add backoff * use project directory with Trampoline V1 --- .../samples/snippets/snippets_test.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/snippets_test.py b/packages/google-cloud-logging/samples/snippets/snippets_test.py index 075c88a5199d..1d1d01972efa 100644 --- a/packages/google-cloud-logging/samples/snippets/snippets_test.py +++ b/packages/google-cloud-logging/samples/snippets/snippets_test.py @@ -12,14 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import uuid + import backoff +from google.api_core.exceptions import NotFound from google.cloud import logging import pytest import snippets -TEST_LOGGER_NAME = 'example_log' +TEST_LOGGER_NAME = 'example_log_{}'.format(uuid.uuid4().hex) @pytest.fixture @@ -46,6 +49,8 @@ def test_write(): def test_delete(example_log, capsys): - snippets.delete_logger(TEST_LOGGER_NAME) - out, _ = capsys.readouterr() - assert TEST_LOGGER_NAME in out + @backoff.on_exception(backoff.expo, NotFound, max_time=120) + def eventually_consistent_test(): + snippets.delete_logger(TEST_LOGGER_NAME) + out, _ = capsys.readouterr() + assert TEST_LOGGER_NAME in out From 9efa60f359b083674775ea88e03af23d60e11d55 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 13 Jul 2020 00:46:30 +0200 Subject: [PATCH 332/855] chore(deps): update dependency pytest to v5.4.3 [(#4279)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4279) * chore(deps): update dependency pytest to v5.4.3 * specify pytest for python 2 in appengine Co-authored-by: Leah Cole --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 8855f3cf1f88..678aa129fb4f 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==1.10.0 -pytest==5.3.2 +pytest==5.4.3 From 52077a915f73db431c4ab8eaf0a3586a8b849fcb Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 1 Aug 2020 21:51:00 +0200 Subject: [PATCH 333/855] Update dependency pytest to v6 [(#4390)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4390) --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 678aa129fb4f..d0029c6de49e 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==1.10.0 -pytest==5.4.3 +pytest==6.0.1 From d12e6766d251b16e31eb107f83b02abff54eb661 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 10 Aug 2020 22:11:59 +0200 Subject: [PATCH 334/855] chore(deps): update dependency google-cloud-logging to v1.15.1 [(#4458)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4458) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 9bd776d79632..dbb4176a17c9 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==1.15.0 +google-cloud-logging==1.15.1 From 6352ee420afc38edd1c6f76b89e0abf43045b602 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim Date: Mon, 12 Oct 2020 16:17:20 +0000 Subject: [PATCH 335/855] chore: update templates --- packages/google-cloud-logging/.coveragerc | 5 +- packages/google-cloud-logging/.flake8 | 2 + .../.github/snippet-bot.yml | 0 packages/google-cloud-logging/.gitignore | 5 +- .../.kokoro/populate-secrets.sh | 43 ++++ .../google-cloud-logging/CONTRIBUTING.rst | 19 -- packages/google-cloud-logging/MANIFEST.in | 3 + .../docs/multiprocessing.rst | 7 + .../samples/AUTHORING_GUIDE.md | 1 + .../samples/CONTRIBUTING.md | 1 + .../samples/snippets/README.rst | 29 ++- .../samples/snippets/noxfile.py | 229 ++++++++++++++++++ .../scripts/readme-gen/readme_gen.py | 66 +++++ .../readme-gen/templates/README.tmpl.rst | 87 +++++++ .../readme-gen/templates/auth.tmpl.rst | 9 + .../templates/auth_api_key.tmpl.rst | 14 ++ .../templates/install_deps.tmpl.rst | 29 +++ .../templates/install_portaudio.tmpl.rst | 35 +++ packages/google-cloud-logging/synth.metadata | 27 +-- packages/google-cloud-logging/synth.py | 10 +- 20 files changed, 582 insertions(+), 39 deletions(-) create mode 100644 packages/google-cloud-logging/.github/snippet-bot.yml create mode 100755 packages/google-cloud-logging/.kokoro/populate-secrets.sh create mode 100644 packages/google-cloud-logging/docs/multiprocessing.rst create mode 100644 packages/google-cloud-logging/samples/AUTHORING_GUIDE.md create mode 100644 packages/google-cloud-logging/samples/CONTRIBUTING.md create mode 100644 packages/google-cloud-logging/samples/snippets/noxfile.py create mode 100644 packages/google-cloud-logging/scripts/readme-gen/readme_gen.py create mode 100644 packages/google-cloud-logging/scripts/readme-gen/templates/README.tmpl.rst create mode 100644 packages/google-cloud-logging/scripts/readme-gen/templates/auth.tmpl.rst create mode 100644 packages/google-cloud-logging/scripts/readme-gen/templates/auth_api_key.tmpl.rst create mode 100644 packages/google-cloud-logging/scripts/readme-gen/templates/install_deps.tmpl.rst create mode 100644 packages/google-cloud-logging/scripts/readme-gen/templates/install_portaudio.tmpl.rst diff --git a/packages/google-cloud-logging/.coveragerc b/packages/google-cloud-logging/.coveragerc index dd39c8546c41..0d8e6297dc9c 100644 --- a/packages/google-cloud-logging/.coveragerc +++ b/packages/google-cloud-logging/.coveragerc @@ -17,6 +17,8 @@ # Generated by synthtool. DO NOT EDIT! [run] branch = True +omit = + google/cloud/__init__.py [report] fail_under = 100 @@ -32,4 +34,5 @@ omit = */gapic/*.py */proto/*.py */core/*.py - */site-packages/*.py \ No newline at end of file + */site-packages/*.py + google/cloud/__init__.py diff --git a/packages/google-cloud-logging/.flake8 b/packages/google-cloud-logging/.flake8 index 20fe9bda2ee4..ed9316381c9c 100644 --- a/packages/google-cloud-logging/.flake8 +++ b/packages/google-cloud-logging/.flake8 @@ -21,6 +21,8 @@ exclude = # Exclude generated code. **/proto/** **/gapic/** + **/services/** + **/types/** *_pb2.py # Standard linting exemptions. diff --git a/packages/google-cloud-logging/.github/snippet-bot.yml b/packages/google-cloud-logging/.github/snippet-bot.yml new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/.gitignore b/packages/google-cloud-logging/.gitignore index 3fb06e09ce74..b9daa52f118d 100644 --- a/packages/google-cloud-logging/.gitignore +++ b/packages/google-cloud-logging/.gitignore @@ -10,6 +10,7 @@ dist build eggs +.eggs parts bin var @@ -45,14 +46,16 @@ pip-log.txt # Built documentation docs/_build bigquery/docs/generated +docs.metadata # Virtual environment env/ coverage.xml +sponge_log.xml # System test environment variables. system_tests/local_test_setup # Make sure a generated file isn't accidentally committed. pylintrc -pylintrc.test \ No newline at end of file +pylintrc.test diff --git a/packages/google-cloud-logging/.kokoro/populate-secrets.sh b/packages/google-cloud-logging/.kokoro/populate-secrets.sh new file mode 100755 index 000000000000..f52514257ef0 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/populate-secrets.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret ${key} > \ + "${SECRET_LOCATION}/${key}" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi +done diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index 64c917ca84ae..ef2706b7773c 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests. .. nox: https://pypi.org/project/nox/ -Note on Editable Installs / Develop Mode -======================================== - -- As mentioned previously, using ``setuptools`` in `develop mode`_ - or a ``pip`` `editable install`_ is not possible with this - library. This is because this library uses `namespace packages`_. - For context see `Issue #2316`_ and the relevant `PyPA issue`_. - - Since ``editable`` / ``develop`` mode can't be used, packages - need to be installed directly. Hence your changes to the source - tree don't get incorporated into the **already installed** - package. - -.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ -.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 -.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 -.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode -.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs - ***************************************** I'm getting weird errors... Can you help? ***************************************** diff --git a/packages/google-cloud-logging/MANIFEST.in b/packages/google-cloud-logging/MANIFEST.in index 68855abc3f02..e9e29d12033d 100644 --- a/packages/google-cloud-logging/MANIFEST.in +++ b/packages/google-cloud-logging/MANIFEST.in @@ -20,3 +20,6 @@ recursive-include google *.json *.proto recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/multiprocessing.rst b/packages/google-cloud-logging/docs/multiprocessing.rst new file mode 100644 index 000000000000..1cb29d4ca967 --- /dev/null +++ b/packages/google-cloud-logging/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpcio` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-logging/samples/AUTHORING_GUIDE.md b/packages/google-cloud-logging/samples/AUTHORING_GUIDE.md new file mode 100644 index 000000000000..55c97b32f4c1 --- /dev/null +++ b/packages/google-cloud-logging/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/packages/google-cloud-logging/samples/CONTRIBUTING.md b/packages/google-cloud-logging/samples/CONTRIBUTING.md new file mode 100644 index 000000000000..34c882b6f1a3 --- /dev/null +++ b/packages/google-cloud-logging/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/packages/google-cloud-logging/samples/snippets/README.rst b/packages/google-cloud-logging/samples/snippets/README.rst index f8cb576a9282..d60cd0a3b997 100644 --- a/packages/google-cloud-logging/samples/snippets/README.rst +++ b/packages/google-cloud-logging/samples/snippets/README.rst @@ -1,3 +1,4 @@ + .. This file is automatically generated. Do not edit this file directly. Stackdriver Logging Python Samples @@ -7,17 +8,19 @@ Stackdriver Logging Python Samples :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/README.rst -This directory contains samples for `Cloud Logging`_, which you can use to store, search, analyze, monitor, and alert on log data and events from Google Cloud Platform and Amazon Web Services. +This directory contains samples for Stackdriver Logging. `Stackdriver Logging`_ allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud Platform and Amazon Web Services. + +.. _Stackdriver Logging: https://cloud.google.com/logging/docs -.. _Cloud Logging: https://cloud.google.com/logging/docs Setup ------------------------------------------------------------------------------- + Authentication ++++++++++++++ @@ -28,6 +31,9 @@ credentials for applications. .. _Authentication Getting Started Guide: https://cloud.google.com/docs/authentication/getting-started + + + Install Dependencies ++++++++++++++++++++ @@ -42,7 +48,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. +#. Create a virtualenv. Samples are compatible with Python 3.6+. .. code-block:: bash @@ -58,9 +64,15 @@ Install Dependencies .. _pip: https://pip.pypa.io/ .. _virtualenv: https://virtualenv.pypa.io/ + + + + + Samples ------------------------------------------------------------------------------- + Quickstart +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -77,6 +89,8 @@ To run this sample: $ python quickstart.py + + Snippets +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -92,6 +106,7 @@ To run this sample: $ python snippets.py + usage: snippets.py [-h] logger_name {list,write,delete} ... This application demonstrates how to perform basic operations on logs and @@ -113,6 +128,8 @@ To run this sample: + + Export +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -128,6 +145,7 @@ To run this sample: $ python export.py + usage: export.py [-h] {list,create,update,delete} ... positional arguments: @@ -150,6 +168,10 @@ To run this sample: + + + + The client library ------------------------------------------------------------------------------- @@ -165,4 +187,5 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues + .. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py new file mode 100644 index 000000000000..01686e4a0379 --- /dev/null +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -0,0 +1,229 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + 'ignored_versions': ["2.7"], + + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + 'envs': {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append('.') + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars(): + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG['gcloud_project_env'] + # This should error out if not set. + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG['envs']) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir): + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session): + session.install("flake8", "flake8-import-order") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + "." + ] + session.run("flake8", *args) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session, post_install=None): + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session): + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) + + +# +# Readmegen +# + + +def _get_repo_root(): + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session, path): + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/packages/google-cloud-logging/scripts/readme-gen/readme_gen.py b/packages/google-cloud-logging/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..d309d6e97518 --- /dev/null +++ b/packages/google-cloud-logging/scripts/readme-gen/readme_gen.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + +README_TMPL = jinja_env.get_template('README.tmpl.rst') + + +def get_help(file): + return subprocess.check_output(['python', file, '--help']).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('source') + parser.add_argument('--destination', default='README.rst') + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals['get_help'] = get_help + + with io.open(source, 'r') as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, 'w') as f: + f.write(output) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-logging/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-logging/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-logging/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-logging/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-logging/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-logging/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-logging/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-logging/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-logging/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-logging/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-logging/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..a0406dba8c84 --- /dev/null +++ b/packages/google-cloud-logging/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-logging/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-logging/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-logging/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index a5616d3e5170..70f91ca29f82 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -1,32 +1,32 @@ { "sources": [ - { - "generator": { - "name": "artman", - "version": "2.0.0", - "dockerImage": "googleapis/artman@sha256:b3b47805231a305d0f40c4bf069df20f6a2635574e6d4259fac651d3f9f6e098" - } - }, { "git": { "name": ".", - "remote": "git@github.com:googleapis/python-logging", - "sha": "a22a3bfdd4c8a4d6e9cc0c7d7504322ff31ad7ea" + "remote": "git@github.com:googleapis/python-logging.git", + "sha": "98029b5a0d997963a7a30758933e0cc8ee8f5127" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "aaff764c185e18a6c73227357c3df5fa60fec85a", - "internalRef": "309426927" + "sha": "fd31b1600fc496d6127665d29f095371d985c637", + "internalRef": "336344634" + } + }, + { + "git": { + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "befc24dcdeb8e57ec1259826fd33120b05137e8f" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "cdddf139b36000b3a7c65fd2a7781e253262359a" + "sha": "befc24dcdeb8e57ec1259826fd33120b05137e8f" } } ], @@ -37,8 +37,7 @@ "apiName": "logging", "apiVersion": "v2", "language": "python", - "generator": "gapic", - "config": "google/logging/artman_logging.yaml" + "generator": "bazel" } } ] diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/synth.py index 45a49f131dc1..9965d9b693e7 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/synth.py @@ -15,6 +15,7 @@ """This script is used to synthesize generated parts of this library.""" import synthtool as s from synthtool import gcp +from synthtool.languages import python gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() @@ -56,7 +57,14 @@ 'webob', 'django' ], + samples=True, ) s.move(templated_files, excludes=[".coveragerc"]) -s.shell.run(["nox", "-s", "blacken"], hide_output=False) +# -------------------------------------------------------------------------- +# Samples templates +# -------------------------------------------------------------------------- + +python.py_samples() + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) \ No newline at end of file From d2407a3fa4c443cb1d5a21389d9cef65314e2b66 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim Date: Fri, 30 Oct 2020 23:05:13 +0000 Subject: [PATCH 336/855] chore: remove multiprocessing note --- .../docs/multiprocessing.rst | 7 ---- packages/google-cloud-logging/noxfile.py | 33 ------------------- 2 files changed, 40 deletions(-) delete mode 100644 packages/google-cloud-logging/docs/multiprocessing.rst diff --git a/packages/google-cloud-logging/docs/multiprocessing.rst b/packages/google-cloud-logging/docs/multiprocessing.rst deleted file mode 100644 index 1cb29d4ca967..000000000000 --- a/packages/google-cloud-logging/docs/multiprocessing.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. note:: - - Because this client uses :mod:`grpcio` library, it is safe to - share instances across threads. In multiprocessing scenarios, the best - practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.Pool` or - :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 11fc0bf286b2..9cc3ab77f6c5 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -172,39 +172,6 @@ def docs(session): ) -@nox.session(python="3.7") -def docfx(session): - """Build the docfx yaml files for this library.""" - - session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-T", # show full traceback on exception - "-N", # no colors - "-D", - ( - "extensions=sphinx.ext.autodoc," - "sphinx.ext.autosummary," - "docfx_yaml.extension," - "sphinx.ext.intersphinx," - "sphinx.ext.coverage," - "sphinx.ext.napoleon," - "sphinx.ext.todo," - "sphinx.ext.viewcode," - "recommonmark" - ), - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - @nox.session(python=DEFAULT_PYTHON_VERSION) def docfx(session): """Build the docfx yaml files for this library.""" From b21e49d255329ae30e727175e39f5d5f39e3b3f3 Mon Sep 17 00:00:00 2001 From: Nicole Zhu <69952136+nicoleczhu@users.noreply.github.com> Date: Fri, 6 Nov 2020 16:22:31 -0800 Subject: [PATCH 337/855] chore: add blunderbuss (#90) --- packages/google-cloud-logging/.github/blunderbuss.yml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 packages/google-cloud-logging/.github/blunderbuss.yml diff --git a/packages/google-cloud-logging/.github/blunderbuss.yml b/packages/google-cloud-logging/.github/blunderbuss.yml new file mode 100644 index 000000000000..148ebf4e81cb --- /dev/null +++ b/packages/google-cloud-logging/.github/blunderbuss.yml @@ -0,0 +1,4 @@ +assign_issues: + - Daniel-Sanche +assign_prs: + - Daniel-Sanche From c7d42a43a99b6724971bdc17cf818f7c341bdb66 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 18 Nov 2020 17:23:36 -0700 Subject: [PATCH 338/855] feat!: use microgenerator (#94) The bulk of the changes are to the handwritten code (`gogle/cloud/logging_v2/*.py`). Changes are listed in `UPGRADING.md`. * `_gapic` uses the microgenerated surface * Add support for parent resources that are not `project` (`folder`, `billingAccount`, `organization`) where appropriate for log entries and sinks. * Use google-style docstrings * Optional params can only be passed as kwargs --- .../.github/sync-repo-settings.yml | 13 + .../.kokoro/docs/common.cfg | 2 +- .../.kokoro/release/common.cfg | 50 +- .../.kokoro/samples/python3.6/common.cfg | 6 + .../.kokoro/samples/python3.7/common.cfg | 6 + .../.kokoro/samples/python3.8/common.cfg | 6 + .../.kokoro/trampoline.sh | 15 +- .../google-cloud-logging/CODE_OF_CONDUCT.md | 123 +- packages/google-cloud-logging/README.rst | 8 +- packages/google-cloud-logging/UPGRADING.md | 337 ++ .../google-cloud-logging/docs/UPGRADING.md | 1 + .../docs/_templates/layout.html | 4 +- packages/google-cloud-logging/docs/client.rst | 2 +- packages/google-cloud-logging/docs/conf.py | 1 + .../google-cloud-logging/docs/entries.rst | 2 +- .../docs/handlers-app-engine.rst | 2 +- .../docs/handlers-container-engine.rst | 2 +- .../google-cloud-logging/docs/handlers.rst | 2 +- packages/google-cloud-logging/docs/index.rst | 11 + packages/google-cloud-logging/docs/logger.rst | 2 +- packages/google-cloud-logging/docs/metric.rst | 2 +- .../google-cloud-logging/docs/resource.rst | 6 + packages/google-cloud-logging/docs/sink.rst | 2 +- .../google-cloud-logging/docs/snippets.py | 4 +- .../docs/stdlib-usage.rst | 10 +- .../docs/transports-base.rst | 2 +- .../docs/transports-sync.rst | 2 +- .../docs/transports-thread.rst | 2 +- packages/google-cloud-logging/docs/v2.rst | 1 + .../google/cloud/logging/__init__.py | 57 +- .../google/cloud/logging/_gapic.py | 574 --- .../google/cloud/logging/_http.py | 540 -- .../google/cloud/logging/client.py | 407 -- .../google/cloud/logging/logger.py | 386 -- .../google/cloud/logging/py.typed | 2 + .../google/cloud/logging/sink.py | 220 - .../google/cloud/logging_v2/__init__.py | 60 +- .../google/cloud/logging_v2/_gapic.py | 562 +++ .../cloud/{logging => logging_v2}/_helpers.py | 77 +- .../google/cloud/logging_v2/_http.py | 525 ++ .../google/cloud/logging_v2/client.py | 384 ++ .../cloud/{logging => logging_v2}/entries.py | 130 +- .../google/cloud/logging_v2/gapic/__init__.py | 0 .../gapic/config_service_v2_client.py | 1442 ------ .../gapic/config_service_v2_client_config.py | 93 - .../google/cloud/logging_v2/gapic/enums.py | 215 - .../gapic/logging_service_v2_client.py | 806 --- .../gapic/logging_service_v2_client_config.py | 62 - .../gapic/metrics_service_v2_client.py | 650 --- .../gapic/metrics_service_v2_client_config.py | 48 - .../logging_v2/gapic/transports/__init__.py | 0 .../config_service_v2_grpc_transport.py | 306 -- .../logging_service_v2_grpc_transport.py | 192 - .../metrics_service_v2_grpc_transport.py | 181 - .../handlers/__init__.py | 8 +- .../handlers/_helpers.py | 18 +- .../handlers/app_engine.py | 48 +- .../handlers/container_engine.py | 24 +- .../handlers/handlers.py | 84 +- .../handlers/middleware/__init__.py | 2 +- .../handlers/middleware/request.py | 11 +- .../handlers/transports/__init__.py | 6 +- .../handlers/transports/background_thread.py | 182 +- .../handlers/transports/base.py | 21 +- .../handlers/transports/sync.py | 28 +- .../google/cloud/logging_v2/logger.py | 382 ++ .../cloud/{logging => logging_v2}/metric.py | 125 +- .../google/cloud/logging_v2/proto/__init__.py | 0 .../cloud/logging_v2/proto/log_entry.proto | 107 +- .../cloud/logging_v2/proto/log_entry_pb2.py | 881 ---- .../logging_v2/proto/log_entry_pb2_grpc.py | 2 - .../cloud/logging_v2/proto/logging.proto | 102 +- .../logging_v2/proto/logging_config.proto | 470 +- .../logging_v2/proto/logging_config_pb2.py | 2368 --------- .../proto/logging_config_pb2_grpc.py | 259 - .../logging_v2/proto/logging_metrics.proto | 42 +- .../logging_v2/proto/logging_metrics_pb2.py | 1045 ---- .../proto/logging_metrics_pb2_grpc.py | 111 - .../cloud/logging_v2/proto/logging_pb2.py | 1326 ----- .../logging_v2/proto/logging_pb2_grpc.py | 127 - .../google/cloud/logging_v2/py.typed | 2 + .../cloud/{logging => logging_v2}/resource.py | 26 +- .../cloud/logging_v2/services/__init__.py | 16 + .../services/config_service_v2/__init__.py | 24 + .../config_service_v2/async_client.py | 1531 ++++++ .../services/config_service_v2/client.py | 1692 +++++++ .../services/config_service_v2/pagers.py | 404 ++ .../config_service_v2/transports/__init__.py | 36 + .../config_service_v2/transports/base.py | 405 ++ .../config_service_v2/transports/grpc.py | 675 +++ .../transports/grpc_asyncio.py | 702 +++ .../services/logging_service_v2/__init__.py | 24 + .../logging_service_v2/async_client.py | 702 +++ .../services/logging_service_v2/client.py | 845 ++++ .../services/logging_service_v2/pagers.py | 412 ++ .../logging_service_v2/transports/__init__.py | 36 + .../logging_service_v2/transports/base.py | 248 + .../logging_service_v2/transports/grpc.py | 384 ++ .../transports/grpc_asyncio.py | 394 ++ .../services/metrics_service_v2/__init__.py | 24 + .../metrics_service_v2/async_client.py | 627 +++ .../services/metrics_service_v2/client.py | 780 +++ .../services/metrics_service_v2/pagers.py | 148 + .../metrics_service_v2/transports/__init__.py | 36 + .../metrics_service_v2/transports/base.py | 234 + .../metrics_service_v2/transports/grpc.py | 366 ++ .../transports/grpc_asyncio.py | 377 ++ .../google/cloud/logging_v2/sink.py | 233 + .../google/cloud/logging_v2/types.py | 72 - .../google/cloud/logging_v2/types/__init__.py | 116 + .../cloud/logging_v2/types/log_entry.py | 271 + .../google/cloud/logging_v2/types/logging.py | 394 ++ .../cloud/logging_v2/types/logging_config.py | 960 ++++ .../cloud/logging_v2/types/logging_metrics.py | 327 ++ packages/google-cloud-logging/mypy.ini | 3 + packages/google-cloud-logging/noxfile.py | 4 +- .../samples/snippets/README.rst | 29 +- .../samples/snippets/README.rst.in | 6 +- .../samples/snippets/export.py | 79 +- .../samples/snippets/export_test.py | 24 +- .../samples/snippets/handler.py | 6 +- .../samples/snippets/handler_test.py | 2 +- .../samples/snippets/noxfile.py | 47 +- .../samples/snippets/quickstart.py | 8 +- .../samples/snippets/quickstart_test.py | 2 +- .../samples/snippets/snippets.py | 55 +- .../samples/snippets/snippets_test.py | 4 +- .../scripts/decrypt-secrets.sh | 15 +- .../scripts/fixup_logging_v2_keywords.py | 203 + packages/google-cloud-logging/setup.py | 6 +- packages/google-cloud-logging/synth.metadata | 12 +- packages/google-cloud-logging/synth.py | 51 +- .../v2/test_system_logging_service_v2_v2.py | 6 +- .../tests/system/test_system.py | 45 +- .../tests/unit/gapic/logging_v2/__init__.py | 1 + .../logging_v2/test_config_service_v2.py | 4382 +++++++++++++++++ .../logging_v2/test_logging_service_v2.py | 2166 ++++++++ .../logging_v2/test_metrics_service_v2.py | 2189 ++++++++ .../v2/test_config_service_v2_client_v2.py | 604 --- .../v2/test_logging_service_v2_client_v2.py | 262 - .../v2/test_metrics_service_v2_client_v2.py | 288 -- .../unit/handlers/middleware/test_request.py | 8 +- .../tests/unit/handlers/test__helpers.py | 16 +- .../tests/unit/handlers/test_app_engine.py | 10 +- .../unit/handlers/test_container_engine.py | 2 +- .../tests/unit/handlers/test_handlers.py | 12 +- .../transports/test_background_thread.py | 61 +- .../unit/handlers/transports/test_base.py | 4 +- .../unit/handlers/transports/test_sync.py | 10 +- .../tests/unit/test__gapic.py | 496 +- .../tests/unit/test__helpers.py | 18 +- .../tests/unit/test__http.py | 131 +- .../tests/unit/test_client.py | 127 +- .../tests/unit/test_entries.py | 34 +- .../tests/unit/test_logger.py | 112 +- .../tests/unit/test_logging_shim.py | 29 + .../tests/unit/test_metric.py | 53 +- .../tests/unit/test_sink.py | 126 +- 158 files changed, 26566 insertions(+), 15029 deletions(-) create mode 100644 packages/google-cloud-logging/.github/sync-repo-settings.yml create mode 100644 packages/google-cloud-logging/UPGRADING.md create mode 120000 packages/google-cloud-logging/docs/UPGRADING.md create mode 100644 packages/google-cloud-logging/docs/resource.rst delete mode 100644 packages/google-cloud-logging/google/cloud/logging/_gapic.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging/_http.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging/client.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging/logger.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/py.typed delete mode 100644 packages/google-cloud-logging/google/cloud/logging/sink.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py rename packages/google-cloud-logging/google/cloud/{logging => logging_v2}/_helpers.py (65%) create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/_http.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/client.py rename packages/google-cloud-logging/google/cloud/{logging => logging_v2}/entries.py (72%) delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/__init__.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/__init__.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py rename packages/google-cloud-logging/google/cloud/{logging => logging_v2}/handlers/__init__.py (71%) rename packages/google-cloud-logging/google/cloud/{logging => logging_v2}/handlers/_helpers.py (86%) rename packages/google-cloud-logging/google/cloud/{logging => logging_v2}/handlers/app_engine.py (74%) rename packages/google-cloud-logging/google/cloud/{logging => logging_v2}/handlers/container_engine.py (75%) rename packages/google-cloud-logging/google/cloud/{logging => logging_v2}/handlers/handlers.py (58%) rename packages/google-cloud-logging/google/cloud/{logging => logging_v2}/handlers/middleware/__init__.py (88%) rename packages/google-cloud-logging/google/cloud/{logging => logging_v2}/handlers/middleware/request.py (87%) rename packages/google-cloud-logging/google/cloud/{logging => logging_v2}/handlers/transports/__init__.py (81%) rename packages/google-cloud-logging/google/cloud/{logging => logging_v2}/handlers/transports/background_thread.py (61%) rename packages/google-cloud-logging/google/cloud/{logging => logging_v2}/handlers/transports/base.py (64%) rename packages/google-cloud-logging/google/cloud/{logging => logging_v2}/handlers/transports/sync.py (60%) create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/logger.py rename packages/google-cloud-logging/google/cloud/{logging => logging_v2}/metric.py (50%) delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/__init__.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/py.typed rename packages/google-cloud-logging/google/cloud/{logging => logging_v2}/resource.py (68%) create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/sink.py delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/types.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py create mode 100644 packages/google-cloud-logging/mypy.ini create mode 100644 packages/google-cloud-logging/scripts/fixup_logging_v2_keywords.py create mode 100644 packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py create mode 100644 packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py create mode 100644 packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py create mode 100644 packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py delete mode 100644 packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py delete mode 100644 packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py delete mode 100644 packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py create mode 100644 packages/google-cloud-logging/tests/unit/test_logging_shim.py diff --git a/packages/google-cloud-logging/.github/sync-repo-settings.yml b/packages/google-cloud-logging/.github/sync-repo-settings.yml new file mode 100644 index 000000000000..29fffc283012 --- /dev/null +++ b/packages/google-cloud-logging/.github/sync-repo-settings.yml @@ -0,0 +1,13 @@ +# https://github.com/googleapis/repo-automation-bots/tree/master/packages/sync-repo-settings +# Rules for master branch protection +branchProtectionRules: +# Identifies the protection rule pattern. Name of the branch to be protected. +# Defaults to `master` +- pattern: master + requiredStatusCheckContexts: + - 'Kokoro' + - 'cla/google' + - 'Samples - Lint' + - 'Samples - Python 3.6' + - 'Samples - Python 3.7' + - 'Samples - Python 3.8' \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/docs/common.cfg b/packages/google-cloud-logging/.kokoro/docs/common.cfg index 4206e8ac43ed..7bb6536d7580 100644 --- a/packages/google-cloud-logging/.kokoro/docs/common.cfg +++ b/packages/google-cloud-logging/.kokoro/docs/common.cfg @@ -30,7 +30,7 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - value: "docs-staging-v2-staging" + value: "docs-staging-v2" } # It will upload the docker image after successful builds. diff --git a/packages/google-cloud-logging/.kokoro/release/common.cfg b/packages/google-cloud-logging/.kokoro/release/common.cfg index 52c4b699b7cf..9fedb82bbf67 100644 --- a/packages/google-cloud-logging/.kokoro/release/common.cfg +++ b/packages/google-cloud-logging/.kokoro/release/common.cfg @@ -23,42 +23,18 @@ env_vars: { value: "github/python-logging/.kokoro/release.sh" } -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - -# Fetch magictoken to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "releasetool-magictoken" - } - } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_pypi_password" + } + } } -# Fetch api key to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "magic-github-proxy-api-key" - } - } -} +# Tokens needed to report release status back to GitHub +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg index a9d6d48c40a3..1bfa98c11b08 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.6" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py36" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-logging/.kokoro/test-samples.sh" diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg index 1f7cc1973d29..0f8e2c95d19d 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.7" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-logging/.kokoro/test-samples.sh" diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg index 9ba81c4b7b80..6c1d661d2f91 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.8" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-logging/.kokoro/test-samples.sh" diff --git a/packages/google-cloud-logging/.kokoro/trampoline.sh b/packages/google-cloud-logging/.kokoro/trampoline.sh index e8c4251f3ed4..f39236e943a8 100755 --- a/packages/google-cloud-logging/.kokoro/trampoline.sh +++ b/packages/google-cloud-logging/.kokoro/trampoline.sh @@ -15,9 +15,14 @@ set -eo pipefail -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT -chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh -${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true - -exit ${ret_code} +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/packages/google-cloud-logging/CODE_OF_CONDUCT.md b/packages/google-cloud-logging/CODE_OF_CONDUCT.md index b3d1f6029849..039f43681204 100644 --- a/packages/google-cloud-logging/CODE_OF_CONDUCT.md +++ b/packages/google-cloud-logging/CODE_OF_CONDUCT.md @@ -1,44 +1,95 @@ -# Contributor Code of Conduct +# Code of Conduct -As contributors and maintainers of this project, -and in the interest of fostering an open and welcoming community, -we pledge to respect all people who contribute through reporting issues, -posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. +## Our Pledge -We are committed to making participation in this project -a harassment-free experience for everyone, -regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, -body size, race, ethnicity, age, religion, or nationality. +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, -such as physical or electronic -addresses, without explicit permission -* Other unethical or unprofessional conduct. +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct. -By adopting this Code of Conduct, -project maintainers commit themselves to fairly and consistently -applying these principles to every aspect of managing this project. -Project maintainers who do not follow or enforce the Code of Conduct -may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior -may be reported by opening an issue -or contacting one or more of the project maintainers. - -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, -available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index a81f28e85516..e5017619e38c 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -50,11 +50,11 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 +Python >= 3.6 -Deprecated Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support was removed on January 1, 2020. +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python == 2.7. The last version of the library compatible with Python 2.7 is `google-cloud-logging==1.15.1`. Mac/Linux diff --git a/packages/google-cloud-logging/UPGRADING.md b/packages/google-cloud-logging/UPGRADING.md new file mode 100644 index 000000000000..af7461dda902 --- /dev/null +++ b/packages/google-cloud-logging/UPGRADING.md @@ -0,0 +1,337 @@ +# 2.0.0 Migration Guide + +The 2.0 release of the `google-cloud-logging` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library will likely require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage. + +If you experience issues or have questions, please file an [issue](https://github.com/googleapis/python-logging/issues). + +## Supported Python Versions + +> **WARNING**: Breaking change + +The 2.0.0 release requires Python 3.6+. + + +## Primary Changes + +This section lists the most relevant changes in `google.cloud.logging`. +See 'Changes in GAPIC Layer' if you were directly using `google.cloud.logging_v2.proto` or `google.cloud.logging_v2.gapic`. + + +### Optional arguments *must* be passed as keyword arguments. + +Optional arguments are keyword-only arguments and *must* be passed by name. +See [PEP 3102](https://www.python.org/dev/peps/pep-3102/). + +```diff +from google.cloud import logging + +filter_ = "severity>=CRITICAL" +destination = "storage.googleapis.com/{bucket}".format(bucket=destination_bucket) +logging_client = logging.Client() +-sink = logging_client.sink(sink_name, filter_, destination) ++sink = logging_client.sink(sink_name, filter_=filter_, destination=destination) +``` + +### Support for non-project resources + +Where appropriate, the library supports additional resource names. https://google.aip.dev/122 + +**Valid Resource Names**: + +* `"projects/[PROJECT_ID]"` +* `"organizations/[ORGANIZATION_ID]"` +* `"billingAccounts/[BILLING_ACCOUNT_ID]"` +* `"folders/[FOLDER_ID]"` + + +#### `google.cloud.logging_v2.client.Client` + + +> **WARNING**: Breaking change + +`list_entries` accepts an optional `resource_names` parameter. `projects` has been removed. + + +```diff +from google.cloud import logging_v2 + +client = logging_v2.Client() +-client.list_entries(projects="myProject"]) ++client.list_entries(resource_names=["projects/myProject", "folders/myFolder"]) +client.list_entries() # defaults to project bound to client +``` + +`list_sinks` accepts an optional `parent` parameter. + +```py +from google.cloud import logging_v2 + +client = logging_v2.Client() +client.list_sinks() # lists sinks in current project +client.list_sinks(parent="folders/myFolder") # specify a different parent resource +``` + +#### `google.cloud.logging_v2.logger.Logger` + +> **WARNING**: Breaking change + +`list_entries` accepts an optional `resource_names` parameter. `projects` has been removed. + +```diff +from google.cloud import logging_v2 + +client = logging_v2.Client() +logger = logging_v2.Logger("myLog", client) +- logger.list_entries(projects="myProject"]) ++ logger.list_entries(resource_names=["projects/myProject", "folders/myFolder"]) +logger.list_entries() # defaults to project bound to client +``` + +#### `google.cloud.logging_v2.sinks.Sink` + +> **WARNING**: Breaking change +* Sinks no longer have a `project` property. The attribute is replaced by `parent`. + +```diff +from google.cloud import logging + +client = logging_v2.Client(project="myProject") +sink = logging.Sink("mySink", client=client) +-project = sink.project # myProject ++parent = sink.parent # projects/myProject +``` + + +### `google.cloud.logging` is an alias for `google.cloud.logging_v2` + +> **WARNING**: Breaking change + +All library code has been moved to `google.cloud.logging_v2`. +`google.cloud.logging` serves as a default alias for `google.cloud.logging_v2`. + + + + +## Changes in GAPIC layer + +This section describes changes in the GAPIC layer (produced by the generator) that previously lived in `google.cloud.logging_v2.proto` / `google.cloud.logging_v2.gapic`. + +> **NOTE**: Most users are unlikely to have been using this layer directly. + +### Import path + +> **WARNING**: Breaking change + +The generated client is no longer exposed at `google.cloud.logging_v2`. This is because we expect most users to use the handwritten surface exposed at `google.cloud.logging_v2`. See the [Cloud Logging How-to Guides](https://cloud.google.com/logging/docs/how-to). + +If you would like to continue using the generated surface, adjust your imports: + +**Before** +```py +from google.cloud import logging_v2 +logging_client = logging_v2.LoggingServiceV2Client() +``` + +**After** + +```py +from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client +from google.cloud.logging_v2.types import LogSink + +logging_client = LoggingServiceV2Client() +sink = LogSink() +``` +### Method Calls + +> **WARNING**: Breaking change + +Methods expect request objects. We provide a script that will convert most common use cases. This script will *only* convert code written for the generated clients previously exposed at `google.cloud.logging_v2` like `LoggingServiceV2Client`. + +* Install the library and `libcst`. `libcst` is required to run the fixup script. + +```py +python3 -m pip install google-cloud-logging libcst +``` + +* The script `fixup_logging_v2_keywords.py` is shipped with the library. It expects +an input directory (with the code to convert) and an empty destination directory. + +```sh +$ fixup_logging_v2_keywords.py --input-directory .samples/ --output-directory samples/ +``` + +**Before:** +```py +from google.cloud import logging_v2 + +client = logging_v2.LoggingServiceV2Client() +client.list_log_entries(["projects/myProject"], filter_ = "severity>=CRITICAL") +``` + + +**After:** +```py +from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client + +client = LoggingServiceV2Client() +client.list_log_entries({"resource_names": ["projects/myProject"], "filter": "severity>=CRITICAL"}) +``` + +#### More Details + +In `google-cloud-logging<2.0.0`, parameters required by the API were positional parameters and optional parameters were keyword parameters. + +**Before:** +```py + def list_log_entries( + self, + resource_names, + project_ids=None, + filter_=None, + order_by=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): +``` + +In the 2.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a parameter is required or optional. + +Some methods have additional keyword only parameters. The available parameters depend on the [`google.api.method_signature` annotation](https://github.com/googleapis/googleapis/blob/2db5725bf898b544a0cf951e1694d3b0fce5eda3/google/cloud/automl/v1/prediction_service.proto#L86) specified by the API producer. + + +**After:** +```py + def list_log_entries( + self, + request: logging.ListLogEntriesRequest = None, + *, + resource_names: Sequence[str] = None, + filter: str = None, + order_by: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogEntriesPager: +``` + +> **NOTE:** The `request` parameter and flattened keyword parameters for the API are mutually exclusive. +> Passing both will result in an error. + + +Both of these calls are valid: + +```py +response = client.list_log_entries( + request={ + "resource_names": resource_names, + "filter": filter_, + "order_by": order_by, + } +) +``` + +```py +response = client.list_log_entries( + resource_names=resource_names, + filter=filter_, + order_by=order_by, +) +``` + +This call is invalid because it mixes `request` with a keyword argument `order_by`. Executing this code will result in an error. + +```py +response = client.list_log_entries( + request={ + "resource_names": resource_names, + "filter": filter_, + } + order_by=order_by +) +``` + +### `filter` parameter + +Methods that took parameter `filter_` now expect `filter`. + +**Before** +```py +from google.cloud import logging_v2 + +client = logging_v2.LoggingServiceV2Client() +client.list_log_entries(["projects/myProject"], filter_="severity>=CRITICAL") +``` + + +**After** +```py +from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client + +client = LoggingServiceV2Client() +client.list_log_entries(resource_names=["projects/myProject"], filter="severity>=CRITICAL") +``` + +### Enums + + +> **WARNING**: Breaking change + +The submodule `enums` has been removed. Enums can be accessed under `types`. + +**Before:** +```py +from google.cloud import logging_v2 + +severity = logging_v2.enums.LifecycleState.ACTIVE +``` + + +**After:** +```py +from google.cloud import logging_v2 + +severity = logging_v2.types.LifecycleState.ACTIVE +``` + + +### Resource Path Helper Methods + +The following resource name helpers have been removed. Please construct the strings manually. + +```py +billing_account = "my-billing-account" +folder = "my-folder" +organization = "my-organization" +log = "my-log" + +exclusion = "exclusion" +sink = "my-sink" + +# LoggingServiceV2Client +billing_log_path = f"billingAccounts/{billing_account}/logs/{log}" +folder_log_path = f"folders/{folder}/logs/{log}" +organization_log_path = f"organizations/{organization}/logs/{log}" + +# ConfigServiceV2Client +billing_exclusion_path = f"billingAccounts/{billing_account}/exclusions/{exclusion}" +billing_sink_path = f"billingAccounts/{billing_account}/sinks/{sink}" +exclusion_path = f"projects/{project}/exclusions/{exclusion}" +folder_exclusion_path = f"folders/{folder}/exclusions/{exclusion}" +folder_sink_path = f"folders/{folder}/sinks/{sink}" +organization_exclusion_path = f"organizations/{organization}/exclusions/{exclusion}" +organization_sink_path = f"organizations/{organization}/sinks/{sink}" +``` + +The following resource name helpers have been renamed. + +**All Clients** +* `billing_path` -> `common_billing_account_path` +* `folder_path` -> `common_folder_path` +* `organization_path` -> `common_organization_path` +* `project_path` -> `common_project_path` + +**`ConfigServiceV2Client`** +* `sink_path` -> `log_sink_path` +* `exclusion_path` -> `log_exclusion_path` \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/UPGRADING.md b/packages/google-cloud-logging/docs/UPGRADING.md new file mode 120000 index 000000000000..01097c8c0fb8 --- /dev/null +++ b/packages/google-cloud-logging/docs/UPGRADING.md @@ -0,0 +1 @@ +../UPGRADING.md \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/_templates/layout.html b/packages/google-cloud-logging/docs/_templates/layout.html index 228529efe2d2..6316a537f72b 100644 --- a/packages/google-cloud-logging/docs/_templates/layout.html +++ b/packages/google-cloud-logging/docs/_templates/layout.html @@ -21,8 +21,8 @@
- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please + As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please visit Python 2 support on Google Cloud.
{% block body %} {% endblock %} diff --git a/packages/google-cloud-logging/docs/client.rst b/packages/google-cloud-logging/docs/client.rst index c9e78db2795e..4fe7c39e1aef 100644 --- a/packages/google-cloud-logging/docs/client.rst +++ b/packages/google-cloud-logging/docs/client.rst @@ -1,6 +1,6 @@ Cloud Logging Client ========================== -.. automodule:: google.cloud.logging.client +.. automodule:: google.cloud.logging_v2.client :members: :show-inheritance: diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index 1815da57acb6..296607b790b7 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -349,6 +349,7 @@ "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), } diff --git a/packages/google-cloud-logging/docs/entries.rst b/packages/google-cloud-logging/docs/entries.rst index 223eadc0756e..9d473f3c1fe8 100644 --- a/packages/google-cloud-logging/docs/entries.rst +++ b/packages/google-cloud-logging/docs/entries.rst @@ -1,7 +1,7 @@ Entries ======= -.. automodule:: google.cloud.logging.entries +.. automodule:: google.cloud.logging_v2.entries :members: :show-inheritance: :member-order: groupwise diff --git a/packages/google-cloud-logging/docs/handlers-app-engine.rst b/packages/google-cloud-logging/docs/handlers-app-engine.rst index 71c45e3690be..f25223a20578 100644 --- a/packages/google-cloud-logging/docs/handlers-app-engine.rst +++ b/packages/google-cloud-logging/docs/handlers-app-engine.rst @@ -1,6 +1,6 @@ Google App Engine flexible Log Handler ====================================== -.. automodule:: google.cloud.logging.handlers.app_engine +.. automodule:: google.cloud.logging_v2.handlers.app_engine :members: :show-inheritance: diff --git a/packages/google-cloud-logging/docs/handlers-container-engine.rst b/packages/google-cloud-logging/docs/handlers-container-engine.rst index 5286ec58b50c..981b41dcb105 100644 --- a/packages/google-cloud-logging/docs/handlers-container-engine.rst +++ b/packages/google-cloud-logging/docs/handlers-container-engine.rst @@ -1,6 +1,6 @@ Google Kubernetes Engine Log Handler ==================================== -.. automodule:: google.cloud.logging.handlers.container_engine +.. automodule:: google.cloud.logging_v2.handlers.container_engine :members: :show-inheritance: diff --git a/packages/google-cloud-logging/docs/handlers.rst b/packages/google-cloud-logging/docs/handlers.rst index 1a258a88a541..9089170fbe7c 100644 --- a/packages/google-cloud-logging/docs/handlers.rst +++ b/packages/google-cloud-logging/docs/handlers.rst @@ -1,6 +1,6 @@ Python Logging Module Handler ============================== -.. automodule:: google.cloud.logging.handlers.handlers +.. automodule:: google.cloud.logging_v2.handlers.handlers :members: :show-inheritance: diff --git a/packages/google-cloud-logging/docs/index.rst b/packages/google-cloud-logging/docs/index.rst index 347dc9f813e7..64c2dcd1e37c 100644 --- a/packages/google-cloud-logging/docs/index.rst +++ b/packages/google-cloud-logging/docs/index.rst @@ -6,6 +6,17 @@ Documentation :maxdepth: 3 v2 + + +Migration Guide +--------------- + +See the guide below for instructions on migrating to the 2.x release of this library. + +.. toctree:: + :maxdepth: 2 + + UPGRADING Changelog ~~~~~~~~~ diff --git a/packages/google-cloud-logging/docs/logger.rst b/packages/google-cloud-logging/docs/logger.rst index 72533ba33774..8aca18199333 100644 --- a/packages/google-cloud-logging/docs/logger.rst +++ b/packages/google-cloud-logging/docs/logger.rst @@ -1,6 +1,6 @@ Logger ====== -.. automodule:: google.cloud.logging.logger +.. automodule:: google.cloud.logging_v2.logger :members: :show-inheritance: diff --git a/packages/google-cloud-logging/docs/metric.rst b/packages/google-cloud-logging/docs/metric.rst index ca30e3c89eca..8ef5c3f08439 100644 --- a/packages/google-cloud-logging/docs/metric.rst +++ b/packages/google-cloud-logging/docs/metric.rst @@ -1,6 +1,6 @@ Metrics ======= -.. automodule:: google.cloud.logging.metric +.. automodule:: google.cloud.logging_v2.metric :members: :show-inheritance: diff --git a/packages/google-cloud-logging/docs/resource.rst b/packages/google-cloud-logging/docs/resource.rst new file mode 100644 index 000000000000..c5de1a540476 --- /dev/null +++ b/packages/google-cloud-logging/docs/resource.rst @@ -0,0 +1,6 @@ +Resource +========= + +.. automodule:: google.cloud.logging_v2.resource + :members: + :show-inheritance: diff --git a/packages/google-cloud-logging/docs/sink.rst b/packages/google-cloud-logging/docs/sink.rst index 35e88562bbee..eb40277659ab 100644 --- a/packages/google-cloud-logging/docs/sink.rst +++ b/packages/google-cloud-logging/docs/sink.rst @@ -1,6 +1,6 @@ Sinks ===== -.. automodule:: google.cloud.logging.sink +.. automodule:: google.cloud.logging_v2.sink :members: :show-inheritance: diff --git a/packages/google-cloud-logging/docs/snippets.py b/packages/google-cloud-logging/docs/snippets.py index 7a86213472b6..da9ba9b2d857 100644 --- a/packages/google-cloud-logging/docs/snippets.py +++ b/packages/google-cloud-logging/docs/snippets.py @@ -107,8 +107,8 @@ def client_list_entries_multi_project( """List entries via client across multiple projects.""" # [START client_list_entries_multi_project] - PROJECT_IDS = ["one-project", "another-project"] - for entry in client.list_entries(projects=PROJECT_IDS): # API call(s) + resource_names = ["projects/one-project", "projects/another-project"] + for entry in client.list_entries(resource_names=resource_names): # API call(s) do_something_with(entry) # [END client_list_entries_multi_project] diff --git a/packages/google-cloud-logging/docs/stdlib-usage.rst b/packages/google-cloud-logging/docs/stdlib-usage.rst index cba4080b5f5e..375b41ddf3bf 100644 --- a/packages/google-cloud-logging/docs/stdlib-usage.rst +++ b/packages/google-cloud-logging/docs/stdlib-usage.rst @@ -3,7 +3,7 @@ Integration with Python logging module It's possible to tie the Python :mod:`logging` module directly into Google Cloud Logging. To use it, -create a :class:`CloudLoggingHandler ` instance from your +create a :class:`CloudLoggingHandler ` instance from your Logging client. .. code-block:: python @@ -35,7 +35,7 @@ change it by providing a name to the handler: It is also possible to attach the handler to the root Python logger, so that for example a plain `logging.warn` call would be sent to Cloud Logging, as well as any other loggers created. However, you must avoid infinite recursion from the logging calls the client itself makes. A helper -method :meth:`setup_logging ` is provided to configure +method :meth:`setup_logging ` is provided to configure this automatically: .. code-block:: python @@ -61,10 +61,10 @@ Python logging handler transports ================================== The Python logging handler can use different transports. The default is -:class:`google.cloud.logging.handlers.BackgroundThreadTransport`. +:class:`google.cloud.logging_v2.handlers.BackgroundThreadTransport`. - 1. :class:`google.cloud.logging.handlers.BackgroundThreadTransport` this is the default. It writes + 1. :class:`google.cloud.logging_V2.handlers.BackgroundThreadTransport` this is the default. It writes entries on a background :class:`python.threading.Thread`. - 1. :class:`google.cloud.logging.handlers.SyncTransport` this handler does a direct API call on each + 1. :class:`google.cloud.logging_V2.handlers.SyncTransport` this handler does a direct API call on each logging statement to write the entry. diff --git a/packages/google-cloud-logging/docs/transports-base.rst b/packages/google-cloud-logging/docs/transports-base.rst index 5b52c46cadcb..b28fb5ba6bc2 100644 --- a/packages/google-cloud-logging/docs/transports-base.rst +++ b/packages/google-cloud-logging/docs/transports-base.rst @@ -1,6 +1,6 @@ Python Logging Handler Sync Transport ====================================== -.. automodule:: google.cloud.logging.handlers.transports.base +.. automodule:: google.cloud.logging_v2.handlers.transports.base :members: :show-inheritance: diff --git a/packages/google-cloud-logging/docs/transports-sync.rst b/packages/google-cloud-logging/docs/transports-sync.rst index edb2b72f578d..32e6401cba05 100644 --- a/packages/google-cloud-logging/docs/transports-sync.rst +++ b/packages/google-cloud-logging/docs/transports-sync.rst @@ -1,6 +1,6 @@ Python Logging Handler Sync Transport ====================================== -.. automodule:: google.cloud.logging.handlers.transports.sync +.. automodule:: google.cloud.logging_v2.handlers.transports.sync :members: :show-inheritance: diff --git a/packages/google-cloud-logging/docs/transports-thread.rst b/packages/google-cloud-logging/docs/transports-thread.rst index 45780b27fe42..2899e6c480b8 100644 --- a/packages/google-cloud-logging/docs/transports-thread.rst +++ b/packages/google-cloud-logging/docs/transports-thread.rst @@ -2,6 +2,6 @@ Python Logging Handler Threaded Transport ========================================= -.. automodule:: google.cloud.logging.handlers.transports.background_thread +.. automodule:: google.cloud.logging_v2.handlers.transports.background_thread :members: :show-inheritance: diff --git a/packages/google-cloud-logging/docs/v2.rst b/packages/google-cloud-logging/docs/v2.rst index 567eabd7a4fd..823097bd73bd 100644 --- a/packages/google-cloud-logging/docs/v2.rst +++ b/packages/google-cloud-logging/docs/v2.rst @@ -8,6 +8,7 @@ v2 logger entries metric + resource sink stdlib-usage handlers diff --git a/packages/google-cloud-logging/google/cloud/logging/__init__.py b/packages/google-cloud-logging/google/cloud/logging/__init__.py index 80de6c4b6113..4481cea11286 100644 --- a/packages/google-cloud-logging/google/cloud/logging/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/__init__.py @@ -1,4 +1,6 @@ -# Copyright 2016 Google LLC +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,20 +13,41 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -"""Google Stackdriver Logging API wrapper.""" - - -from pkg_resources import get_distribution - -__version__ = get_distribution("google-cloud-logging").version - -from google.cloud.logging.client import Client - - -ASCENDING = "timestamp asc" -"""Query string to order by ascending timestamps.""" -DESCENDING = "timestamp desc" -"""Query string to order by decending timestamps.""" - -__all__ = ["__version__", "ASCENDING", "Client", "DESCENDING"] +from google.cloud.logging_v2 import __version__ +from google.cloud.logging_v2 import ASCENDING +from google.cloud.logging_v2 import DESCENDING + +from google.cloud.logging_v2.client import Client +from google.cloud.logging_v2.entries import logger_name_from_path +from google.cloud.logging_v2.entries import LogEntry +from google.cloud.logging_v2.entries import TextEntry +from google.cloud.logging_v2.entries import StructEntry +from google.cloud.logging_v2.entries import ProtobufEntry +from google.cloud.logging_v2 import handlers +from google.cloud.logging_v2.logger import Logger +from google.cloud.logging_v2.logger import Batch +from google.cloud.logging_v2.metric import Metric +from google.cloud.logging_v2.resource import Resource +from google.cloud.logging_v2.sink import Sink +from google.cloud.logging_v2 import types + +__all__ = ( + "__version__", + "ASCENDING", + "Batch", + "Client", + "DESCENDING", + "handlers", + "logger_name_from_path", + "Logger", + "LogEntry", + "Metric", + "ProtobufEntry", + "Resource", + "Sink", + "StructEntry", + "TextEntry", + "types", +) diff --git a/packages/google-cloud-logging/google/cloud/logging/_gapic.py b/packages/google-cloud-logging/google/cloud/logging/_gapic.py deleted file mode 100644 index 32897c088142..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging/_gapic.py +++ /dev/null @@ -1,574 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrapper for adapting the autogenerated gapic client to the hand-written -client.""" - -import functools - -from google.cloud.logging_v2.gapic.config_service_v2_client import ConfigServiceV2Client -from google.cloud.logging_v2.gapic.logging_service_v2_client import ( - LoggingServiceV2Client, -) -from google.cloud.logging_v2.gapic.metrics_service_v2_client import ( - MetricsServiceV2Client, -) -from google.cloud.logging_v2.proto.logging_config_pb2 import LogSink -from google.cloud.logging_v2.proto.logging_metrics_pb2 import LogMetric -from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry -from google.protobuf.json_format import MessageToDict -from google.protobuf.json_format import ParseDict - -from google.cloud.logging._helpers import entry_from_resource -from google.cloud.logging.sink import Sink -from google.cloud.logging.metric import Metric - - -class _LoggingAPI(object): - """Helper mapping logging-related APIs. - - :type gapic_api: - :class:`.logging_service_v2_client.LoggingServiceV2Client` - :param gapic_api: API object used to make RPCs. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client that owns this API object. - """ - - def __init__(self, gapic_api, client): - self._gapic_api = gapic_api - self._client = client - - def list_entries( - self, projects, filter_="", order_by="", page_size=0, page_token=None - ): - """Return a page of log entry resources. - - :type projects: list of strings - :param projects: project IDs to include. If not passed, - defaults to the project bound to the API's client. - - :type filter_: str - :param filter_: - a filter expression. See - https://cloud.google.com/logging/docs/view/advanced_filters - - :type order_by: str - :param order_by: One of :data:`~google.cloud.logging.ASCENDING` - or :data:`~google.cloud.logging.DESCENDING`. - - :type page_size: int - :param page_size: maximum number of entries to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` - accessible to the current API. - """ - page_iter = self._gapic_api.list_log_entries( - [], - project_ids=projects, - filter_=filter_, - order_by=order_by, - page_size=page_size, - ) - page_iter.client = self._client - page_iter.next_page_token = page_token - - # We attach a mutable loggers dictionary so that as Logger - # objects are created by entry_from_resource, they can be - # re-used by other log entries from the same logger. - loggers = {} - page_iter.item_to_value = functools.partial(_item_to_entry, loggers=loggers) - return page_iter - - def write_entries(self, entries, logger_name=None, resource=None, labels=None): - """API call: log an entry resource via a POST request - - :type entries: sequence of mapping - :param entries: the log entry resources to log. - - :type logger_name: str - :param logger_name: name of default logger to which to log the entries; - individual entries may override. - - :type resource: mapping - :param resource: default resource to associate with entries; - individual entries may override. - - :type labels: mapping - :param labels: default labels to associate with entries; - individual entries may override. - """ - partial_success = False - entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries] - self._gapic_api.write_log_entries( - entry_pbs, - log_name=logger_name, - resource=resource, - labels=labels, - partial_success=partial_success, - ) - - def logger_delete(self, project, logger_name): - """API call: delete all entries in a logger via a DELETE request - - :type project: str - :param project: ID of project containing the log entries to delete - - :type logger_name: str - :param logger_name: name of logger containing the log entries to delete - """ - path = "projects/%s/logs/%s" % (project, logger_name) - self._gapic_api.delete_log(path) - - -class _SinksAPI(object): - """Helper mapping sink-related APIs. - - :type gapic_api: - :class:`.config_service_v2_client.ConfigServiceV2Client` - :param gapic_api: API object used to make RPCs. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client that owns this API object. - """ - - def __init__(self, gapic_api, client): - self._gapic_api = gapic_api - self._client = client - - def list_sinks(self, project, page_size=0, page_token=None): - """List sinks for the project associated with this client. - - :type project: str - :param project: ID of the project whose sinks are to be listed. - - :type page_size: int - :param page_size: maximum number of sinks to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of sinks. If not - passed, the API will return the first page of - sinks. - - :rtype: tuple, (list, str) - :returns: list of mappings, plus a "next page token" string: - if not None, indicates that more sinks can be retrieved - with another call (pass that value as ``page_token``). - """ - path = "projects/%s" % (project,) - page_iter = self._gapic_api.list_sinks(path, page_size=page_size) - page_iter.client = self._client - page_iter.next_page_token = page_token - page_iter.item_to_value = _item_to_sink - return page_iter - - def sink_create( - self, project, sink_name, filter_, destination, unique_writer_identity=False - ): - """API call: create a sink resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create - - :type project: str - :param project: ID of the project in which to create the sink. - - :type sink_name: str - :param sink_name: the name of the sink - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the sink. - - :type destination: str - :param destination: destination URI for the entries exported by - the sink. - - :type unique_writer_identity: bool - :param unique_writer_identity: (Optional) determines the kind of - IAM identity returned as - writer_identity in the new sink. - - :rtype: dict - :returns: The sink resource returned from the API (converted from a - protobuf to a dictionary). - """ - parent = "projects/%s" % (project,) - sink_pb = LogSink(name=sink_name, filter=filter_, destination=destination) - created_pb = self._gapic_api.create_sink( - parent, sink_pb, unique_writer_identity=unique_writer_identity - ) - return MessageToDict(created_pb) - - def sink_get(self, project, sink_name): - """API call: retrieve a sink resource. - - :type project: str - :param project: ID of the project containing the sink. - - :type sink_name: str - :param sink_name: the name of the sink - - :rtype: dict - :returns: The sink object returned from the API (converted from a - protobuf to a dictionary). - """ - path = "projects/%s/sinks/%s" % (project, sink_name) - sink_pb = self._gapic_api.get_sink(path) - # NOTE: LogSink message type does not have an ``Any`` field - # so `MessageToDict`` can safely be used. - return MessageToDict(sink_pb) - - def sink_update( - self, project, sink_name, filter_, destination, unique_writer_identity=False - ): - """API call: update a sink resource. - - :type project: str - :param project: ID of the project containing the sink. - - :type sink_name: str - :param sink_name: the name of the sink - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the sink. - - :type destination: str - :param destination: destination URI for the entries exported by - the sink. - - :type unique_writer_identity: bool - :param unique_writer_identity: (Optional) determines the kind of - IAM identity returned as - writer_identity in the new sink. - - :rtype: dict - :returns: The sink resource returned from the API (converted from a - protobuf to a dictionary). - """ - path = "projects/%s/sinks/%s" % (project, sink_name) - sink_pb = LogSink(name=path, filter=filter_, destination=destination) - sink_pb = self._gapic_api.update_sink( - path, sink_pb, unique_writer_identity=unique_writer_identity - ) - # NOTE: LogSink message type does not have an ``Any`` field - # so `MessageToDict`` can safely be used. - return MessageToDict(sink_pb) - - def sink_delete(self, project, sink_name): - """API call: delete a sink resource. - - :type project: str - :param project: ID of the project containing the sink. - - :type sink_name: str - :param sink_name: the name of the sink - """ - path = "projects/%s/sinks/%s" % (project, sink_name) - self._gapic_api.delete_sink(path) - - -class _MetricsAPI(object): - """Helper mapping sink-related APIs. - - :type gapic_api: - :class:`.metrics_service_v2_client.MetricsServiceV2Client` - - :param gapic_api: API object used to make RPCs. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client that owns this API object. - """ - - def __init__(self, gapic_api, client): - self._gapic_api = gapic_api - self._client = client - - def list_metrics(self, project, page_size=0, page_token=None): - """List metrics for the project associated with this client. - - :type project: str - :param project: ID of the project whose metrics are to be listed. - - :type page_size: int - :param page_size: maximum number of metrics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of metrics. If not - passed, the API will return the first page of - metrics. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.logging.metric.Metric` - accessible to the current API. - """ - path = "projects/%s" % (project,) - page_iter = self._gapic_api.list_log_metrics(path, page_size=page_size) - page_iter.client = self._client - page_iter.next_page_token = page_token - page_iter.item_to_value = _item_to_metric - return page_iter - - def metric_create(self, project, metric_name, filter_, description): - """API call: create a metric resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create - - :type project: str - :param project: ID of the project in which to create the metric. - - :type metric_name: str - :param metric_name: the name of the metric - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the metric. - - :type description: str - :param description: description of the metric. - """ - parent = "projects/%s" % (project,) - metric_pb = LogMetric(name=metric_name, filter=filter_, description=description) - self._gapic_api.create_log_metric(parent, metric_pb) - - def metric_get(self, project, metric_name): - """API call: retrieve a metric resource. - - :type project: str - :param project: ID of the project containing the metric. - - :type metric_name: str - :param metric_name: the name of the metric - - :rtype: dict - :returns: The metric object returned from the API (converted from a - protobuf to a dictionary). - """ - path = "projects/%s/metrics/%s" % (project, metric_name) - metric_pb = self._gapic_api.get_log_metric(path) - # NOTE: LogMetric message type does not have an ``Any`` field - # so `MessageToDict`` can safely be used. - return MessageToDict(metric_pb) - - def metric_update(self, project, metric_name, filter_, description): - """API call: update a metric resource. - - :type project: str - :param project: ID of the project containing the metric. - - :type metric_name: str - :param metric_name: the name of the metric - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the metric. - - :type description: str - :param description: description of the metric. - - :rtype: dict - :returns: The metric object returned from the API (converted from a - protobuf to a dictionary). - """ - path = "projects/%s/metrics/%s" % (project, metric_name) - metric_pb = LogMetric(name=path, filter=filter_, description=description) - metric_pb = self._gapic_api.update_log_metric(path, metric_pb) - # NOTE: LogMetric message type does not have an ``Any`` field - # so `MessageToDict`` can safely be used. - return MessageToDict(metric_pb) - - def metric_delete(self, project, metric_name): - """API call: delete a metric resource. - - :type project: str - :param project: ID of the project containing the metric. - - :type metric_name: str - :param metric_name: the name of the metric - """ - path = "projects/%s/metrics/%s" % (project, metric_name) - self._gapic_api.delete_log_metric(path) - - -def _parse_log_entry(entry_pb): - """Special helper to parse ``LogEntry`` protobuf into a dictionary. - - The ``proto_payload`` field in ``LogEntry`` is of type ``Any``. This - can be problematic if the type URL in the payload isn't in the - ``google.protobuf`` registry. To help with parsing unregistered types, - this function will remove ``proto_payload`` before parsing. - - :type entry_pb: :class:`.log_entry_pb2.LogEntry` - :param entry_pb: Log entry protobuf. - - :rtype: dict - :returns: The parsed log entry. The ``protoPayload`` key may contain - the raw ``Any`` protobuf from ``entry_pb.proto_payload`` if - it could not be parsed. - """ - try: - return MessageToDict(entry_pb) - except TypeError: - if entry_pb.HasField("proto_payload"): - proto_payload = entry_pb.proto_payload - entry_pb.ClearField("proto_payload") - entry_mapping = MessageToDict(entry_pb) - entry_mapping["protoPayload"] = proto_payload - return entry_mapping - else: - raise - - -def _log_entry_mapping_to_pb(mapping): - """Helper for :meth:`write_entries`, et aliae - - Performs "impedance matching" between the protobuf attrs and - the keys expected in the JSON API. - """ - entry_pb = LogEntry() - # NOTE: We assume ``mapping`` was created in ``Batch.commit`` - # or ``Logger._make_entry_resource``. In either case, if - # the ``protoPayload`` key is present, we assume that the - # type URL is registered with ``google.protobuf`` and will - # not cause any issues in the JSON->protobuf conversion - # of the corresponding ``proto_payload`` in the log entry - # (it is an ``Any`` field). - ParseDict(mapping, entry_pb) - return entry_pb - - -def _item_to_entry(iterator, entry_pb, loggers): - """Convert a log entry protobuf to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.api_core.page_iterator.Iterator`. It is intended to be - patched with a mutable ``loggers`` argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_LoggingAPI.list_entries`. - - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type entry_pb: :class:`.log_entry_pb2.LogEntry` - :param entry_pb: Log entry protobuf returned from the API. - - :type loggers: dict - :param loggers: - A mapping of logger fullnames -> loggers. If the logger - that owns the entry is not in ``loggers``, the entry - will have a newly-created logger. - - :rtype: :class:`~google.cloud.logging.entries._BaseEntry` - :returns: The next log entry in the page. - """ - resource = _parse_log_entry(entry_pb) - return entry_from_resource(resource, iterator.client, loggers) - - -def _item_to_sink(iterator, log_sink_pb): - """Convert a sink protobuf to the native object. - - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type log_sink_pb: - :class:`.logging_config_pb2.LogSink` - :param log_sink_pb: Sink protobuf returned from the API. - - :rtype: :class:`~google.cloud.logging.sink.Sink` - :returns: The next sink in the page. - """ - # NOTE: LogSink message type does not have an ``Any`` field - # so `MessageToDict`` can safely be used. - resource = MessageToDict(log_sink_pb) - return Sink.from_api_repr(resource, iterator.client) - - -def _item_to_metric(iterator, log_metric_pb): - """Convert a metric protobuf to the native object. - - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type log_metric_pb: - :class:`.logging_metrics_pb2.LogMetric` - :param log_metric_pb: Metric protobuf returned from the API. - - :rtype: :class:`~google.cloud.logging.metric.Metric` - :returns: The next metric in the page. - """ - # NOTE: LogMetric message type does not have an ``Any`` field - # so `MessageToDict`` can safely be used. - resource = MessageToDict(log_metric_pb) - return Metric.from_api_repr(resource, iterator.client) - - -def make_logging_api(client): - """Create an instance of the Logging API adapter. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client that holds configuration details. - - :rtype: :class:`_LoggingAPI` - :returns: A metrics API instance with the proper credentials. - """ - generated = LoggingServiceV2Client( - credentials=client._credentials, client_info=client._client_info - ) - return _LoggingAPI(generated, client) - - -def make_metrics_api(client): - """Create an instance of the Metrics API adapter. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client that holds configuration details. - - :rtype: :class:`_MetricsAPI` - :returns: A metrics API instance with the proper credentials. - """ - generated = MetricsServiceV2Client( - credentials=client._credentials, client_info=client._client_info - ) - return _MetricsAPI(generated, client) - - -def make_sinks_api(client): - """Create an instance of the Sinks API adapter. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client that holds configuration details. - - :rtype: :class:`_SinksAPI` - :returns: A metrics API instance with the proper credentials. - """ - generated = ConfigServiceV2Client( - credentials=client._credentials, client_info=client._client_info - ) - return _SinksAPI(generated, client) diff --git a/packages/google-cloud-logging/google/cloud/logging/_http.py b/packages/google-cloud-logging/google/cloud/logging/_http.py deleted file mode 100644 index deb6b394f49d..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging/_http.py +++ /dev/null @@ -1,540 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Interact with Stackdriver Logging via JSON-over-HTTP.""" - -import functools - -from google.api_core import page_iterator -from google.cloud import _http - -from google.cloud.logging import __version__ -from google.cloud.logging._helpers import entry_from_resource -from google.cloud.logging.sink import Sink -from google.cloud.logging.metric import Metric - - -class Connection(_http.JSONConnection): - """A connection to Google Stackdriver Logging via the JSON REST API. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client that owns the current connection. - - :type client_info: :class:`~google.api_core.client_info.ClientInfo` - :param client_info: (Optional) instance used to generate user agent. - - :type client_options: :class:`~google.api_core.client_options.ClientOptions` - :param client_options (Optional) Client options used to set user options - on the client. API Endpoint should be set through client_options. - """ - - DEFAULT_API_ENDPOINT = "https://logging.googleapis.com" - - def __init__(self, client, client_info=None, api_endpoint=DEFAULT_API_ENDPOINT): - super(Connection, self).__init__(client, client_info) - self.API_BASE_URL = api_endpoint - self._client_info.gapic_version = __version__ - self._client_info.client_library_version = __version__ - - API_VERSION = "v2" - """The version of the API, used in building the API call's URL.""" - - API_URL_TEMPLATE = "{api_base_url}/{api_version}{path}" - """A template for the URL of a particular API call.""" - - -class _LoggingAPI(object): - """Helper mapping logging-related APIs. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client used to make API requests. - """ - - def __init__(self, client): - self._client = client - self.api_request = client._connection.api_request - - def list_entries( - self, projects, filter_=None, order_by=None, page_size=None, page_token=None - ): - """Return a page of log entry resources. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list - - :type projects: list of strings - :param projects: project IDs to include. If not passed, - defaults to the project bound to the client. - - :type filter_: str - :param filter_: - a filter expression. See - https://cloud.google.com/logging/docs/view/advanced_filters - - :type order_by: str - :param order_by: One of :data:`~google.cloud.logging.ASCENDING` - or :data:`~google.cloud.logging.DESCENDING`. - - :type page_size: int - :param page_size: maximum number of entries to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` - accessible to the current API. - """ - extra_params = {"projectIds": projects} - - if filter_ is not None: - extra_params["filter"] = filter_ - - if order_by is not None: - extra_params["orderBy"] = order_by - - if page_size is not None: - extra_params["pageSize"] = page_size - - path = "/entries:list" - # We attach a mutable loggers dictionary so that as Logger - # objects are created by entry_from_resource, they can be - # re-used by other log entries from the same logger. - loggers = {} - item_to_value = functools.partial(_item_to_entry, loggers=loggers) - iterator = page_iterator.HTTPIterator( - client=self._client, - api_request=self._client._connection.api_request, - path=path, - item_to_value=item_to_value, - items_key="entries", - page_token=page_token, - extra_params=extra_params, - ) - # This method uses POST to make a read-only request. - iterator._HTTP_METHOD = "POST" - return iterator - - def write_entries(self, entries, logger_name=None, resource=None, labels=None): - """API call: log an entry resource via a POST request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write - - :type entries: sequence of mapping - :param entries: the log entry resources to log. - - :type logger_name: str - :param logger_name: name of default logger to which to log the entries; - individual entries may override. - - :type resource: mapping - :param resource: default resource to associate with entries; - individual entries may override. - - :type labels: mapping - :param labels: default labels to associate with entries; - individual entries may override. - """ - data = {"entries": list(entries)} - - if logger_name is not None: - data["logName"] = logger_name - - if resource is not None: - data["resource"] = resource - - if labels is not None: - data["labels"] = labels - - self.api_request(method="POST", path="/entries:write", data=data) - - def logger_delete(self, project, logger_name): - """API call: delete all entries in a logger via a DELETE request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs/delete - - :type project: str - :param project: ID of project containing the log entries to delete - - :type logger_name: str - :param logger_name: name of logger containing the log entries to delete - """ - path = "/projects/%s/logs/%s" % (project, logger_name) - self.api_request(method="DELETE", path=path) - - -class _SinksAPI(object): - """Helper mapping sink-related APIs. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client used to make API requests. - """ - - def __init__(self, client): - self._client = client - self.api_request = client._connection.api_request - - def list_sinks(self, project, page_size=None, page_token=None): - """List sinks for the project associated with this client. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/list - - :type project: str - :param project: ID of the project whose sinks are to be listed. - - :type page_size: int - :param page_size: maximum number of sinks to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of sinks. If not - passed, the API will return the first page of - sinks. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.logging.sink.Sink` - accessible to the current API. - """ - extra_params = {} - - if page_size is not None: - extra_params["pageSize"] = page_size - - path = "/projects/%s/sinks" % (project,) - return page_iterator.HTTPIterator( - client=self._client, - api_request=self._client._connection.api_request, - path=path, - item_to_value=_item_to_sink, - items_key="sinks", - page_token=page_token, - extra_params=extra_params, - ) - - def sink_create( - self, project, sink_name, filter_, destination, unique_writer_identity=False - ): - """API call: create a sink resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create - - :type project: str - :param project: ID of the project in which to create the sink. - - :type sink_name: str - :param sink_name: the name of the sink - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the sink. - - :type destination: str - :param destination: destination URI for the entries exported by - the sink. - - :type unique_writer_identity: bool - :param unique_writer_identity: (Optional) determines the kind of - IAM identity returned as - writer_identity in the new sink. - - :rtype: dict - :returns: The returned (created) resource. - """ - target = "/projects/%s/sinks" % (project,) - data = {"name": sink_name, "filter": filter_, "destination": destination} - query_params = {"uniqueWriterIdentity": unique_writer_identity} - return self.api_request( - method="POST", path=target, data=data, query_params=query_params - ) - - def sink_get(self, project, sink_name): - """API call: retrieve a sink resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get - - :type project: str - :param project: ID of the project containing the sink. - - :type sink_name: str - :param sink_name: the name of the sink - - :rtype: dict - :returns: The JSON sink object returned from the API. - """ - target = "/projects/%s/sinks/%s" % (project, sink_name) - return self.api_request(method="GET", path=target) - - def sink_update( - self, project, sink_name, filter_, destination, unique_writer_identity=False - ): - """API call: update a sink resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/update - - :type project: str - :param project: ID of the project containing the sink. - - :type sink_name: str - :param sink_name: the name of the sink - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the sink. - - :type destination: str - :param destination: destination URI for the entries exported by - the sink. - - :type unique_writer_identity: bool - :param unique_writer_identity: (Optional) determines the kind of - IAM identity returned as - writer_identity in the new sink. - - :rtype: dict - :returns: The returned (updated) resource. - """ - target = "/projects/%s/sinks/%s" % (project, sink_name) - data = {"name": sink_name, "filter": filter_, "destination": destination} - query_params = {"uniqueWriterIdentity": unique_writer_identity} - return self.api_request( - method="PUT", path=target, query_params=query_params, data=data - ) - - def sink_delete(self, project, sink_name): - """API call: delete a sink resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/delete - - :type project: str - :param project: ID of the project containing the sink. - - :type sink_name: str - :param sink_name: the name of the sink - """ - target = "/projects/%s/sinks/%s" % (project, sink_name) - self.api_request(method="DELETE", path=target) - - -class _MetricsAPI(object): - """Helper mapping sink-related APIs. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The client used to make API requests. - """ - - def __init__(self, client): - self._client = client - self.api_request = client._connection.api_request - - def list_metrics(self, project, page_size=None, page_token=None): - """List metrics for the project associated with this client. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list - - :type project: str - :param project: ID of the project whose metrics are to be listed. - - :type page_size: int - :param page_size: maximum number of metrics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of metrics. If not - passed, the API will return the first page of - metrics. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.logging.metric.Metric` - accessible to the current API. - """ - extra_params = {} - - if page_size is not None: - extra_params["pageSize"] = page_size - - path = "/projects/%s/metrics" % (project,) - return page_iterator.HTTPIterator( - client=self._client, - api_request=self._client._connection.api_request, - path=path, - item_to_value=_item_to_metric, - items_key="metrics", - page_token=page_token, - extra_params=extra_params, - ) - - def metric_create(self, project, metric_name, filter_, description=None): - """API call: create a metric resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create - - :type project: str - :param project: ID of the project in which to create the metric. - - :type metric_name: str - :param metric_name: the name of the metric - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the metric. - - :type description: str - :param description: description of the metric. - """ - target = "/projects/%s/metrics" % (project,) - data = {"name": metric_name, "filter": filter_, "description": description} - self.api_request(method="POST", path=target, data=data) - - def metric_get(self, project, metric_name): - """API call: retrieve a metric resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get - - :type project: str - :param project: ID of the project containing the metric. - - :type metric_name: str - :param metric_name: the name of the metric - - :rtype: dict - :returns: The JSON metric object returned from the API. - """ - target = "/projects/%s/metrics/%s" % (project, metric_name) - return self.api_request(method="GET", path=target) - - def metric_update(self, project, metric_name, filter_, description): - """API call: update a metric resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update - - :type project: str - :param project: ID of the project containing the metric. - - :type metric_name: str - :param metric_name: the name of the metric - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries exported by the metric. - - :type description: str - :param description: description of the metric. - - :rtype: dict - :returns: The returned (updated) resource. - """ - target = "/projects/%s/metrics/%s" % (project, metric_name) - data = {"name": metric_name, "filter": filter_, "description": description} - return self.api_request(method="PUT", path=target, data=data) - - def metric_delete(self, project, metric_name): - """API call: delete a metric resource. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/delete - - :type project: str - :param project: ID of the project containing the metric. - - :type metric_name: str - :param metric_name: the name of the metric. - """ - target = "/projects/%s/metrics/%s" % (project, metric_name) - self.api_request(method="DELETE", path=target) - - -def _item_to_entry(iterator, resource, loggers): - """Convert a log entry resource to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.api_core.page_iterator.Iterator`. It is intended to be - patched with a mutable ``loggers`` argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_LoggingAPI.list_entries`. - - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: Log entry JSON resource returned from the API. - - :type loggers: dict - :param loggers: - A mapping of logger fullnames -> loggers. If the logger - that owns the entry is not in ``loggers``, the entry - will have a newly-created logger. - - :rtype: :class:`~google.cloud.logging.entries._BaseEntry` - :returns: The next log entry in the page. - """ - return entry_from_resource(resource, iterator.client, loggers) - - -def _item_to_sink(iterator, resource): - """Convert a sink resource to the native object. - - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: Sink JSON resource returned from the API. - - :rtype: :class:`~google.cloud.logging.sink.Sink` - :returns: The next sink in the page. - """ - return Sink.from_api_repr(resource, iterator.client) - - -def _item_to_metric(iterator, resource): - """Convert a metric resource to the native object. - - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: Metric JSON resource returned from the API. - - :rtype: :class:`~google.cloud.logging.metric.Metric` - :returns: The next metric in the page. - """ - return Metric.from_api_repr(resource, iterator.client) diff --git a/packages/google-cloud-logging/google/cloud/logging/client.py b/packages/google-cloud-logging/google/cloud/logging/client.py deleted file mode 100644 index 64d9625060a9..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging/client.py +++ /dev/null @@ -1,407 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Client for interacting with the Google Stackdriver Logging API.""" - -import logging -import os - -try: - from google.cloud.logging import _gapic -except ImportError: # pragma: NO COVER - _HAVE_GRPC = False - _gapic = None -else: - _HAVE_GRPC = True - -import google.api_core.client_options -from google.cloud.client import ClientWithProject -from google.cloud.environment_vars import DISABLE_GRPC -from google.cloud.logging._helpers import _add_defaults_to_filter -from google.cloud.logging._helpers import retrieve_metadata_server -from google.cloud.logging._http import Connection -from google.cloud.logging._http import _LoggingAPI as JSONLoggingAPI -from google.cloud.logging._http import _MetricsAPI as JSONMetricsAPI -from google.cloud.logging._http import _SinksAPI as JSONSinksAPI -from google.cloud.logging.handlers import CloudLoggingHandler -from google.cloud.logging.handlers import AppEngineHandler -from google.cloud.logging.handlers import ContainerEngineHandler -from google.cloud.logging.handlers import setup_logging -from google.cloud.logging.handlers.handlers import EXCLUDED_LOGGER_DEFAULTS - -from google.cloud.logging.logger import Logger -from google.cloud.logging.metric import Metric -from google.cloud.logging.sink import Sink - - -_DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) -_USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC - -_APPENGINE_FLEXIBLE_ENV_VM = "GAE_APPENGINE_HOSTNAME" -"""Environment variable set in App Engine when vm:true is set.""" - -_APPENGINE_INSTANCE_ID = "GAE_INSTANCE" -"""Environment variable set in App Engine standard and flexible environment.""" - -_GKE_CLUSTER_NAME = "instance/attributes/cluster-name" -"""Attribute in metadata server when in GKE environment.""" - - -class Client(ClientWithProject): - """Client to bundle configuration needed for API requests. - - :type project: str - :param project: the project which the client acts on behalf of. - If not passed, falls back to the default inferred - from the environment. - - :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: (Optional) The OAuth2 Credentials to use for this - client. If not passed (and if no ``_http`` object is - passed), falls back to the default inferred from the - environment. - - :type _http: :class:`~requests.Session` - :param _http: (Optional) HTTP object to make requests. Can be any object - that defines ``request()`` with the same interface as - :meth:`requests.Session.request`. If not passed, an - ``_http`` object is created that is bound to the - ``credentials`` for the current object. - This parameter should be considered private, and could - change in the future. - - :type _use_grpc: bool - :param _use_grpc: (Optional) Explicitly specifies whether - to use the gRPC transport or HTTP. If unset, - falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` - environment variable - This parameter should be considered private, and could - change in the future. - - :type client_info: - :class:`google.api_core.client_info.ClientInfo` or - :class:`google.api_core.gapic_v1.client_info.ClientInfo` - :param client_info: - The client info used to send a user-agent string along with API - requests. If ``None``, then default info will be used. Generally, - you only need to set this if you're developing your own library - or partner tool. - :type client_options: :class:`~google.api_core.client_options.ClientOptions` - or :class:`dict` - :param client_options: (Optional) Client options used to set user options - on the client. API Endpoint should be set through client_options. - """ - - _logging_api = None - _sinks_api = None - _metrics_api = None - - SCOPE = ( - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/cloud-platform", - ) - """The scopes required for authenticating as a Logging consumer.""" - - def __init__( - self, - project=None, - credentials=None, - _http=None, - _use_grpc=None, - client_info=None, - client_options=None, - ): - super(Client, self).__init__( - project=project, - credentials=credentials, - _http=_http, - client_options=client_options, - ) - - kw_args = {"client_info": client_info} - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - kw_args["api_endpoint"] = api_endpoint - - self._connection = Connection(self, **kw_args) - - self._client_info = client_info - if _use_grpc is None: - self._use_grpc = _USE_GRPC - else: - self._use_grpc = _use_grpc - - @property - def logging_api(self): - """Helper for logging-related API calls. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs - """ - if self._logging_api is None: - if self._use_grpc: - self._logging_api = _gapic.make_logging_api(self) - else: - self._logging_api = JSONLoggingAPI(self) - return self._logging_api - - @property - def sinks_api(self): - """Helper for log sink-related API calls. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks - """ - if self._sinks_api is None: - if self._use_grpc: - self._sinks_api = _gapic.make_sinks_api(self) - else: - self._sinks_api = JSONSinksAPI(self) - return self._sinks_api - - @property - def metrics_api(self): - """Helper for log metric-related API calls. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics - """ - if self._metrics_api is None: - if self._use_grpc: - self._metrics_api = _gapic.make_metrics_api(self) - else: - self._metrics_api = JSONMetricsAPI(self) - return self._metrics_api - - def logger(self, name): - """Creates a logger bound to the current client. - - :type name: str - :param name: the name of the logger to be constructed. - - :rtype: :class:`google.cloud.logging.logger.Logger` - :returns: Logger created with the current client. - """ - return Logger(name, client=self) - - def list_entries( - self, - projects=None, - filter_=None, - order_by=None, - page_size=None, - page_token=None, - ): - """Return a page of log entries. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list - - :type projects: list of strings - :param projects: project IDs to include. If not passed, - defaults to the project bound to the client. - - :type filter_: str - :param filter_: - a filter expression. See - https://cloud.google.com/logging/docs/view/advanced_filters - By default, a 24 hour filter is applied. - - :type order_by: str - :param order_by: One of :data:`~google.cloud.logging.ASCENDING` - or :data:`~google.cloud.logging.DESCENDING`. - - :type page_size: int - :param page_size: - Optional. The maximum number of entries in each page of results - from this request. Non-positive values are ignored. Defaults - to a sensible value set by the API. - - :type page_token: str - :param page_token: - Optional. If present, return the next batch of entries, using - the value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property of the returned iterator instead of manually passing - the token. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` - accessible to the current client. - """ - if projects is None: - projects = [self.project] - - filter_ = _add_defaults_to_filter(filter_) - - return self.logging_api.list_entries( - projects=projects, - filter_=filter_, - order_by=order_by, - page_size=page_size, - page_token=page_token, - ) - - def sink(self, name, filter_=None, destination=None): - """Creates a sink bound to the current client. - - :type name: str - :param name: the name of the sink to be constructed. - - :type filter_: str - :param filter_: (optional) the advanced logs filter expression - defining the entries exported by the sink. If not - passed, the instance should already exist, to be - refreshed via :meth:`Sink.reload`. - - :type destination: str - :param destination: destination URI for the entries exported by - the sink. If not passed, the instance should - already exist, to be refreshed via - :meth:`Sink.reload`. - - :rtype: :class:`google.cloud.logging.sink.Sink` - :returns: Sink created with the current client. - """ - return Sink(name, filter_, destination, client=self) - - def list_sinks(self, page_size=None, page_token=None): - """List sinks for the project associated with this client. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/list - - :type page_size: int - :param page_size: - Optional. The maximum number of sinks in each page of results from - this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - - :type page_token: str - :param page_token: - Optional. If present, return the next batch of sinks, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property of the returned iterator instead of manually passing the - token. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.logging.sink.Sink` - accessible to the current client. - """ - return self.sinks_api.list_sinks(self.project, page_size, page_token) - - def metric(self, name, filter_=None, description=""): - """Creates a metric bound to the current client. - - :type name: str - :param name: the name of the metric to be constructed. - - :type filter_: str - :param filter_: the advanced logs filter expression defining the - entries tracked by the metric. If not - passed, the instance should already exist, to be - refreshed via :meth:`Metric.reload`. - - :type description: str - :param description: the description of the metric to be constructed. - If not passed, the instance should already exist, - to be refreshed via :meth:`Metric.reload`. - - :rtype: :class:`google.cloud.logging.metric.Metric` - :returns: Metric created with the current client. - """ - return Metric(name, filter_, client=self, description=description) - - def list_metrics(self, page_size=None, page_token=None): - """List metrics for the project associated with this client. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list - - :type page_size: int - :param page_size: - Optional. The maximum number of metrics in each page of results - from this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - - :type page_token: str - :param page_token: - Optional. If present, return the next batch of metrics, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property of the returned iterator instead of manually passing the - token. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.logging.metric.Metric` - accessible to the current client. - """ - return self.metrics_api.list_metrics(self.project, page_size, page_token) - - def get_default_handler(self, **kw): - """Return the default logging handler based on the local environment. - - :type kw: dict - :param kw: keyword args passed to handler constructor - - :rtype: :class:`logging.Handler` - :returns: The default log handler based on the environment - """ - gke_cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME) - - if ( - _APPENGINE_FLEXIBLE_ENV_VM in os.environ - or _APPENGINE_INSTANCE_ID in os.environ - ): - return AppEngineHandler(self, **kw) - elif gke_cluster_name is not None: - return ContainerEngineHandler(**kw) - else: - return CloudLoggingHandler(self, **kw) - - def setup_logging( - self, log_level=logging.INFO, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, **kw - ): - """Attach default Stackdriver logging handler to the root logger. - - This method uses the default log handler, obtained by - :meth:`~get_default_handler`, and attaches it to the root Python - logger, so that a call such as ``logging.warn``, as well as all child - loggers, will report to Stackdriver logging. - - :type log_level: int - :param log_level: (Optional) Python logging log level. Defaults to - :const:`logging.INFO`. - - :type excluded_loggers: tuple - :param excluded_loggers: (Optional) The loggers to not attach the - handler to. This will always include the - loggers in the path of the logging client - itself. - - :type kw: dict - :param kw: keyword args passed to handler constructor - """ - handler = self.get_default_handler(**kw) - setup_logging(handler, log_level=log_level, excluded_loggers=excluded_loggers) diff --git a/packages/google-cloud-logging/google/cloud/logging/logger.py b/packages/google-cloud-logging/google/cloud/logging/logger.py deleted file mode 100644 index e6dae8b0eaa0..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging/logger.py +++ /dev/null @@ -1,386 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Loggers.""" - -from google.cloud.logging._helpers import _add_defaults_to_filter -from google.cloud.logging.entries import LogEntry -from google.cloud.logging.entries import ProtobufEntry -from google.cloud.logging.entries import StructEntry -from google.cloud.logging.entries import TextEntry -from google.cloud.logging.resource import Resource - - -_GLOBAL_RESOURCE = Resource(type="global", labels={}) - - -_OUTBOUND_ENTRY_FIELDS = ( # (name, default) - ("type_", None), - ("log_name", None), - ("payload", None), - ("labels", None), - ("insert_id", None), - ("severity", None), - ("http_request", None), - ("timestamp", None), - ("resource", _GLOBAL_RESOURCE), - ("trace", None), - ("span_id", None), - ("trace_sampled", None), - ("source_location", None), -) - - -class Logger(object): - """Loggers represent named targets for log entries. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs - - :type name: str - :param name: the name of the logger - - :type client: :class:`google.cloud.logging.client.Client` - :param client: A client which holds credentials and project configuration - for the logger (which requires a project). - - :type labels: dict - :param labels: (optional) mapping of default labels for entries written - via this logger. - """ - - def __init__(self, name, client, labels=None): - self.name = name - self._client = client - self.labels = labels - - @property - def client(self): - """Clent bound to the logger.""" - return self._client - - @property - def project(self): - """Project bound to the logger.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in logging APIs""" - return "projects/%s/logs/%s" % (self.project, self.name) - - @property - def path(self): - """URI path for use in logging APIs""" - return "/%s" % (self.full_name,) - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current logger. - - :rtype: :class:`google.cloud.logging.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def batch(self, client=None): - """Return a batch to use as a context manager. - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :rtype: :class:`Batch` - :returns: A batch to use as a context manager. - """ - client = self._require_client(client) - return Batch(self, client) - - def _do_log(self, client, _entry_class, payload=None, **kw): - """Helper for :meth:`log_empty`, :meth:`log_text`, etc.""" - client = self._require_client(client) - - # Apply defaults - kw["log_name"] = kw.pop("log_name", self.full_name) - kw["labels"] = kw.pop("labels", self.labels) - kw["resource"] = kw.pop("resource", _GLOBAL_RESOURCE) - - if payload is not None: - entry = _entry_class(payload=payload, **kw) - else: - entry = _entry_class(**kw) - - api_repr = entry.to_api_repr() - client.logging_api.write_entries([api_repr]) - - def log_empty(self, client=None, **kw): - """API call: log an empty message via a POST request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current logger. - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self._do_log(client, LogEntry, **kw) - - def log_text(self, text, client=None, **kw): - """API call: log a text message via a POST request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write - - :type text: str - :param text: the log message. - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current logger. - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self._do_log(client, TextEntry, text, **kw) - - def log_struct(self, info, client=None, **kw): - """API call: log a structured message via a POST request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write - - :type info: dict - :param info: the log entry information - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current logger. - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self._do_log(client, StructEntry, info, **kw) - - def log_proto(self, message, client=None, **kw): - """API call: log a protobuf message via a POST request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list - - :type message: :class:`~google.protobuf.message.Message` - :param message: The protobuf message to be logged. - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current logger. - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self._do_log(client, ProtobufEntry, message, **kw) - - def delete(self, client=None): - """API call: delete all entries in a logger via a DELETE request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs/delete - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current logger. - """ - client = self._require_client(client) - client.logging_api.logger_delete(self.project, self.name) - - def list_entries( - self, - projects=None, - filter_=None, - order_by=None, - page_size=None, - page_token=None, - ): - """Return a page of log entries. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list - - :type projects: list of strings - :param projects: project IDs to include. If not passed, - defaults to the project bound to the client. - - :type filter_: str - :param filter_: - a filter expression. See - https://cloud.google.com/logging/docs/view/advanced_filters - By default, a 24 hour filter is applied. - - :type order_by: str - :param order_by: One of :data:`~google.cloud.logging.ASCENDING` - or :data:`~google.cloud.logging.DESCENDING`. - - :type page_size: int - :param page_size: - Optional. The maximum number of entries in each page of results - from this request. Non-positive values are ignored. Defaults - to a sensible value set by the API. - - :type page_token: str - :param page_token: - Optional. If present, return the next batch of entries, using - the value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property of the returned iterator instead of manually passing - the token. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of log entries accessible to the current logger. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - log_filter = "logName=%s" % (self.full_name,) - if filter_ is not None: - filter_ = "%s AND %s" % (filter_, log_filter) - else: - filter_ = log_filter - filter_ = _add_defaults_to_filter(filter_) - return self.client.list_entries( - projects=projects, - filter_=filter_, - order_by=order_by, - page_size=page_size, - page_token=page_token, - ) - - -class Batch(object): - """Context manager: collect entries to log via a single API call. - - Helper returned by :meth:`Logger.batch` - - :type logger: :class:`google.cloud.logging.logger.Logger` - :param logger: the logger to which entries will be logged. - - :type client: :class:`google.cloud.logging.client.Client` - :param client: The client to use. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the batch, defaults - to None, which requires that every entry should have a - resource specified. Since the methods used to write - entries default the entry's resource to the global - resource type, this parameter is only required - if explicitly set to None. If no entries' resource are - set to None, this parameter will be ignored on the server. - """ - - def __init__(self, logger, client, resource=None): - self.logger = logger - self.entries = [] - self.client = client - self.resource = resource - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is None: - self.commit() - - def log_empty(self, **kw): - """Add a entry without payload to be logged during :meth:`commit`. - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self.entries.append(LogEntry(**kw)) - - def log_text(self, text, **kw): - """Add a text entry to be logged during :meth:`commit`. - - :type text: str - :param text: the text entry - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self.entries.append(TextEntry(payload=text, **kw)) - - def log_struct(self, info, **kw): - """Add a struct entry to be logged during :meth:`commit`. - - :type info: dict - :param info: the struct entry - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self.entries.append(StructEntry(payload=info, **kw)) - - def log_proto(self, message, **kw): - """Add a protobuf entry to be logged during :meth:`commit`. - - :type message: protobuf message - :param message: the protobuf entry - - :type kw: dict - :param kw: (optional) additional keyword arguments for the entry. - See :class:`~google.cloud.logging.entries.LogEntry`. - """ - self.entries.append(ProtobufEntry(payload=message, **kw)) - - def commit(self, client=None): - """Send saved log entries as a single API call. - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - """ - if client is None: - client = self.client - - kwargs = {"logger_name": self.logger.full_name} - - if self.resource is not None: - kwargs["resource"] = self.resource._to_dict() - - if self.logger.labels is not None: - kwargs["labels"] = self.logger.labels - - entries = [entry.to_api_repr() for entry in self.entries] - - client.logging_api.write_entries(entries, **kwargs) - del self.entries[:] diff --git a/packages/google-cloud-logging/google/cloud/logging/py.typed b/packages/google-cloud-logging/google/cloud/logging/py.typed new file mode 100644 index 000000000000..6c7420d0d9cb --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-logging package uses inline types. diff --git a/packages/google-cloud-logging/google/cloud/logging/sink.py b/packages/google-cloud-logging/google/cloud/logging/sink.py deleted file mode 100644 index 2a7d46fdbb81..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging/sink.py +++ /dev/null @@ -1,220 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define Stackdriver Logging API Sinks.""" - -from google.cloud.exceptions import NotFound - - -class Sink(object): - """Sinks represent filtered exports for log entries. - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks - - :type name: str - :param name: the name of the sink - - :type filter_: str - :param filter_: (optional) the advanced logs filter expression defining - the entries exported by the sink. - - :type destination: str - :param destination: destination URI for the entries exported by the sink. - If not passed, the instance should already exist, to - be refreshed via :meth:`reload`. - - :type client: :class:`google.cloud.logging.client.Client` - :param client: A client which holds credentials and project configuration - for the sink (which requires a project). - """ - - def __init__(self, name, filter_=None, destination=None, client=None): - self.name = name - self.filter_ = filter_ - self.destination = destination - self._client = client - self._writer_identity = None - - @property - def client(self): - """Client bound to the sink.""" - return self._client - - @property - def project(self): - """Project bound to the sink.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in sink APIs""" - return "projects/%s/sinks/%s" % (self.project, self.name) - - @property - def path(self): - """URL path for the sink's APIs""" - return "/%s" % (self.full_name) - - @property - def writer_identity(self): - """Identity used for exports via the sink""" - return self._writer_identity - - def _update_from_api_repr(self, resource): - """Helper for API methods returning sink resources.""" - self.destination = resource["destination"] - self.filter_ = resource.get("filter") - self._writer_identity = resource.get("writerIdentity") - - @classmethod - def from_api_repr(cls, resource, client): - """Factory: construct a sink given its API representation - - :type resource: dict - :param resource: sink resource representation returned from the API - - :type client: :class:`google.cloud.logging.client.Client` - :param client: Client which holds credentials and project - configuration for the sink. - - :rtype: :class:`google.cloud.logging.sink.Sink` - :returns: Sink parsed from ``resource``. - :raises: :class:`ValueError` if ``client`` is not ``None`` and the - project from the resource does not agree with the project - from the client. - """ - sink_name = resource["name"] - instance = cls(sink_name, client=client) - instance._update_from_api_repr(resource) - return instance - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current sink. - - :rtype: :class:`google.cloud.logging.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def create(self, client=None, unique_writer_identity=False): - """API call: create the sink via a PUT request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current sink. - - :type unique_writer_identity: bool - :param unique_writer_identity: (Optional) determines the kind of - IAM identity returned as - writer_identity in the new sink. - """ - client = self._require_client(client) - resource = client.sinks_api.sink_create( - self.project, - self.name, - self.filter_, - self.destination, - unique_writer_identity=unique_writer_identity, - ) - self._update_from_api_repr(resource) - - def exists(self, client=None): - """API call: test for the existence of the sink via a GET request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current sink. - - :rtype: bool - :returns: Boolean indicating existence of the sink. - """ - client = self._require_client(client) - - try: - client.sinks_api.sink_get(self.project, self.name) - except NotFound: - return False - else: - return True - - def reload(self, client=None): - """API call: sync local sink configuration via a GET request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current sink. - """ - client = self._require_client(client) - resource = client.sinks_api.sink_get(self.project, self.name) - self._update_from_api_repr(resource) - - def update(self, client=None, unique_writer_identity=False): - """API call: update sink configuration via a PUT request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/update - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current sink. - - :type unique_writer_identity: bool - :param unique_writer_identity: (Optional) determines the kind of - IAM identity returned as - writer_identity in the new sink. - """ - client = self._require_client(client) - resource = client.sinks_api.sink_update( - self.project, - self.name, - self.filter_, - self.destination, - unique_writer_identity=unique_writer_identity, - ) - self._update_from_api_repr(resource) - - def delete(self, client=None): - """API call: delete a sink via a DELETE request - - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/delete - - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current sink. - """ - client = self._require_client(client) - client.sinks_api.sink_delete(self.project, self.name) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py index 964c99572fd6..98954d550294 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py @@ -14,32 +14,50 @@ from __future__ import absolute_import +import pkg_resources + +try: + __version__ = pkg_resources.get_distribution("google-cloud-logging").version +except pkg_resources.DistributionNotFound: + __version__ = None + + +from google.cloud.logging_v2.client import Client +from google.cloud.logging_v2.entries import logger_name_from_path +from google.cloud.logging_v2.entries import LogEntry +from google.cloud.logging_v2.entries import TextEntry +from google.cloud.logging_v2.entries import StructEntry +from google.cloud.logging_v2.entries import ProtobufEntry +from google.cloud.logging_v2 import handlers +from google.cloud.logging_v2.logger import Logger +from google.cloud.logging_v2.logger import Batch +from google.cloud.logging_v2.metric import Metric +from google.cloud.logging_v2.resource import Resource +from google.cloud.logging_v2.sink import Sink from google.cloud.logging_v2 import types -from google.cloud.logging_v2.gapic import config_service_v2_client -from google.cloud.logging_v2.gapic import enums -from google.cloud.logging_v2.gapic import logging_service_v2_client -from google.cloud.logging_v2.gapic import metrics_service_v2_client -class LoggingServiceV2Client(logging_service_v2_client.LoggingServiceV2Client): - __doc__ = logging_service_v2_client.LoggingServiceV2Client.__doc__ - enums = enums - - -class ConfigServiceV2Client(config_service_v2_client.ConfigServiceV2Client): - __doc__ = config_service_v2_client.ConfigServiceV2Client.__doc__ - enums = enums - - -class MetricsServiceV2Client(metrics_service_v2_client.MetricsServiceV2Client): - __doc__ = metrics_service_v2_client.MetricsServiceV2Client.__doc__ - enums = enums +ASCENDING = "timestamp asc" +"""Query string to order by ascending timestamps.""" +DESCENDING = "timestamp desc" +"""Query string to order by decending timestamps.""" __all__ = ( - "enums", + "__version__", + "ASCENDING", + "Batch", + "Client", + "DESCENDING", + "handlers", + "logger_name_from_path", + "Logger", + "LogEntry", + "Metric", + "ProtobufEntry", + "Resource", + "Sink", + "StructEntry", + "TextEntry", "types", - "LoggingServiceV2Client", - "ConfigServiceV2Client", - "MetricsServiceV2Client", ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py new file mode 100644 index 000000000000..7a6d70650dff --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py @@ -0,0 +1,562 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Wrapper for adapting the autogenerated gapic client to the hand-written +client.""" + +from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client +from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client +from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2Client +from google.cloud.logging_v2.types import CreateSinkRequest +from google.cloud.logging_v2.types import UpdateSinkRequest +from google.cloud.logging_v2.types import ListSinksRequest +from google.cloud.logging_v2.types import ListLogMetricsRequest +from google.cloud.logging_v2.types import ListLogEntriesRequest +from google.cloud.logging_v2.types import WriteLogEntriesRequest +from google.cloud.logging_v2.types import LogSink +from google.cloud.logging_v2.types import LogMetric +from google.cloud.logging_v2.types import LogEntry as LogEntryPB + +from google.protobuf.json_format import MessageToDict +from google.protobuf.json_format import ParseDict + +from google.cloud.logging_v2._helpers import entry_from_resource +from google.cloud.logging_v2.sink import Sink +from google.cloud.logging_v2.metric import Metric + + +class _LoggingAPI(object): + """Helper mapping logging-related APIs.""" + + def __init__(self, gapic_api, client): + self._gapic_api = gapic_api + self._client = client + + def list_entries( + self, + resource_names, + *, + filter_=None, + order_by=None, + page_size=None, + page_token=None, + ): + """Return a page of log entry resources. + + Args: + resource_names (Sequence[str]): Names of one or more parent resources + from which to retrieve log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + filter_ (str): a filter expression. See + https://cloud.google.com/logging/docs/view/advanced_filters + order_by (str) One of :data:`~logging_v2.ASCENDING` + or :data:`~logging_v2.DESCENDING`. + page_size (int): maximum number of entries to return, If not passed, + defaults to a value set by the API. + page_token (str): opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + Returns: + Iterator[~logging_v2.LogEntry] + """ + # full resource names are expected by the API + resource_names = resource_names + request = ListLogEntriesRequest( + resource_names=resource_names, + filter=filter_, + order_by=order_by, + page_size=page_size, + page_token=page_token, + ) + + response = self._gapic_api.list_log_entries(request=request) + page_iter = iter(response) + + # We attach a mutable loggers dictionary so that as Logger + # objects are created by entry_from_resource, they can be + # re-used by other log entries from the same logger. + loggers = {} + + def log_entries_pager(page_iter): + for page in page_iter: + log_entry_dict = _parse_log_entry(LogEntryPB.pb(page)) + yield entry_from_resource(log_entry_dict, self._client, loggers=loggers) + + return log_entries_pager(page_iter) + + def write_entries( + self, + entries, + *, + logger_name=None, + resource=None, + labels=None, + partial_success=False, + dry_run=False, + ): + """Log an entry resource via a POST request + + Args: + entries (Sequence[Mapping[str, ...]]): sequence of mappings representing + the log entry resources to log. + logger_name (Optional[str]): name of default logger to which to log the entries; + individual entries may override. + resource(Optional[Mapping[str, ...]]): default resource to associate with entries; + individual entries may override. + labels (Optional[Mapping[str, ...]]): default labels to associate with entries; + individual entries may override. + partial_success (Optional[bool]): Whether valid entries should be written even if + some other entries fail due to INVALID_ARGUMENT or + PERMISSION_DENIED errors. If any entry is not written, then + the response status is the error associated with one of the + failed entries and the response includes error details keyed + by the entries' zero-based index in the ``entries.write`` + method. + dry_run (Optional[bool]): + If true, the request should expect normal response, + but the entries won't be persisted nor exported. + Useful for checking whether the logging API endpoints are working + properly before sending valuable data. + """ + log_entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries] + + request = WriteLogEntriesRequest( + log_name=logger_name, + resource=resource, + labels=labels, + entries=log_entry_pbs, + partial_success=partial_success, + ) + self._gapic_api.write_log_entries(request=request) + + def logger_delete(self, logger_name): + """Delete all entries in a logger. + + Args: + logger_name (str): The resource name of the log to delete: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + """ + self._gapic_api.delete_log(log_name=logger_name) + + +class _SinksAPI(object): + """Helper mapping sink-related APIs.""" + + def __init__(self, gapic_api, client): + self._gapic_api = gapic_api + self._client = client + + def list_sinks(self, parent, *, page_size=0, page_token=None): + """List sinks for the parent resource. + + Args: + parent (str): The parent resource whose sinks are to be listed: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + page_size (Optional[int]): Maximum number of sinks to return, If not passed, + defaults to a value set by the API. + page_token (Optional[str]): Opaque marker for the next "page" of sinks. If not + passed, the API will return the first page of + sinks. + + Returns: + Iterator[~logging_v2.Sink] + """ + request = ListSinksRequest( + parent=parent, page_size=page_size, page_token=page_token + ) + response = self._gapic_api.list_sinks(request) + page_iter = iter(response) + + def sinks_pager(page_iter): + for page in page_iter: + # Convert the GAPIC sink type into the handwritten `Sink` type + yield Sink.from_api_repr(LogSink.to_dict(page), client=self._client) + + return sinks_pager(page_iter) + + def sink_create( + self, parent, sink_name, filter_, destination, *, unique_writer_identity=False + ): + """Create a sink resource. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create + + Args: + parent(str): The resource in which to create the sink, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + sink_name (str): The name of the sink. + filter_ (str): The advanced logs filter expression defining the + entries exported by the sink. + destination (str): Destination URI for the entries exported by + the sink. + unique_writer_identity (Optional[bool]): determines the kind of + IAM identity returned as writer_identity in the new sink. + + Returns: + dict: The sink resource returned from the API (converted from a + protobuf to a dictionary). + """ + sink_pb = LogSink(name=sink_name, filter=filter_, destination=destination) + request = CreateSinkRequest( + parent=parent, sink=sink_pb, unique_writer_identity=unique_writer_identity + ) + created_pb = self._gapic_api.create_sink(request=request) + return MessageToDict( + LogSink.pb(created_pb), + preserving_proto_field_name=False, + including_default_value_fields=False, + ) + + def sink_get(self, sink_name): + """Retrieve a sink resource. + + Args: + sink_name (str): The resource name of the sink, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Returns: + dict: The sink object returned from the API (converted from a + protobuf to a dictionary). + """ + sink_pb = self._gapic_api.get_sink(sink_name=sink_name) + # NOTE: LogSink message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. + return MessageToDict( + LogSink.pb(sink_pb), + preserving_proto_field_name=False, + including_default_value_fields=False, + ) + + def sink_update( + self, sink_name, filter_, destination, *, unique_writer_identity=False, + ): + """Update a sink resource. + + Args: + sink_name (str): Required. The resource name of the sink, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + filter_ (str): The advanced logs filter expression defining the + entries exported by the sink. + destination (str): destination URI for the entries exported by + the sink. + unique_writer_identity (Optional[bool]): determines the kind of + IAM identity returned as writer_identity in the new sink. + + + Returns: + dict: The sink resource returned from the API (converted from a + protobuf to a dictionary). + """ + name = sink_name.split("/")[-1] # parse name out of full resoure name + sink_pb = LogSink(name=name, filter=filter_, destination=destination,) + + request = UpdateSinkRequest( + sink_name=sink_name, + sink=sink_pb, + unique_writer_identity=unique_writer_identity, + ) + sink_pb = self._gapic_api.update_sink(request=request) + # NOTE: LogSink message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. + return MessageToDict( + LogSink.pb(sink_pb), + preserving_proto_field_name=False, + including_default_value_fields=False, + ) + + def sink_delete(self, sink_name): + """Delete a sink resource. + + Args: + sink_name (str): Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + """ + self._gapic_api.delete_sink(sink_name=sink_name) + + +class _MetricsAPI(object): + """Helper mapping sink-related APIs. """ + + def __init__(self, gapic_api, client): + self._gapic_api = gapic_api + self._client = client + + def list_metrics(self, project, *, page_size=0, page_token=None): + """List metrics for the project associated with this client. + + Args: + project (str): ID of the project whose metrics are to be listed. + page_size (int): Maximum number of metrics to return, If not passed, + defaults to a value set by the API. + page_token (str): Opaque marker for the next "page" of metrics. If not + passed, the API will return the first page of + sinks. + + Returns: + Iterable[logging_v2.Metric]: Iterable of metrics. + """ + path = f"projects/{project}" + request = ListLogMetricsRequest( + parent=path, page_size=page_size, page_token=page_token, + ) + response = self._gapic_api.list_log_metrics(request=request) + page_iter = iter(response) + + def metrics_pager(page_iter): + for page in page_iter: + # Convert GAPIC metrics type into handwritten `Metric` type + yield Metric.from_api_repr(LogMetric.to_dict(page), client=self._client) + + return metrics_pager(page_iter) + + def metric_create(self, project, metric_name, filter_, description): + """Create a metric resource. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create + + Args: + project (str): ID of the project in which to create the metric. + metric_name (str): The name of the metric + filter_ (str): The advanced logs filter expression defining the + entries exported by the metric. + description (str): description of the metric. + """ + parent = f"projects/{project}" + metric_pb = LogMetric(name=metric_name, filter=filter_, description=description) + self._gapic_api.create_log_metric(parent=parent, metric=metric_pb) + + def metric_get(self, project, metric_name): + """Retrieve a metric resource. + + Args: + project (str): ID of the project containing the metric. + metric_name (str): The name of the metric + + Returns: + dict: The metric object returned from the API (converted from a + protobuf to a dictionary). + """ + path = f"projects/{project}/metrics/{metric_name}" + metric_pb = self._gapic_api.get_log_metric(metric_name=path) + # NOTE: LogMetric message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. + return MessageToDict( + LogMetric.pb(metric_pb), + preserving_proto_field_name=False, + including_default_value_fields=False, + ) + + def metric_update( + self, project, metric_name, filter_, description, + ): + """Update a metric resource. + + Args: + project (str): ID of the project containing the metric. + metric_name (str): the name of the metric + filter_ (str): the advanced logs filter expression defining the + entries exported by the metric. + description (str): description of the metric. + + Returns: + The metric object returned from the API (converted from a + protobuf to a dictionary). + """ + path = f"projects/{project}/metrics/{metric_name}" + metric_pb = LogMetric(name=path, filter=filter_, description=description,) + metric_pb = self._gapic_api.update_log_metric( + metric_name=path, metric=metric_pb + ) + # NOTE: LogMetric message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. + return MessageToDict( + LogMetric.pb(metric_pb), + preserving_proto_field_name=False, + including_default_value_fields=False, + ) + + def metric_delete(self, project, metric_name): + """Delete a metric resource. + + Args: + project (str): ID of the project containing the metric. + metric_name (str): The name of the metric + """ + path = f"projects/{project}/metrics/{metric_name}" + self._gapic_api.delete_log_metric(metric_name=path) + + +def _parse_log_entry(entry_pb): + """Special helper to parse ``LogEntry`` protobuf into a dictionary. + + The ``proto_payload`` field in ``LogEntry`` is of type ``Any``. This + can be problematic if the type URL in the payload isn't in the + ``google.protobuf`` registry. To help with parsing unregistered types, + this function will remove ``proto_payload`` before parsing. + + Args: + entry_pb (LogEntry): Log entry protobuf. + + Returns: + dict: The parsed log entry. The ``protoPayload`` key may contain + the raw ``Any`` protobuf from ``entry_pb.proto_payload`` if + it could not be parsed. + """ + try: + return MessageToDict( + entry_pb, + preserving_proto_field_name=False, + including_default_value_fields=False, + ) + except TypeError: + if entry_pb.HasField("proto_payload"): + proto_payload = entry_pb.proto_payload + entry_pb.ClearField("proto_payload") + entry_mapping = MessageToDict( + entry_pb, + preserving_proto_field_name=False, + including_default_value_fields=False, + ) + entry_mapping["protoPayload"] = proto_payload + return entry_mapping + else: + raise + + +def _log_entry_mapping_to_pb(mapping): + """Helper for :meth:`write_entries`, et aliae + + Performs "impedance matching" between the protobuf attrs and + the keys expected in the JSON API. + """ + entry_pb = LogEntryPB.pb(LogEntryPB()) + # NOTE: We assume ``mapping`` was created in ``Batch.commit`` + # or ``Logger._make_entry_resource``. In either case, if + # the ``protoPayload`` key is present, we assume that the + # type URL is registered with ``google.protobuf`` and will + # not cause any issues in the JSON->protobuf conversion + # of the corresponding ``proto_payload`` in the log entry + # (it is an ``Any`` field). + ParseDict(mapping, entry_pb) + return LogEntryPB(entry_pb) + + +def make_logging_api(client): + """Create an instance of the Logging API adapter. + + Args: + client (~logging_v2.client.Client): The client + that holds configuration details. + + Returns: + _LoggingAPI: A metrics API instance with the proper credentials. + """ + generated = LoggingServiceV2Client( + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, + ) + return _LoggingAPI(generated, client) + + +def make_metrics_api(client): + """Create an instance of the Metrics API adapter. + + Args: + client (~logging_v2.client.Client): The client + that holds configuration details. + + Returns: + _MetricsAPI: A metrics API instance with the proper credentials. + """ + generated = MetricsServiceV2Client( + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, + ) + return _MetricsAPI(generated, client) + + +def make_sinks_api(client): + """Create an instance of the Sinks API adapter. + + Args: + client (~logging_v2.client.Client): The client + that holds configuration details. + + Returns: + _SinksAPI: A metrics API instance with the proper credentials. + """ + generated = ConfigServiceV2Client( + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, + ) + return _SinksAPI(generated, client) diff --git a/packages/google-cloud-logging/google/cloud/logging/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/_helpers.py similarity index 65% rename from packages/google-cloud-logging/google/cloud/logging/_helpers.py rename to packages/google-cloud-logging/google/cloud/logging_v2/_helpers.py index 37e890eadc3a..51cc6486836b 100644 --- a/packages/google-cloud-logging/google/cloud/logging/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_helpers.py @@ -22,13 +22,13 @@ import requests -from google.cloud.logging.entries import LogEntry -from google.cloud.logging.entries import ProtobufEntry -from google.cloud.logging.entries import StructEntry -from google.cloud.logging.entries import TextEntry +from google.cloud.logging_v2.entries import LogEntry +from google.cloud.logging_v2.entries import ProtobufEntry +from google.cloud.logging_v2.entries import StructEntry +from google.cloud.logging_v2.entries import TextEntry try: - from google.cloud.logging_v2.gapic.enums import LogSeverity + from google.cloud.logging_v2.types import LogSeverity except ImportError: # pragma: NO COVER class LogSeverity(object): @@ -64,31 +64,29 @@ class LogSeverity(object): def entry_from_resource(resource, client, loggers): """Detect correct entry type from resource and instantiate. - :type resource: dict - :param resource: One entry resource from API response. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: Client that owns the log entry. - - :type loggers: dict - :param loggers: - A mapping of logger fullnames -> loggers. If the logger - that owns the entry is not in ``loggers``, the entry - will have a newly-created logger. - - :rtype: :class:`~google.cloud.logging.entries._BaseEntry` - :returns: The entry instance, constructed via the resource + Args: + resource (dict): One entry resource from API response. + client (~logging_v2.client.Client): + Client that owns the log entry. + loggers (dict): + A mapping of logger fullnames -> loggers. If the logger + that owns the entry is not in ``loggers``, the entry + will have a newly-created logger. + + Returns: + google.cloud.logging_v2.entries._BaseEntry: + The entry instance, constructed via the resource """ if "textPayload" in resource: - return TextEntry.from_api_repr(resource, client, loggers) + return TextEntry.from_api_repr(resource, client, loggers=loggers) if "jsonPayload" in resource: - return StructEntry.from_api_repr(resource, client, loggers) + return StructEntry.from_api_repr(resource, client, loggers=loggers) if "protoPayload" in resource: - return ProtobufEntry.from_api_repr(resource, client, loggers) + return ProtobufEntry.from_api_repr(resource, client, loggers=loggers) - return LogEntry.from_api_repr(resource, client, loggers) + return LogEntry.from_api_repr(resource, client, loggers=loggers) def retrieve_metadata_server(metadata_key): @@ -96,13 +94,14 @@ def retrieve_metadata_server(metadata_key): See: https://cloud.google.com/compute/docs/storing-retrieving-metadata - :type metadata_key: str - :param metadata_key: Key of the metadata which will form the url. You can - also supply query parameters after the metadata key. - e.g. "tags?alt=json" + Args: + metadata_key (str): + Key of the metadata which will form the url. You can + also supply query parameters after the metadata key. + e.g. "tags?alt=json" - :rtype: str - :returns: The value of the metadata key returned by the metadata server. + Returns: + str: The value of the metadata key returned by the metadata server. """ url = METADATA_URL + metadata_key @@ -123,11 +122,11 @@ def retrieve_metadata_server(metadata_key): def _normalize_severity(stdlib_level): """Normalize a Python stdlib severity to LogSeverity enum. - :type stdlib_level: int - :param stdlib_level: 'levelno' from a :class:`logging.LogRecord` + Args: + stdlib_level (int): 'levelno' from a :class:`logging.LogRecord` - :rtype: int - :returns: Corresponding Stackdriver severity. + Returns: + int: Corresponding Stackdriver severity. """ return _NORMALIZED_SEVERITIES.get(stdlib_level, stdlib_level) @@ -135,18 +134,18 @@ def _normalize_severity(stdlib_level): def _add_defaults_to_filter(filter_): """Modify the input filter expression to add sensible defaults. - :type filter_: str - :param filter_: The original filter expression + Args: + filter_ (str): The original filter expression - :rtype: str - :returns: sensible default filter string + Returns: + str: sensible default filter string """ # By default, requests should only return logs in the last 24 hours yesterday = datetime.now(timezone.utc) - timedelta(days=1) - time_filter = 'timestamp>="%s"' % yesterday.strftime(_TIME_FORMAT) + time_filter = f'timestamp>="{yesterday.strftime(_TIME_FORMAT)}"' if filter_ is None: filter_ = time_filter elif "timestamp" not in filter_.lower(): - filter_ = "%s AND %s" % (filter_, time_filter) + filter_ = f"{filter_} AND {time_filter}" return filter_ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_http.py b/packages/google-cloud-logging/google/cloud/logging_v2/_http.py new file mode 100644 index 000000000000..68bde346a119 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_http.py @@ -0,0 +1,525 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Interact with Cloud Logging via JSON-over-HTTP.""" + +import functools + +from google.api_core import page_iterator +from google.cloud import _http + +from google.cloud.logging_v2 import __version__ +from google.cloud.logging_v2._helpers import entry_from_resource +from google.cloud.logging_v2.sink import Sink +from google.cloud.logging_v2.metric import Metric + + +class Connection(_http.JSONConnection): + + DEFAULT_API_ENDPOINT = "https://logging.googleapis.com" + + def __init__(self, client, *, client_info=None, api_endpoint=DEFAULT_API_ENDPOINT): + """A connection to Google Cloud Logging via the JSON REST API. + + Args: + client (google.cloud.logging_v2.cliet.Client): + The client that owns the current connection. + client_info (Optional[google.api_core.client_info.ClientInfo]): + Instance used to generate user agent. + client_options (Optional[google.api_core.client_options.ClientOptions]): + Client options used to set user options + on the client. API Endpoint should be set through client_options. + """ + super(Connection, self).__init__(client, client_info) + self.API_BASE_URL = api_endpoint + self._client_info.gapic_version = __version__ + self._client_info.client_library_version = __version__ + + API_VERSION = "v2" + """The version of the API, used in building the API call's URL.""" + + API_URL_TEMPLATE = "{api_base_url}/{api_version}{path}" + """A template for the URL of a particular API call.""" + + +class _LoggingAPI(object): + """Helper mapping logging-related APIs. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs + + :type client: :class:`~google.cloud.logging.client.Client` + :param client: The client used to make API requests. + """ + + def __init__(self, client): + self._client = client + self.api_request = client._connection.api_request + + def list_entries( + self, + resource_names, + *, + filter_=None, + order_by=None, + page_size=None, + page_token=None, + ): + """Return a page of log entry resources. + + Args: + resource_names (Sequence[str]): Names of one or more parent resources + from which to retrieve log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + filter_ (str): a filter expression. See + https://cloud.google.com/logging/docs/view/advanced_filters + order_by (str) One of :data:`~logging_v2.ASCENDING` + or :data:`~logging_v2.DESCENDING`. + page_size (int): maximum number of entries to return, If not passed, + defaults to a value set by the API. + page_token (str): opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + Returns: + Iterator[~logging_v2.LogEntry] + """ + extra_params = {"resourceNames": resource_names} + + if filter_ is not None: + extra_params["filter"] = filter_ + + if order_by is not None: + extra_params["orderBy"] = order_by + + if page_size is not None: + extra_params["pageSize"] = page_size + + path = "/entries:list" + # We attach a mutable loggers dictionary so that as Logger + # objects are created by entry_from_resource, they can be + # re-used by other log entries from the same logger. + loggers = {} + item_to_value = functools.partial(_item_to_entry, loggers=loggers) + iterator = page_iterator.HTTPIterator( + client=self._client, + api_request=self._client._connection.api_request, + path=path, + item_to_value=item_to_value, + items_key="entries", + page_token=page_token, + extra_params=extra_params, + ) + # This method uses POST to make a read-only request. + iterator._HTTP_METHOD = "POST" + return iterator + + def write_entries( + self, + entries, + *, + logger_name=None, + resource=None, + labels=None, + partial_success=False, + dry_run=False, + ): + """Log an entry resource via a POST request + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write + + Args: + entries (Sequence[Mapping[str, ...]]): sequence of mappings representing + the log entry resources to log. + logger_name (Optional[str]): name of default logger to which to log the entries; + individual entries may override. + resource(Optional[Mapping[str, ...]]): default resource to associate with entries; + individual entries may override. + labels (Optional[Mapping[str, ...]]): default labels to associate with entries; + individual entries may override. + partial_success (Optional[bool]): Whether valid entries should be written even if + some other entries fail due to INVALID_ARGUMENT or + PERMISSION_DENIED errors. If any entry is not written, then + the response status is the error associated with one of the + failed entries and the response includes error details keyed + by the entries' zero-based index in the ``entries.write`` + method. + dry_run (Optional[bool]): + If true, the request should expect normal response, + but the entries won't be persisted nor exported. + Useful for checking whether the logging API endpoints are working + properly before sending valuable data. + """ + data = { + "entries": list(entries), + "partialSuccess": partial_success, + "dry_run": dry_run, + } + + if logger_name is not None: + data["logName"] = logger_name + + if resource is not None: + data["resource"] = resource + + if labels is not None: + data["labels"] = labels + + self.api_request(method="POST", path="/entries:write", data=data) + + def logger_delete(self, logger_name): + """Delete all entries in a logger. + + Args: + logger_name (str): The resource name of the log to delete: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + """ + path = f"/{logger_name}" + self.api_request(method="DELETE", path=path) + + +class _SinksAPI(object): + """Helper mapping sink-related APIs. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks + """ + + def __init__(self, client): + self._client = client + self.api_request = client._connection.api_request + + def list_sinks(self, parent, *, page_size=None, page_token=None): + """List sinks for the parent resource. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/list + + Args: + parent (str): The parent resource whose sinks are to be listed: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + page_size (Optional[int]): Maximum number of sinks to return, If not passed, + defaults to a value set by the API. + page_token (Optional[str]): Opaque marker for the next "page" of sinks. If not + passed, the API will return the first page of + sinks. + + Returns: + Iterator[~logging_v2.Sink] + """ + extra_params = {} + + if page_size is not None: + extra_params["pageSize"] = page_size + + path = f"/{parent}/sinks" + return page_iterator.HTTPIterator( + client=self._client, + api_request=self._client._connection.api_request, + path=path, + item_to_value=_item_to_sink, + items_key="sinks", + page_token=page_token, + extra_params=extra_params, + ) + + def sink_create( + self, parent, sink_name, filter_, destination, *, unique_writer_identity=False + ): + """Create a sink resource. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create + + Args: + parent(str): The resource in which to create the sink: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + sink_name (str): The name of the sink. + filter_ (str): The advanced logs filter expression defining the + entries exported by the sink. + destination (str): Destination URI for the entries exported by + the sink. + unique_writer_identity (Optional[bool]): determines the kind of + IAM identity returned as writer_identity in the new sink. + + Returns: + dict: The sink resource returned from the API. + """ + target = f"/{parent}/sinks" + data = {"name": sink_name, "filter": filter_, "destination": destination} + query_params = {"uniqueWriterIdentity": unique_writer_identity} + return self.api_request( + method="POST", path=target, data=data, query_params=query_params + ) + + def sink_get(self, sink_name): + """Retrieve a sink resource. + + Args: + sink_name (str): The resource name of the sink: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Returns: + dict: The JSON sink object returned from the API. + """ + target = f"/{sink_name}" + return self.api_request(method="GET", path=target) + + def sink_update( + self, sink_name, filter_, destination, *, unique_writer_identity=False + ): + """Update a sink resource. + + Args: + sink_name (str): Required. The resource name of the sink: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + filter_ (str): The advanced logs filter expression defining the + entries exported by the sink. + destination (str): destination URI for the entries exported by + the sink. + unique_writer_identity (Optional[bool]): determines the kind of + IAM identity returned as writer_identity in the new sink. + + + Returns: + dict: The returned (updated) resource. + """ + target = f"/{sink_name}" + name = sink_name.split("/")[-1] # parse name out of full resoure name + data = {"name": name, "filter": filter_, "destination": destination} + query_params = {"uniqueWriterIdentity": unique_writer_identity} + return self.api_request( + method="PUT", path=target, query_params=query_params, data=data + ) + + def sink_delete(self, sink_name): + """Delete a sink resource. + + Args: + sink_name (str): Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + """ + target = f"/{sink_name}" + self.api_request(method="DELETE", path=target) + + +class _MetricsAPI(object): + """Helper mapping sink-related APIs.""" + + def __init__(self, client): + self._client = client + self.api_request = client._connection.api_request + + def list_metrics(self, project, *, page_size=None, page_token=None): + """List metrics for the project associated with this client. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list + + Args: + page_size (Optional[int]): The maximum number of sinks in each + page of results from this request. Non-positive values are ignored. Defaults to a + sensible value set by the API. + page_token (Optional[str]): If present, return the next batch of sinks, using the + value, which must correspond to the ``nextPageToken`` value + returned in the previous response. Deprecated: use the ``pages`` + property ofthe returned iterator instead of manually passing the + token. + + Returns: + Iterator[google.cloud.logging_v2.metric.Metric] + """ + extra_params = {} + + if page_size is not None: + extra_params["pageSize"] = page_size + + path = f"/projects/{project}/metrics" + return page_iterator.HTTPIterator( + client=self._client, + api_request=self._client._connection.api_request, + path=path, + item_to_value=_item_to_metric, + items_key="metrics", + page_token=page_token, + extra_params=extra_params, + ) + + def metric_create(self, project, metric_name, filter_, description): + """Create a metric resource. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create + + Args: + project (str): ID of the project in which to create the metric. + metric_name (str): The name of the metric + filter_ (str): The advanced logs filter expression defining the + entries exported by the metric. + description (str): description of the metric. + """ + target = f"/projects/{project}/metrics" + data = {"name": metric_name, "filter": filter_, "description": description} + self.api_request(method="POST", path=target, data=data) + + def metric_get(self, project, metric_name): + """Retrieve a metric resource. + + Args: + project (str): ID of the project containing the metric. + metric_name (str): The name of the metric + + Returns: + dict: The JSON metric object returned from the API. + """ + target = f"/projects/{project}/metrics/{metric_name}" + return self.api_request(method="GET", path=target) + + def metric_update(self, project, metric_name, filter_, description): + """Update a metric resource. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update + + Args: + project (str): ID of the project containing the metric. + metric_name (str): the name of the metric + filter_ (str): the advanced logs filter expression defining the + entries exported by the metric. + description (str): description of the metric. + + Returns: + dict: The returned (updated) resource. + """ + target = f"/projects/{project}/metrics/{metric_name}" + data = {"name": metric_name, "filter": filter_, "description": description} + return self.api_request(method="PUT", path=target, data=data) + + def metric_delete(self, project, metric_name): + """Delete a metric resource. + + Args: + project (str): ID of the project containing the metric. + metric_name (str): The name of the metric + """ + target = f"/projects/{project}/metrics/{metric_name}" + self.api_request(method="DELETE", path=target) + + +def _item_to_entry(iterator, resource, loggers): + """Convert a log entry resource to the native object. + + .. note:: + + This method does not have the correct signature to be used as + the ``item_to_value`` argument to + :class:`~google.api_core.page_iterator.Iterator`. It is intended to be + patched with a mutable ``loggers`` argument that can be updated + on subsequent calls. For an example, see how the method is + used above in :meth:`_LoggingAPI.list_entries`. + + Args: + iterator (google.api_core.page_iterator.Iterator): The iterator that + is currently in use. + resource (dict): Log entry JSON resource returned from the API. + loggers (Mapping[str, logging_v2.logger.Logger]): + A mapping of logger fullnames -> loggers. If the logger + that owns the entry is not in ``loggers``, the entry + will have a newly-created logger. + + Returns: + ~logging_v2.entries._BaseEntry: The next log entry in the page. + """ + return entry_from_resource(resource, iterator.client, loggers) + + +def _item_to_sink(iterator, resource): + """Convert a sink resource to the native object. + + Args: + iterator (google.api_core.page_iterator.Iterator): The iterator that + is currently in use. + resource (dict): Sink JSON resource returned from the API. + + Returns: + ~logging_v2.sink.Sink: The next sink in the page. + """ + return Sink.from_api_repr(resource, iterator.client) + + +def _item_to_metric(iterator, resource): + """Convert a metric resource to the native object. + + Args: + iterator (google.api_core.page_iterator.Iterator): The iterator that + is currently in use. + resource (dict): Sink JSON resource returned from the API. + + Returns: + ~logging_v2.metric.Metric: + The next metric in the page. + """ + return Metric.from_api_repr(resource, iterator.client) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py new file mode 100644 index 000000000000..ee65d288a093 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -0,0 +1,384 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Client for interacting with the Google Cloud Logging API.""" + +import logging +import os + +try: + from google.cloud.logging_v2 import _gapic +except ImportError: # pragma: NO COVER + _HAVE_GRPC = False + _gapic = None +else: + _HAVE_GRPC = True + +import google.api_core.client_options +from google.cloud.client import ClientWithProject +from google.cloud.environment_vars import DISABLE_GRPC +from google.cloud.logging_v2._helpers import _add_defaults_to_filter +from google.cloud.logging_v2._helpers import retrieve_metadata_server +from google.cloud.logging_v2._http import Connection +from google.cloud.logging_v2._http import _LoggingAPI as JSONLoggingAPI +from google.cloud.logging_v2._http import _MetricsAPI as JSONMetricsAPI +from google.cloud.logging_v2._http import _SinksAPI as JSONSinksAPI +from google.cloud.logging_v2.handlers import CloudLoggingHandler +from google.cloud.logging_v2.handlers import AppEngineHandler +from google.cloud.logging_v2.handlers import ContainerEngineHandler +from google.cloud.logging_v2.handlers import setup_logging +from google.cloud.logging_v2.handlers.handlers import EXCLUDED_LOGGER_DEFAULTS + +from google.cloud.logging_v2.logger import Logger +from google.cloud.logging_v2.metric import Metric +from google.cloud.logging_v2.sink import Sink + + +_DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) +_USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC + +_APPENGINE_FLEXIBLE_ENV_VM = "GAE_APPENGINE_HOSTNAME" +"""Environment variable set in App Engine when vm:true is set.""" + +_APPENGINE_INSTANCE_ID = "GAE_INSTANCE" +"""Environment variable set in App Engine standard and flexible environment.""" + +_GKE_CLUSTER_NAME = "instance/attributes/cluster-name" +"""Attribute in metadata server when in GKE environment.""" + + +class Client(ClientWithProject): + """Client to bundle configuration needed for API requests.""" + + _logging_api = None + _sinks_api = None + _metrics_api = None + + SCOPE = ( + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/cloud-platform", + ) + """The scopes required for authenticating as a Logging consumer.""" + + def __init__( + self, + *, + project=None, + credentials=None, + _http=None, + _use_grpc=None, + client_info=None, + client_options=None, + ): + """ + Args: + project (Optional[str]): the project which the client acts on behalf of. + If not passed, falls back to the default inferred + from the environment. + credentials (Optional[google.auth.credentials.Credentials]): + Thehe OAuth2 Credentials to use for this + client. If not passed (and if no ``_http`` object is + passed), falls back to the default inferred from the + environment. + _http (Optional[requests.Session]): HTTP object to make requests. + Can be any object that defines ``request()`` with the same interface as + :meth:`requests.Session.request`. If not passed, an + ``_http`` object is created that is bound to the + ``credentials`` for the current object. + This parameter should be considered private, and could + change in the future. + _use_grpc (Optional[bool]): Explicitly specifies whether + to use the gRPC transport or HTTP. If unset, + falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` + environment variable + This parameter should be considered private, and could + change in the future. + client_info (Optional[Union[google.api_core.client_info.ClientInfo, google.api_core.gapic_v1.client_info.ClientInfo]]): + The client info used to send a user-agent string along with API + requests. If ``None``, then default info will be used. Generally, + you only need to set this if you're developing your own library + or partner tool. + client_options (Optional[Union[dict, google.api_core.client_options.ClientOptions]]): + Client options used to set user options + on the client. API Endpoint should be set through client_options. + """ + super(Client, self).__init__( + project=project, + credentials=credentials, + _http=_http, + client_options=client_options, + ) + + kw_args = {"client_info": client_info} + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + kw_args["api_endpoint"] = api_endpoint + + self._connection = Connection(self, **kw_args) + + self._client_info = client_info + self._client_options = client_options + if _use_grpc is None: + self._use_grpc = _USE_GRPC + else: + self._use_grpc = _use_grpc + + @property + def logging_api(self): + """Helper for logging-related API calls. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs + """ + if self._logging_api is None: + if self._use_grpc: + self._logging_api = _gapic.make_logging_api(self) + else: + self._logging_api = JSONLoggingAPI(self) + return self._logging_api + + @property + def sinks_api(self): + """Helper for log sink-related API calls. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks + """ + if self._sinks_api is None: + if self._use_grpc: + self._sinks_api = _gapic.make_sinks_api(self) + else: + self._sinks_api = JSONSinksAPI(self) + return self._sinks_api + + @property + def metrics_api(self): + """Helper for log metric-related API calls. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics + """ + if self._metrics_api is None: + if self._use_grpc: + self._metrics_api = _gapic.make_metrics_api(self) + else: + self._metrics_api = JSONMetricsAPI(self) + return self._metrics_api + + def logger(self, name): + """Creates a logger bound to the current client. + + Args: + name (str): The name of the logger to be constructed. + + Returns: + ~logging_v2.logger.Logger: Logger created with the current client. + """ + return Logger(name, client=self) + + def list_entries( + self, + *, + resource_names=None, + filter_=None, + order_by=None, + page_size=None, + page_token=None, + ): + """Return a page of log entry resources. + + Args: + resource_names (Sequence[str]): Names of one or more parent resources + from which to retrieve log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + If not passed, defaults to the project bound to the API's client. + + filter_ (str): a filter expression. See + https://cloud.google.com/logging/docs/view/advanced_filters + order_by (str) One of :data:`~logging_v2.ASCENDING` + or :data:`~logging_v2.DESCENDING`. + page_size (int): maximum number of entries to return, If not passed, + defaults to a value set by the API. + page_token (str): opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + Returns: + Iterator[~logging_v2.LogEntry] + """ + if resource_names is None: + resource_names = [f"projects/{self.project}"] + filter_ = _add_defaults_to_filter(filter_) + + return self.logging_api.list_entries( + resource_names=resource_names, + filter_=filter_, + order_by=order_by, + page_size=page_size, + page_token=page_token, + ) + + def sink(self, name, *, filter_=None, destination=None): + """Creates a sink bound to the current client. + + Args: + name (str): the name of the sink to be constructed. + filter_ (Optional[str]): the advanced logs filter expression + defining the entries exported by the sink. If not + passed, the instance should already exist, to be + refreshed via :meth:`Sink.reload`. + destination (str): destination URI for the entries exported by + the sink. If not passed, the instance should + already exist, to be refreshed via + :meth:`Sink.reload`. + + Returns: + ~logging_v2.sink.Sink: Sink created with the current client. + """ + return Sink(name, filter_=filter_, destination=destination, client=self) + + def list_sinks(self, *, parent=None, page_size=None, page_token=None): + """List sinks for the a parent resource. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/list + + Args: + parent (Optional[str]): The parent resource whose sinks are to be listed: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + + If not passed, defaults to the project bound to the API's client. + page_size (Optional[int]): The maximum number of sinks in each + page of results from this request. Non-positive values are ignored. Defaults to a + sensible value set by the API. + page_token (Optional[str]): If present, return the next batch of sinks, using the + value, which must correspond to the ``nextPageToken`` value + returned in the previous response. Deprecated: use the ``pages`` + property ofthe returned iterator instead of manually passing the + token. + + Returns: + Iterator[~logging_v2.sink.Sink] + """ + if parent is None: + parent = f"projects/{self.project}" + return self.sinks_api.list_sinks( + parent=parent, page_size=page_size, page_token=page_token + ) + + def metric(self, name, *, filter_=None, description=""): + """Creates a metric bound to the current client. + + Args: + name (str): The name of the metric to be constructed. + filter_(Optional[str]): The advanced logs filter expression defining the + entries tracked by the metric. If not + passed, the instance should already exist, to be + refreshed via :meth:`Metric.reload`. + description (Optional[str]): The description of the metric to be constructed. + If not passed, the instance should already exist, + to be refreshed via :meth:`Metric.reload`. + + Returns: + ~logging_v2.metric.Metric: Metric created with the current client. + """ + return Metric(name, filter_=filter_, client=self, description=description) + + def list_metrics(self, *, page_size=None, page_token=None): + """List metrics for the project associated with this client. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list + + Args: + page_size (Optional[int]): The maximum number of sinks in each + page of results from this request. Non-positive values are ignored. Defaults to a + sensible value set by the API. + page_token (Optional[str]): If present, return the next batch of sinks, using the + value, which must correspond to the ``nextPageToken`` value + returned in the previous response. Deprecated: use the ``pages`` + property ofthe returned iterator instead of manually passing the + token. + + Returns: + Iterator[~logging_v2.metric.Metric] + """ + return self.metrics_api.list_metrics( + self.project, page_size=page_size, page_token=page_token + ) + + def get_default_handler(self, **kw): + """Return the default logging handler based on the local environment. + + Args: + kw (dict): keyword args passed to handler constructor + + Returns: + logging.Handler: The default log handler based on the environment + """ + gke_cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME) + + if ( + _APPENGINE_FLEXIBLE_ENV_VM in os.environ + or _APPENGINE_INSTANCE_ID in os.environ + ): + return AppEngineHandler(self, **kw) + elif gke_cluster_name is not None: + return ContainerEngineHandler(**kw) + else: + return CloudLoggingHandler(self, **kw) + + def setup_logging( + self, *, log_level=logging.INFO, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, **kw + ): + """Attach default Cloud Logging handler to the root logger. + + This method uses the default log handler, obtained by + :meth:`~get_default_handler`, and attaches it to the root Python + logger, so that a call such as ``logging.warn``, as well as all child + loggers, will report to Cloud Logging. + + Args: + log_level (Optional[int]): Python logging log level. Defaults to + :const:`logging.INFO`. + excluded_loggers (Optional[Tuple[str]]): The loggers to not attach the + handler to. This will always include the + loggers in the path of the logging client + itself. + Returns: + dict: keyword args passed to handler constructor + """ + handler = self.get_default_handler(**kw) + setup_logging(handler, log_level=log_level, excluded_loggers=excluded_loggers) diff --git a/packages/google-cloud-logging/google/cloud/logging/entries.py b/packages/google-cloud-logging/google/cloud/logging_v2/entries.py similarity index 72% rename from packages/google-cloud-logging/google/cloud/logging/entries.py rename to packages/google-cloud-logging/google/cloud/logging_v2/entries.py index 3847102dc504..87e042018f5c 100644 --- a/packages/google-cloud-logging/google/cloud/logging/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/entries.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Log entries within the Google Stackdriver Logging API.""" +"""Log entries within the Google Cloud Logging API.""" import collections import json @@ -22,7 +22,7 @@ from google.protobuf.json_format import MessageToDict from google.protobuf.json_format import Parse -from google.cloud.logging.resource import Resource +from google.cloud.logging_v2.resource import Resource from google.cloud._helpers import _name_from_project_path from google.cloud._helpers import _rfc3339_nanos_to_datetime from google.cloud._helpers import _datetime_to_rfc3339 @@ -45,14 +45,15 @@ def logger_name_from_path(path): """Validate a logger URI path and get the logger name. - :type path: str - :param path: URI path for a logger API request. + Args: + path (str): URI path for a logger API request - :rtype: str - :returns: Logger name parsed from ``path``. - :raises: :class:`ValueError` if the ``path`` is ill-formed or if - the project from the ``path`` does not agree with the - ``project`` passed in. + Returns: + str: Logger name parsed from ``path``. + + Raises: + ValueError: If the ``path`` is ill-formed of if the project + from ``path`` does not agree with the ``project`` passed in. """ return _name_from_project_path(path, None, _LOGGER_TEMPLATE) @@ -91,50 +92,28 @@ def _int_or_none(value): _LOG_ENTRY_PARAM_DOCSTRING = """\ - :type log_name: str - :param log_name: the name of the logger used to post the entry. - - :type labels: dict - :param labels: (optional) mapping of labels for the entry - - :type insert_id: text - :param insert_id: (optional) the ID used to identify an entry uniquely. - - :type severity: str - :param severity: (optional) severity of event being logged. - - :type http_request: dict - :param http_request: (optional) info about HTTP request associated with - the entry. - - :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp for the entry - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry - - :type trace: str - :param trace: (optional) traceid to apply to the entry. - - :type span_id: str - :param span_id: (optional) span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. - - :type trace_sampled: bool - :param trace_sampled: (optional) the sampling decision of the trace - associated with the log entry. - - :type source_location: dict - :param source_location: (optional) location in source code from which - the entry was emitted. - - :type operation: dict - :param operation: (optional) additional information about a potentially - long-running operation associated with the log entry. - - :type logger: :class:`google.cloud.logging.logger.Logger` - :param logger: the logger used to write the entry. - + Args: + log_name (str): The name of the logger used to post the entry. + labels (Optional[dict]): Mapping of labels for the entry + insert_id (Optional[str]): The ID used to identify an entry + uniquely. + severity (Optional[str]): The severity of the event being logged. + http_request (Optional[dict]): Info about HTTP request associated + with the entry. + timestamp (Optional[datetime.datetime]): Timestamp for the entry. + resource (Optional[google.cloud.logging_v2.resource.Resource]): + Monitored resource of the entry. + trace (Optional[str]): Trace ID to apply to the entry. + span_id (Optional[str]): Span ID within the trace for the log + entry. Specify the trace parameter if ``span_id`` is set. + trace_sampled (Optional[bool]): The sampling decision of the trace + associated with the log entry. + source_location (Optional[dict]): Location in source code from which + the entry was emitted. + operation (Optional[dict]): Additional information about a potentially + long-running operation associated with the log entry. + logger (logging_v2.logger.Logger): the logger used + to write the entry. """ _LOG_ENTRY_SEE_ALSO_DOCSTRING = """\ @@ -162,24 +141,20 @@ def _extract_payload(cls, resource): return None @classmethod - def from_api_repr(cls, resource, client, loggers=None): - """Factory: construct an entry given its API representation - - :type resource: dict - :param resource: text entry resource representation returned from - the API - - :type client: :class:`google.cloud.logging.client.Client` - :param client: Client which holds credentials and project - configuration. - - :type loggers: dict - :param loggers: - (Optional) A mapping of logger fullnames -> loggers. If not - passed, the entry will have a newly-created logger. - - :rtype: :class:`google.cloud.logging.entries.LogEntry` - :returns: Log entry parsed from ``resource``. + def from_api_repr(cls, resource, client, *, loggers=None): + """Construct an entry given its API representation + + Args: + resource (dict): text entry resource representation returned from + the API + client (~logging_v2.client.Client): + Client which holds credentials and project configuration. + loggers (Optional[dict]): + A mapping of logger fullnames -> loggers. If not + passed, the entry will have a newly-created logger. + + Returns: + google.cloud.logging.entries.LogEntry: Log entry parsed from ``resource``. """ if loggers is None: loggers = {} @@ -272,8 +247,7 @@ class TextEntry(LogEntry): + _LOG_ENTRY_PARAM_DOCSTRING + """ - :type payload: str | unicode - :param payload: payload for the log entry. + payload (str): payload for the log entry. """ + _LOG_ENTRY_SEE_ALSO_DOCSTRING ) @@ -299,8 +273,7 @@ class StructEntry(LogEntry): + _LOG_ENTRY_PARAM_DOCSTRING + """ - :type payload: dict - :param payload: payload for the log entry. + payload (dict): payload for the log entry. """ + _LOG_ENTRY_SEE_ALSO_DOCSTRING ) @@ -326,8 +299,7 @@ class ProtobufEntry(LogEntry): + _LOG_ENTRY_PARAM_DOCSTRING + """ - :type payload: protobuf message - :param payload: payload for the log entry. + payload (google.protobuf.Message): payload for the log entry. """ + _LOG_ENTRY_SEE_ALSO_DOCSTRING ) @@ -358,8 +330,8 @@ def parse_message(self, message): Mutates the passed-in ``message`` in place. - :type message: Protobuf message - :param message: the message to be logged + Args: + message (google.protobuf.Message): the message to be logged """ # NOTE: This assumes that ``payload`` is already a deserialized # ``Any`` field and ``message`` has come from an imported diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py deleted file mode 100644 index 37dafa34ac0e..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ /dev/null @@ -1,1442 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.logging.v2 ConfigServiceV2 API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.cloud.logging_v2.gapic import config_service_v2_client_config -from google.cloud.logging_v2.gapic import enums -from google.cloud.logging_v2.gapic.transports import config_service_v2_grpc_transport -from google.cloud.logging_v2.proto import logging_config_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2_grpc -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version - - -class ConfigServiceV2Client(object): - """Service for configuring sinks used to route log entries.""" - - SERVICE_ADDRESS = "logging.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.logging.v2.ConfigServiceV2" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ConfigServiceV2Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def billing_path(cls, billing_account): - """Return a fully-qualified billing string.""" - return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account, - ) - - @classmethod - def billing_exclusion_path(cls, billing_account, exclusion): - """Return a fully-qualified billing_exclusion string.""" - return google.api_core.path_template.expand( - "billingAccounts/{billing_account}/exclusions/{exclusion}", - billing_account=billing_account, - exclusion=exclusion, - ) - - @classmethod - def billing_sink_path(cls, billing_account, sink): - """Return a fully-qualified billing_sink string.""" - return google.api_core.path_template.expand( - "billingAccounts/{billing_account}/sinks/{sink}", - billing_account=billing_account, - sink=sink, - ) - - @classmethod - def exclusion_path(cls, project, exclusion): - """Return a fully-qualified exclusion string.""" - return google.api_core.path_template.expand( - "projects/{project}/exclusions/{exclusion}", - project=project, - exclusion=exclusion, - ) - - @classmethod - def folder_path(cls, folder): - """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder,) - - @classmethod - def folder_exclusion_path(cls, folder, exclusion): - """Return a fully-qualified folder_exclusion string.""" - return google.api_core.path_template.expand( - "folders/{folder}/exclusions/{exclusion}", - folder=folder, - exclusion=exclusion, - ) - - @classmethod - def folder_sink_path(cls, folder, sink): - """Return a fully-qualified folder_sink string.""" - return google.api_core.path_template.expand( - "folders/{folder}/sinks/{sink}", folder=folder, sink=sink, - ) - - @classmethod - def organization_path(cls, organization): - """Return a fully-qualified organization string.""" - return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization, - ) - - @classmethod - def organization_exclusion_path(cls, organization, exclusion): - """Return a fully-qualified organization_exclusion string.""" - return google.api_core.path_template.expand( - "organizations/{organization}/exclusions/{exclusion}", - organization=organization, - exclusion=exclusion, - ) - - @classmethod - def organization_sink_path(cls, organization, sink): - """Return a fully-qualified organization_sink string.""" - return google.api_core.path_template.expand( - "organizations/{organization}/sinks/{sink}", - organization=organization, - sink=sink, - ) - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - @classmethod - def sink_path(cls, project, sink): - """Return a fully-qualified sink string.""" - return google.api_core.path_template.expand( - "projects/{project}/sinks/{sink}", project=project, sink=sink, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.ConfigServiceV2GrpcTransport, - Callable[[~.Credentials, type], ~.ConfigServiceV2GrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = config_service_v2_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def list_sinks( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists sinks. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_sinks(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_sinks(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The parent resource whose sinks are to be listed: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.logging_v2.types.LogSink` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_sinks" not in self._inner_api_calls: - self._inner_api_calls[ - "list_sinks" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_sinks, - default_retry=self._method_configs["ListSinks"].retry, - default_timeout=self._method_configs["ListSinks"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.ListSinksRequest( - parent=parent, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_sinks"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="sinks", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_sink( - self, - sink_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a sink. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> sink_name = client.sink_path('[PROJECT]', '[SINK]') - >>> - >>> response = client.get_sink(sink_name) - - Args: - sink_name (str): Required. The resource name of the sink: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - Example: ``"projects/my-project-id/sinks/my-sink-id"``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogSink` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_sink" not in self._inner_api_calls: - self._inner_api_calls[ - "get_sink" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_sink, - default_retry=self._method_configs["GetSink"].retry, - default_timeout=self._method_configs["GetSink"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.GetSinkRequest(sink_name=sink_name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("sink_name", sink_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_sink"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_sink( - self, - parent, - sink, - unique_writer_identity=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a sink that exports specified log entries to a destination. The - export of newly-ingested log entries begins immediately, unless the - sink's ``writer_identity`` is not permitted to write to the destination. - A sink can export log entries only from the resource owning the sink. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `sink`: - >>> sink = {} - >>> - >>> response = client.create_sink(parent, sink) - - Args: - parent (str): Required. The resource in which to create the sink: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. - sink (Union[dict, ~google.cloud.logging_v2.types.LogSink]): Required. The new sink, whose ``name`` parameter is a sink identifier - that is not already in use. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.LogSink` - unique_writer_identity (bool): Optional. Determines the kind of IAM identity returned as - ``writer_identity`` in the new sink. If this value is omitted or set to - false, and if the sink's parent is a project, then the value returned as - ``writer_identity`` is the same group or service account used by Logging - before the addition of writer identities to this API. The sink's - destination must be in the same project as the sink itself. - - If this field is set to true, or if the sink is owned by a non-project - resource such as an organization, then the value of ``writer_identity`` - will be a unique service account used only for exports from the new - sink. For more information, see ``writer_identity`` in ``LogSink``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogSink` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_sink" not in self._inner_api_calls: - self._inner_api_calls[ - "create_sink" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_sink, - default_retry=self._method_configs["CreateSink"].retry, - default_timeout=self._method_configs["CreateSink"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.CreateSinkRequest( - parent=parent, sink=sink, unique_writer_identity=unique_writer_identity, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_sink"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_sink( - self, - sink_name, - sink, - unique_writer_identity=None, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates a sink. This method replaces the following fields in the - existing sink with values from the new sink: ``destination``, and - ``filter``. - - The updated sink might also have a new ``writer_identity``; see the - ``unique_writer_identity`` field. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> sink_name = client.sink_path('[PROJECT]', '[SINK]') - >>> - >>> # TODO: Initialize `sink`: - >>> sink = {} - >>> - >>> response = client.update_sink(sink_name, sink) - - Args: - sink_name (str): Required. The full resource name of the sink to update, including the - parent resource and the sink identifier: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - Example: ``"projects/my-project-id/sinks/my-sink-id"``. - sink (Union[dict, ~google.cloud.logging_v2.types.LogSink]): Required. The updated sink, whose name is the same identifier that - appears as part of ``sink_name``. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.LogSink` - unique_writer_identity (bool): Optional. See ``sinks.create`` for a description of this field. When - updating a sink, the effect of this field on the value of - ``writer_identity`` in the updated sink depends on both the old and new - values of this field: - - - If the old and new values of this field are both false or both true, - then there is no change to the sink's ``writer_identity``. - - If the old value is false and the new value is true, then - ``writer_identity`` is changed to a unique service account. - - It is an error if the old value is true and the new value is set to - false or defaulted to false. - update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Optional. Field mask that specifies the fields in ``sink`` that need an - update. A sink field will be overwritten if, and only if, it is in the - update mask. ``name`` and output only fields cannot be updated. - - An empty updateMask is temporarily treated as using the following mask - for backwards compatibility purposes: destination,filter,includeChildren - At some point in the future, behavior will be removed and specifying an - empty updateMask will be an error. - - For a detailed ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - - Example: ``updateMask=filter``. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogSink` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_sink" not in self._inner_api_calls: - self._inner_api_calls[ - "update_sink" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_sink, - default_retry=self._method_configs["UpdateSink"].retry, - default_timeout=self._method_configs["UpdateSink"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.UpdateSinkRequest( - sink_name=sink_name, - sink=sink, - unique_writer_identity=unique_writer_identity, - update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("sink_name", sink_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_sink"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_sink( - self, - sink_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a sink. If the sink has a unique ``writer_identity``, then that - service account is also deleted. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> sink_name = client.sink_path('[PROJECT]', '[SINK]') - >>> - >>> client.delete_sink(sink_name) - - Args: - sink_name (str): Required. The full resource name of the sink to delete, including the - parent resource and the sink identifier: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - Example: ``"projects/my-project-id/sinks/my-sink-id"``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_sink" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_sink" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_sink, - default_retry=self._method_configs["DeleteSink"].retry, - default_timeout=self._method_configs["DeleteSink"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("sink_name", sink_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_sink"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_exclusions( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists all the exclusions in a parent resource. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_exclusions(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_exclusions(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The parent resource whose exclusions are to be listed. - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.logging_v2.types.LogExclusion` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_exclusions" not in self._inner_api_calls: - self._inner_api_calls[ - "list_exclusions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_exclusions, - default_retry=self._method_configs["ListExclusions"].retry, - default_timeout=self._method_configs["ListExclusions"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.ListExclusionsRequest( - parent=parent, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_exclusions"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="exclusions", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_exclusion( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the description of an exclusion. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') - >>> - >>> response = client.get_exclusion(name) - - Args: - name (str): Required. The resource name of an existing exclusion: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogExclusion` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_exclusion" not in self._inner_api_calls: - self._inner_api_calls[ - "get_exclusion" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_exclusion, - default_retry=self._method_configs["GetExclusion"].retry, - default_timeout=self._method_configs["GetExclusion"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.GetExclusionRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_exclusion"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_exclusion( - self, - parent, - exclusion, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a new exclusion in a specified parent resource. - Only log entries belonging to that resource can be excluded. - You can have up to 10 exclusions in a resource. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `exclusion`: - >>> exclusion = {} - >>> - >>> response = client.create_exclusion(parent, exclusion) - - Args: - parent (str): Required. The parent resource in which to create the exclusion: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. - exclusion (Union[dict, ~google.cloud.logging_v2.types.LogExclusion]): Required. The new exclusion, whose ``name`` parameter is an exclusion - name that is not already used in the parent resource. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.LogExclusion` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogExclusion` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_exclusion" not in self._inner_api_calls: - self._inner_api_calls[ - "create_exclusion" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_exclusion, - default_retry=self._method_configs["CreateExclusion"].retry, - default_timeout=self._method_configs["CreateExclusion"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.CreateExclusionRequest( - parent=parent, exclusion=exclusion, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_exclusion"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_exclusion( - self, - name, - exclusion, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Changes one or more properties of an existing exclusion. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') - >>> - >>> # TODO: Initialize `exclusion`: - >>> exclusion = {} - >>> - >>> # TODO: Initialize `update_mask`: - >>> update_mask = {} - >>> - >>> response = client.update_exclusion(name, exclusion, update_mask) - - Args: - name (str): Required. The resource name of the exclusion to update: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. - exclusion (Union[dict, ~google.cloud.logging_v2.types.LogExclusion]): Required. New values for the existing exclusion. Only the fields - specified in ``update_mask`` are relevant. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.LogExclusion` - update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Required. A non-empty list of fields to change in the existing - exclusion. New values for the fields are taken from the corresponding - fields in the ``LogExclusion`` included in this request. Fields not - mentioned in ``update_mask`` are not changed and are ignored in the - request. - - For example, to change the filter and description of an exclusion, - specify an ``update_mask`` of ``"filter,description"``. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogExclusion` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_exclusion" not in self._inner_api_calls: - self._inner_api_calls[ - "update_exclusion" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_exclusion, - default_retry=self._method_configs["UpdateExclusion"].retry, - default_timeout=self._method_configs["UpdateExclusion"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.UpdateExclusionRequest( - name=name, exclusion=exclusion, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_exclusion"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_exclusion( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes an exclusion. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') - >>> - >>> client.delete_exclusion(name) - - Args: - name (str): Required. The resource name of an existing exclusion to delete: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_exclusion" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_exclusion" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_exclusion, - default_retry=self._method_configs["DeleteExclusion"].retry, - default_timeout=self._method_configs["DeleteExclusion"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.DeleteExclusionRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_exclusion"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_cmek_settings( - self, - name=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured for GCP - organizations. Once configured, it applies to all projects and folders - in the GCP organization. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> response = client.get_cmek_settings() - - Args: - name (str): Required. The resource for which to retrieve CMEK settings. - - :: - - "projects/[PROJECT_ID]/cmekSettings" - "organizations/[ORGANIZATION_ID]/cmekSettings" - "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" - "folders/[FOLDER_ID]/cmekSettings" - - Example: ``"organizations/12345/cmekSettings"``. - - Note: CMEK for the Logs Router can currently only be configured for GCP - organizations. Once configured, it applies to all projects and folders - in the GCP organization. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.CmekSettings` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_cmek_settings" not in self._inner_api_calls: - self._inner_api_calls[ - "get_cmek_settings" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_cmek_settings, - default_retry=self._method_configs["GetCmekSettings"].retry, - default_timeout=self._method_configs["GetCmekSettings"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.GetCmekSettingsRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_cmek_settings"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_cmek_settings( - self, - name=None, - cmek_settings=None, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured for GCP - organizations. Once configured, it applies to all projects and folders - in the GCP organization. - - ``UpdateCmekSettings`` will fail if 1) ``kms_key_name`` is invalid, or - 2) the associated service account does not have the required - ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the - key, or - - 3) access to the key is disabled. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.ConfigServiceV2Client() - >>> - >>> response = client.update_cmek_settings() - - Args: - name (str): Required. The resource name for the CMEK settings to update. - - :: - - "projects/[PROJECT_ID]/cmekSettings" - "organizations/[ORGANIZATION_ID]/cmekSettings" - "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" - "folders/[FOLDER_ID]/cmekSettings" - - Example: ``"organizations/12345/cmekSettings"``. - - Note: CMEK for the Logs Router can currently only be configured for GCP - organizations. Once configured, it applies to all projects and folders - in the GCP organization. - cmek_settings (Union[dict, ~google.cloud.logging_v2.types.CmekSettings]): Required. The CMEK settings to update. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.CmekSettings` - update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Optional. Field mask identifying which fields from ``cmek_settings`` - should be updated. A field will be overwritten if and only if it is in - the update mask. Output only fields cannot be updated. - - See ``FieldMask`` for more information. - - Example: ``"updateMask=kmsKeyName"`` - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.CmekSettings` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_cmek_settings" not in self._inner_api_calls: - self._inner_api_calls[ - "update_cmek_settings" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_cmek_settings, - default_retry=self._method_configs["UpdateCmekSettings"].retry, - default_timeout=self._method_configs["UpdateCmekSettings"].timeout, - client_info=self._client_info, - ) - - request = logging_config_pb2.UpdateCmekSettingsRequest( - name=name, cmek_settings=cmek_settings, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_cmek_settings"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py deleted file mode 100644 index 00c7146e2627..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py +++ /dev/null @@ -1,93 +0,0 @@ -config = { - "interfaces": { - "google.logging.v2.ConfigServiceV2": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], - "non_idempotent": [], - "idempotent2": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - }, - "write_sink": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - }, - }, - "methods": { - "ListSinks": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetSink": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateSink": { - "timeout_millis": 120000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateSink": { - "timeout_millis": 120000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteSink": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListExclusions": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetExclusion": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateExclusion": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateExclusion": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DeleteExclusion": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetCmekSettings": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent2", - "retry_params_name": "default", - }, - "UpdateCmekSettings": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py deleted file mode 100644 index ee1a098a5779..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/enums.py +++ /dev/null @@ -1,215 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class LaunchStage(enum.IntEnum): - """ - The launch stage as defined by `Google Cloud Platform Launch - Stages `__. - - Attributes: - LAUNCH_STAGE_UNSPECIFIED (int): Do not use this default value. - EARLY_ACCESS (int): Early Access features are limited to a closed group of testers. To use - these features, you must sign up in advance and sign a Trusted Tester - agreement (which includes confidentiality provisions). These features may - be unstable, changed in backward-incompatible ways, and are not - guaranteed to be released. - ALPHA (int): Alpha is a limited availability test for releases before they are cleared - for widespread use. By Alpha, all significant design issues are resolved - and we are in the process of verifying functionality. Alpha customers - need to apply for access, agree to applicable terms, and have their - projects whitelisted. Alpha releases don’t have to be feature complete, - no SLAs are provided, and there are no technical support obligations, but - they will be far enough along that customers can actually use them in - test environments or for limited-use tests -- just like they would in - normal production cases. - BETA (int): Beta is the point at which we are ready to open a release for any - customer to use. There are no SLA or technical support obligations in a - Beta release. Products will be complete from a feature perspective, but - may have some open outstanding issues. Beta releases are suitable for - limited production use cases. - GA (int): GA features are open to all developers and are considered stable and - fully qualified for production use. - DEPRECATED (int): Deprecated features are scheduled to be shut down and removed. For more - information, see the “Deprecation Policy” section of our `Terms of - Service `__ and the `Google Cloud - Platform Subject to the Deprecation - Policy `__ documentation. - """ - - LAUNCH_STAGE_UNSPECIFIED = 0 - EARLY_ACCESS = 1 - ALPHA = 2 - BETA = 3 - GA = 4 - DEPRECATED = 5 - - -class LogSeverity(enum.IntEnum): - """ - The severity of the event described in a log entry, expressed as one of - the standard severity levels listed below. For your reference, the - levels are assigned the listed numeric values. The effect of using - numeric values other than those listed is undefined. - - You can filter for log entries by severity. For example, the following - filter expression will match log entries with severities ``INFO``, - ``NOTICE``, and ``WARNING``: - - :: - - severity > DEBUG AND severity <= WARNING - - If you are writing log entries, you should map other severity encodings - to one of these standard levels. For example, you might map all of - Java's FINE, FINER, and FINEST levels to ``LogSeverity.DEBUG``. You can - preserve the original severity level in the log entry payload if you - wish. - - Attributes: - DEFAULT (int): (0) The log entry has no assigned severity level. - DEBUG (int): (100) Debug or trace information. - INFO (int): (200) Routine information, such as ongoing status or performance. - NOTICE (int): (300) Normal but significant events, such as start up, shut down, or - a configuration change. - WARNING (int): (400) Warning events might cause problems. - ERROR (int): (500) Error events are likely to cause problems. - CRITICAL (int): (600) Critical events cause more severe problems or outages. - ALERT (int): (700) A person must take an action immediately. - EMERGENCY (int): (800) One or more systems are unusable. - """ - - DEFAULT = 0 - DEBUG = 100 - INFO = 200 - NOTICE = 300 - WARNING = 400 - ERROR = 500 - CRITICAL = 600 - ALERT = 700 - EMERGENCY = 800 - - -class NullValue(enum.IntEnum): - """ - ``NullValue`` is a singleton enumeration to represent the null value for - the ``Value`` type union. - - The JSON representation for ``NullValue`` is JSON ``null``. - - Attributes: - NULL_VALUE (int): Null value. - """ - - NULL_VALUE = 0 - - -class LabelDescriptor(object): - class ValueType(enum.IntEnum): - """ - Value types that can be used as label values. - - Attributes: - STRING (int): A variable-length string. This is the default. - BOOL (int): Boolean; true or false. - INT64 (int): A 64-bit signed integer. - """ - - STRING = 0 - BOOL = 1 - INT64 = 2 - - -class LogMetric(object): - class ApiVersion(enum.IntEnum): - """ - Logging API version. - - Attributes: - V2 (int): Logging API v2. - V1 (int): Logging API v1. - """ - - V2 = 0 - V1 = 1 - - -class LogSink(object): - class VersionFormat(enum.IntEnum): - """ - Available log entry formats. Log entries can be written to - Logging in either format and can be exported in either format. - Version 2 is the preferred format. - - Attributes: - VERSION_FORMAT_UNSPECIFIED (int): An unspecified format version that will default to V2. - V2 (int): ``LogEntry`` version 2 format. - V1 (int): ``LogEntry`` version 1 format. - """ - - VERSION_FORMAT_UNSPECIFIED = 0 - V2 = 1 - V1 = 2 - - -class MetricDescriptor(object): - class MetricKind(enum.IntEnum): - """ - The kind of measurement. It describes how the data is reported. - - Attributes: - METRIC_KIND_UNSPECIFIED (int): Do not use this default value. - GAUGE (int): An instantaneous measurement of a value. - DELTA (int): The change in a value during a time interval. - CUMULATIVE (int): A value accumulated over a time interval. Cumulative - measurements in a time series should have the same start time - and increasing end times, until an event resets the cumulative - value to zero and sets a new start time for the following - points. - """ - - METRIC_KIND_UNSPECIFIED = 0 - GAUGE = 1 - DELTA = 2 - CUMULATIVE = 3 - - class ValueType(enum.IntEnum): - """ - The value type of a metric. - - Attributes: - VALUE_TYPE_UNSPECIFIED (int): Do not use this default value. - BOOL (int): The value is a boolean. This value type can be used only if the metric - kind is ``GAUGE``. - INT64 (int): The value is a signed 64-bit integer. - DOUBLE (int): The value is a double precision floating point number. - STRING (int): The value is a text string. This value type can be used only if the - metric kind is ``GAUGE``. - DISTRIBUTION (int): The value is a ``Distribution``. - MONEY (int): The value is money. - """ - - VALUE_TYPE_UNSPECIFIED = 0 - BOOL = 1 - INT64 = 2 - DOUBLE = 3 - STRING = 4 - DISTRIBUTION = 5 - MONEY = 6 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py deleted file mode 100644 index c43506d1bb74..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ /dev/null @@ -1,806 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.logging.v2 LoggingServiceV2 API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.api import monitored_resource_pb2 -from google.cloud.logging_v2.gapic import enums -from google.cloud.logging_v2.gapic import logging_service_v2_client_config -from google.cloud.logging_v2.gapic.transports import logging_service_v2_grpc_transport -from google.cloud.logging_v2.proto import log_entry_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2_grpc -from google.cloud.logging_v2.proto import logging_pb2 -from google.cloud.logging_v2.proto import logging_pb2_grpc -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version - - -class LoggingServiceV2Client(object): - """Service for ingesting and querying logs.""" - - SERVICE_ADDRESS = "logging.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.logging.v2.LoggingServiceV2" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LoggingServiceV2Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def billing_path(cls, billing_account): - """Return a fully-qualified billing string.""" - return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account, - ) - - @classmethod - def billing_log_path(cls, billing_account, log): - """Return a fully-qualified billing_log string.""" - return google.api_core.path_template.expand( - "billingAccounts/{billing_account}/logs/{log}", - billing_account=billing_account, - log=log, - ) - - @classmethod - def folder_path(cls, folder): - """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder,) - - @classmethod - def folder_log_path(cls, folder, log): - """Return a fully-qualified folder_log string.""" - return google.api_core.path_template.expand( - "folders/{folder}/logs/{log}", folder=folder, log=log, - ) - - @classmethod - def log_path(cls, project, log): - """Return a fully-qualified log string.""" - return google.api_core.path_template.expand( - "projects/{project}/logs/{log}", project=project, log=log, - ) - - @classmethod - def organization_path(cls, organization): - """Return a fully-qualified organization string.""" - return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization, - ) - - @classmethod - def organization_log_path(cls, organization, log): - """Return a fully-qualified organization_log string.""" - return google.api_core.path_template.expand( - "organizations/{organization}/logs/{log}", - organization=organization, - log=log, - ) - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.LoggingServiceV2GrpcTransport, - Callable[[~.Credentials, type], ~.LoggingServiceV2GrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = logging_service_v2_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def delete_log( - self, - log_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes all the log entries in a log. The log reappears if it receives new - entries. Log entries written shortly before the delete operation might not - be deleted. Entries received after the delete operation with a timestamp - before the operation will be deleted. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.LoggingServiceV2Client() - >>> - >>> log_name = client.log_path('[PROJECT]', '[LOG]') - >>> - >>> client.delete_log(log_name) - - Args: - log_name (str): Required. The resource name of the log to delete: - - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" - - ``[LOG_ID]`` must be URL-encoded. For example, - ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. - For more information about log names, see ``LogEntry``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_log" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_log" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_log, - default_retry=self._method_configs["DeleteLog"].retry, - default_timeout=self._method_configs["DeleteLog"].timeout, - client_info=self._client_info, - ) - - request = logging_pb2.DeleteLogRequest(log_name=log_name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("log_name", log_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_log"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def write_log_entries( - self, - entries, - log_name=None, - resource=None, - labels=None, - partial_success=None, - dry_run=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Writes log entries to Logging. This API method is the - only way to send log entries to Logging. This method - is used, directly or indirectly, by the Logging agent - (fluentd) and all logging libraries configured to use Logging. - A single request may contain log entries for a maximum of 1000 - different resources (projects, organizations, billing accounts or - folders) - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.LoggingServiceV2Client() - >>> - >>> # TODO: Initialize `entries`: - >>> entries = [] - >>> - >>> response = client.write_log_entries(entries) - - Args: - entries (list[Union[dict, ~google.cloud.logging_v2.types.LogEntry]]): Required. The log entries to send to Logging. The order of log entries - in this list does not matter. Values supplied in this method's - ``log_name``, ``resource``, and ``labels`` fields are copied into those - log entries in this list that do not include values for their - corresponding fields. For more information, see the ``LogEntry`` type. - - If the ``timestamp`` or ``insert_id`` fields are missing in log entries, - then this method supplies the current time or a unique identifier, - respectively. The supplied values are chosen so that, among the log - entries that did not supply their own values, the entries earlier in the - list will sort before the entries later in the list. See the - ``entries.list`` method. - - Log entries with timestamps that are more than the `logs retention - period `__ in the past or - more than 24 hours in the future will not be available when calling - ``entries.list``. However, those log entries can still be `exported with - LogSinks `__. - - To improve throughput and to avoid exceeding the `quota - limit `__ for calls to - ``entries.write``, you should try to include several log entries in this - list, rather than calling this method for each individual log entry. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.LogEntry` - log_name (str): Optional. A default log resource name that is assigned to all log - entries in ``entries`` that do not specify a value for ``log_name``: - - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" - - ``[LOG_ID]`` must be URL-encoded. For example: - - :: - - "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" - - The permission logging.logEntries.create is needed on each project, - organization, billing account, or folder that is receiving new log - entries, whether the resource is specified in logName or in an - individual log entry. - resource (Union[dict, ~google.cloud.logging_v2.types.MonitoredResource]): Optional. A default monitored resource object that is assigned to all - log entries in ``entries`` that do not specify a value for ``resource``. - Example: - - :: - - { "type": "gce_instance", - "labels": { - "zone": "us-central1-a", "instance_id": "00000000000000000000" }} - - See ``LogEntry``. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.MonitoredResource` - labels (dict[str -> str]): Optional. Default labels that are added to the ``labels`` field of all - log entries in ``entries``. If a log entry already has a label with the - same key as a label in this parameter, then the log entry's label is not - changed. See ``LogEntry``. - partial_success (bool): Optional. Whether valid entries should be written even if some other - entries fail due to INVALID\_ARGUMENT or PERMISSION\_DENIED errors. If - any entry is not written, then the response status is the error - associated with one of the failed entries and the response includes - error details keyed by the entries' zero-based index in the - ``entries.write`` method. - dry_run (bool): Optional. If true, the request should expect normal response, but the - entries won't be persisted nor exported. Useful for checking whether the - logging API endpoints are working properly before sending valuable data. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.WriteLogEntriesResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "write_log_entries" not in self._inner_api_calls: - self._inner_api_calls[ - "write_log_entries" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.write_log_entries, - default_retry=self._method_configs["WriteLogEntries"].retry, - default_timeout=self._method_configs["WriteLogEntries"].timeout, - client_info=self._client_info, - ) - - request = logging_pb2.WriteLogEntriesRequest( - entries=entries, - log_name=log_name, - resource=resource, - labels=labels, - partial_success=partial_success, - dry_run=dry_run, - ) - return self._inner_api_calls["write_log_entries"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_log_entries( - self, - resource_names, - project_ids=None, - filter_=None, - order_by=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists log entries. Use this method to retrieve log entries that - originated from a project/folder/organization/billing account. For ways - to export log entries, see `Exporting - Logs `__. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.LoggingServiceV2Client() - >>> - >>> # TODO: Initialize `resource_names`: - >>> resource_names = [] - >>> - >>> # Iterate over all results - >>> for element in client.list_log_entries(resource_names): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_log_entries(resource_names).pages: - ... for element in page: - ... # process element - ... pass - - Args: - resource_names (list[str]): Required. Names of one or more parent resources from which to retrieve - log entries: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - Projects listed in the ``project_ids`` field are added to this list. - project_ids (list[str]): Deprecated. Use ``resource_names`` instead. One or more project - identifiers or project numbers from which to retrieve log entries. - Example: ``"my-project-1A"``. - filter_ (str): Optional. A filter that chooses which log entries to return. See - `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An empty filter - matches all log entries in the resources listed in ``resource_names``. - Referencing a parent resource that is not listed in ``resource_names`` - will cause the filter to return no results. The maximum length of the - filter is 20000 characters. - order_by (str): Optional. How the results should be sorted. Presently, the only - permitted values are ``"timestamp asc"`` (default) and - ``"timestamp desc"``. The first option returns entries in order of - increasing values of ``LogEntry.timestamp`` (oldest first), and the - second option returns entries in order of decreasing timestamps (newest - first). Entries with equal timestamps are returned in order of their - ``insert_id`` values. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.logging_v2.types.LogEntry` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_log_entries" not in self._inner_api_calls: - self._inner_api_calls[ - "list_log_entries" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_log_entries, - default_retry=self._method_configs["ListLogEntries"].retry, - default_timeout=self._method_configs["ListLogEntries"].timeout, - client_info=self._client_info, - ) - - request = logging_pb2.ListLogEntriesRequest( - resource_names=resource_names, - project_ids=project_ids, - filter=filter_, - order_by=order_by, - page_size=page_size, - ) - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_log_entries"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="entries", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def list_monitored_resource_descriptors( - self, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the descriptors for monitored resource types used by Logging. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.LoggingServiceV2Client() - >>> - >>> # Iterate over all results - >>> for element in client.list_monitored_resource_descriptors(): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_monitored_resource_descriptors().pages: - ... for element in page: - ... # process element - ... pass - - Args: - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.logging_v2.types.MonitoredResourceDescriptor` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_monitored_resource_descriptors" not in self._inner_api_calls: - self._inner_api_calls[ - "list_monitored_resource_descriptors" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_monitored_resource_descriptors, - default_retry=self._method_configs[ - "ListMonitoredResourceDescriptors" - ].retry, - default_timeout=self._method_configs[ - "ListMonitoredResourceDescriptors" - ].timeout, - client_info=self._client_info, - ) - - request = logging_pb2.ListMonitoredResourceDescriptorsRequest( - page_size=page_size, - ) - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_monitored_resource_descriptors"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="resource_descriptors", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def list_logs( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the logs in projects, organizations, folders, or billing accounts. - Only logs that have entries are listed. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.LoggingServiceV2Client() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_logs(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_logs(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The resource name that owns the logs: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`str` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_logs" not in self._inner_api_calls: - self._inner_api_calls[ - "list_logs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_logs, - default_retry=self._method_configs["ListLogs"].retry, - default_timeout=self._method_configs["ListLogs"].timeout, - client_info=self._client_info, - ) - - request = logging_pb2.ListLogsRequest(parent=parent, page_size=page_size,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_logs"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="log_names", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py deleted file mode 100644 index b3da612f6caf..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py +++ /dev/null @@ -1,62 +0,0 @@ -config = { - "interfaces": { - "google.logging.v2.LoggingServiceV2": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - }, - "list": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - }, - }, - "methods": { - "DeleteLog": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "WriteLogEntries": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - "bundling": { - "element_count_threshold": 1000, - "request_byte_threshold": 1048576, - "delay_threshold_millis": 50, - }, - }, - "ListLogEntries": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListMonitoredResourceDescriptors": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListLogs": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py deleted file mode 100644 index 0c80a5d43fe2..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ /dev/null @@ -1,650 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.logging.v2 MetricsServiceV2 API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.api import monitored_resource_pb2 -from google.cloud.logging_v2.gapic import enums -from google.cloud.logging_v2.gapic import metrics_service_v2_client_config -from google.cloud.logging_v2.gapic.transports import metrics_service_v2_grpc_transport -from google.cloud.logging_v2.proto import log_entry_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2_grpc -from google.cloud.logging_v2.proto import logging_metrics_pb2 -from google.cloud.logging_v2.proto import logging_metrics_pb2_grpc -from google.cloud.logging_v2.proto import logging_pb2 -from google.cloud.logging_v2.proto import logging_pb2_grpc -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version - - -class MetricsServiceV2Client(object): - """Service for configuring logs-based metrics.""" - - SERVICE_ADDRESS = "logging.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.logging.v2.MetricsServiceV2" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetricsServiceV2Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def billing_path(cls, billing_account): - """Return a fully-qualified billing string.""" - return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account, - ) - - @classmethod - def folder_path(cls, folder): - """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder,) - - @classmethod - def metric_path(cls, project, metric): - """Return a fully-qualified metric string.""" - return google.api_core.path_template.expand( - "projects/{project}/metrics/{metric}", project=project, metric=metric, - ) - - @classmethod - def organization_path(cls, organization): - """Return a fully-qualified organization string.""" - return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization, - ) - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.MetricsServiceV2GrpcTransport, - Callable[[~.Credentials, type], ~.MetricsServiceV2GrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = metrics_service_v2_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def list_log_metrics( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists logs-based metrics. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.MetricsServiceV2Client() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_log_metrics(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_log_metrics(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The name of the project containing the metrics: - - :: - - "projects/[PROJECT_ID]" - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.logging_v2.types.LogMetric` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_log_metrics" not in self._inner_api_calls: - self._inner_api_calls[ - "list_log_metrics" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_log_metrics, - default_retry=self._method_configs["ListLogMetrics"].retry, - default_timeout=self._method_configs["ListLogMetrics"].timeout, - client_info=self._client_info, - ) - - request = logging_metrics_pb2.ListLogMetricsRequest( - parent=parent, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_log_metrics"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="metrics", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_log_metric( - self, - metric_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a logs-based metric. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.MetricsServiceV2Client() - >>> - >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') - >>> - >>> response = client.get_log_metric(metric_name) - - Args: - metric_name (str): Required. The resource name of the desired metric: - - :: - - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogMetric` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_log_metric" not in self._inner_api_calls: - self._inner_api_calls[ - "get_log_metric" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_log_metric, - default_retry=self._method_configs["GetLogMetric"].retry, - default_timeout=self._method_configs["GetLogMetric"].timeout, - client_info=self._client_info, - ) - - request = logging_metrics_pb2.GetLogMetricRequest(metric_name=metric_name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("metric_name", metric_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_log_metric"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_log_metric( - self, - parent, - metric, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a logs-based metric. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.MetricsServiceV2Client() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `metric`: - >>> metric = {} - >>> - >>> response = client.create_log_metric(parent, metric) - - Args: - parent (str): Required. The resource name of the project in which to create the - metric: - - :: - - "projects/[PROJECT_ID]" - - The new metric must be provided in the request. - metric (Union[dict, ~google.cloud.logging_v2.types.LogMetric]): Required. The new logs-based metric, which must not have an identifier that - already exists. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.LogMetric` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogMetric` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_log_metric" not in self._inner_api_calls: - self._inner_api_calls[ - "create_log_metric" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_log_metric, - default_retry=self._method_configs["CreateLogMetric"].retry, - default_timeout=self._method_configs["CreateLogMetric"].timeout, - client_info=self._client_info, - ) - - request = logging_metrics_pb2.CreateLogMetricRequest( - parent=parent, metric=metric, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_log_metric"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_log_metric( - self, - metric_name, - metric, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates or updates a logs-based metric. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.MetricsServiceV2Client() - >>> - >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') - >>> - >>> # TODO: Initialize `metric`: - >>> metric = {} - >>> - >>> response = client.update_log_metric(metric_name, metric) - - Args: - metric_name (str): Required. The resource name of the metric to update: - - :: - - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - - The updated metric must be provided in the request and it's ``name`` - field must be the same as ``[METRIC_ID]`` If the metric does not exist - in ``[PROJECT_ID]``, then a new metric is created. - metric (Union[dict, ~google.cloud.logging_v2.types.LogMetric]): Required. The updated metric. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.logging_v2.types.LogMetric` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.logging_v2.types.LogMetric` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_log_metric" not in self._inner_api_calls: - self._inner_api_calls[ - "update_log_metric" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_log_metric, - default_retry=self._method_configs["UpdateLogMetric"].retry, - default_timeout=self._method_configs["UpdateLogMetric"].timeout, - client_info=self._client_info, - ) - - request = logging_metrics_pb2.UpdateLogMetricRequest( - metric_name=metric_name, metric=metric, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("metric_name", metric_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_log_metric"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_log_metric( - self, - metric_name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a logs-based metric. - - Example: - >>> from google.cloud import logging_v2 - >>> - >>> client = logging_v2.MetricsServiceV2Client() - >>> - >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') - >>> - >>> client.delete_log_metric(metric_name) - - Args: - metric_name (str): Required. The resource name of the metric to delete: - - :: - - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_log_metric" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_log_metric" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_log_metric, - default_retry=self._method_configs["DeleteLogMetric"].retry, - default_timeout=self._method_configs["DeleteLogMetric"].timeout, - client_info=self._client_info, - ) - - request = logging_metrics_pb2.DeleteLogMetricRequest(metric_name=metric_name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("metric_name", metric_name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_log_metric"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py deleted file mode 100644 index 133abec23dcf..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py +++ /dev/null @@ -1,48 +0,0 @@ -config = { - "interfaces": { - "google.logging.v2.MetricsServiceV2": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "ListLogMetrics": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetLogMetric": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateLogMetric": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateLogMetric": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteLogMetric": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py deleted file mode 100644 index f3132ede0451..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py +++ /dev/null @@ -1,306 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.logging_v2.proto import logging_config_pb2_grpc - - -class ConfigServiceV2GrpcTransport(object): - """gRPC transport class providing stubs for - google.logging.v2 ConfigServiceV2 API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ) - - def __init__( - self, channel=None, credentials=None, address="logging.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "config_service_v2_stub": logging_config_pb2_grpc.ConfigServiceV2Stub( - channel - ), - } - - @classmethod - def create_channel( - cls, address="logging.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def list_sinks(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.list_sinks`. - - Lists sinks. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].ListSinks - - @property - def get_sink(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.get_sink`. - - Gets a sink. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].GetSink - - @property - def create_sink(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.create_sink`. - - Creates a sink that exports specified log entries to a destination. The - export of newly-ingested log entries begins immediately, unless the - sink's ``writer_identity`` is not permitted to write to the destination. - A sink can export log entries only from the resource owning the sink. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].CreateSink - - @property - def update_sink(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.update_sink`. - - Updates a sink. This method replaces the following fields in the - existing sink with values from the new sink: ``destination``, and - ``filter``. - - The updated sink might also have a new ``writer_identity``; see the - ``unique_writer_identity`` field. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].UpdateSink - - @property - def delete_sink(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.delete_sink`. - - Deletes a sink. If the sink has a unique ``writer_identity``, then that - service account is also deleted. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].DeleteSink - - @property - def list_exclusions(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.list_exclusions`. - - Lists all the exclusions in a parent resource. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].ListExclusions - - @property - def get_exclusion(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.get_exclusion`. - - Gets the description of an exclusion. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].GetExclusion - - @property - def create_exclusion(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.create_exclusion`. - - Creates a new exclusion in a specified parent resource. - Only log entries belonging to that resource can be excluded. - You can have up to 10 exclusions in a resource. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].CreateExclusion - - @property - def update_exclusion(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.update_exclusion`. - - Changes one or more properties of an existing exclusion. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].UpdateExclusion - - @property - def delete_exclusion(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.delete_exclusion`. - - Deletes an exclusion. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].DeleteExclusion - - @property - def get_cmek_settings(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.get_cmek_settings`. - - Gets the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured for GCP - organizations. Once configured, it applies to all projects and folders - in the GCP organization. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].GetCmekSettings - - @property - def update_cmek_settings(self): - """Return the gRPC stub for :meth:`ConfigServiceV2Client.update_cmek_settings`. - - Updates the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured for GCP - organizations. Once configured, it applies to all projects and folders - in the GCP organization. - - ``UpdateCmekSettings`` will fail if 1) ``kms_key_name`` is invalid, or - 2) the associated service account does not have the required - ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the - key, or - - 3) access to the key is disabled. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["config_service_v2_stub"].UpdateCmekSettings diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py deleted file mode 100644 index 4cf843caff47..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py +++ /dev/null @@ -1,192 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.logging_v2.proto import logging_pb2_grpc - - -class LoggingServiceV2GrpcTransport(object): - """gRPC transport class providing stubs for - google.logging.v2 LoggingServiceV2 API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ) - - def __init__( - self, channel=None, credentials=None, address="logging.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "logging_service_v2_stub": logging_pb2_grpc.LoggingServiceV2Stub(channel), - } - - @classmethod - def create_channel( - cls, address="logging.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def delete_log(self): - """Return the gRPC stub for :meth:`LoggingServiceV2Client.delete_log`. - - Deletes all the log entries in a log. The log reappears if it receives new - entries. Log entries written shortly before the delete operation might not - be deleted. Entries received after the delete operation with a timestamp - before the operation will be deleted. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["logging_service_v2_stub"].DeleteLog - - @property - def write_log_entries(self): - """Return the gRPC stub for :meth:`LoggingServiceV2Client.write_log_entries`. - - Writes log entries to Logging. This API method is the - only way to send log entries to Logging. This method - is used, directly or indirectly, by the Logging agent - (fluentd) and all logging libraries configured to use Logging. - A single request may contain log entries for a maximum of 1000 - different resources (projects, organizations, billing accounts or - folders) - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["logging_service_v2_stub"].WriteLogEntries - - @property - def list_log_entries(self): - """Return the gRPC stub for :meth:`LoggingServiceV2Client.list_log_entries`. - - Lists log entries. Use this method to retrieve log entries that - originated from a project/folder/organization/billing account. For ways - to export log entries, see `Exporting - Logs `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["logging_service_v2_stub"].ListLogEntries - - @property - def list_monitored_resource_descriptors(self): - """Return the gRPC stub for :meth:`LoggingServiceV2Client.list_monitored_resource_descriptors`. - - Lists the descriptors for monitored resource types used by Logging. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["logging_service_v2_stub"].ListMonitoredResourceDescriptors - - @property - def list_logs(self): - """Return the gRPC stub for :meth:`LoggingServiceV2Client.list_logs`. - - Lists the logs in projects, organizations, folders, or billing accounts. - Only logs that have entries are listed. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["logging_service_v2_stub"].ListLogs diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py deleted file mode 100644 index 605bc118e28d..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py +++ /dev/null @@ -1,181 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.logging_v2.proto import logging_metrics_pb2_grpc - - -class MetricsServiceV2GrpcTransport(object): - """gRPC transport class providing stubs for - google.logging.v2 MetricsServiceV2 API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ) - - def __init__( - self, channel=None, credentials=None, address="logging.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "metrics_service_v2_stub": logging_metrics_pb2_grpc.MetricsServiceV2Stub( - channel - ), - } - - @classmethod - def create_channel( - cls, address="logging.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def list_log_metrics(self): - """Return the gRPC stub for :meth:`MetricsServiceV2Client.list_log_metrics`. - - Lists logs-based metrics. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metrics_service_v2_stub"].ListLogMetrics - - @property - def get_log_metric(self): - """Return the gRPC stub for :meth:`MetricsServiceV2Client.get_log_metric`. - - Gets a logs-based metric. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metrics_service_v2_stub"].GetLogMetric - - @property - def create_log_metric(self): - """Return the gRPC stub for :meth:`MetricsServiceV2Client.create_log_metric`. - - Creates a logs-based metric. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metrics_service_v2_stub"].CreateLogMetric - - @property - def update_log_metric(self): - """Return the gRPC stub for :meth:`MetricsServiceV2Client.update_log_metric`. - - Creates or updates a logs-based metric. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metrics_service_v2_stub"].UpdateLogMetric - - @property - def delete_log_metric(self): - """Return the gRPC stub for :meth:`MetricsServiceV2Client.delete_log_metric`. - - Deletes a logs-based metric. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["metrics_service_v2_stub"].DeleteLogMetric diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/__init__.py similarity index 71% rename from packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py rename to packages/google-cloud-logging/google/cloud/logging_v2/handlers/__init__.py index 67b96c95e907..29ed8f0d165c 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/__init__.py @@ -14,10 +14,10 @@ """Python :mod:`logging` handlers for Google Cloud Logging.""" -from google.cloud.logging.handlers.app_engine import AppEngineHandler -from google.cloud.logging.handlers.container_engine import ContainerEngineHandler -from google.cloud.logging.handlers.handlers import CloudLoggingHandler -from google.cloud.logging.handlers.handlers import setup_logging +from google.cloud.logging_v2.handlers.app_engine import AppEngineHandler +from google.cloud.logging_v2.handlers.container_engine import ContainerEngineHandler +from google.cloud.logging_v2.handlers.handlers import CloudLoggingHandler +from google.cloud.logging_v2.handlers.handlers import setup_logging __all__ = [ "AppEngineHandler", diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py similarity index 86% rename from packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py rename to packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py index 4bd319a53886..3150e46c351f 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py @@ -22,7 +22,7 @@ except ImportError: # pragma: NO COVER flask = None -from google.cloud.logging.handlers.middleware.request import _get_django_request +from google.cloud.logging_v2.handlers.middleware.request import _get_django_request _DJANGO_TRACE_HEADER = "HTTP_X_CLOUD_TRACE_CONTEXT" _FLASK_TRACE_HEADER = "X_CLOUD_TRACE_CONTEXT" @@ -31,8 +31,8 @@ def format_stackdriver_json(record, message): """Helper to format a LogRecord in in Stackdriver fluentd format. - :rtype: str - :returns: JSON str to be written to the log file. + Returns: + str: JSON str to be written to the log file. """ subsecond, second = math.modf(record.created) @@ -49,8 +49,8 @@ def format_stackdriver_json(record, message): def get_trace_id_from_flask(): """Get trace_id from flask request headers. - :rtype: str - :returns: TraceID in HTTP request headers. + Returns: + str: TraceID in HTTP request headers. """ if flask is None or not flask.request: return None @@ -68,8 +68,8 @@ def get_trace_id_from_flask(): def get_trace_id_from_django(): """Get trace_id from django request headers. - :rtype: str - :returns: TraceID in HTTP request headers. + Returns: + str: TraceID in HTTP request headers. """ request = _get_django_request() @@ -88,8 +88,8 @@ def get_trace_id_from_django(): def get_trace_id(): """Helper to get trace_id from web application request header. - :rtype: str - :returns: TraceID in HTTP request headers. + Returns: + str: TraceID in HTTP request headers. """ checkers = ( get_trace_id_from_django, diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py similarity index 74% rename from packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py rename to packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py index d0179fb6dcfc..fed9bd205add 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py @@ -14,16 +14,16 @@ """Logging handler for App Engine Flexible -Sends logs to the Stackdriver Logging API with the appropriate resource +Sends logs to the Cloud Logging API with the appropriate resource and labels for App Engine logs. """ import logging import os -from google.cloud.logging.handlers._helpers import get_trace_id -from google.cloud.logging.handlers.transports import BackgroundThreadTransport -from google.cloud.logging.resource import Resource +from google.cloud.logging_v2.handlers._helpers import get_trace_id +from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport +from google.cloud.logging_v2.resource import Resource _DEFAULT_GAE_LOGGER_NAME = "app" @@ -36,28 +36,28 @@ class AppEngineHandler(logging.StreamHandler): - """A logging handler that sends App Engine-formatted logs to Stackdriver. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The authenticated Google Cloud Logging client for this - handler to use. - - :type transport: :class:`type` - :param transport: The transport class. It should be a subclass - of :class:`.Transport`. If unspecified, - :class:`.BackgroundThreadTransport` will be used. - - :type stream: file-like object - :param stream: (optional) stream to be used by the handler. - """ + """A logging handler that sends App Engine-formatted logs to Stackdriver.""" def __init__( self, client, + *, name=_DEFAULT_GAE_LOGGER_NAME, transport=BackgroundThreadTransport, stream=None, ): + """ + Args: + client (~logging_v2.client.Client): The authenticated + Google Cloud Logging client for this handler to use. + name (Optional[str]): Name for the logger. + transport (Optional[~logging_v2.transports.Transport]): + The transport class. It should be a subclass + of :class:`.Transport`. If unspecified, + :class:`.BackgroundThreadTransport` will be used. + stream (Optional[IO]): Stream to be used by the handler. + + """ super(AppEngineHandler, self).__init__(stream) self.name = name self.client = client @@ -72,8 +72,8 @@ def __init__( def get_gae_resource(self): """Return the GAE resource using the environment variables. - :rtype: :class:`~google.cloud.logging.resource.Resource` - :returns: Monitored resource for GAE. + Returns: + google.cloud.logging_v2.resource.Resource: Monitored resource for GAE. """ gae_resource = Resource( type="gae_app", @@ -91,8 +91,8 @@ def get_gae_labels(self): If the trace ID can be detected, it will be included as a label. Currently, no other labels are included. - :rtype: dict - :returns: Labels for GAE app. + Returns: + dict: Labels for GAE app. """ gae_labels = {} @@ -109,8 +109,8 @@ def emit(self, record): See https://docs.python.org/2/library/logging.html#handler-objects - :type record: :class:`logging.LogRecord` - :param record: The record to be logged. + Args: + record (logging.LogRecord): The record to be logged. """ message = super(AppEngineHandler, self).format(record) gae_labels = self.get_gae_labels() diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/container_engine.py similarity index 75% rename from packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py rename to packages/google-cloud-logging/google/cloud/logging_v2/handlers/container_engine.py index 9fe460889232..a4bd0f84890b 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/container_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/container_engine.py @@ -21,7 +21,7 @@ import logging.handlers -from google.cloud.logging.handlers._helpers import format_stackdriver_json +from google.cloud.logging_v2.handlers._helpers import format_stackdriver_json class ContainerEngineHandler(logging.StreamHandler): @@ -29,26 +29,26 @@ class ContainerEngineHandler(logging.StreamHandler): This handler is written to format messages for the Google Container Engine (GKE) fluentd plugin, so that metadata such as log level are properly set. - - :type name: str - :param name: (optional) the name of the custom log in Stackdriver Logging. - - :type stream: file-like object - :param stream: (optional) stream to be used by the handler. """ - def __init__(self, name=None, stream=None): + def __init__(self, *, name=None, stream=None): + """ + Args: + name (Optional[str]): The name of the custom log in Cloud Logging. + stream (Optional[IO]): Stream to be used by the handler. + + """ super(ContainerEngineHandler, self).__init__(stream=stream) self.name = name def format(self, record): """Format the message into JSON expected by fluentd. - :type record: :class:`~logging.LogRecord` - :param record: the log record + Args: + record (logging.LogRecord): The log record. - :rtype: str - :returns: A JSON string formatted for GKE fluentd. + Returns: + str: A JSON string formatted for GKE fluentd. """ message = super(ContainerEngineHandler, self).format(record) return format_stackdriver_json(record, message) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py similarity index 58% rename from packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py rename to packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 111cec8d27cf..2d79c7f8ad64 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -12,12 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Python :mod:`logging` handlers for Stackdriver Logging.""" +"""Python :mod:`logging` handlers for Cloud Logging.""" import logging -from google.cloud.logging.handlers.transports import BackgroundThreadTransport -from google.cloud.logging.logger import _GLOBAL_RESOURCE +from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport +from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE DEFAULT_LOGGER_NAME = "python" @@ -25,7 +25,7 @@ class CloudLoggingHandler(logging.StreamHandler): - """Handler that directly makes Stackdriver logging API calls. + """Handler that directly makes Cloud Logging API calls. This is a Python standard ``logging`` handler using that can be used to route Python standard logging messages directly to the Stackdriver @@ -35,39 +35,13 @@ class CloudLoggingHandler(logging.StreamHandler): This handler supports both an asynchronous and synchronous transport. - :type client: :class:`google.cloud.logging.client.Client` - :param client: the authenticated Google Cloud Logging client for this - handler to use - - :type name: str - :param name: the name of the custom log in Stackdriver Logging. Defaults - to 'python'. The name of the Python logger will be represented - in the ``python_logger`` field. - - :type transport: :class:`type` - :param transport: Class for creating new transport objects. It should - extend from the base :class:`.Transport` type and - implement :meth`.Transport.send`. Defaults to - :class:`.BackgroundThreadTransport`. The other - option is :class:`.SyncTransport`. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry, defaults - to the global resource type. - - :type labels: dict - :param labels: (Optional) Mapping of labels for the entry. - - :type stream: file-like object - :param stream: (optional) stream to be used by the handler. - Example: .. code-block:: python import logging import google.cloud.logging - from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging_v2.handlers import CloudLoggingHandler client = google.cloud.logging.Client() handler = CloudLoggingHandler(client) @@ -82,12 +56,33 @@ class CloudLoggingHandler(logging.StreamHandler): def __init__( self, client, + *, name=DEFAULT_LOGGER_NAME, transport=BackgroundThreadTransport, resource=_GLOBAL_RESOURCE, labels=None, stream=None, ): + """ + Args: + client (~logging_v2.client.Client): + The authenticated Google Cloud Logging client for this + handler to use. + name (str): the name of the custom log in Cloud Logging. + Defaults to 'python'. The name of the Python logger will be represented + in the ``python_logger`` field. + transport (~logging_v2.transports.Transport): + Class for creating new transport objects. It should + extend from the base :class:`.Transport` type and + implement :meth`.Transport.send`. Defaults to + :class:`.BackgroundThreadTransport`. The other + option is :class:`.SyncTransport`. + resource (~logging_v2.resource.Resource): + Resource for this Handler. Defaults to ``GLOBAL_RESOURCE``. + labels (Optional[dict]): Monitored resource of the entry, defaults + to the global resource type. + stream (Optional[IO]): Stream to be used by the handler. + """ super(CloudLoggingHandler, self).__init__(stream) self.name = name self.client = client @@ -102,40 +97,28 @@ def emit(self, record): See https://docs.python.org/2/library/logging.html#handler-objects - :type record: :class:`logging.LogRecord` - :param record: The record to be logged. + Args: + record (logging.LogRecord): The record to be logged. """ message = super(CloudLoggingHandler, self).format(record) self.transport.send(record, message, resource=self.resource, labels=self.labels) def setup_logging( - handler, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, log_level=logging.INFO + handler, *, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, log_level=logging.INFO ): """Attach a logging handler to the Python root logger Excludes loggers that this library itself uses to avoid infinite recursion. - :type handler: :class:`logging.handler` - :param handler: the handler to attach to the global handler - - :type excluded_loggers: tuple - :param excluded_loggers: (Optional) The loggers to not attach the handler - to. This will always include the loggers in the - path of the logging client itself. - - :type log_level: int - :param log_level: (Optional) Python logging log level. Defaults to - :const:`logging.INFO`. - Example: .. code-block:: python import logging import google.cloud.logging - from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging_v2.handlers import CloudLoggingHandler client = google.cloud.logging.Client() handler = CloudLoggingHandler(client) @@ -144,6 +127,13 @@ def setup_logging( logging.error('bad news') # API call + Args: + handler (logging.handler): the handler to attach to the global handler + excluded_loggers (Optional[Tuple[str]]): The loggers to not attach the handler + to. This will always include the loggers in the + path of the logging client itself. + log_level (Optional[int]): Python logging log level. Defaults to + :const:`logging.INFO`. """ all_excluded_loggers = set(excluded_loggers + EXCLUDED_LOGGER_DEFAULTS) logger = logging.getLogger() diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/middleware/__init__.py similarity index 88% rename from packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py rename to packages/google-cloud-logging/google/cloud/logging_v2/handlers/middleware/__init__.py index d8ba3016f724..bd32e4a90101 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/middleware/__init__.py @@ -12,6 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud.logging.handlers.middleware.request import RequestMiddleware +from google.cloud.logging_v2.handlers.middleware.request import RequestMiddleware __all__ = ["RequestMiddleware"] diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/middleware/request.py similarity index 87% rename from packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py rename to packages/google-cloud-logging/google/cloud/logging_v2/handlers/middleware/request.py index 0229e4c8e1cd..da361b9679b4 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/request.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/middleware/request.py @@ -27,8 +27,8 @@ def _get_django_request(): """Get Django request from thread local. - :rtype: str - :returns: Django request. + Returns: + str: Django request """ return getattr(_thread_locals, "request", None) @@ -42,13 +42,14 @@ def _get_django_request(): class RequestMiddleware(MiddlewareMixin): """Saves the request in thread local""" - def __init__(self, get_response=None): + def __init__(self, *, get_response=None): self.get_response = get_response def process_request(self, request): """Called on each request, before Django decides which view to execute. - :type request: :class:`~django.http.request.HttpRequest` - :param request: Django http request. + Args: + request(django.http.request.HttpRequest): + Django http request. """ _thread_locals.request = request diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/__init__.py similarity index 81% rename from packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py rename to packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/__init__.py index 3c6cc214e5e3..d1b961533e8a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/__init__.py @@ -20,9 +20,9 @@ the background. """ -from google.cloud.logging.handlers.transports.base import Transport -from google.cloud.logging.handlers.transports.sync import SyncTransport -from google.cloud.logging.handlers.transports.background_thread import ( +from google.cloud.logging_v2.handlers.transports.base import Transport +from google.cloud.logging_v2.handlers.transports.sync import SyncTransport +from google.cloud.logging_v2.handlers.transports.background_thread import ( BackgroundThreadTransport, ) diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py similarity index 61% rename from packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py rename to packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py index 812b733cff92..873fa452d294 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py @@ -14,7 +14,7 @@ """Transport for Python logging handler -Uses a background worker to log to Stackdriver Logging asynchronously. +Uses a background worker to log to Cloud Logging asynchronously. """ from __future__ import print_function @@ -28,8 +28,8 @@ from six.moves import queue -from google.cloud.logging import _helpers -from google.cloud.logging.handlers.transports.base import Transport +from google.cloud.logging_v2 import _helpers +from google.cloud.logging_v2.handlers.transports.base import Transport _DEFAULT_GRACE_PERIOD = 5.0 # Seconds _DEFAULT_MAX_BATCH_SIZE = 10 @@ -39,26 +39,22 @@ _LOGGER = logging.getLogger(__name__) -def _get_many(queue_, max_items=None, max_latency=0): +def _get_many(queue_, *, max_items=None, max_latency=0): """Get multiple items from a Queue. Gets at least one (blocking) and at most ``max_items`` items (non-blocking) from a given Queue. Does not mark the items as done. - :type queue_: :class:`~queue.Queue` - :param queue_: The Queue to get items from. + Args: + queue_ (queue.Queue): The Queue to get items from. + max_items (Optional[int]): The maximum number of items to get. + If ``None``, then all available items in the queue are returned. + max_latency (Optional[float]): The maximum number of seconds to wait + for more than one item from a queue. This number includes + the time required to retrieve the first item. - :type max_items: int - :param max_items: The maximum number of items to get. If ``None``, then all - available items in the queue are returned. - - :type max_latency: float - :param max_latency: The maximum number of seconds to wait for more than one - item from a queue. This number includes the time required to retrieve - the first item. - - :rtype: list - :returns: items retrieved from the queue. + Returns: + list: items retrieved from the queue """ start = time.time() # Always return at least one item. @@ -74,34 +70,30 @@ def _get_many(queue_, max_items=None, max_latency=0): class _Worker(object): - """A background thread that writes batches of log entries. - - :type cloud_logger: :class:`~google.cloud.logging.logger.Logger` - :param cloud_logger: The logger to send entries to. - - :type grace_period: float - :param grace_period: The amount of time to wait for pending logs to - be submitted when the process is shutting down. - - :type max_batch_size: int - :param max_batch_size: The maximum number of items to send at a time - in the background thread. - - :type max_latency: float - :param max_latency: The amount of time to wait for new logs before - sending a new batch. It is strongly recommended to keep this smaller - than the grace_period. This means this is effectively the longest - amount of time the background thread will hold onto log entries - before sending them to the server. - """ + """A background thread that writes batches of log entries.""" def __init__( self, cloud_logger, + *, grace_period=_DEFAULT_GRACE_PERIOD, max_batch_size=_DEFAULT_MAX_BATCH_SIZE, max_latency=_DEFAULT_MAX_LATENCY, ): + """ + Args: + cloud_logger (logging_v2.logger.Logger): + The logger to send entries to. + grace_period (Optional[float]): The amount of time to wait for pending logs to + be submitted when the process is shutting down. + max_batch (Optional[int]): The maximum number of items to send at a time + in the background thread. + max_latency (Optional[float]): The amount of time to wait for new logs before + sending a new batch. It is strongly recommended to keep this smaller + than the grace_period. This means this is effectively the longest + amount of time the background thread will hold onto log entries + before sending them to the server. + """ self._cloud_logger = cloud_logger self._grace_period = grace_period self._max_batch_size = max_batch_size @@ -172,7 +164,7 @@ def start(self): self._thread.start() atexit.register(self._main_thread_terminated) - def stop(self, grace_period=None): + def stop(self, *, grace_period=None): """Signals the background thread to stop. This does not terminate the background thread. It simply queues the @@ -181,13 +173,13 @@ def stop(self, grace_period=None): work. The ``grace_period`` parameter will give the background thread some time to finish processing before this function returns. - :type grace_period: float - :param grace_period: If specified, this method will block up to this - many seconds to allow the background thread to finish work before - returning. + Args: + grace_period (Optional[float]): If specified, this method will + block up to this many seconds to allow the background thread + to finish work before returning. - :rtype: bool - :returns: True if the thread terminated. False if the thread is still + Returns: + bool: True if the thread terminated. False if the thread is still running. """ if not self.is_alive: @@ -218,11 +210,11 @@ def _main_thread_terminated(self): if not self._queue.empty(): print( "Program shutting down, attempting to send %d queued log " - "entries to Stackdriver Logging..." % (self._queue.qsize(),), + "entries to Cloud Logging..." % (self._queue.qsize(),), file=sys.stderr, ) - if self.stop(self._grace_period): + if self.stop(grace_period=self._grace_period): print("Sent all pending logs.", file=sys.stderr) else: print( @@ -231,29 +223,20 @@ def _main_thread_terminated(self): ) def enqueue( - self, record, message, resource=None, labels=None, trace=None, span_id=None + self, record, message, *, resource=None, labels=None, trace=None, span_id=None ): """Queues a log entry to be written by the background thread. - :type record: :class:`logging.LogRecord` - :param record: Python log record that the handler was called with. - - :type message: str - :param message: The message from the ``LogRecord`` after being + Args: + record (logging.LogRecord): Python log record that the handler was called with. + message (str): The message from the ``LogRecord`` after being formatted by the associated log formatters. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry - - :type labels: dict - :param labels: (Optional) Mapping of labels for the entry. - - :type trace: str - :param trace: (optional) traceid to apply to the logging entry. - - :type span_id: str - :param span_id: (optional) span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. + resource (Optional[google.cloud.logging_v2.resource.Resource]): + Monitored resource of the entry + labels (Optional[dict]): Mapping of labels for the entry. + trace (Optional[str]): TraceID to apply to the logging entry. + span_id (Optional[str]): Span_id within the trace for the log entry. + Specify the trace parameter if span_id is set. """ queue_entry = { "info": {"message": message, "python_logger": record.name}, @@ -272,38 +255,32 @@ def flush(self): class BackgroundThreadTransport(Transport): - """Asynchronous transport that uses a background thread. - - :type client: :class:`~google.cloud.logging.client.Client` - :param client: The Logging client. - - :type name: str - :param name: the name of the logger. - - :type grace_period: float - :param grace_period: The amount of time to wait for pending logs to - be submitted when the process is shutting down. - - :type batch_size: int - :param batch_size: The maximum number of items to send at a time in the - background thread. - - :type max_latency: float - :param max_latency: The amount of time to wait for new logs before - sending a new batch. It is strongly recommended to keep this smaller - than the grace_period. This means this is effectively the longest - amount of time the background thread will hold onto log entries - before sending them to the server. - """ + """Asynchronous transport that uses a background thread.""" def __init__( self, client, name, + *, grace_period=_DEFAULT_GRACE_PERIOD, batch_size=_DEFAULT_MAX_BATCH_SIZE, max_latency=_DEFAULT_MAX_LATENCY, ): + """ + Args: + client (~logging_v2.client.Client): + The Logging client. + name (str): The name of the lgoger. + grace_period (Optional[float]): The amount of time to wait for pending logs to + be submitted when the process is shutting down. + batch_size (Optional[int]): The maximum number of items to send at a time in the + background thread. + max_latency (Optional[float]): The amount of time to wait for new logs before + sending a new batch. It is strongly recommended to keep this smaller + than the grace_period. This means this is effectively the longest + amount of time the background thread will hold onto log entries + before sending them to the server. + """ self.client = client logger = self.client.logger(name) self.worker = _Worker( @@ -319,25 +296,16 @@ def send( ): """Overrides Transport.send(). - :type record: :class:`logging.LogRecord` - :param record: Python log record that the handler was called with. - - :type message: str - :param message: The message from the ``LogRecord`` after being - formatted by the associated log formatters. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry. - - :type labels: dict - :param labels: (Optional) Mapping of labels for the entry. - - :type trace: str - :param trace: (optional) traceid to apply to the logging entry. - - :type span_id: str - :param span_id: (optional) span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. + Args: + record (logging.LogRecord): Python log record that the handler was called with. + message (str): The message from the ``LogRecord`` after being + formatted by the associated log formatters. + resource (Optional[google.cloud.logging_v2.resource.Resource]): + Monitored resource of the entry. + labels (Optional[dict]): Mapping of labels for the entry. + trace (Optional[str]): TraceID to apply to the logging entry. + span_id (Optional[str]): span_id within the trace for the log entry. + Specify the trace parameter if span_id is set. """ self.worker.enqueue( record, diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py similarity index 64% rename from packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py rename to packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py index 7e24cc0206ca..c94c7ad704c3 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py @@ -23,22 +23,17 @@ class Transport(object): """ def send( - self, record, message, resource=None, labels=None, trace=None, span_id=None + self, record, message, *, resource=None, labels=None, trace=None, span_id=None ): """Transport send to be implemented by subclasses. - :type record: :class:`logging.LogRecord` - :param record: Python log record that the handler was called with. - - :type message: str - :param message: The message from the ``LogRecord`` after being - formatted by the associated log formatters. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry. - - :type labels: dict - :param labels: (Optional) Mapping of labels for the entry. + Args: + record (logging.LogRecord): Python log record that the handler was called with. + message (str): The message from the ``LogRecord`` after being + formatted by the associated log formatters. + resource (Optional[google.cloud.logging_v2.resource.Resource]): + Monitored resource of the entry. + labels (Optional[dict]): Mapping of labels for the entry. """ raise NotImplementedError diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py similarity index 60% rename from packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py rename to packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py index e87eb4885fbf..550c29391548 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/sync.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py @@ -14,11 +14,11 @@ """Transport for Python logging handler. -Logs directly to the the Stackdriver Logging API with a synchronous call. +Logs directly to the the Cloud Logging API with a synchronous call. """ -from google.cloud.logging import _helpers -from google.cloud.logging.handlers.transports.base import Transport +from google.cloud.logging_v2 import _helpers +from google.cloud.logging_v2.handlers.transports.base import Transport class SyncTransport(Transport): @@ -31,22 +31,18 @@ def __init__(self, client, name): self.logger = client.logger(name) def send( - self, record, message, resource=None, labels=None, trace=None, span_id=None + self, record, message, *, resource=None, labels=None, trace=None, span_id=None ): """Overrides transport.send(). - :type record: :class:`logging.LogRecord` - :param record: Python log record that the handler was called with. - - :type message: str - :param message: The message from the ``LogRecord`` after being - formatted by the associated log formatters. - - :type resource: :class:`~google.cloud.logging.resource.Resource` - :param resource: (Optional) Monitored resource of the entry. - - :type labels: dict - :param labels: (Optional) Mapping of labels for the entry. + Args: + record (logging.LogRecord): + Python log record that the handler was called with. + message (str): The message from the ``LogRecord`` after being + formatted by the associated log formatters. + resource (Optional[~logging_v2.resource.Resource]): + Monitored resource of the entry. + labels (Optional[dict]): Mapping of labels for the entry. """ info = {"message": message, "python_logger": record.name} self.logger.log_struct( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py new file mode 100644 index 000000000000..89202bcbd659 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -0,0 +1,382 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API Loggers.""" + +from google.cloud.logging_v2._helpers import _add_defaults_to_filter +from google.cloud.logging_v2.entries import LogEntry +from google.cloud.logging_v2.entries import ProtobufEntry +from google.cloud.logging_v2.entries import StructEntry +from google.cloud.logging_v2.entries import TextEntry +from google.cloud.logging_v2.resource import Resource + + +_GLOBAL_RESOURCE = Resource(type="global", labels={}) + + +_OUTBOUND_ENTRY_FIELDS = ( # (name, default) + ("type_", None), + ("log_name", None), + ("payload", None), + ("labels", None), + ("insert_id", None), + ("severity", None), + ("http_request", None), + ("timestamp", None), + ("resource", _GLOBAL_RESOURCE), + ("trace", None), + ("span_id", None), + ("trace_sampled", None), + ("source_location", None), +) + + +class Logger(object): + def __init__(self, name, client, *, labels=None): + """Loggers represent named targets for log entries. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs + + Args: + name (str): The name of the logger. + client (~logging_v2.client.Client): + A client which holds credentials and project configuration + for the logger (which requires a project). + labels (Optional[dict]): Mapping of default labels for entries written + via this logger. + + + """ + self.name = name + self._client = client + self.labels = labels + + @property + def client(self): + """Clent bound to the logger.""" + return self._client + + @property + def project(self): + """Project bound to the logger.""" + return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in logging APIs""" + return f"projects/{self.project}/logs/{self.name}" + + @property + def path(self): + """URI path for use in logging APIs""" + return f"/{self.full_name}" + + def _require_client(self, client): + """Check client or verify over-ride. Also sets ``parent``. + + Args: + client (Union[None, ~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. + + Returns: + ~logging_v2.client.Client: The client passed in + or the currently bound client. + """ + if client is None: + client = self._client + return client + + def batch(self, *, client=None): + """Return a batch to use as a context manager. + + Args: + client (Union[None, ~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. + + Returns: + Batch: A batch to use as a context manager. + """ + client = self._require_client(client) + return Batch(self, client) + + def _do_log(self, client, _entry_class, payload=None, **kw): + """Helper for :meth:`log_empty`, :meth:`log_text`, etc.""" + client = self._require_client(client) + + # Apply defaults + kw["log_name"] = kw.pop("log_name", self.full_name) + kw["labels"] = kw.pop("labels", self.labels) + kw["resource"] = kw.pop("resource", _GLOBAL_RESOURCE) + + if payload is not None: + entry = _entry_class(payload=payload, **kw) + else: + entry = _entry_class(**kw) + + api_repr = entry.to_api_repr() + client.logging_api.write_entries([api_repr]) + + def log_empty(self, *, client=None, **kw): + """Log an empty message via a POST request + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write + + Args: + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. + kw (Optional[dict]): additional keyword arguments for the entry. + See :class:`~logging_v2.entries.LogEntry`. + """ + self._do_log(client, LogEntry, **kw) + + def log_text(self, text, *, client=None, **kw): + """Log a text message via a POST request + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write + + Args: + text (str): the log message + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. + kw (Optional[dict]): additional keyword arguments for the entry. + See :class:`~logging_v2.entries.LogEntry`. + """ + self._do_log(client, TextEntry, text, **kw) + + def log_struct(self, info, *, client=None, **kw): + """Log a structured message via a POST request + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write + + Args: + info (dict): the log entry information + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. + kw (Optional[dict]): additional keyword arguments for the entry. + See :class:`~logging_v2.entries.LogEntry`. + """ + self._do_log(client, StructEntry, info, **kw) + + def log_proto(self, message, *, client=None, **kw): + """Log a protobuf message via a POST request + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list + + Args: + message (google.protobuf.message.Message): + The protobuf message to be logged. + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. + kw (Optional[dict]): additional keyword arguments for the entry. + See :class:`~logging_v2.entries.LogEntry`. + """ + self._do_log(client, ProtobufEntry, message, **kw) + + def delete(self, logger_name=None, *, client=None): + """Delete all entries in a logger via a DELETE request + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs/delete + + Args: + logger_name (Optional[str]): The resource name of the log to delete: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + If not passed, defaults to the project bound to the client. + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current logger. + """ + client = self._require_client(client) + if logger_name is None: + logger_name = self.full_name + client.logging_api.logger_delete(logger_name) + + def list_entries( + self, + *, + resource_names=None, + filter_=None, + order_by=None, + page_size=None, + page_token=None, + ): + """Return a page of log entries. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list + + Args: + resource_names (Optional[Sequence[str]]): Names of one or more parent resources + from which to retrieve log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + If not passed, defaults to the project bound to the client. + filter_ (Optional[str]): a filter expression. See + https://cloud.google.com/logging/docs/view/advanced_filters + By default, a 24 hour filter is applied. + order_by (Optional[str]): One of :data:`~logging_v2.ASCENDING` + or :data:`~logging_v2.DESCENDING`. + page_size (Optional[int]): + Optional. The maximum number of entries in each page of results + from this request. Non-positive values are ignored. Defaults + to a sensible value set by the API. + page_token (Optional[str]): + Optional. If present, return the next batch of entries, using + the value, which must correspond to the ``nextPageToken`` value + returned in the previous response. Deprecated: use the ``pages`` + property of the returned iterator instead of manually passing + the token. + + Returns: + Iterator[~logging_v2.entries.LogEntry] + """ + + if resource_names is None: + resource_names = [f"projects/{self.project}"] + + log_filter = f"logName={self.full_name}" + if filter_ is not None: + filter_ = f"{filter_} AND {log_filter}" + else: + filter_ = log_filter + filter_ = _add_defaults_to_filter(filter_) + return self.client.list_entries( + resource_names=resource_names, + filter_=filter_, + order_by=order_by, + page_size=page_size, + page_token=page_token, + ) + + +class Batch(object): + def __init__(self, logger, client, *, resource=None): + """Context manager: collect entries to log via a single API call. + + Helper returned by :meth:`Logger.batch` + + Args: + logger (logging_v2.logger.Logger): + the logger to which entries will be logged. + client (~logging_V2.client.Cilent): + The client to use. + resource (Optional[~logging_v2.resource.Resource]): + Monitored resource of the batch, defaults + to None, which requires that every entry should have a + resource specified. Since the methods used to write + entries default the entry's resource to the global + resource type, this parameter is only required + if explicitly set to None. If no entries' resource are + set to None, this parameter will be ignored on the server. + """ + self.logger = logger + self.entries = [] + self.client = client + self.resource = resource + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is None: + self.commit() + + def log_empty(self, **kw): + """Add a entry without payload to be logged during :meth:`commit`. + + Args: + kw (Optional[dict]): Additional keyword arguments for the entry. + See :class:`~logging_v2.entries.LogEntry`. + """ + self.entries.append(LogEntry(**kw)) + + def log_text(self, text, **kw): + """Add a text entry to be logged during :meth:`commit`. + + Args: + text (str): the text entry + kw (Optional[dict]): Additional keyword arguments for the entry. + See :class:`~logging_v2.entries.LogEntry`. + """ + self.entries.append(TextEntry(payload=text, **kw)) + + def log_struct(self, info, **kw): + """Add a struct entry to be logged during :meth:`commit`. + + Args: + info (dict): The struct entry, + kw (Optional[dict]): Additional keyword arguments for the entry. + See :class:`~logging_v2.entries.LogEntry`. + """ + self.entries.append(StructEntry(payload=info, **kw)) + + def log_proto(self, message, **kw): + """Add a protobuf entry to be logged during :meth:`commit`. + + Args: + message (google.protobuf.Message): The protobuf entry. + kw (Optional[dict]): Additional keyword arguments for the entry. + See :class:`~logging_v2.entries.LogEntry`. + """ + self.entries.append(ProtobufEntry(payload=message, **kw)) + + def commit(self, *, client=None): + """Send saved log entries as a single API call. + + Args: + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current batch. + """ + if client is None: + client = self.client + + kwargs = {"logger_name": self.logger.full_name} + + if self.resource is not None: + kwargs["resource"] = self.resource._to_dict() + + if self.logger.labels is not None: + kwargs["labels"] = self.logger.labels + + entries = [entry.to_api_repr() for entry in self.entries] + + client.logging_api.write_entries(entries, **kwargs) + del self.entries[:] diff --git a/packages/google-cloud-logging/google/cloud/logging/metric.py b/packages/google-cloud-logging/google/cloud/logging_v2/metric.py similarity index 50% rename from packages/google-cloud-logging/google/cloud/logging/metric.py rename to packages/google-cloud-logging/google/cloud/logging_v2/metric.py index 3fb91bb52f0a..2959bacc2e5b 100644 --- a/packages/google-cloud-logging/google/cloud/logging/metric.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/metric.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Define Stackdriver Logging API Metrics.""" +"""Define Cloud Logging API Metrics.""" from google.cloud.exceptions import NotFound @@ -20,26 +20,22 @@ class Metric(object): """Metrics represent named filters for log entries. - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics - - :type name: str - :param name: the name of the metric + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics + """ - :type filter_: str - :param filter_: the advanced logs filter expression defining the entries + def __init__(self, name, *, filter_=None, client=None, description=""): + """ + Args: + name (str): The name of the metric. + filter_ (str): the advanced logs filter expression defining the entries tracked by the metric. If not passed, the instance should already exist, to be refreshed via :meth:`reload`. + client (Optional[~logging_v2.client.Client]): A client which holds + credentials and project configuration for the sink (which requires a project). + description (Optional[str]): An optional description of the metric. - :type client: :class:`google.cloud.logging.client.Client` - :param client: A client which holds credentials and project configuration - for the metric (which requires a project). - - :type description: str - :param description: an optional description of the metric. - """ - - def __init__(self, name, filter_=None, client=None, description=""): + """ self.name = name self._client = client self.filter_ = filter_ @@ -58,76 +54,75 @@ def project(self): @property def full_name(self): """Fully-qualified name used in metric APIs""" - return "projects/%s/metrics/%s" % (self.project, self.name) + return f"projects/{self.project}/metrics/{self.name}" @property def path(self): """URL path for the metric's APIs""" - return "/%s" % (self.full_name,) + return f"/{self.full_name}" @classmethod def from_api_repr(cls, resource, client): - """Factory: construct a metric given its API representation - - :type resource: dict - :param resource: metric resource representation returned from the API + """Construct a metric given its API representation - :type client: :class:`google.cloud.logging.client.Client` - :param client: Client which holds credentials and project - configuration for the metric. + Args: + resource (dict): metric resource representation returned from the API + client (~logging_v2.client.Client): Client which holds + credentials and project configuration for the sink. - :rtype: :class:`google.cloud.logging.metric.Metric` - :returns: Metric parsed from ``resource``. + Returns: + google.cloud.logging_v2.metric.Metric """ metric_name = resource["name"] filter_ = resource["filter"] description = resource.get("description", "") - return cls(metric_name, filter_, client=client, description=description) + return cls(metric_name, filter_=filter_, client=client, description=description) def _require_client(self, client): - """Check client or verify over-ride. + """Check client or verify over-ride. Also sets ``parent``. - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current metric. + Args: + client (Union[None, ~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. - :rtype: :class:`google.cloud.logging.client.Client` - :returns: The client passed in or the currently bound client. + Returns: + google.cloud.logging_v2.client.Client: The client passed in + or the currently bound client. """ if client is None: client = self._client return client - def create(self, client=None): - """API call: create the metric via a PUT request + def create(self, *, client=None): + """Create the metric via a PUT request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current metric. + Args: + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. """ client = self._require_client(client) client.metrics_api.metric_create( self.project, self.name, self.filter_, self.description ) - def exists(self, client=None): - """API call: test for the existence of the metric via a GET request + def exists(self, *, client=None): + """Test for the existence of the metric via a GET request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current metric. + Args: + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. - :rtype: bool - :returns: Boolean indicating existence of the metric. + Returns: + bool: Boolean indicating existence of the metric. """ client = self._require_client(client) @@ -138,48 +133,48 @@ def exists(self, client=None): else: return True - def reload(self, client=None): + def reload(self, *, client=None): """API call: sync local metric configuration via a GET request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current metric. + Args: + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. """ client = self._require_client(client) data = client.metrics_api.metric_get(self.project, self.name) self.description = data.get("description", "") self.filter_ = data["filter"] - def update(self, client=None): + def update(self, *, client=None): """API call: update metric configuration via a PUT request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current metric. + Args: + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. """ client = self._require_client(client) client.metrics_api.metric_update( self.project, self.name, self.filter_, self.description ) - def delete(self, client=None): + def delete(self, *, client=None): """API call: delete a metric via a DELETE request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/delete - :type client: :class:`~google.cloud.logging.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current metric. + Args: + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. """ client = self._require_client(client) client.metrics_api.metric_delete(self.project, self.name) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto index 3f9c3d51d76d..3ad2cfbb5834 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,12 +11,12 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; package google.logging.v2; +import "google/api/field_behavior.proto"; import "google/api/monitored_resource.proto"; import "google/api/resource.proto"; import "google/logging/type/http_request.proto"; @@ -34,6 +34,7 @@ option java_multiple_files = true; option java_outer_classname = "LogEntryProto"; option java_package = "com.google.logging.v2"; option php_namespace = "Google\\Cloud\\Logging\\V2"; +option ruby_package = "Google::Cloud::Logging::V2"; // An individual entry in a log. // @@ -55,9 +56,9 @@ message LogEntry { // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" // "folders/[FOLDER_ID]/logs/[LOG_ID]" // - // A project number may optionally be used in place of PROJECT_ID. The project - // number is translated to its corresponding PROJECT_ID internally and the - // `log_name` field will contain PROJECT_ID in queries and exports. + // A project number may be used in place of PROJECT_ID. The project number is + // translated to its corresponding PROJECT_ID internally and the `log_name` + // field will contain PROJECT_ID in queries and exports. // // `[LOG_ID]` must be URL-encoded within `log_name`. Example: // `"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"`. @@ -70,16 +71,16 @@ message LogEntry { // forward-slash is removed. Listing the log entry will not show the leading // slash and filtering for a log name with a leading slash will never return // any results. - string log_name = 12; + string log_name = 12 [(google.api.field_behavior) = REQUIRED]; // Required. The monitored resource that produced this log entry. // // Example: a log entry that reports a database error would be associated with // the monitored resource designating the particular database that reported // the error. - google.api.MonitoredResource resource = 8; + google.api.MonitoredResource resource = 8 [(google.api.field_behavior) = REQUIRED]; - // Optional. The log entry payload, which can be one of multiple types. + // The log entry payload, which can be one of multiple types. oneof payload { // The log entry payload, represented as a protocol buffer. Some Google // Cloud Platform services use this field for their log entry payloads. @@ -99,29 +100,27 @@ message LogEntry { google.protobuf.Struct json_payload = 6; } - // Optional. The time the event described by the log entry occurred. This - // time is used to compute the log entry's age and to enforce the logs - // retention period. If this field is omitted in a new log entry, then Logging - // assigns it the current time. Timestamps have nanosecond accuracy, but - // trailing zeros in the fractional seconds might be omitted when the - // timestamp is displayed. + // Optional. The time the event described by the log entry occurred. This time is used + // to compute the log entry's age and to enforce the logs retention period. + // If this field is omitted in a new log entry, then Logging assigns it the + // current time. Timestamps have nanosecond accuracy, but trailing zeros in + // the fractional seconds might be omitted when the timestamp is displayed. // - // Incoming log entries should have timestamps that are no more than the [logs - // retention period](/logging/quotas) in the past, and no more than 24 hours - // in the future. Log entries outside those time boundaries will not be - // available when calling `entries.list`, but those log entries can still be - // [exported with LogSinks](/logging/docs/api/tasks/exporting-logs). - google.protobuf.Timestamp timestamp = 9; + // Incoming log entries must have timestamps that don't exceed the + // [logs retention + // period](https://cloud.google.com/logging/quotas#logs_retention_periods) in + // the past, and that don't exceed 24 hours in the future. Log entries outside + // those time boundaries aren't ingested by Logging. + google.protobuf.Timestamp timestamp = 9 [(google.api.field_behavior) = OPTIONAL]; // Output only. The time the log entry was received by Logging. - google.protobuf.Timestamp receive_timestamp = 24; + google.protobuf.Timestamp receive_timestamp = 24 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Optional. The severity of the log entry. The default value is - // `LogSeverity.DEFAULT`. - google.logging.type.LogSeverity severity = 10; + // Optional. The severity of the log entry. The default value is `LogSeverity.DEFAULT`. + google.logging.type.LogSeverity severity = 10 [(google.api.field_behavior) = OPTIONAL]; - // Optional. A unique identifier for the log entry. If you provide a value, - // then Logging considers other log entries in the same project, with the same + // Optional. A unique identifier for the log entry. If you provide a value, then + // Logging considers other log entries in the same project, with the same // `timestamp`, and with the same `insert_id` to be duplicates which are // removed in a single query result. However, there are no guarantees of // de-duplication in the export of logs. @@ -131,43 +130,32 @@ message LogEntry { // // In queries, the `insert_id` is also used to order log entries that have // the same `log_name` and `timestamp` values. - string insert_id = 4; + string insert_id = 4 [(google.api.field_behavior) = OPTIONAL]; - // Optional. Information about the HTTP request associated with this log - // entry, if applicable. - google.logging.type.HttpRequest http_request = 7; + // Optional. Information about the HTTP request associated with this log entry, if + // applicable. + google.logging.type.HttpRequest http_request = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. A set of user-defined (key, value) data that provides additional // information about the log entry. - map labels = 11; - - // Deprecated. Output only. Additional metadata about the monitored resource. - // - // Only `k8s_container`, `k8s_pod`, and `k8s_node` MonitoredResources have - // this field populated for GKE versions older than 1.12.6. For GKE versions - // 1.12.6 and above, the `metadata` field has been deprecated. The Kubernetes - // pod labels that used to be in `metadata.userLabels` will now be present in - // the `labels` field with a key prefix of `k8s-pod/`. The Stackdriver system - // labels that were present in the `metadata.systemLabels` field will no - // longer be available in the LogEntry. - google.api.MonitoredResourceMetadata metadata = 25 [deprecated = true]; + map labels = 11 [(google.api.field_behavior) = OPTIONAL]; // Optional. Information about an operation associated with the log entry, if // applicable. - LogEntryOperation operation = 15; + LogEntryOperation operation = 15 [(google.api.field_behavior) = OPTIONAL]; - // Optional. Resource name of the trace associated with the log entry, if any. - // If it contains a relative resource name, the name is assumed to be relative - // to `//tracing.googleapis.com`. Example: + // Optional. Resource name of the trace associated with the log entry, if any. If it + // contains a relative resource name, the name is assumed to be relative to + // `//tracing.googleapis.com`. Example: // `projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824` - string trace = 22; + string trace = 22 [(google.api.field_behavior) = OPTIONAL]; // Optional. The span ID within the trace associated with the log entry. // // For Trace spans, this is the same format that the Trace API v2 uses: a // 16-character hexadecimal encoding of an 8-byte array, such as - // "000000000000004a". - string span_id = 27; + // `000000000000004a`. + string span_id = 27 [(google.api.field_behavior) = OPTIONAL]; // Optional. The sampling decision of the trace associated with the log entry. // @@ -176,11 +164,10 @@ message LogEntry { // for storage when this log entry was written, or the sampling decision was // unknown at the time. A non-sampled `trace` value is still useful as a // request correlation identifier. The default is False. - bool trace_sampled = 30; + bool trace_sampled = 30 [(google.api.field_behavior) = OPTIONAL]; - // Optional. Source code location information associated with the log entry, - // if any. - LogEntrySourceLocation source_location = 23; + // Optional. Source code location information associated with the log entry, if any. + LogEntrySourceLocation source_location = 23 [(google.api.field_behavior) = OPTIONAL]; } // Additional information about a potentially long-running operation with which @@ -188,18 +175,18 @@ message LogEntry { message LogEntryOperation { // Optional. An arbitrary operation identifier. Log entries with the same // identifier are assumed to be part of the same operation. - string id = 1; + string id = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. An arbitrary producer identifier. The combination of `id` and // `producer` must be globally unique. Examples for `producer`: // `"MyDivision.MyBigCompany.com"`, `"github.com/MyProject/MyApplication"`. - string producer = 2; + string producer = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. Set this to True if this is the first log entry in the operation. - bool first = 3; + bool first = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Set this to True if this is the last log entry in the operation. - bool last = 4; + bool last = 4 [(google.api.field_behavior) = OPTIONAL]; } // Additional information about the source code location that produced the log @@ -207,11 +194,11 @@ message LogEntryOperation { message LogEntrySourceLocation { // Optional. Source file name. Depending on the runtime environment, this // might be a simple name or a fully-qualified name. - string file = 1; + string file = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. Line within the source file. 1-based; 0 indicates no line number // available. - int64 line = 2; + int64 line = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. Human-readable name of the function or method being invoked, with // optional context such as the class or package name. This information may be @@ -219,5 +206,5 @@ message LogEntrySourceLocation { // less meaningful. The format can vary by language. For example: // `qual.if.ied.Class.method` (Java), `dir/package.func` (Go), `function` // (Python). - string function = 3; + string function = 3 [(google.api.field_behavior) = OPTIONAL]; } diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py deleted file mode 100644 index f4805192b30a..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ /dev/null @@ -1,881 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/logging_v2/proto/log_entry.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.logging.type import ( - http_request_pb2 as google_dot_logging_dot_type_dot_http__request__pb2, -) -from google.logging.type import ( - log_severity_pb2 as google_dot_logging_dot_type_dot_log__severity__pb2, -) -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/logging_v2/proto/log_entry.proto", - package="google.logging.v2", - syntax="proto3", - serialized_options=_b( - "\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), - serialized_pb=_b( - '\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a\x19google/api/resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\xce\x07\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12;\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadataB\x02\x18\x01\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:\xbd\x01\xea\x41\xb9\x01\n\x1alogging.googleapis.com/Log\x12\x1dprojects/{project}/logs/{log}\x12\'organizations/{organization}/logs/{log}\x12\x1b\x66olders/{folder}/logs/{log}\x12,billingAccounts/{billing_account}/logs/{log}\x1a\x08log_nameB\t\n\x07payload"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_logging_dot_type_dot_http__request__pb2.DESCRIPTOR, - google_dot_logging_dot_type_dot_log__severity__pb2.DESCRIPTOR, - google_dot_protobuf_dot_any__pb2.DESCRIPTOR, - google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_LOGENTRY_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.logging.v2.LogEntry.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.LogEntry.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.LogEntry.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1084, - serialized_end=1129, -) - -_LOGENTRY = _descriptor.Descriptor( - name="LogEntry", - full_name="google.logging.v2.LogEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_name", - full_name="google.logging.v2.LogEntry.log_name", - index=0, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource", - full_name="google.logging.v2.LogEntry.resource", - index=1, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="proto_payload", - full_name="google.logging.v2.LogEntry.proto_payload", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="text_payload", - full_name="google.logging.v2.LogEntry.text_payload", - index=3, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="json_payload", - full_name="google.logging.v2.LogEntry.json_payload", - index=4, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="timestamp", - full_name="google.logging.v2.LogEntry.timestamp", - index=5, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="receive_timestamp", - full_name="google.logging.v2.LogEntry.receive_timestamp", - index=6, - number=24, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="severity", - full_name="google.logging.v2.LogEntry.severity", - index=7, - number=10, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="insert_id", - full_name="google.logging.v2.LogEntry.insert_id", - index=8, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="http_request", - full_name="google.logging.v2.LogEntry.http_request", - index=9, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.logging.v2.LogEntry.labels", - index=10, - number=11, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metadata", - full_name="google.logging.v2.LogEntry.metadata", - index=11, - number=25, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="operation", - full_name="google.logging.v2.LogEntry.operation", - index=12, - number=15, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="trace", - full_name="google.logging.v2.LogEntry.trace", - index=13, - number=22, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="span_id", - full_name="google.logging.v2.LogEntry.span_id", - index=14, - number=27, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="trace_sampled", - full_name="google.logging.v2.LogEntry.trace_sampled", - index=15, - number=30, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="source_location", - full_name="google.logging.v2.LogEntry.source_location", - index=16, - number=23, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_LOGENTRY_LABELSENTRY,], - enum_types=[], - serialized_options=_b( - "\352A\271\001\n\032logging.googleapis.com/Log\022\035projects/{project}/logs/{log}\022'organizations/{organization}/logs/{log}\022\033folders/{folder}/logs/{log}\022,billingAccounts/{billing_account}/logs/{log}\032\010log_name" - ), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="payload", - full_name="google.logging.v2.LogEntry.payload", - index=0, - containing_type=None, - fields=[], - ), - ], - serialized_start=358, - serialized_end=1332, -) - - -_LOGENTRYOPERATION = _descriptor.Descriptor( - name="LogEntryOperation", - full_name="google.logging.v2.LogEntryOperation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="id", - full_name="google.logging.v2.LogEntryOperation.id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="producer", - full_name="google.logging.v2.LogEntryOperation.producer", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="first", - full_name="google.logging.v2.LogEntryOperation.first", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="last", - full_name="google.logging.v2.LogEntryOperation.last", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1334, - serialized_end=1412, -) - - -_LOGENTRYSOURCELOCATION = _descriptor.Descriptor( - name="LogEntrySourceLocation", - full_name="google.logging.v2.LogEntrySourceLocation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="file", - full_name="google.logging.v2.LogEntrySourceLocation.file", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="line", - full_name="google.logging.v2.LogEntrySourceLocation.line", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="function", - full_name="google.logging.v2.LogEntrySourceLocation.function", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1414, - serialized_end=1484, -) - -_LOGENTRY_LABELSENTRY.containing_type = _LOGENTRY -_LOGENTRY.fields_by_name[ - "resource" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE -_LOGENTRY.fields_by_name[ - "proto_payload" -].message_type = google_dot_protobuf_dot_any__pb2._ANY -_LOGENTRY.fields_by_name[ - "json_payload" -].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT -_LOGENTRY.fields_by_name[ - "timestamp" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGENTRY.fields_by_name[ - "receive_timestamp" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGENTRY.fields_by_name[ - "severity" -].enum_type = google_dot_logging_dot_type_dot_log__severity__pb2._LOGSEVERITY -_LOGENTRY.fields_by_name[ - "http_request" -].message_type = google_dot_logging_dot_type_dot_http__request__pb2._HTTPREQUEST -_LOGENTRY.fields_by_name["labels"].message_type = _LOGENTRY_LABELSENTRY -_LOGENTRY.fields_by_name[ - "metadata" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEMETADATA -_LOGENTRY.fields_by_name["operation"].message_type = _LOGENTRYOPERATION -_LOGENTRY.fields_by_name["source_location"].message_type = _LOGENTRYSOURCELOCATION -_LOGENTRY.oneofs_by_name["payload"].fields.append( - _LOGENTRY.fields_by_name["proto_payload"] -) -_LOGENTRY.fields_by_name["proto_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ - "payload" -] -_LOGENTRY.oneofs_by_name["payload"].fields.append( - _LOGENTRY.fields_by_name["text_payload"] -) -_LOGENTRY.fields_by_name["text_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ - "payload" -] -_LOGENTRY.oneofs_by_name["payload"].fields.append( - _LOGENTRY.fields_by_name["json_payload"] -) -_LOGENTRY.fields_by_name["json_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ - "payload" -] -DESCRIPTOR.message_types_by_name["LogEntry"] = _LOGENTRY -DESCRIPTOR.message_types_by_name["LogEntryOperation"] = _LOGENTRYOPERATION -DESCRIPTOR.message_types_by_name["LogEntrySourceLocation"] = _LOGENTRYSOURCELOCATION -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LogEntry = _reflection.GeneratedProtocolMessageType( - "LogEntry", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_LOGENTRY_LABELSENTRY, - __module__="google.cloud.logging_v2.proto.log_entry_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry.LabelsEntry) - ), - ), - DESCRIPTOR=_LOGENTRY, - __module__="google.cloud.logging_v2.proto.log_entry_pb2", - __doc__="""An individual entry in a log. - - - Attributes: - log_name: - Required. The resource name of the log to which this log entry - belongs: :: "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" A project number may - optionally be used in place of PROJECT\_ID. The project number - is translated to its corresponding PROJECT\_ID internally and - the ``log_name`` field will contain PROJECT\_ID in queries and - exports. ``[LOG_ID]`` must be URL-encoded within - ``log_name``. Example: ``"organizations/1234567890/logs/cloudr - esourcemanager.googleapis.com%2Factivity"``. ``[LOG_ID]`` must - be less than 512 characters long and can only include the - following characters: upper and lower case alphanumeric - characters, forward-slash, underscore, hyphen, and period. - For backward compatibility, if ``log_name`` begins with a - forward-slash, such as ``/projects/...``, then the log entry - is ingested as usual but the forward-slash is removed. Listing - the log entry will not show the leading slash and filtering - for a log name with a leading slash will never return any - results. - resource: - Required. The monitored resource that produced this log entry. - Example: a log entry that reports a database error would be - associated with the monitored resource designating the - particular database that reported the error. - payload: - Optional. The log entry payload, which can be one of multiple - types. - proto_payload: - The log entry payload, represented as a protocol buffer. Some - Google Cloud Platform services use this field for their log - entry payloads. The following protocol buffer types are - supported; user-defined types are not supported: - "type.googleapis.com/google.cloud.audit.AuditLog" - "type.googleapis.com/google.appengine.logging.v1.RequestLog" - text_payload: - The log entry payload, represented as a Unicode string - (UTF-8). - json_payload: - The log entry payload, represented as a structure that is - expressed as a JSON object. - timestamp: - Optional. The time the event described by the log entry - occurred. This time is used to compute the log entry's age and - to enforce the logs retention period. If this field is omitted - in a new log entry, then Logging assigns it the current time. - Timestamps have nanosecond accuracy, but trailing zeros in the - fractional seconds might be omitted when the timestamp is - displayed. Incoming log entries should have timestamps that - are no more than the `logs retention period - `__ in the past, and no more than 24 hours in - the future. Log entries outside those time boundaries will not - be available when calling ``entries.list``, but those log - entries can still be `exported with LogSinks - `__. - receive_timestamp: - Output only. The time the log entry was received by Logging. - severity: - Optional. The severity of the log entry. The default value is - ``LogSeverity.DEFAULT``. - insert_id: - Optional. A unique identifier for the log entry. If you - provide a value, then Logging considers other log entries in - the same project, with the same ``timestamp``, and with the - same ``insert_id`` to be duplicates which are removed in a - single query result. However, there are no guarantees of de- - duplication in the export of logs. If the ``insert_id`` is - omitted when writing a log entry, the Logging API assigns its - own unique identifier in this field. In queries, the - ``insert_id`` is also used to order log entries that have the - same ``log_name`` and ``timestamp`` values. - http_request: - Optional. Information about the HTTP request associated with - this log entry, if applicable. - labels: - Optional. A set of user-defined (key, value) data that - provides additional information about the log entry. - metadata: - Deprecated. Output only. Additional metadata about the - monitored resource. Only ``k8s_container``, ``k8s_pod``, and - ``k8s_node`` MonitoredResources have this field populated for - GKE versions older than 1.12.6. For GKE versions 1.12.6 and - above, the ``metadata`` field has been deprecated. The - Kubernetes pod labels that used to be in - ``metadata.userLabels`` will now be present in the ``labels`` - field with a key prefix of ``k8s-pod/``. The Stackdriver - system labels that were present in the - ``metadata.systemLabels`` field will no longer be available in - the LogEntry. - operation: - Optional. Information about an operation associated with the - log entry, if applicable. - trace: - Optional. Resource name of the trace associated with the log - entry, if any. If it contains a relative resource name, the - name is assumed to be relative to - ``//tracing.googleapis.com``. Example: ``projects/my- - projectid/traces/06796866738c859f2f19b7cfb3214824`` - span_id: - Optional. The span ID within the trace associated with the log - entry. For Trace spans, this is the same format that the - Trace API v2 uses: a 16-character hexadecimal encoding of an - 8-byte array, such as "000000000000004a". - trace_sampled: - Optional. The sampling decision of the trace associated with - the log entry. True means that the trace resource name in the - ``trace`` field was sampled for storage in a trace backend. - False means that the trace was not sampled for storage when - this log entry was written, or the sampling decision was - unknown at the time. A non-sampled ``trace`` value is still - useful as a request correlation identifier. The default is - False. - source_location: - Optional. Source code location information associated with the - log entry, if any. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry) - ), -) -_sym_db.RegisterMessage(LogEntry) -_sym_db.RegisterMessage(LogEntry.LabelsEntry) - -LogEntryOperation = _reflection.GeneratedProtocolMessageType( - "LogEntryOperation", - (_message.Message,), - dict( - DESCRIPTOR=_LOGENTRYOPERATION, - __module__="google.cloud.logging_v2.proto.log_entry_pb2", - __doc__="""Additional information about a potentially long-running - operation with which a log entry is associated. - - - Attributes: - id: - Optional. An arbitrary operation identifier. Log entries with - the same identifier are assumed to be part of the same - operation. - producer: - Optional. An arbitrary producer identifier. The combination of - ``id`` and ``producer`` must be globally unique. Examples for - ``producer``: ``"MyDivision.MyBigCompany.com"``, - ``"github.com/MyProject/MyApplication"``. - first: - Optional. Set this to True if this is the first log entry in - the operation. - last: - Optional. Set this to True if this is the last log entry in - the operation. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntryOperation) - ), -) -_sym_db.RegisterMessage(LogEntryOperation) - -LogEntrySourceLocation = _reflection.GeneratedProtocolMessageType( - "LogEntrySourceLocation", - (_message.Message,), - dict( - DESCRIPTOR=_LOGENTRYSOURCELOCATION, - __module__="google.cloud.logging_v2.proto.log_entry_pb2", - __doc__="""Additional information about the source code location that - produced the log entry. - - - Attributes: - file: - Optional. Source file name. Depending on the runtime - environment, this might be a simple name or a fully-qualified - name. - line: - Optional. Line within the source file. 1-based; 0 indicates no - line number available. - function: - Optional. Human-readable name of the function or method being - invoked, with optional context such as the class or package - name. This information may be used in contexts such as the - logs viewer, where a file and line number are less meaningful. - The format can vary by language. For example: - ``qual.if.ied.Class.method`` (Java), ``dir/package.func`` - (Go), ``function`` (Python). - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntrySourceLocation) - ), -) -_sym_db.RegisterMessage(LogEntrySourceLocation) - - -DESCRIPTOR._options = None -_LOGENTRY_LABELSENTRY._options = None -_LOGENTRY.fields_by_name["metadata"]._options = None -_LOGENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py deleted file mode 100644 index 07cb78fe03a9..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto index c3a5246334ca..58647b92ff04 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,13 +11,11 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; package google.logging.v2; -import "google/api/annotations.proto"; import "google/api/client.proto"; import "google/api/field_behavior.proto"; import "google/api/monitored_resource.proto"; @@ -26,8 +24,10 @@ import "google/logging/v2/log_entry.proto"; import "google/logging/v2/logging_config.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; import "google/rpc/status.proto"; +import "google/api/annotations.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Logging.V2"; @@ -36,6 +36,7 @@ option java_multiple_files = true; option java_outer_classname = "LoggingProto"; option java_package = "com.google.logging.v2"; option php_namespace = "Google\\Cloud\\Logging\\V2"; +option ruby_package = "Google::Cloud::Logging::V2"; // Service for ingesting and querying logs. service LoggingServiceV2 { @@ -87,7 +88,8 @@ service LoggingServiceV2 { // Lists log entries. Use this method to retrieve log entries that originated // from a project/folder/organization/billing account. For ways to export log - // entries, see [Exporting Logs](/logging/docs/export). + // entries, see [Exporting + // Logs](https://cloud.google.com/logging/docs/export). rpc ListLogEntries(ListLogEntriesRequest) returns (ListLogEntriesResponse) { option (google.api.http) = { post: "/v2/entries:list" @@ -142,7 +144,7 @@ message DeleteLogRequest { string log_name = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "logging.googleapis.com/Log" + type: "logging.googleapis.com/Log" } ]; } @@ -162,13 +164,16 @@ message WriteLogEntriesRequest { // "projects/my-project-id/logs/syslog" // "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" // - // The permission logging.logEntries.create is needed on each - // project, organization, billing account, or folder that is receiving - // new log entries, whether the resource is specified in - // logName or in an individual log entry. - string log_name = 1 [(google.api.resource_reference) = { - type: "logging.googleapis.com/Log" - }]; + // The permission `logging.logEntries.create` is needed on each project, + // organization, billing account, or folder that is receiving new log + // entries, whether the resource is specified in `logName` or in an + // individual log entry. + string log_name = 1 [ + (google.api.field_behavior) = OPTIONAL, + (google.api.resource_reference) = { + type: "logging.googleapis.com/Log" + } + ]; // Optional. A default monitored resource object that is assigned to all log // entries in `entries` that do not specify a value for `resource`. Example: @@ -178,13 +183,13 @@ message WriteLogEntriesRequest { // "zone": "us-central1-a", "instance_id": "00000000000000000000" }} // // See [LogEntry][google.logging.v2.LogEntry]. - google.api.MonitoredResource resource = 2; + google.api.MonitoredResource resource = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. Default labels that are added to the `labels` field of all log // entries in `entries`. If a log entry already has a label with the same key // as a label in this parameter, then the log entry's label is not changed. // See [LogEntry][google.logging.v2.LogEntry]. - map labels = 3; + map labels = 3 [(google.api.field_behavior) = OPTIONAL]; // Required. The log entries to send to Logging. The order of log // entries in this list does not matter. Values supplied in this method's @@ -200,15 +205,16 @@ message WriteLogEntriesRequest { // the entries later in the list. See the `entries.list` method. // // Log entries with timestamps that are more than the - // [logs retention period](/logging/quota-policy) in the past or more than - // 24 hours in the future will not be available when calling `entries.list`. - // However, those log entries can still be - // [exported with LogSinks](/logging/docs/api/tasks/exporting-logs). + // [logs retention period](https://cloud.google.com/logging/quota-policy) in + // the past or more than 24 hours in the future will not be available when + // calling `entries.list`. However, those log entries can still be [exported + // with + // LogSinks](https://cloud.google.com/logging/docs/api/tasks/exporting-logs). // // To improve throughput and to avoid exceeding the - // [quota limit](/logging/quota-policy) for calls to `entries.write`, - // you should try to include several log entries in this list, - // rather than calling this method for each individual log entry. + // [quota limit](https://cloud.google.com/logging/quota-policy) for calls to + // `entries.write`, you should try to include several log entries in this + // list, rather than calling this method for each individual log entry. repeated LogEntry entries = 4 [(google.api.field_behavior) = REQUIRED]; // Optional. Whether valid entries should be written even if some other @@ -216,19 +222,16 @@ message WriteLogEntriesRequest { // entry is not written, then the response status is the error associated // with one of the failed entries and the response includes error details // keyed by the entries' zero-based index in the `entries.write` method. - bool partial_success = 5; + bool partial_success = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. If true, the request should expect normal response, but the // entries won't be persisted nor exported. Useful for checking whether the // logging API endpoints are working properly before sending valuable data. - bool dry_run = 6; + bool dry_run = 6 [(google.api.field_behavior) = OPTIONAL]; } // Result returned from WriteLogEntries. -// empty -message WriteLogEntriesResponse { - -} +message WriteLogEntriesResponse {} // Error details for WriteLogEntries with partial success. message WriteLogEntriesPartialErrors { @@ -243,11 +246,6 @@ message WriteLogEntriesPartialErrors { // The parameters to `ListLogEntries`. message ListLogEntriesRequest { - // Deprecated. Use `resource_names` instead. One or more project identifiers - // or project numbers from which to retrieve log entries. Example: - // `"my-project-1A"`. - repeated string project_ids = 1 [deprecated = true]; - // Required. Names of one or more parent resources from which to // retrieve log entries: // @@ -266,13 +264,13 @@ message ListLogEntriesRequest { ]; // Optional. A filter that chooses which log entries to return. See [Advanced - // Logs Queries](/logging/docs/view/advanced-queries). Only log entries that - // match the filter are returned. An empty filter matches all log entries in - // the resources listed in `resource_names`. Referencing a parent resource - // that is not listed in `resource_names` will cause the filter to return no - // results. - // The maximum length of the filter is 20000 characters. - string filter = 2; + // Logs Queries](https://cloud.google.com/logging/docs/view/advanced-queries). + // Only log entries that match the filter are returned. An empty filter + // matches all log entries in the resources listed in `resource_names`. + // Referencing a parent resource that is not listed in `resource_names` will + // cause the filter to return no results. The maximum length of the filter is + // 20000 characters. + string filter = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. How the results should be sorted. Presently, the only permitted // values are `"timestamp asc"` (default) and `"timestamp desc"`. The first @@ -280,18 +278,19 @@ message ListLogEntriesRequest { // `LogEntry.timestamp` (oldest first), and the second option returns entries // in order of decreasing timestamps (newest first). Entries with equal // timestamps are returned in order of their `insert_id` values. - string order_by = 3; + string order_by = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `next_page_token` in the + // Default is 50. If the value is negative or exceeds 1000, + // the request is rejected. The presence of `next_page_token` in the // response indicates that more results might be available. - int32 page_size = 4; + int32 page_size = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. If present, then retrieve the next batch of results from the // preceding call to this method. `page_token` must be the value of // `next_page_token` from the previous response. The values of other method // parameters should be identical to those in the previous call. - string page_token = 5; + string page_token = 5 [(google.api.field_behavior) = OPTIONAL]; } // Result returned from `ListLogEntries`. @@ -319,13 +318,13 @@ message ListMonitoredResourceDescriptorsRequest { // Optional. The maximum number of results to return from this request. // Non-positive values are ignored. The presence of `nextPageToken` in the // response indicates that more results might be available. - int32 page_size = 1; + int32 page_size = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. If present, then retrieve the next batch of results from the // preceding call to this method. `pageToken` must be the value of // `nextPageToken` from the previous response. The values of other method // parameters should be identical to those in the previous call. - string page_token = 2; + string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; } // Result returned from ListMonitoredResourceDescriptors. @@ -347,20 +346,23 @@ message ListLogsRequest { // "organizations/[ORGANIZATION_ID]" // "billingAccounts/[BILLING_ACCOUNT_ID]" // "folders/[FOLDER_ID]" - string parent = 1 [(google.api.resource_reference) = { - child_type: "logging.googleapis.com/Log" - }]; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "logging.googleapis.com/Log" + } + ]; // Optional. The maximum number of results to return from this request. // Non-positive values are ignored. The presence of `nextPageToken` in the // response indicates that more results might be available. - int32 page_size = 2; + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. If present, then retrieve the next batch of results from the // preceding call to this method. `pageToken` must be the value of // `nextPageToken` from the previous response. The values of other method // parameters should be identical to those in the previous call. - string page_token = 3; + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; } // Result returned from ListLogs. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto index 7fb830ded21f..9486f4a9a4fe 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; @@ -33,6 +32,19 @@ option java_multiple_files = true; option java_outer_classname = "LoggingConfigProto"; option java_package = "com.google.logging.v2"; option php_namespace = "Google\\Cloud\\Logging\\V2"; +option ruby_package = "Google::Cloud::Logging::V2"; +option (google.api.resource_definition) = { + type: "logging.googleapis.com/OrganizationLocation" + pattern: "organizations/{organization}/locations/{location}" +}; +option (google.api.resource_definition) = { + type: "logging.googleapis.com/FolderLocation" + pattern: "folders/{folder}/locations/{location}" +}; +option (google.api.resource_definition) = { + type: "logging.googleapis.com/BillingAccountLocation" + pattern: "billingAccounts/{billing_account}/locations/{location}" +}; // Service for configuring sinks used to route log entries. service ConfigServiceV2 { @@ -43,6 +55,79 @@ service ConfigServiceV2 { "https://www.googleapis.com/auth/logging.admin," "https://www.googleapis.com/auth/logging.read"; + // Lists buckets (Beta). + rpc ListBuckets(ListBucketsRequest) returns (ListBucketsResponse) { + option (google.api.http) = { + get: "/v2/{parent=*/*/locations/*}/buckets" + additional_bindings { + get: "/v2/{parent=projects/*/locations/*}/buckets" + } + additional_bindings { + get: "/v2/{parent=organizations/*/locations/*}/buckets" + } + additional_bindings { + get: "/v2/{parent=folders/*/locations/*}/buckets" + } + additional_bindings { + get: "/v2/{parent=billingAccounts/*/locations/*}/buckets" + } + }; + option (google.api.method_signature) = "parent"; + } + + // Gets a bucket (Beta). + rpc GetBucket(GetBucketRequest) returns (LogBucket) { + option (google.api.http) = { + get: "/v2/{name=*/*/locations/*/buckets/*}" + additional_bindings { + get: "/v2/{name=projects/*/locations/*/buckets/*}" + } + additional_bindings { + get: "/v2/{name=organizations/*/locations/*/buckets/*}" + } + additional_bindings { + get: "/v2/{name=folders/*/locations/*/buckets/*}" + } + additional_bindings { + get: "/v2/{name=billingAccounts/*/buckets/*}" + } + }; + } + + // Updates a bucket. This method replaces the following fields in the + // existing bucket with values from the new bucket: `retention_period` + // + // If the retention period is decreased and the bucket is locked, + // FAILED_PRECONDITION will be returned. + // + // If the bucket has a LifecycleState of DELETE_REQUESTED, FAILED_PRECONDITION + // will be returned. + // + // A buckets region may not be modified after it is created. + // This method is in Beta. + rpc UpdateBucket(UpdateBucketRequest) returns (LogBucket) { + option (google.api.http) = { + patch: "/v2/{name=*/*/locations/*/buckets/*}" + body: "bucket" + additional_bindings { + patch: "/v2/{name=projects/*/locations/*/buckets/*}" + body: "bucket" + } + additional_bindings { + patch: "/v2/{name=organizations/*/locations/*/buckets/*}" + body: "bucket" + } + additional_bindings { + patch: "/v2/{name=folders/*/locations/*/buckets/*}" + body: "bucket" + } + additional_bindings { + patch: "/v2/{name=billingAccounts/*/locations/*/buckets/*}" + body: "bucket" + } + }; + } + // Lists sinks. rpc ListSinks(ListSinksRequest) returns (ListSinksResponse) { option (google.api.http) = { @@ -297,7 +382,8 @@ service ConfigServiceV2 { // the GCP organization. // // See [Enabling CMEK for Logs - // Router](/logging/docs/routing/managed-encryption) for more information. + // Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + // for more information. rpc GetCmekSettings(GetCmekSettingsRequest) returns (CmekSettings) { option (google.api.http) = { get: "/v2/{name=*/*}/cmekSettings" @@ -320,7 +406,8 @@ service ConfigServiceV2 { // 3) access to the key is disabled. // // See [Enabling CMEK for Logs - // Router](/logging/docs/routing/managed-encryption) for more information. + // Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + // for more information. rpc UpdateCmekSettings(UpdateCmekSettingsRequest) returns (CmekSettings) { option (google.api.http) = { patch: "/v2/{name=*/*}/cmekSettings" @@ -333,6 +420,48 @@ service ConfigServiceV2 { } } +// Describes a repository of logs (Beta). +message LogBucket { + option (google.api.resource) = { + type: "logging.googleapis.com/LogBucket" + pattern: "projects/{project}/locations/{location}/buckets/{bucket}" + pattern: "organizations/{organization}/locations/{location}/buckets/{bucket}" + pattern: "folders/{folder}/locations/{location}/buckets/{bucket}" + pattern: "billingAccounts/{billing_account}/locations/{location}/buckets/{bucket}" + }; + + // The resource name of the bucket. + // For example: + // "projects/my-project-id/locations/my-location/buckets/my-bucket-id The + // supported locations are: + // "global" + // "us-central1" + // + // For the location of `global` it is unspecified where logs are actually + // stored. + // Once a bucket has been created, the location can not be changed. + string name = 1; + + // Describes this bucket. + string description = 3; + + // Output only. The creation timestamp of the bucket. This is not set for any of the + // default buckets. + google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The last update timestamp of the bucket. + google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Logs will be retained by default for this amount of time, after which they + // will automatically be deleted. The minimum retention period is 1 day. + // If this value is set to zero at bucket creation time, the default time of + // 30 days will be used. + int32 retention_days = 11; + + // Output only. The bucket lifecycle state. + LifecycleState lifecycle_state = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + // Describes a sink used to export log entries to one of the following // destinations in any project: a Cloud Storage bucket, a BigQuery dataset, or a // Cloud Pub/Sub topic. A logs filter controls which log entries are exported. @@ -340,16 +469,14 @@ service ConfigServiceV2 { // folder. message LogSink { option (google.api.resource) = { - type: "logging.googleapis.com/Sink" + type: "logging.googleapis.com/LogSink" pattern: "projects/{project}/sinks/{sink}" pattern: "organizations/{organization}/sinks/{sink}" pattern: "folders/{folder}/sinks/{sink}" pattern: "billingAccounts/{billing_account}/sinks/{sink}" }; - // Available log entry formats. Log entries can be written to - // Logging in either format and can be exported in either format. - // Version 2 is the preferred format. + // Deprecated. This is unused. enum VersionFormat { // An unspecified format version that will default to V2. VERSION_FORMAT_UNSPECIFIED = 0; @@ -361,12 +488,12 @@ message LogSink { V1 = 2; } - // Required. The client-assigned sink identifier, unique within the - // project. Example: `"my-syslog-errors-to-pubsub"`. Sink identifiers are - // limited to 100 characters and can include only the following characters: - // upper and lower-case alphanumeric characters, underscores, hyphens, and - // periods. First character has to be alphanumeric. - string name = 1; + // Required. The client-assigned sink identifier, unique within the project. Example: + // `"my-syslog-errors-to-pubsub"`. Sink identifiers are limited to 100 + // characters and can include only the following characters: upper and + // lower-case alphanumeric characters, underscores, hyphens, and periods. + // First character has to be alphanumeric. + string name = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The export destination: // @@ -377,42 +504,44 @@ message LogSink { // The sink's `writer_identity`, set when the sink is created, must // have permission to write to the destination or else the log // entries are not exported. For more information, see - // [Exporting Logs with Sinks](/logging/docs/api/tasks/exporting-logs). - string destination = 3 [(google.api.resource_reference) = { - type: "*" - }]; + // [Exporting Logs with + // Sinks](https://cloud.google.com/logging/docs/api/tasks/exporting-logs). + string destination = 3 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "*" + } + ]; - // Optional. An [advanced logs filter](/logging/docs/view/advanced-queries). The only - // exported log entries are those that are in the resource owning the sink and - // that match the filter. For example: + // Optional. An [advanced logs + // filter](https://cloud.google.com/logging/docs/view/advanced-queries). The + // only exported log entries are those that are in the resource owning the + // sink and that match the filter. For example: // // logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR - string filter = 5; + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. A description of this sink. // The maximum length of the description is 8000 characters. - string description = 18; + string description = 18 [(google.api.field_behavior) = OPTIONAL]; // Optional. If set to True, then this sink is disabled and it does not // export any log entries. - bool disabled = 19; + bool disabled = 19 [(google.api.field_behavior) = OPTIONAL]; - // Deprecated. The log entry format to use for this sink's exported log - // entries. The v2 format is used by default and cannot be changed. + // Deprecated. This field is unused. VersionFormat output_version_format = 6 [deprecated = true]; - // Output only. An IAM identity—a service account or group—under - // which Logging writes the exported log entries to the sink's destination. - // This field is set by - // [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] - // and - // [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] - // based on the value of `unique_writer_identity` in those methods. + // Output only. An IAM identity–a service account or group—under which Logging + // writes the exported log entries to the sink's destination. This field is + // set by [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] and + // [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] based on the + // value of `unique_writer_identity` in those methods. // // Until you grant this identity write-access to the destination, log entry // exports from this sink will fail. For more information, // see [Granting Access for a - // Resource](/iam/docs/granting-roles-to-service-accounts#granting_access_to_a_service_account_for_a_resource). + // Resource](https://cloud.google.com/iam/docs/granting-roles-to-service-accounts#granting_access_to_a_service_account_for_a_resource). // Consult the destination service's documentation to determine the // appropriate IAM roles to assign to the identity. string writer_identity = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; @@ -430,12 +559,12 @@ message LogSink { // // logName:("projects/test-project1/" OR "projects/test-project2/") AND // resource.type=gce_instance - bool include_children = 9; + bool include_children = 9 [(google.api.field_behavior) = OPTIONAL]; - // Optional. Destination dependent options. + // Destination dependent options. oneof options { // Optional. Options that affect sinks exporting data to BigQuery. - BigQueryOptions bigquery_options = 12; + BigQueryOptions bigquery_options = 12 [(google.api.field_behavior) = OPTIONAL]; } // Output only. The creation timestamp of the sink. @@ -447,24 +576,19 @@ message LogSink { // // This field may not be present for older sinks. google.protobuf.Timestamp update_time = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Do not use. This field is ignored. - google.protobuf.Timestamp start_time = 10 [deprecated = true]; - - // Do not use. This field is ignored. - google.protobuf.Timestamp end_time = 11 [deprecated = true]; } // Options that change functionality of a sink exporting data to BigQuery. message BigQueryOptions { // Optional. Whether to use [BigQuery's partition - // tables](/bigquery/docs/partitioned-tables). By default, Logging - // creates dated tables based on the log entries' timestamps, e.g. - // syslog_20170523. With partitioned tables the date suffix is no longer + // tables](https://cloud.google.com/bigquery/docs/partitioned-tables). By + // default, Logging creates dated tables based on the log entries' timestamps, + // e.g. syslog_20170523. With partitioned tables the date suffix is no longer // present and [special query - // syntax](/bigquery/docs/querying-partitioned-tables) has to be used instead. - // In both cases, tables are sharded based on UTC timezone. - bool use_partitioned_tables = 1; + // syntax](https://cloud.google.com/bigquery/docs/querying-partitioned-tables) + // has to be used instead. In both cases, tables are sharded based on UTC + // timezone. + bool use_partitioned_tables = 1 [(google.api.field_behavior) = OPTIONAL]; // Output only. True if new timestamp column based partitioning is in use, // false if legacy ingestion-time partitioning is in use. @@ -475,6 +599,114 @@ message BigQueryOptions { bool uses_timestamp_column_partitioning = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } +// LogBucket lifecycle states (Beta). +enum LifecycleState { + // Unspecified state. This is only used/useful for distinguishing + // unset values. + LIFECYCLE_STATE_UNSPECIFIED = 0; + + // The normal and active state. + ACTIVE = 1; + + // The bucket has been marked for deletion by the user. + DELETE_REQUESTED = 2; +} + +// The parameters to `ListBuckets` (Beta). +message ListBucketsRequest { + // Required. The parent resource whose buckets are to be listed: + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" + // "folders/[FOLDER_ID]/locations/[LOCATION_ID]" + // + // Note: The locations portion of the resource must be specified, but + // supplying the character `-` in place of [LOCATION_ID] will return all + // buckets. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "logging.googleapis.com/LogBucket" + } + ]; + + // Optional. If present, then retrieve the next batch of results from the + // preceding call to this method. `pageToken` must be the value of + // `nextPageToken` from the previous response. The values of other method + // parameters should be identical to those in the previous call. + string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of results to return from this request. + // Non-positive values are ignored. The presence of `nextPageToken` in the + // response indicates that more results might be available. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// The response from ListBuckets (Beta). +message ListBucketsResponse { + // A list of buckets. + repeated LogBucket buckets = 1; + + // If there might be more results than appear in this response, then + // `nextPageToken` is included. To get the next set of results, call the same + // method again using the value of `nextPageToken` as `pageToken`. + string next_page_token = 2; +} + +// The parameters to `UpdateBucket` (Beta). +message UpdateBucketRequest { + // Required. The full resource name of the bucket to update. + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // + // Example: + // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id"`. Also + // requires permission "resourcemanager.projects.updateLiens" to set the + // locked property + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/LogBucket" + } + ]; + + // Required. The updated bucket. + LogBucket bucket = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. Field mask that specifies the fields in `bucket` that need an update. A + // bucket field will be overwritten if, and only if, it is in the update + // mask. `name` and output only fields cannot be updated. + // + // For a detailed `FieldMask` definition, see + // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + // + // Example: `updateMask=retention_days`. + google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// The parameters to `GetBucket` (Beta). +message GetBucketRequest { + // Required. The resource name of the bucket: + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // + // Example: + // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id"`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/LogBucket" + } + ]; +} + // The parameters to `ListSinks`. message ListSinksRequest { // Required. The parent resource whose sinks are to be listed: @@ -486,7 +718,7 @@ message ListSinksRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "logging.googleapis.com/Sink" + child_type: "logging.googleapis.com/LogSink" } ]; @@ -494,12 +726,12 @@ message ListSinksRequest { // preceding call to this method. `pageToken` must be the value of // `nextPageToken` from the previous response. The values of other method // parameters should be identical to those in the previous call. - string page_token = 2; + string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The maximum number of results to return from this request. // Non-positive values are ignored. The presence of `nextPageToken` in the // response indicates that more results might be available. - int32 page_size = 3; + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; } // Result returned from `ListSinks`. @@ -526,7 +758,7 @@ message GetSinkRequest { string sink_name = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - type: "logging.googleapis.com/Sink" + type: "logging.googleapis.com/LogSink" } ]; } @@ -544,7 +776,7 @@ message CreateSinkRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "logging.googleapis.com/Sink" + child_type: "logging.googleapis.com/LogSink" } ]; @@ -563,13 +795,13 @@ message CreateSinkRequest { // resource such as an organization, then the value of `writer_identity` will // be a unique service account used only for exports from the new sink. For // more information, see `writer_identity` in [LogSink][google.logging.v2.LogSink]. - bool unique_writer_identity = 3; + bool unique_writer_identity = 3 [(google.api.field_behavior) = OPTIONAL]; } // The parameters to `UpdateSink`. message UpdateSinkRequest { - // Required. The full resource name of the sink to update, including the - // parent resource and the sink identifier: + // Required. The full resource name of the sink to update, including the parent + // resource and the sink identifier: // // "projects/[PROJECT_ID]/sinks/[SINK_ID]" // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" @@ -580,12 +812,12 @@ message UpdateSinkRequest { string sink_name = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - type: "logging.googleapis.com/Sink" + type: "logging.googleapis.com/LogSink" } ]; - // Required. The updated sink, whose name is the same identifier that appears - // as part of `sink_name`. + // Required. The updated sink, whose name is the same identifier that appears as part + // of `sink_name`. LogSink sink = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. See [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] @@ -599,7 +831,7 @@ message UpdateSinkRequest { // `writer_identity` is changed to a unique service account. // + It is an error if the old value is true and the new value is // set to false or defaulted to false. - bool unique_writer_identity = 3; + bool unique_writer_identity = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Field mask that specifies the fields in `sink` that need // an update. A sink field will be overwritten if, and only if, it is @@ -615,13 +847,13 @@ message UpdateSinkRequest { // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask // // Example: `updateMask=filter`. - google.protobuf.FieldMask update_mask = 4; + google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = OPTIONAL]; } // The parameters to `DeleteSink`. message DeleteSinkRequest { - // Required. The full resource name of the sink to delete, including the - // parent resource and the sink identifier: + // Required. The full resource name of the sink to delete, including the parent + // resource and the sink identifier: // // "projects/[PROJECT_ID]/sinks/[SINK_ID]" // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" @@ -632,7 +864,7 @@ message DeleteSinkRequest { string sink_name = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - type: "logging.googleapis.com/Sink" + type: "logging.googleapis.com/LogSink" } ]; } @@ -645,47 +877,48 @@ message DeleteSinkRequest { // apply to child resources, and that you can't exclude audit log entries. message LogExclusion { option (google.api.resource) = { - type: "logging.googleapis.com/Exclusion" + type: "logging.googleapis.com/LogExclusion" pattern: "projects/{project}/exclusions/{exclusion}" pattern: "organizations/{organization}/exclusions/{exclusion}" pattern: "folders/{folder}/exclusions/{exclusion}" pattern: "billingAccounts/{billing_account}/exclusions/{exclusion}" }; - // Required. A client-assigned identifier, such as - // `"load-balancer-exclusion"`. Identifiers are limited to 100 characters and - // can include only letters, digits, underscores, hyphens, and periods. - // First character has to be alphanumeric. - string name = 1; + // Required. A client-assigned identifier, such as `"load-balancer-exclusion"`. + // Identifiers are limited to 100 characters and can include only letters, + // digits, underscores, hyphens, and periods. First character has to be + // alphanumeric. + string name = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. A description of this exclusion. - string description = 2; + string description = 2 [(google.api.field_behavior) = OPTIONAL]; - // Required. An [advanced logs filter](/logging/docs/view/advanced-queries) - // that matches the log entries to be excluded. By using the - // [sample function](/logging/docs/view/advanced-queries#sample), + // Required. An [advanced logs + // filter](https://cloud.google.com/logging/docs/view/advanced-queries) that + // matches the log entries to be excluded. By using the [sample + // function](https://cloud.google.com/logging/docs/view/advanced-queries#sample), // you can exclude less than 100% of the matching log entries. // For example, the following query matches 99% of low-severity log // entries from Google Cloud Storage buckets: // // `"resource.type=gcs_bucket severity\n\x10\x62igquery_options\x18\x0c \x01(\x0b\x32".google.logging.v2.BigQueryOptionsH\x00\x12\x34\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x0e \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x32\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\x12\x30\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02:\xbc\x01\xea\x41\xb8\x01\n\x1blogging.googleapis.com/Sink\x12\x1fprojects/{project}/sinks/{sink}\x12)organizations/{organization}/sinks/{sink}\x12\x1d\x66olders/{folder}/sinks/{sink}\x12.billingAccounts/{billing_account}/sinks/{sink}B\t\n\x07options"b\n\x0f\x42igQueryOptions\x12\x1e\n\x16use_partitioned_tables\x18\x01 \x01(\x08\x12/\n"uses_timestamp_column_partitioning\x18\x03 \x01(\x08\x42\x03\xe0\x41\x03"n\n\x10ListSinksRequest\x12\x33\n\x06parent\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\x12\x1blogging.googleapis.com/Sink\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0eGetSinkRequest\x12\x36\n\tsink_name\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1blogging.googleapis.com/Sink"\x97\x01\n\x11\x43reateSinkRequest\x12\x33\n\x06parent\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\x12\x1blogging.googleapis.com/Sink\x12-\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSinkB\x03\xe0\x41\x02\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xcb\x01\n\x11UpdateSinkRequest\x12\x36\n\tsink_name\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1blogging.googleapis.com/Sink\x12-\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSinkB\x03\xe0\x41\x02\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x11\x44\x65leteSinkRequest\x12\x36\n\tsink_name\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1blogging.googleapis.com/Sink"\xa1\x03\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp:\xe9\x01\xea\x41\xe5\x01\n logging.googleapis.com/Exclusion\x12)projects/{project}/exclusions/{exclusion}\x12\x33organizations/{organization}/exclusions/{exclusion}\x12\'folders/{folder}/exclusions/{exclusion}\x12\x38\x62illingAccounts/{billing_account}/exclusions/{exclusion}"x\n\x15ListExclusionsRequest\x12\x38\n\x06parent\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\x12 logging.googleapis.com/Exclusion\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"M\n\x13GetExclusionRequest\x12\x36\n\x04name\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\n logging.googleapis.com/Exclusion"\x86\x01\n\x16\x43reateExclusionRequest\x12\x38\n\x06parent\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\x12 logging.googleapis.com/Exclusion\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\xbf\x01\n\x16UpdateExclusionRequest\x12\x36\n\x04name\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\n logging.googleapis.com/Exclusion\x12\x37\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusionB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"P\n\x16\x44\x65leteExclusionRequest\x12\x36\n\x04name\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\n logging.googleapis.com/Exclusion"&\n\x16GetCmekSettingsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x92\x01\n\x19UpdateCmekSettingsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\rcmek_settings\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.CmekSettings\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"N\n\x0c\x43mekSettings\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0ckms_key_name\x18\x02 \x01(\t\x12\x1a\n\x12service_account_id\x18\x03 \x01(\t2\x9e\x1f\n\x0f\x43onfigServiceV2\x12\x90\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xb7\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\xda\x41\x06parent\x12\x9e\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xd3\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\xda\x41\tsink_name\x12\xab\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xda\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\xda\x41\x0bparent,sink\x12\x9f\x04\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xce\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\xda\x41\x1asink_name,sink,update_mask\xda\x41\x0esink_name,sink\x12\xa0\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xd3\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\xda\x41\tsink_name\x12\xb8\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xd0\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\xda\x41\x06parent\x12\xa8\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xce\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\xda\x41\x04name\x12\xf1\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\x91\x02\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\xda\x41\x10parent,exclusion\x12\xfb\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\x9b\x02\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\xda\x41\x1aname,exclusion,update_mask\x12\xa5\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xce\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}\xda\x41\x04name\x12\xad\x01\n\x0fGetCmekSettings\x12).google.logging.v2.GetCmekSettingsRequest\x1a\x1f.google.logging.v2.CmekSettings"N\x82\xd3\xe4\x93\x02H\x12\x1b/v2/{name=*/*}/cmekSettingsZ)\x12\'/v2/{name=organizations/*}/cmekSettings\x12\xd1\x01\n\x12UpdateCmekSettings\x12,.google.logging.v2.UpdateCmekSettingsRequest\x1a\x1f.google.logging.v2.CmekSettings"l\x82\xd3\xe4\x93\x02\x66\x32\x1b/v2/{name=*/*}/cmekSettings:\rcmek_settingsZ82\'/v2/{name=organizations/*}/cmekSettings:\rcmek_settings\x1a\xdf\x01\xca\x41\x16logging.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.readB\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_LOGSINK_VERSIONFORMAT = _descriptor.EnumDescriptor( - name="VersionFormat", - full_name="google.logging.v2.LogSink.VersionFormat", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="VERSION_FORMAT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="V2", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="V1", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=833, - serialized_end=896, -) -_sym_db.RegisterEnumDescriptor(_LOGSINK_VERSIONFORMAT) - - -_LOGSINK = _descriptor.Descriptor( - name="LogSink", - full_name="google.logging.v2.LogSink", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.LogSink.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="destination", - full_name="google.logging.v2.LogSink.destination", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\372A\003\n\001*"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.LogSink.filter", - index=2, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.logging.v2.LogSink.description", - index=3, - number=18, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="disabled", - full_name="google.logging.v2.LogSink.disabled", - index=4, - number=19, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="output_version_format", - full_name="google.logging.v2.LogSink.output_version_format", - index=5, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="writer_identity", - full_name="google.logging.v2.LogSink.writer_identity", - index=6, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="include_children", - full_name="google.logging.v2.LogSink.include_children", - index=7, - number=9, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bigquery_options", - full_name="google.logging.v2.LogSink.bigquery_options", - index=8, - number=12, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.logging.v2.LogSink.create_time", - index=9, - number=13, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.logging.v2.LogSink.update_time", - index=10, - number=14, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.logging.v2.LogSink.start_time", - index=11, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.logging.v2.LogSink.end_time", - index=12, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_LOGSINK_VERSIONFORMAT,], - serialized_options=_b( - "\352A\270\001\n\033logging.googleapis.com/Sink\022\037projects/{project}/sinks/{sink}\022)organizations/{organization}/sinks/{sink}\022\035folders/{folder}/sinks/{sink}\022.billingAccounts/{billing_account}/sinks/{sink}" - ), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="options", - full_name="google.logging.v2.LogSink.options", - index=0, - containing_type=None, - fields=[], - ), - ], - serialized_start=317, - serialized_end=1098, -) - - -_BIGQUERYOPTIONS = _descriptor.Descriptor( - name="BigQueryOptions", - full_name="google.logging.v2.BigQueryOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="use_partitioned_tables", - full_name="google.logging.v2.BigQueryOptions.use_partitioned_tables", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="uses_timestamp_column_partitioning", - full_name="google.logging.v2.BigQueryOptions.uses_timestamp_column_partitioning", - index=1, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\003"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1100, - serialized_end=1198, -) - - -_LISTSINKSREQUEST = _descriptor.Descriptor( - name="ListSinksRequest", - full_name="google.logging.v2.ListSinksRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListSinksRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A\035\022\033logging.googleapis.com/Sink" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListSinksRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListSinksRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1200, - serialized_end=1310, -) - - -_LISTSINKSRESPONSE = _descriptor.Descriptor( - name="ListSinksResponse", - full_name="google.logging.v2.ListSinksResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sinks", - full_name="google.logging.v2.ListSinksResponse.sinks", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListSinksResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1312, - serialized_end=1399, -) - - -_GETSINKREQUEST = _descriptor.Descriptor( - name="GetSinkRequest", - full_name="google.logging.v2.GetSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sink_name", - full_name="google.logging.v2.GetSinkRequest.sink_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A\035\n\033logging.googleapis.com/Sink" - ), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1401, - serialized_end=1473, -) - - -_CREATESINKREQUEST = _descriptor.Descriptor( - name="CreateSinkRequest", - full_name="google.logging.v2.CreateSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.CreateSinkRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A\035\022\033logging.googleapis.com/Sink" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="sink", - full_name="google.logging.v2.CreateSinkRequest.sink", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="unique_writer_identity", - full_name="google.logging.v2.CreateSinkRequest.unique_writer_identity", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1476, - serialized_end=1627, -) - - -_UPDATESINKREQUEST = _descriptor.Descriptor( - name="UpdateSinkRequest", - full_name="google.logging.v2.UpdateSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sink_name", - full_name="google.logging.v2.UpdateSinkRequest.sink_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A\035\n\033logging.googleapis.com/Sink" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="sink", - full_name="google.logging.v2.UpdateSinkRequest.sink", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="unique_writer_identity", - full_name="google.logging.v2.UpdateSinkRequest.unique_writer_identity", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.logging.v2.UpdateSinkRequest.update_mask", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1630, - serialized_end=1833, -) - - -_DELETESINKREQUEST = _descriptor.Descriptor( - name="DeleteSinkRequest", - full_name="google.logging.v2.DeleteSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sink_name", - full_name="google.logging.v2.DeleteSinkRequest.sink_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A\035\n\033logging.googleapis.com/Sink" - ), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1835, - serialized_end=1910, -) - - -_LOGEXCLUSION = _descriptor.Descriptor( - name="LogExclusion", - full_name="google.logging.v2.LogExclusion", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.LogExclusion.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.logging.v2.LogExclusion.description", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.LogExclusion.filter", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="disabled", - full_name="google.logging.v2.LogExclusion.disabled", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.logging.v2.LogExclusion.create_time", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.logging.v2.LogExclusion.update_time", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b( - "\352A\345\001\n logging.googleapis.com/Exclusion\022)projects/{project}/exclusions/{exclusion}\0223organizations/{organization}/exclusions/{exclusion}\022'folders/{folder}/exclusions/{exclusion}\0228billingAccounts/{billing_account}/exclusions/{exclusion}" - ), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1913, - serialized_end=2330, -) - - -_LISTEXCLUSIONSREQUEST = _descriptor.Descriptor( - name="ListExclusionsRequest", - full_name="google.logging.v2.ListExclusionsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListExclusionsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - '\340A\002\372A"\022 logging.googleapis.com/Exclusion' - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListExclusionsRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListExclusionsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2332, - serialized_end=2452, -) - - -_LISTEXCLUSIONSRESPONSE = _descriptor.Descriptor( - name="ListExclusionsResponse", - full_name="google.logging.v2.ListExclusionsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="exclusions", - full_name="google.logging.v2.ListExclusionsResponse.exclusions", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListExclusionsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2454, - serialized_end=2556, -) - - -_GETEXCLUSIONREQUEST = _descriptor.Descriptor( - name="GetExclusionRequest", - full_name="google.logging.v2.GetExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.GetExclusionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b('\340A\002\372A"\n logging.googleapis.com/Exclusion'), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2558, - serialized_end=2635, -) - - -_CREATEEXCLUSIONREQUEST = _descriptor.Descriptor( - name="CreateExclusionRequest", - full_name="google.logging.v2.CreateExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.CreateExclusionRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - '\340A\002\372A"\022 logging.googleapis.com/Exclusion' - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="exclusion", - full_name="google.logging.v2.CreateExclusionRequest.exclusion", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2638, - serialized_end=2772, -) - - -_UPDATEEXCLUSIONREQUEST = _descriptor.Descriptor( - name="UpdateExclusionRequest", - full_name="google.logging.v2.UpdateExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.UpdateExclusionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b('\340A\002\372A"\n logging.googleapis.com/Exclusion'), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="exclusion", - full_name="google.logging.v2.UpdateExclusionRequest.exclusion", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.logging.v2.UpdateExclusionRequest.update_mask", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2775, - serialized_end=2966, -) - - -_DELETEEXCLUSIONREQUEST = _descriptor.Descriptor( - name="DeleteExclusionRequest", - full_name="google.logging.v2.DeleteExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.DeleteExclusionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b('\340A\002\372A"\n logging.googleapis.com/Exclusion'), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2968, - serialized_end=3048, -) - - -_GETCMEKSETTINGSREQUEST = _descriptor.Descriptor( - name="GetCmekSettingsRequest", - full_name="google.logging.v2.GetCmekSettingsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.GetCmekSettingsRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3050, - serialized_end=3088, -) - - -_UPDATECMEKSETTINGSREQUEST = _descriptor.Descriptor( - name="UpdateCmekSettingsRequest", - full_name="google.logging.v2.UpdateCmekSettingsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.UpdateCmekSettingsRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cmek_settings", - full_name="google.logging.v2.UpdateCmekSettingsRequest.cmek_settings", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.logging.v2.UpdateCmekSettingsRequest.update_mask", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3091, - serialized_end=3237, -) - - -_CMEKSETTINGS = _descriptor.Descriptor( - name="CmekSettings", - full_name="google.logging.v2.CmekSettings", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.CmekSettings.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="kms_key_name", - full_name="google.logging.v2.CmekSettings.kms_key_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_account_id", - full_name="google.logging.v2.CmekSettings.service_account_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3239, - serialized_end=3317, -) - -_LOGSINK.fields_by_name["output_version_format"].enum_type = _LOGSINK_VERSIONFORMAT -_LOGSINK.fields_by_name["bigquery_options"].message_type = _BIGQUERYOPTIONS -_LOGSINK.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGSINK.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGSINK.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGSINK.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGSINK_VERSIONFORMAT.containing_type = _LOGSINK -_LOGSINK.oneofs_by_name["options"].fields.append( - _LOGSINK.fields_by_name["bigquery_options"] -) -_LOGSINK.fields_by_name["bigquery_options"].containing_oneof = _LOGSINK.oneofs_by_name[ - "options" -] -_LISTSINKSRESPONSE.fields_by_name["sinks"].message_type = _LOGSINK -_CREATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK -_UPDATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK -_UPDATESINKREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LOGEXCLUSION.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGEXCLUSION.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LISTEXCLUSIONSRESPONSE.fields_by_name["exclusions"].message_type = _LOGEXCLUSION -_CREATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION -_UPDATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION -_UPDATEEXCLUSIONREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_UPDATECMEKSETTINGSREQUEST.fields_by_name["cmek_settings"].message_type = _CMEKSETTINGS -_UPDATECMEKSETTINGSREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -DESCRIPTOR.message_types_by_name["LogSink"] = _LOGSINK -DESCRIPTOR.message_types_by_name["BigQueryOptions"] = _BIGQUERYOPTIONS -DESCRIPTOR.message_types_by_name["ListSinksRequest"] = _LISTSINKSREQUEST -DESCRIPTOR.message_types_by_name["ListSinksResponse"] = _LISTSINKSRESPONSE -DESCRIPTOR.message_types_by_name["GetSinkRequest"] = _GETSINKREQUEST -DESCRIPTOR.message_types_by_name["CreateSinkRequest"] = _CREATESINKREQUEST -DESCRIPTOR.message_types_by_name["UpdateSinkRequest"] = _UPDATESINKREQUEST -DESCRIPTOR.message_types_by_name["DeleteSinkRequest"] = _DELETESINKREQUEST -DESCRIPTOR.message_types_by_name["LogExclusion"] = _LOGEXCLUSION -DESCRIPTOR.message_types_by_name["ListExclusionsRequest"] = _LISTEXCLUSIONSREQUEST -DESCRIPTOR.message_types_by_name["ListExclusionsResponse"] = _LISTEXCLUSIONSRESPONSE -DESCRIPTOR.message_types_by_name["GetExclusionRequest"] = _GETEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name["CreateExclusionRequest"] = _CREATEEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name["UpdateExclusionRequest"] = _UPDATEEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name["DeleteExclusionRequest"] = _DELETEEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name["GetCmekSettingsRequest"] = _GETCMEKSETTINGSREQUEST -DESCRIPTOR.message_types_by_name[ - "UpdateCmekSettingsRequest" -] = _UPDATECMEKSETTINGSREQUEST -DESCRIPTOR.message_types_by_name["CmekSettings"] = _CMEKSETTINGS -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LogSink = _reflection.GeneratedProtocolMessageType( - "LogSink", - (_message.Message,), - dict( - DESCRIPTOR=_LOGSINK, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Describes a sink used to export log entries to one of the - following destinations in any project: a Cloud Storage bucket, a - BigQuery dataset, or a Cloud Pub/Sub topic. A logs filter controls which - log entries are exported. The sink must be created within a project, - organization, billing account, or folder. - - - Attributes: - name: - Required. The client-assigned sink identifier, unique within - the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink - identifiers are limited to 100 characters and can include only - the following characters: upper and lower-case alphanumeric - characters, underscores, hyphens, and periods. First character - has to be alphanumeric. - destination: - Required. The export destination: :: - "storage.googleapis.com/[GCS_BUCKET]" "bigquery.googleapis - .com/projects/[PROJECT_ID]/datasets/[DATASET]" "pubsub.goo - gleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" The - sink's ``writer_identity``, set when the sink is created, must - have permission to write to the destination or else the log - entries are not exported. For more information, see `Exporting - Logs with Sinks `__. - filter: - Optional. An `advanced logs filter - `__. The only exported - log entries are those that are in the resource owning the sink - and that match the filter. For example: :: - logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND - severity>=ERROR - description: - Optional. A description of this sink. The maximum length of - the description is 8000 characters. - disabled: - Optional. If set to True, then this sink is disabled and it - does not export any log entries. - output_version_format: - Deprecated. The log entry format to use for this sink's - exported log entries. The v2 format is used by default and - cannot be changed. - writer_identity: - Output only. An IAM identity—a service account or group—under - which Logging writes the exported log entries to the sink's - destination. This field is set by - [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] - and - [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] - based on the value of ``unique_writer_identity`` in those - methods. Until you grant this identity write-access to the - destination, log entry exports from this sink will fail. For - more information, see `Granting Access for a Resource - `__. Consult the - destination service's documentation to determine the - appropriate IAM roles to assign to the identity. - include_children: - Optional. This field applies only to sinks owned by - organizations and folders. If the field is false, the default, - only the logs owned by the sink's parent resource are - available for export. If the field is true, then logs from all - the projects, folders, and billing accounts contained in the - sink's parent resource are also available for export. Whether - a particular log entry from the children is exported depends - on the sink's filter expression. For example, if this field is - true, then the filter ``resource.type=gce_instance`` would - export all Compute Engine VM instance log entries from all - projects in the sink's parent. To only export entries from - certain child projects, filter on the project part of the log - name: :: logName:("projects/test-project1/" OR - "projects/test-project2/") AND resource.type=gce_instance - options: - Optional. Destination dependent options. - bigquery_options: - Optional. Options that affect sinks exporting data to - BigQuery. - create_time: - Output only. The creation timestamp of the sink. This field - may not be present for older sinks. - update_time: - Output only. The last update timestamp of the sink. This - field may not be present for older sinks. - start_time: - Do not use. This field is ignored. - end_time: - Do not use. This field is ignored. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogSink) - ), -) -_sym_db.RegisterMessage(LogSink) - -BigQueryOptions = _reflection.GeneratedProtocolMessageType( - "BigQueryOptions", - (_message.Message,), - dict( - DESCRIPTOR=_BIGQUERYOPTIONS, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Options that change functionality of a sink exporting data - to BigQuery. - - - Attributes: - use_partitioned_tables: - Optional. Whether to use `BigQuery's partition tables - `__. By default, Logging - creates dated tables based on the log entries' timestamps, - e.g. syslog\_20170523. With partitioned tables the date suffix - is no longer present and `special query syntax - `__ has to be used - instead. In both cases, tables are sharded based on UTC - timezone. - uses_timestamp_column_partitioning: - Output only. True if new timestamp column based partitioning - is in use, false if legacy ingestion-time partitioning is in - use. All new sinks will have this field set true and will use - timestamp column based partitioning. If - use\_partitioned\_tables is false, this value has no meaning - and will be false. Legacy sinks using partitioned tables will - have this field set to false. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.BigQueryOptions) - ), -) -_sym_db.RegisterMessage(BigQueryOptions) - -ListSinksRequest = _reflection.GeneratedProtocolMessageType( - "ListSinksRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTSINKSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``ListSinks``. - - - Attributes: - parent: - Required. The parent resource whose sinks are to be listed: - :: "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksRequest) - ), -) -_sym_db.RegisterMessage(ListSinksRequest) - -ListSinksResponse = _reflection.GeneratedProtocolMessageType( - "ListSinksResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTSINKSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Result returned from ``ListSinks``. - - - Attributes: - sinks: - A list of sinks. - next_page_token: - If there might be more results than appear in this response, - then ``nextPageToken`` is included. To get the next set of - results, call the same method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksResponse) - ), -) -_sym_db.RegisterMessage(ListSinksResponse) - -GetSinkRequest = _reflection.GeneratedProtocolMessageType( - "GetSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETSINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``GetSink``. - - - Attributes: - sink_name: - Required. The resource name of the sink: :: - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: - ``"projects/my-project-id/sinks/my-sink-id"``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.GetSinkRequest) - ), -) -_sym_db.RegisterMessage(GetSinkRequest) - -CreateSinkRequest = _reflection.GeneratedProtocolMessageType( - "CreateSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATESINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``CreateSink``. - - - Attributes: - parent: - Required. The resource in which to create the sink: :: - "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" Examples: ``"projects/my-logging- - project"``, ``"organizations/123456789"``. - sink: - Required. The new sink, whose ``name`` parameter is a sink - identifier that is not already in use. - unique_writer_identity: - Optional. Determines the kind of IAM identity returned as - ``writer_identity`` in the new sink. If this value is omitted - or set to false, and if the sink's parent is a project, then - the value returned as ``writer_identity`` is the same group or - service account used by Logging before the addition of writer - identities to this API. The sink's destination must be in the - same project as the sink itself. If this field is set to - true, or if the sink is owned by a non-project resource such - as an organization, then the value of ``writer_identity`` will - be a unique service account used only for exports from the new - sink. For more information, see ``writer_identity`` in - [LogSink][google.logging.v2.LogSink]. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.CreateSinkRequest) - ), -) -_sym_db.RegisterMessage(CreateSinkRequest) - -UpdateSinkRequest = _reflection.GeneratedProtocolMessageType( - "UpdateSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATESINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``UpdateSink``. - - - Attributes: - sink_name: - Required. The full resource name of the sink to update, - including the parent resource and the sink identifier: :: - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: - ``"projects/my-project-id/sinks/my-sink-id"``. - sink: - Required. The updated sink, whose name is the same identifier - that appears as part of ``sink_name``. - unique_writer_identity: - Optional. See - [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] - for a description of this field. When updating a sink, the - effect of this field on the value of ``writer_identity`` in - the updated sink depends on both the old and new values of - this field: - If the old and new values of this field are - both false or both true, then there is no change to the - sink's ``writer_identity``. - If the old value is false and - the new value is true, then ``writer_identity`` is changed - to a unique service account. - It is an error if the old - value is true and the new value is set to false or - defaulted to false. - update_mask: - Optional. Field mask that specifies the fields in ``sink`` - that need an update. A sink field will be overwritten if, and - only if, it is in the update mask. ``name`` and output only - fields cannot be updated. An empty updateMask is temporarily - treated as using the following mask for backwards - compatibility purposes: destination,filter,includeChildren At - some point in the future, behavior will be removed and - specifying an empty updateMask will be an error. For a - detailed ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/ - google.protobuf#google.protobuf.FieldMask Example: - ``updateMask=filter``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateSinkRequest) - ), -) -_sym_db.RegisterMessage(UpdateSinkRequest) - -DeleteSinkRequest = _reflection.GeneratedProtocolMessageType( - "DeleteSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETESINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``DeleteSink``. - - - Attributes: - sink_name: - Required. The full resource name of the sink to delete, - including the parent resource and the sink identifier: :: - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: - ``"projects/my-project-id/sinks/my-sink-id"``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteSinkRequest) - ), -) -_sym_db.RegisterMessage(DeleteSinkRequest) - -LogExclusion = _reflection.GeneratedProtocolMessageType( - "LogExclusion", - (_message.Message,), - dict( - DESCRIPTOR=_LOGEXCLUSION, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Specifies a set of log entries that are not to be stored - in Logging. If your GCP resource receives a large volume of logs, you - can use exclusions to reduce your chargeable logs. Exclusions are - processed after log sinks, so you can export log entries before they are - excluded. Note that organization-level and folder-level exclusions don't - apply to child resources, and that you can't exclude audit log entries. - - - Attributes: - name: - Required. A client-assigned identifier, such as ``"load- - balancer-exclusion"``. Identifiers are limited to 100 - characters and can include only letters, digits, underscores, - hyphens, and periods. First character has to be alphanumeric. - description: - Optional. A description of this exclusion. - filter: - Required. An `advanced logs filter - `__ that matches the log - entries to be excluded. By using the `sample function - `__, you can - exclude less than 100% of the matching log entries. For - example, the following query matches 99% of low-severity log - entries from Google Cloud Storage buckets: - ``"resource.type=gcs_bucket severity`__ for more - information. - - - Attributes: - name: - Required. The resource for which to retrieve CMEK settings. - :: "projects/[PROJECT_ID]/cmekSettings" - "organizations/[ORGANIZATION_ID]/cmekSettings" - "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" - "folders/[FOLDER_ID]/cmekSettings" Example: - ``"organizations/12345/cmekSettings"``. Note: CMEK for the - Logs Router can currently only be configured for GCP - organizations. Once configured, it applies to all projects and - folders in the GCP organization. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.GetCmekSettingsRequest) - ), -) -_sym_db.RegisterMessage(GetCmekSettingsRequest) - -UpdateCmekSettingsRequest = _reflection.GeneratedProtocolMessageType( - "UpdateCmekSettingsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATECMEKSETTINGSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - - See `Enabling CMEK for Logs - Router `__ for more - information. - - - Attributes: - name: - Required. The resource name for the CMEK settings to update. - :: "projects/[PROJECT_ID]/cmekSettings" - "organizations/[ORGANIZATION_ID]/cmekSettings" - "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" - "folders/[FOLDER_ID]/cmekSettings" Example: - ``"organizations/12345/cmekSettings"``. Note: CMEK for the - Logs Router can currently only be configured for GCP - organizations. Once configured, it applies to all projects and - folders in the GCP organization. - cmek_settings: - Required. The CMEK settings to update. See `Enabling CMEK for - Logs Router `__ for - more information. - update_mask: - Optional. Field mask identifying which fields from - ``cmek_settings`` should be updated. A field will be - overwritten if and only if it is in the update mask. Output - only fields cannot be updated. See - [FieldMask][google.protobuf.FieldMask] for more information. - Example: ``"updateMask=kmsKeyName"`` - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateCmekSettingsRequest) - ), -) -_sym_db.RegisterMessage(UpdateCmekSettingsRequest) - -CmekSettings = _reflection.GeneratedProtocolMessageType( - "CmekSettings", - (_message.Message,), - dict( - DESCRIPTOR=_CMEKSETTINGS, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Describes the customer-managed encryption key (CMEK) - settings associated with a project, folder, organization, billing - account, or flexible resource. - - Note: CMEK for the Logs Router can currently only be configured for GCP - organizations. Once configured, it applies to all projects and folders - in the GCP organization. - - See `Enabling CMEK for Logs - Router `__ for more - information. - - - Attributes: - name: - Output Only. The resource name of the CMEK settings. - kms_key_name: - The resource name for the configured Cloud KMS key. KMS key - name format: "projects/[PROJECT\_ID]/locations/[LOCATION]/keyR - ings/[KEYRING]/cryptoKeys/[KEY]" For example: ``"projects/my- - project-id/locations/my-region/keyRings/key-ring- - name/cryptoKeys/key-name"`` To enable CMEK for the Logs - Router, set this field to a valid ``kms_key_name`` for which - the associated service account has the required - ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned - for the key. The Cloud KMS key used by the Log Router can be - updated by changing the ``kms_key_name`` to a new valid key - name. Encryption operations that are in progress will be - completed with the key that was in use when they started. - Decryption operations will be completed using the key that was - used at the time of encryption unless access to that key has - been revoked. To disable CMEK for the Logs Router, set this - field to an empty string. See `Enabling CMEK for Logs Router - `__ for more - information. - service_account_id: - Output Only. The service account that will be used by the Logs - Router to access your Cloud KMS key. Before enabling CMEK for - Logs Router, you must first assign the role - ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` to the service - account that the Logs Router will use to access your Cloud KMS - key. Use [GetCmekSettings][google.logging.v2.ConfigServiceV2.G - etCmekSettings] to obtain the service account ID. See - `Enabling CMEK for Logs Router `__ for more information. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.CmekSettings) - ), -) -_sym_db.RegisterMessage(CmekSettings) - - -DESCRIPTOR._options = None -_LOGSINK.fields_by_name["destination"]._options = None -_LOGSINK.fields_by_name["output_version_format"]._options = None -_LOGSINK.fields_by_name["writer_identity"]._options = None -_LOGSINK.fields_by_name["create_time"]._options = None -_LOGSINK.fields_by_name["update_time"]._options = None -_LOGSINK.fields_by_name["start_time"]._options = None -_LOGSINK.fields_by_name["end_time"]._options = None -_LOGSINK._options = None -_BIGQUERYOPTIONS.fields_by_name["uses_timestamp_column_partitioning"]._options = None -_LISTSINKSREQUEST.fields_by_name["parent"]._options = None -_GETSINKREQUEST.fields_by_name["sink_name"]._options = None -_CREATESINKREQUEST.fields_by_name["parent"]._options = None -_CREATESINKREQUEST.fields_by_name["sink"]._options = None -_UPDATESINKREQUEST.fields_by_name["sink_name"]._options = None -_UPDATESINKREQUEST.fields_by_name["sink"]._options = None -_DELETESINKREQUEST.fields_by_name["sink_name"]._options = None -_LOGEXCLUSION._options = None -_LISTEXCLUSIONSREQUEST.fields_by_name["parent"]._options = None -_GETEXCLUSIONREQUEST.fields_by_name["name"]._options = None -_CREATEEXCLUSIONREQUEST.fields_by_name["parent"]._options = None -_UPDATEEXCLUSIONREQUEST.fields_by_name["name"]._options = None -_UPDATEEXCLUSIONREQUEST.fields_by_name["exclusion"]._options = None -_UPDATEEXCLUSIONREQUEST.fields_by_name["update_mask"]._options = None -_DELETEEXCLUSIONREQUEST.fields_by_name["name"]._options = None - -_CONFIGSERVICEV2 = _descriptor.ServiceDescriptor( - name="ConfigServiceV2", - full_name="google.logging.v2.ConfigServiceV2", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\026logging.googleapis.com\322A\302\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read" - ), - serialized_start=3320, - serialized_end=7318, - methods=[ - _descriptor.MethodDescriptor( - name="ListSinks", - full_name="google.logging.v2.ConfigServiceV2.ListSinks", - index=0, - containing_service=None, - input_type=_LISTSINKSREQUEST, - output_type=_LISTSINKSRESPONSE, - serialized_options=_b( - '\202\323\344\223\002\247\001\022\026/v2/{parent=*/*}/sinksZ\037\022\035/v2/{parent=projects/*}/sinksZ$\022"/v2/{parent=organizations/*}/sinksZ\036\022\034/v2/{parent=folders/*}/sinksZ&\022$/v2/{parent=billingAccounts/*}/sinks\332A\006parent' - ), - ), - _descriptor.MethodDescriptor( - name="GetSink", - full_name="google.logging.v2.ConfigServiceV2.GetSink", - index=1, - containing_service=None, - input_type=_GETSINKREQUEST, - output_type=_LOGSINK, - serialized_options=_b( - "\202\323\344\223\002\300\001\022\033/v2/{sink_name=*/*/sinks/*}Z$\022\"/v2/{sink_name=projects/*/sinks/*}Z)\022'/v2/{sink_name=organizations/*/sinks/*}Z#\022!/v2/{sink_name=folders/*/sinks/*}Z+\022)/v2/{sink_name=billingAccounts/*/sinks/*}\332A\tsink_name" - ), - ), - _descriptor.MethodDescriptor( - name="CreateSink", - full_name="google.logging.v2.ConfigServiceV2.CreateSink", - index=2, - containing_service=None, - input_type=_CREATESINKREQUEST, - output_type=_LOGSINK, - serialized_options=_b( - '\202\323\344\223\002\305\001"\026/v2/{parent=*/*}/sinks:\004sinkZ%"\035/v2/{parent=projects/*}/sinks:\004sinkZ*""/v2/{parent=organizations/*}/sinks:\004sinkZ$"\034/v2/{parent=folders/*}/sinks:\004sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\004sink\332A\013parent,sink' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateSink", - full_name="google.logging.v2.ConfigServiceV2.UpdateSink", - index=3, - containing_service=None, - input_type=_UPDATESINKREQUEST, - output_type=_LOGSINK, - serialized_options=_b( - "\202\323\344\223\002\231\003\032\033/v2/{sink_name=*/*/sinks/*}:\004sinkZ*\032\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/\032'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)\032!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ1\032)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sinkZ*2\"/v2/{sink_name=projects/*/sinks/*}:\004sinkZ/2'/v2/{sink_name=organizations/*/sinks/*}:\004sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\004sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\004sink\332A\032sink_name,sink,update_mask\332A\016sink_name,sink" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteSink", - full_name="google.logging.v2.ConfigServiceV2.DeleteSink", - index=4, - containing_service=None, - input_type=_DELETESINKREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002\300\001*\033/v2/{sink_name=*/*/sinks/*}Z$*\"/v2/{sink_name=projects/*/sinks/*}Z)*'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\332A\tsink_name" - ), - ), - _descriptor.MethodDescriptor( - name="ListExclusions", - full_name="google.logging.v2.ConfigServiceV2.ListExclusions", - index=5, - containing_service=None, - input_type=_LISTEXCLUSIONSREQUEST, - output_type=_LISTEXCLUSIONSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002\300\001\022\033/v2/{parent=*/*}/exclusionsZ$\022\"/v2/{parent=projects/*}/exclusionsZ)\022'/v2/{parent=organizations/*}/exclusionsZ#\022!/v2/{parent=folders/*}/exclusionsZ+\022)/v2/{parent=billingAccounts/*}/exclusions\332A\006parent" - ), - ), - _descriptor.MethodDescriptor( - name="GetExclusion", - full_name="google.logging.v2.ConfigServiceV2.GetExclusion", - index=6, - containing_service=None, - input_type=_GETEXCLUSIONREQUEST, - output_type=_LOGEXCLUSION, - serialized_options=_b( - "\202\323\344\223\002\300\001\022\033/v2/{name=*/*/exclusions/*}Z$\022\"/v2/{name=projects/*/exclusions/*}Z)\022'/v2/{name=organizations/*/exclusions/*}Z#\022!/v2/{name=folders/*/exclusions/*}Z+\022)/v2/{name=billingAccounts/*/exclusions/*}\332A\004name" - ), - ), - _descriptor.MethodDescriptor( - name="CreateExclusion", - full_name="google.logging.v2.ConfigServiceV2.CreateExclusion", - index=7, - containing_service=None, - input_type=_CREATEEXCLUSIONREQUEST, - output_type=_LOGEXCLUSION, - serialized_options=_b( - '\202\323\344\223\002\367\001"\033/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\332A\020parent,exclusion' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateExclusion", - full_name="google.logging.v2.ConfigServiceV2.UpdateExclusion", - index=8, - containing_service=None, - input_type=_UPDATEEXCLUSIONREQUEST, - output_type=_LOGEXCLUSION, - serialized_options=_b( - "\202\323\344\223\002\367\0012\033/v2/{name=*/*/exclusions/*}:\texclusionZ/2\"/v2/{name=projects/*/exclusions/*}:\texclusionZ42'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\332A\032name,exclusion,update_mask" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteExclusion", - full_name="google.logging.v2.ConfigServiceV2.DeleteExclusion", - index=9, - containing_service=None, - input_type=_DELETEEXCLUSIONREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002\300\001*\033/v2/{name=*/*/exclusions/*}Z$*\"/v2/{name=projects/*/exclusions/*}Z)*'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}\332A\004name" - ), - ), - _descriptor.MethodDescriptor( - name="GetCmekSettings", - full_name="google.logging.v2.ConfigServiceV2.GetCmekSettings", - index=10, - containing_service=None, - input_type=_GETCMEKSETTINGSREQUEST, - output_type=_CMEKSETTINGS, - serialized_options=_b( - "\202\323\344\223\002H\022\033/v2/{name=*/*}/cmekSettingsZ)\022'/v2/{name=organizations/*}/cmekSettings" - ), - ), - _descriptor.MethodDescriptor( - name="UpdateCmekSettings", - full_name="google.logging.v2.ConfigServiceV2.UpdateCmekSettings", - index=11, - containing_service=None, - input_type=_UPDATECMEKSETTINGSREQUEST, - output_type=_CMEKSETTINGS, - serialized_options=_b( - "\202\323\344\223\002f2\033/v2/{name=*/*}/cmekSettings:\rcmek_settingsZ82'/v2/{name=organizations/*}/cmekSettings:\rcmek_settings" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_CONFIGSERVICEV2) - -DESCRIPTOR.services_by_name["ConfigServiceV2"] = _CONFIGSERVICEV2 - -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py deleted file mode 100644 index 62e751bf554a..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py +++ /dev/null @@ -1,259 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.logging_v2.proto import ( - logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class ConfigServiceV2Stub(object): - """Service for configuring sinks used to route log entries.""" - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListSinks = channel.unary_unary( - "/google.logging.v2.ConfigServiceV2/ListSinks", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksResponse.FromString, - ) - self.GetSink = channel.unary_unary( - "/google.logging.v2.ConfigServiceV2/GetSink", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetSinkRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString, - ) - self.CreateSink = channel.unary_unary( - "/google.logging.v2.ConfigServiceV2/CreateSink", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateSinkRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString, - ) - self.UpdateSink = channel.unary_unary( - "/google.logging.v2.ConfigServiceV2/UpdateSink", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateSinkRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString, - ) - self.DeleteSink = channel.unary_unary( - "/google.logging.v2.ConfigServiceV2/DeleteSink", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteSinkRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ListExclusions = channel.unary_unary( - "/google.logging.v2.ConfigServiceV2/ListExclusions", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsResponse.FromString, - ) - self.GetExclusion = channel.unary_unary( - "/google.logging.v2.ConfigServiceV2/GetExclusion", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetExclusionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString, - ) - self.CreateExclusion = channel.unary_unary( - "/google.logging.v2.ConfigServiceV2/CreateExclusion", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateExclusionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString, - ) - self.UpdateExclusion = channel.unary_unary( - "/google.logging.v2.ConfigServiceV2/UpdateExclusion", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateExclusionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString, - ) - self.DeleteExclusion = channel.unary_unary( - "/google.logging.v2.ConfigServiceV2/DeleteExclusion", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteExclusionRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.GetCmekSettings = channel.unary_unary( - "/google.logging.v2.ConfigServiceV2/GetCmekSettings", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetCmekSettingsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CmekSettings.FromString, - ) - self.UpdateCmekSettings = channel.unary_unary( - "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateCmekSettingsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CmekSettings.FromString, - ) - - -class ConfigServiceV2Servicer(object): - """Service for configuring sinks used to route log entries.""" - - def ListSinks(self, request, context): - """Lists sinks.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetSink(self, request, context): - """Gets a sink.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateSink(self, request, context): - """Creates a sink that exports specified log entries to a destination. The - export of newly-ingested log entries begins immediately, unless the sink's - `writer_identity` is not permitted to write to the destination. A sink can - export log entries only from the resource owning the sink. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateSink(self, request, context): - """Updates a sink. This method replaces the following fields in the existing - sink with values from the new sink: `destination`, and `filter`. - - The updated sink might also have a new `writer_identity`; see the - `unique_writer_identity` field. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteSink(self, request, context): - """Deletes a sink. If the sink has a unique `writer_identity`, then that - service account is also deleted. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListExclusions(self, request, context): - """Lists all the exclusions in a parent resource.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetExclusion(self, request, context): - """Gets the description of an exclusion.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateExclusion(self, request, context): - """Creates a new exclusion in a specified parent resource. - Only log entries belonging to that resource can be excluded. - You can have up to 10 exclusions in a resource. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateExclusion(self, request, context): - """Changes one or more properties of an existing exclusion.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteExclusion(self, request, context): - """Deletes an exclusion.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetCmekSettings(self, request, context): - """Gets the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured for GCP - organizations. Once configured, it applies to all projects and folders in - the GCP organization. - - See [Enabling CMEK for Logs - Router](/logging/docs/routing/managed-encryption) for more information. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateCmekSettings(self, request, context): - """Updates the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured for GCP - organizations. Once configured, it applies to all projects and folders in - the GCP organization. - - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] - will fail if 1) `kms_key_name` is invalid, or 2) the associated service - account does not have the required - `roles/cloudkms.cryptoKeyEncrypterDecrypter` role assigned for the key, or - 3) access to the key is disabled. - - See [Enabling CMEK for Logs - Router](/logging/docs/routing/managed-encryption) for more information. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_ConfigServiceV2Servicer_to_server(servicer, server): - rpc_method_handlers = { - "ListSinks": grpc.unary_unary_rpc_method_handler( - servicer.ListSinks, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksResponse.SerializeToString, - ), - "GetSink": grpc.unary_unary_rpc_method_handler( - servicer.GetSink, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetSinkRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString, - ), - "CreateSink": grpc.unary_unary_rpc_method_handler( - servicer.CreateSink, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateSinkRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString, - ), - "UpdateSink": grpc.unary_unary_rpc_method_handler( - servicer.UpdateSink, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateSinkRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString, - ), - "DeleteSink": grpc.unary_unary_rpc_method_handler( - servicer.DeleteSink, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteSinkRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "ListExclusions": grpc.unary_unary_rpc_method_handler( - servicer.ListExclusions, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsResponse.SerializeToString, - ), - "GetExclusion": grpc.unary_unary_rpc_method_handler( - servicer.GetExclusion, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetExclusionRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString, - ), - "CreateExclusion": grpc.unary_unary_rpc_method_handler( - servicer.CreateExclusion, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateExclusionRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString, - ), - "UpdateExclusion": grpc.unary_unary_rpc_method_handler( - servicer.UpdateExclusion, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateExclusionRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString, - ), - "DeleteExclusion": grpc.unary_unary_rpc_method_handler( - servicer.DeleteExclusion, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteExclusionRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "GetCmekSettings": grpc.unary_unary_rpc_method_handler( - servicer.GetCmekSettings, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetCmekSettingsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CmekSettings.SerializeToString, - ), - "UpdateCmekSettings": grpc.unary_unary_rpc_method_handler( - servicer.UpdateCmekSettings, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateCmekSettingsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CmekSettings.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.logging.v2.ConfigServiceV2", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto index 582c067e6833..eb9f73ffabcf 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; @@ -35,6 +34,7 @@ option java_multiple_files = true; option java_outer_classname = "LoggingMetricsProto"; option java_package = "com.google.logging.v2"; option php_namespace = "Google\\Cloud\\Logging\\V2"; +option ruby_package = "Google::Cloud::Logging::V2"; // Service for configuring logs-based metrics. service MetricsServiceV2 { @@ -98,7 +98,7 @@ service MetricsServiceV2 { // by the bucket options. message LogMetric { option (google.api.resource) = { - type: "logging.googleapis.com/Metric" + type: "logging.googleapis.com/LogMetric" pattern: "projects/{project}/metrics/{metric}" }; @@ -124,20 +124,20 @@ message LogMetric { // However, when the metric identifier appears as the `[METRIC_ID]` part of a // `metric_name` API parameter, then the metric identifier must be // URL-encoded. Example: `"projects/my-project/metrics/nginx%2Frequests"`. - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. A description of this metric, which is used in documentation. // The maximum length of the description is 8000 characters. - string description = 2; + string description = 2 [(google.api.field_behavior) = OPTIONAL]; - // Required. An [advanced logs filter](/logging/docs/view/advanced_filters) - // which is used to match log entries. - // Example: + // Required. An [advanced logs + // filter](https://cloud.google.com/logging/docs/view/advanced_filters) which + // is used to match log entries. Example: // // "resource.type=gae_app AND severity>=ERROR" // // The maximum length of the filter is 20000 characters. - string filter = 3; + string filter = 3 [(google.api.field_behavior) = REQUIRED]; // Optional. The metric descriptor associated with the logs-based metric. // If unspecified, it uses a default metric descriptor with a DELTA metric @@ -160,7 +160,7 @@ message LogMetric { // be updated once initially configured. New labels can be added in the // `metric_descriptor`, but existing labels cannot be modified except for // their description. - google.api.MetricDescriptor metric_descriptor = 5; + google.api.MetricDescriptor metric_descriptor = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. A `value_extractor` is required when using a distribution // logs-based metric to extract the values to record from a log entry. @@ -181,7 +181,7 @@ message LogMetric { // distribution. // // Example: `REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")` - string value_extractor = 6; + string value_extractor = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. A map from a label key string to an extractor expression which is // used to extract data from a log entry field and assign as the label value. @@ -197,22 +197,22 @@ message LogMetric { // // Note that there are upper bounds on the maximum number of labels and the // number of active time series that are allowed in a project. - map label_extractors = 7; + map label_extractors = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The `bucket_options` are required when the logs-based metric is // using a DISTRIBUTION value type and it describes the bucket boundaries // used to create a histogram of the extracted values. - google.api.Distribution.BucketOptions bucket_options = 8; + google.api.Distribution.BucketOptions bucket_options = 8 [(google.api.field_behavior) = OPTIONAL]; // Output only. The creation timestamp of the metric. // // This field may not be present for older metrics. - google.protobuf.Timestamp create_time = 9; + google.protobuf.Timestamp create_time = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The last update timestamp of the metric. // // This field may not be present for older metrics. - google.protobuf.Timestamp update_time = 10; + google.protobuf.Timestamp update_time = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; // Deprecated. The API version that created or updated this metric. // The v2 format is used by default and cannot be changed. @@ -235,12 +235,12 @@ message ListLogMetricsRequest { // preceding call to this method. `pageToken` must be the value of // `nextPageToken` from the previous response. The values of other method // parameters should be identical to those in the previous call. - string page_token = 2; + string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The maximum number of results to return from this request. // Non-positive values are ignored. The presence of `nextPageToken` in the // response indicates that more results might be available. - int32 page_size = 3; + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; } // Result returned from ListLogMetrics. @@ -262,7 +262,7 @@ message GetLogMetricRequest { string metric_name = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - type: "logging.googleapis.com/Metric" + type: "logging.googleapis.com/LogMetric" } ]; } @@ -277,7 +277,7 @@ message CreateLogMetricRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - type: "logging.googleapis.com/Metric" + child_type: "logging.googleapis.com/LogMetric" } ]; @@ -298,7 +298,7 @@ message UpdateLogMetricRequest { string metric_name = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - type: "logging.googleapis.com/Metric" + type: "logging.googleapis.com/LogMetric" } ]; @@ -314,7 +314,7 @@ message DeleteLogMetricRequest { string metric_name = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - type: "logging.googleapis.com/Metric" + type: "logging.googleapis.com/LogMetric" } ]; } diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py deleted file mode 100644 index 01e308fb741d..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ /dev/null @@ -1,1045 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/logging_v2/proto/logging_metrics.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import distribution_pb2 as google_dot_api_dot_distribution__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import metric_pb2 as google_dot_api_dot_metric__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/logging_v2/proto/logging_metrics.proto", - package="google.logging.v2", - syntax="proto3", - serialized_options=_b( - "\n\025com.google.logging.v2B\023LoggingMetricsProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), - serialized_pb=_b( - '\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x17google/api/client.proto\x1a\x1dgoogle/api/distribution.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x17google/api/metric.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\xdc\x04\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12/\n\x0b\x63reate_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01:G\xea\x41\x44\n\x1dlogging.googleapis.com/Metric\x12#projects/{project}/metrics/{metric}"\x83\x01\n\x15ListLogMetricsRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x13GetLogMetricRequest\x12:\n\x0bmetric_name\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1dlogging.googleapis.com/Metric"\x82\x01\n\x16\x43reateLogMetricRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1dlogging.googleapis.com/Metric\x12\x31\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetricB\x03\xe0\x41\x02"\x87\x01\n\x16UpdateLogMetricRequest\x12:\n\x0bmetric_name\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1dlogging.googleapis.com/Metric\x12\x31\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetricB\x03\xe0\x41\x02"T\n\x16\x44\x65leteLogMetricRequest\x12:\n\x0bmetric_name\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1dlogging.googleapis.com/Metric2\xae\x08\n\x10MetricsServiceV2\x12\x97\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse"0\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\xda\x41\x06parent\x12\x92\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"<\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\xda\x41\x0bmetric_name\x12\x9b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"?\x82\xd3\xe4\x93\x02)"\x1f/v2/{parent=projects/*}/metrics:\x06metric\xda\x41\rparent,metric\x12\xa7\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"K\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\xda\x41\x12metric_name,metric\x12\x92\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty"<\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}\xda\x41\x0bmetric_name\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_distribution__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_metric__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_LOGMETRIC_APIVERSION = _descriptor.EnumDescriptor( - name="ApiVersion", - full_name="google.logging.v2.LogMetric.ApiVersion", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="V2", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="V1", index=1, number=1, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=877, - serialized_end=905, -) -_sym_db.RegisterEnumDescriptor(_LOGMETRIC_APIVERSION) - - -_LOGMETRIC_LABELEXTRACTORSENTRY = _descriptor.Descriptor( - name="LabelExtractorsEntry", - full_name="google.logging.v2.LogMetric.LabelExtractorsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.LogMetric.LabelExtractorsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.LogMetric.LabelExtractorsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=821, - serialized_end=875, -) - -_LOGMETRIC = _descriptor.Descriptor( - name="LogMetric", - full_name="google.logging.v2.LogMetric", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.LogMetric.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.logging.v2.LogMetric.description", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.LogMetric.filter", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric_descriptor", - full_name="google.logging.v2.LogMetric.metric_descriptor", - index=3, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value_extractor", - full_name="google.logging.v2.LogMetric.value_extractor", - index=4, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="label_extractors", - full_name="google.logging.v2.LogMetric.label_extractors", - index=5, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_options", - full_name="google.logging.v2.LogMetric.bucket_options", - index=6, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.logging.v2.LogMetric.create_time", - index=7, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.logging.v2.LogMetric.update_time", - index=8, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.logging.v2.LogMetric.version", - index=9, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY,], - enum_types=[_LOGMETRIC_APIVERSION,], - serialized_options=_b( - "\352AD\n\035logging.googleapis.com/Metric\022#projects/{project}/metrics/{metric}" - ), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=374, - serialized_end=978, -) - - -_LISTLOGMETRICSREQUEST = _descriptor.Descriptor( - name="ListLogMetricsRequest", - full_name="google.logging.v2.ListLogMetricsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListLogMetricsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListLogMetricsRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListLogMetricsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=981, - serialized_end=1112, -) - - -_LISTLOGMETRICSRESPONSE = _descriptor.Descriptor( - name="ListLogMetricsResponse", - full_name="google.logging.v2.ListLogMetricsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metrics", - full_name="google.logging.v2.ListLogMetricsResponse.metrics", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListLogMetricsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1114, - serialized_end=1210, -) - - -_GETLOGMETRICREQUEST = _descriptor.Descriptor( - name="GetLogMetricRequest", - full_name="google.logging.v2.GetLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metric_name", - full_name="google.logging.v2.GetLogMetricRequest.metric_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A\037\n\035logging.googleapis.com/Metric" - ), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1212, - serialized_end=1293, -) - - -_CREATELOGMETRICREQUEST = _descriptor.Descriptor( - name="CreateLogMetricRequest", - full_name="google.logging.v2.CreateLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.CreateLogMetricRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A\037\n\035logging.googleapis.com/Metric" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric", - full_name="google.logging.v2.CreateLogMetricRequest.metric", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1296, - serialized_end=1426, -) - - -_UPDATELOGMETRICREQUEST = _descriptor.Descriptor( - name="UpdateLogMetricRequest", - full_name="google.logging.v2.UpdateLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metric_name", - full_name="google.logging.v2.UpdateLogMetricRequest.metric_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A\037\n\035logging.googleapis.com/Metric" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric", - full_name="google.logging.v2.UpdateLogMetricRequest.metric", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1429, - serialized_end=1564, -) - - -_DELETELOGMETRICREQUEST = _descriptor.Descriptor( - name="DeleteLogMetricRequest", - full_name="google.logging.v2.DeleteLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metric_name", - full_name="google.logging.v2.DeleteLogMetricRequest.metric_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A\037\n\035logging.googleapis.com/Metric" - ), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1566, - serialized_end=1650, -) - -_LOGMETRIC_LABELEXTRACTORSENTRY.containing_type = _LOGMETRIC -_LOGMETRIC.fields_by_name[ - "metric_descriptor" -].message_type = google_dot_api_dot_metric__pb2._METRICDESCRIPTOR -_LOGMETRIC.fields_by_name[ - "label_extractors" -].message_type = _LOGMETRIC_LABELEXTRACTORSENTRY -_LOGMETRIC.fields_by_name[ - "bucket_options" -].message_type = google_dot_api_dot_distribution__pb2._DISTRIBUTION_BUCKETOPTIONS -_LOGMETRIC.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGMETRIC.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGMETRIC.fields_by_name["version"].enum_type = _LOGMETRIC_APIVERSION -_LOGMETRIC_APIVERSION.containing_type = _LOGMETRIC -_LISTLOGMETRICSRESPONSE.fields_by_name["metrics"].message_type = _LOGMETRIC -_CREATELOGMETRICREQUEST.fields_by_name["metric"].message_type = _LOGMETRIC -_UPDATELOGMETRICREQUEST.fields_by_name["metric"].message_type = _LOGMETRIC -DESCRIPTOR.message_types_by_name["LogMetric"] = _LOGMETRIC -DESCRIPTOR.message_types_by_name["ListLogMetricsRequest"] = _LISTLOGMETRICSREQUEST -DESCRIPTOR.message_types_by_name["ListLogMetricsResponse"] = _LISTLOGMETRICSRESPONSE -DESCRIPTOR.message_types_by_name["GetLogMetricRequest"] = _GETLOGMETRICREQUEST -DESCRIPTOR.message_types_by_name["CreateLogMetricRequest"] = _CREATELOGMETRICREQUEST -DESCRIPTOR.message_types_by_name["UpdateLogMetricRequest"] = _UPDATELOGMETRICREQUEST -DESCRIPTOR.message_types_by_name["DeleteLogMetricRequest"] = _DELETELOGMETRICREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LogMetric = _reflection.GeneratedProtocolMessageType( - "LogMetric", - (_message.Message,), - dict( - LabelExtractorsEntry=_reflection.GeneratedProtocolMessageType( - "LabelExtractorsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_LOGMETRIC_LABELEXTRACTORSENTRY, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric.LabelExtractorsEntry) - ), - ), - DESCRIPTOR=_LOGMETRIC, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""Describes a logs-based metric. The value of the metric is - the number of log entries that match a logs filter in a given time - interval. - - Logs-based metric can also be used to extract values from logs and - create a a distribution of the values. The distribution records the - statistics of the extracted values along with an optional histogram of - the values as specified by the bucket options. - - - Attributes: - name: - Required. The client-assigned metric identifier. Examples: - ``"error_count"``, ``"nginx/requests"``. Metric identifiers - are limited to 100 characters and can include only the - following characters: ``A-Z``, ``a-z``, ``0-9``, and the - special characters ``_-.,+!*',()%/``. The forward-slash - character (``/``) denotes a hierarchy of name pieces, and it - cannot be the first character of the name. The metric - identifier in this field must not be `URL-encoded - `__. However, - when the metric identifier appears as the ``[METRIC_ID]`` part - of a ``metric_name`` API parameter, then the metric identifier - must be URL-encoded. Example: ``"projects/my- - project/metrics/nginx%2Frequests"``. - description: - Optional. A description of this metric, which is used in - documentation. The maximum length of the description is 8000 - characters. - filter: - Required. An `advanced logs filter - `__ which is used to - match log entries. Example: :: "resource.type=gae_app - AND severity>=ERROR" The maximum length of the filter is - 20000 characters. - metric_descriptor: - Optional. The metric descriptor associated with the logs-based - metric. If unspecified, it uses a default metric descriptor - with a DELTA metric kind, INT64 value type, with no labels and - a unit of "1". Such a metric counts the number of log entries - matching the ``filter`` expression. The ``name``, ``type``, - and ``description`` fields in the ``metric_descriptor`` are - output only, and is constructed using the ``name`` and - ``description`` field in the LogMetric. To create a logs- - based metric that records a distribution of log values, a - DELTA metric kind with a DISTRIBUTION value type must be used - along with a ``value_extractor`` expression in the LogMetric. - Each label in the metric descriptor must have a matching label - name as the key and an extractor expression as the value in - the ``label_extractors`` map. The ``metric_kind`` and - ``value_type`` fields in the ``metric_descriptor`` cannot be - updated once initially configured. New labels can be added in - the ``metric_descriptor``, but existing labels cannot be - modified except for their description. - value_extractor: - Optional. A ``value_extractor`` is required when using a - distribution logs-based metric to extract the values to record - from a log entry. Two functions are supported for value - extraction: ``EXTRACT(field)`` or ``REGEXP_EXTRACT(field, - regex)``. The argument are: 1. field: The name of the log - entry field from which the value is to be extracted. 2. regex: - A regular expression using the Google RE2 syntax - (https://github.com/google/re2/wiki/Syntax) with a single - capture group to extract data from the specified log entry - field. The value of the field is converted to a string before - applying the regex. It is an error to specify a regex that - does not include exactly one capture group. The result of the - extraction must be convertible to a double type, as the - distribution always records double values. If either the - extraction or the conversion to double fails, then those - values are not recorded in the distribution. Example: - ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` - label_extractors: - Optional. A map from a label key string to an extractor - expression which is used to extract data from a log entry - field and assign as the label value. Each label key specified - in the LabelDescriptor must have an associated extractor - expression in this map. The syntax of the extractor expression - is the same as for the ``value_extractor`` field. The - extracted value is converted to the type defined in the label - descriptor. If the either the extraction or the type - conversion fails, the label will have a default value. The - default value for a string label is an empty string, for an - integer label its 0, and for a boolean label its ``false``. - Note that there are upper bounds on the maximum number of - labels and the number of active time series that are allowed - in a project. - bucket_options: - Optional. The ``bucket_options`` are required when the logs- - based metric is using a DISTRIBUTION value type and it - describes the bucket boundaries used to create a histogram of - the extracted values. - create_time: - Output only. The creation timestamp of the metric. This field - may not be present for older metrics. - update_time: - Output only. The last update timestamp of the metric. This - field may not be present for older metrics. - version: - Deprecated. The API version that created or updated this - metric. The v2 format is used by default and cannot be - changed. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric) - ), -) -_sym_db.RegisterMessage(LogMetric) -_sym_db.RegisterMessage(LogMetric.LabelExtractorsEntry) - -ListLogMetricsRequest = _reflection.GeneratedProtocolMessageType( - "ListLogMetricsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGMETRICSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to ListLogMetrics. - - - Attributes: - parent: - Required. The name of the project containing the metrics: :: - "projects/[PROJECT_ID]" - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsRequest) - ), -) -_sym_db.RegisterMessage(ListLogMetricsRequest) - -ListLogMetricsResponse = _reflection.GeneratedProtocolMessageType( - "ListLogMetricsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGMETRICSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""Result returned from ListLogMetrics. - - - Attributes: - metrics: - A list of logs-based metrics. - next_page_token: - If there might be more results than appear in this response, - then ``nextPageToken`` is included. To get the next set of - results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsResponse) - ), -) -_sym_db.RegisterMessage(ListLogMetricsResponse) - -GetLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "GetLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETLOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to GetLogMetric. - - - Attributes: - metric_name: - Required. The resource name of the desired metric: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.GetLogMetricRequest) - ), -) -_sym_db.RegisterMessage(GetLogMetricRequest) - -CreateLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "CreateLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATELOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to CreateLogMetric. - - - Attributes: - parent: - Required. The resource name of the project in which to create - the metric: :: "projects/[PROJECT_ID]" The new metric - must be provided in the request. - metric: - Required. The new logs-based metric, which must not have an - identifier that already exists. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.CreateLogMetricRequest) - ), -) -_sym_db.RegisterMessage(CreateLogMetricRequest) - -UpdateLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "UpdateLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATELOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to UpdateLogMetric. - - - Attributes: - metric_name: - Required. The resource name of the metric to update: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" The updated - metric must be provided in the request and it's ``name`` field - must be the same as ``[METRIC_ID]`` If the metric does not - exist in ``[PROJECT_ID]``, then a new metric is created. - metric: - Required. The updated metric. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateLogMetricRequest) - ), -) -_sym_db.RegisterMessage(UpdateLogMetricRequest) - -DeleteLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "DeleteLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETELOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to DeleteLogMetric. - - - Attributes: - metric_name: - Required. The resource name of the metric to delete: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogMetricRequest) - ), -) -_sym_db.RegisterMessage(DeleteLogMetricRequest) - - -DESCRIPTOR._options = None -_LOGMETRIC_LABELEXTRACTORSENTRY._options = None -_LOGMETRIC.fields_by_name["version"]._options = None -_LOGMETRIC._options = None -_LISTLOGMETRICSREQUEST.fields_by_name["parent"]._options = None -_GETLOGMETRICREQUEST.fields_by_name["metric_name"]._options = None -_CREATELOGMETRICREQUEST.fields_by_name["parent"]._options = None -_CREATELOGMETRICREQUEST.fields_by_name["metric"]._options = None -_UPDATELOGMETRICREQUEST.fields_by_name["metric_name"]._options = None -_UPDATELOGMETRICREQUEST.fields_by_name["metric"]._options = None -_DELETELOGMETRICREQUEST.fields_by_name["metric_name"]._options = None - -_METRICSSERVICEV2 = _descriptor.ServiceDescriptor( - name="MetricsServiceV2", - full_name="google.logging.v2.MetricsServiceV2", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" - ), - serialized_start=1653, - serialized_end=2723, - methods=[ - _descriptor.MethodDescriptor( - name="ListLogMetrics", - full_name="google.logging.v2.MetricsServiceV2.ListLogMetrics", - index=0, - containing_service=None, - input_type=_LISTLOGMETRICSREQUEST, - output_type=_LISTLOGMETRICSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002!\022\037/v2/{parent=projects/*}/metrics\332A\006parent" - ), - ), - _descriptor.MethodDescriptor( - name="GetLogMetric", - full_name="google.logging.v2.MetricsServiceV2.GetLogMetric", - index=1, - containing_service=None, - input_type=_GETLOGMETRICREQUEST, - output_type=_LOGMETRIC, - serialized_options=_b( - "\202\323\344\223\002(\022&/v2/{metric_name=projects/*/metrics/*}\332A\013metric_name" - ), - ), - _descriptor.MethodDescriptor( - name="CreateLogMetric", - full_name="google.logging.v2.MetricsServiceV2.CreateLogMetric", - index=2, - containing_service=None, - input_type=_CREATELOGMETRICREQUEST, - output_type=_LOGMETRIC, - serialized_options=_b( - '\202\323\344\223\002)"\037/v2/{parent=projects/*}/metrics:\006metric\332A\rparent,metric' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateLogMetric", - full_name="google.logging.v2.MetricsServiceV2.UpdateLogMetric", - index=3, - containing_service=None, - input_type=_UPDATELOGMETRICREQUEST, - output_type=_LOGMETRIC, - serialized_options=_b( - "\202\323\344\223\0020\032&/v2/{metric_name=projects/*/metrics/*}:\006metric\332A\022metric_name,metric" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteLogMetric", - full_name="google.logging.v2.MetricsServiceV2.DeleteLogMetric", - index=4, - containing_service=None, - input_type=_DELETELOGMETRICREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002(*&/v2/{metric_name=projects/*/metrics/*}\332A\013metric_name" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_METRICSSERVICEV2) - -DESCRIPTOR.services_by_name["MetricsServiceV2"] = _METRICSSERVICEV2 - -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py deleted file mode 100644 index a3a3733687c4..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py +++ /dev/null @@ -1,111 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.logging_v2.proto import ( - logging_metrics_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class MetricsServiceV2Stub(object): - """Service for configuring logs-based metrics.""" - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListLogMetrics = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/ListLogMetrics", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.FromString, - ) - self.GetLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/GetLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, - ) - self.CreateLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/CreateLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, - ) - self.UpdateLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, - ) - self.DeleteLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - -class MetricsServiceV2Servicer(object): - """Service for configuring logs-based metrics.""" - - def ListLogMetrics(self, request, context): - """Lists logs-based metrics.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetLogMetric(self, request, context): - """Gets a logs-based metric.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateLogMetric(self, request, context): - """Creates a logs-based metric.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateLogMetric(self, request, context): - """Creates or updates a logs-based metric.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteLogMetric(self, request, context): - """Deletes a logs-based metric.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_MetricsServiceV2Servicer_to_server(servicer, server): - rpc_method_handlers = { - "ListLogMetrics": grpc.unary_unary_rpc_method_handler( - servicer.ListLogMetrics, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.SerializeToString, - ), - "GetLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.GetLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, - ), - "CreateLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.CreateLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, - ), - "UpdateLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.UpdateLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, - ), - "DeleteLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.DeleteLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.logging.v2.MetricsServiceV2", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py deleted file mode 100644 index 35c9b9c52449..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2.py +++ /dev/null @@ -1,1326 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/logging_v2/proto/logging.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.logging_v2.proto import ( - log_entry_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2, -) -from google.cloud.logging_v2.proto import ( - logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/logging_v2/proto/logging.proto", - package="google.logging.v2", - syntax="proto3", - serialized_options=_b( - "\n\025com.google.logging.v2B\014LoggingProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), - serialized_pb=_b( - '\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a#google/api/monitored_resource.proto\x1a\x19google/api/resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x32google/cloud/logging_v2/proto/logging_config.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"H\n\x10\x44\x65leteLogRequest\x12\x34\n\x08log_name\x18\x01 \x01(\tB"\xe0\x41\x02\xfa\x41\x1c\x12\x1alogging.googleapis.com/Log"\xcf\x02\n\x16WriteLogEntriesRequest\x12\x31\n\x08log_name\x18\x01 \x01(\tB\x1f\xfa\x41\x1c\n\x1alogging.googleapis.com/Log\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12\x31\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntryB\x03\xe0\x41\x02\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x19\n\x17WriteLogEntriesResponse"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01"\xb5\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12:\n\x0eresource_names\x18\x08 \x03(\tB"\xe0\x41\x02\xfa\x41\x1c\x12\x1alogging.googleapis.com/Log\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"i\n\x0fListLogsRequest\x12/\n\x06parent\x18\x01 \x01(\tB\x1f\xfa\x41\x1c\x12\x1alogging.googleapis.com/Log\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xdd\n\n\x10LoggingServiceV2\x12\x93\x02\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty"\xc8\x01\x82\xd3\xe4\x93\x02\xb6\x01* /v2/{log_name=projects/*/logs/*}Z\x1b*\x19/v2/{log_name=*/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\xda\x41\x08log_name\x12\xa9\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse"?\x82\xd3\xe4\x93\x02\x16"\x11/v2/entries:write:\x01*\xda\x41 log_name,resource,labels,entries\x12\xa3\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse"<\x82\xd3\xe4\x93\x02\x15"\x10/v2/entries:list:\x01*\xda\x41\x1eresource_names,filter,order_by\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse"(\x82\xd3\xe4\x93\x02"\x12 /v2/monitoredResourceDescriptors\x12\x88\x02\n\x08ListLogs\x12".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse"\xb2\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logs\xda\x41\x06parent\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2.DESCRIPTOR, - google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - ], -) - - -_DELETELOGREQUEST = _descriptor.Descriptor( - name="DeleteLogRequest", - full_name="google.logging.v2.DeleteLogRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_name", - full_name="google.logging.v2.DeleteLogRequest.log_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A\034\022\032logging.googleapis.com/Log" - ), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=436, - serialized_end=508, -) - - -_WRITELOGENTRIESREQUEST_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=801, - serialized_end=846, -) - -_WRITELOGENTRIESREQUEST = _descriptor.Descriptor( - name="WriteLogEntriesRequest", - full_name="google.logging.v2.WriteLogEntriesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_name", - full_name="google.logging.v2.WriteLogEntriesRequest.log_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\372A\034\n\032logging.googleapis.com/Log"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource", - full_name="google.logging.v2.WriteLogEntriesRequest.resource", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.logging.v2.WriteLogEntriesRequest.labels", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="entries", - full_name="google.logging.v2.WriteLogEntriesRequest.entries", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\340A\002"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="partial_success", - full_name="google.logging.v2.WriteLogEntriesRequest.partial_success", - index=4, - number=5, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="dry_run", - full_name="google.logging.v2.WriteLogEntriesRequest.dry_run", - index=5, - number=6, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=511, - serialized_end=846, -) - - -_WRITELOGENTRIESRESPONSE = _descriptor.Descriptor( - name="WriteLogEntriesResponse", - full_name="google.logging.v2.WriteLogEntriesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=848, - serialized_end=873, -) - - -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY = _descriptor.Descriptor( - name="LogEntryErrorsEntry", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.key", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.value", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1003, - serialized_end=1076, -) - -_WRITELOGENTRIESPARTIALERRORS = _descriptor.Descriptor( - name="WriteLogEntriesPartialErrors", - full_name="google.logging.v2.WriteLogEntriesPartialErrors", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_entry_errors", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.log_entry_errors", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=876, - serialized_end=1076, -) - - -_LISTLOGENTRIESREQUEST = _descriptor.Descriptor( - name="ListLogEntriesRequest", - full_name="google.logging.v2.ListLogEntriesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_ids", - full_name="google.logging.v2.ListLogEntriesRequest.project_ids", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource_names", - full_name="google.logging.v2.ListLogEntriesRequest.resource_names", - index=1, - number=8, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b( - "\340A\002\372A\034\022\032logging.googleapis.com/Log" - ), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.ListLogEntriesRequest.filter", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="order_by", - full_name="google.logging.v2.ListLogEntriesRequest.order_by", - index=3, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListLogEntriesRequest.page_size", - index=4, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListLogEntriesRequest.page_token", - index=5, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1079, - serialized_end=1260, -) - - -_LISTLOGENTRIESRESPONSE = _descriptor.Descriptor( - name="ListLogEntriesResponse", - full_name="google.logging.v2.ListLogEntriesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="entries", - full_name="google.logging.v2.ListLogEntriesResponse.entries", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListLogEntriesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1262, - serialized_end=1357, -) - - -_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST = _descriptor.Descriptor( - name="ListMonitoredResourceDescriptorsRequest", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_size", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1359, - serialized_end=1439, -) - - -_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE = _descriptor.Descriptor( - name="ListMonitoredResourceDescriptorsResponse", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="resource_descriptors", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse.resource_descriptors", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1442, - serialized_end=1580, -) - - -_LISTLOGSREQUEST = _descriptor.Descriptor( - name="ListLogsRequest", - full_name="google.logging.v2.ListLogsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListLogsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\372A\034\022\032logging.googleapis.com/Log"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListLogsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListLogsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1582, - serialized_end=1687, -) - - -_LISTLOGSRESPONSE = _descriptor.Descriptor( - name="ListLogsResponse", - full_name="google.logging.v2.ListLogsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_names", - full_name="google.logging.v2.ListLogsResponse.log_names", - index=0, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListLogsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1689, - serialized_end=1751, -) - -_WRITELOGENTRIESREQUEST_LABELSENTRY.containing_type = _WRITELOGENTRIESREQUEST -_WRITELOGENTRIESREQUEST.fields_by_name[ - "resource" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE -_WRITELOGENTRIESREQUEST.fields_by_name[ - "labels" -].message_type = _WRITELOGENTRIESREQUEST_LABELSENTRY -_WRITELOGENTRIESREQUEST.fields_by_name[ - "entries" -].message_type = ( - google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY -) -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.fields_by_name[ - "value" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.containing_type = ( - _WRITELOGENTRIESPARTIALERRORS -) -_WRITELOGENTRIESPARTIALERRORS.fields_by_name[ - "log_entry_errors" -].message_type = _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY -_LISTLOGENTRIESRESPONSE.fields_by_name[ - "entries" -].message_type = ( - google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY -) -_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE.fields_by_name[ - "resource_descriptors" -].message_type = ( - google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEDESCRIPTOR -) -DESCRIPTOR.message_types_by_name["DeleteLogRequest"] = _DELETELOGREQUEST -DESCRIPTOR.message_types_by_name["WriteLogEntriesRequest"] = _WRITELOGENTRIESREQUEST -DESCRIPTOR.message_types_by_name["WriteLogEntriesResponse"] = _WRITELOGENTRIESRESPONSE -DESCRIPTOR.message_types_by_name[ - "WriteLogEntriesPartialErrors" -] = _WRITELOGENTRIESPARTIALERRORS -DESCRIPTOR.message_types_by_name["ListLogEntriesRequest"] = _LISTLOGENTRIESREQUEST -DESCRIPTOR.message_types_by_name["ListLogEntriesResponse"] = _LISTLOGENTRIESRESPONSE -DESCRIPTOR.message_types_by_name[ - "ListMonitoredResourceDescriptorsRequest" -] = _LISTMONITOREDRESOURCEDESCRIPTORSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListMonitoredResourceDescriptorsResponse" -] = _LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE -DESCRIPTOR.message_types_by_name["ListLogsRequest"] = _LISTLOGSREQUEST -DESCRIPTOR.message_types_by_name["ListLogsResponse"] = _LISTLOGSRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -DeleteLogRequest = _reflection.GeneratedProtocolMessageType( - "DeleteLogRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETELOGREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to DeleteLog. - - - Attributes: - log_name: - Required. The resource name of the log to delete: :: - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL- - encoded. For example, ``"projects/my-project- - id/logs/syslog"``, ``"organizations/1234567890/logs/cloudresou - rcemanager.googleapis.com%2Factivity"``. For more information - about log names, see [LogEntry][google.logging.v2.LogEntry]. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogRequest) - ), -) -_sym_db.RegisterMessage(DeleteLogRequest) - -WriteLogEntriesRequest = _reflection.GeneratedProtocolMessageType( - "WriteLogEntriesRequest", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_WRITELOGENTRIESREQUEST_LABELSENTRY, - __module__="google.cloud.logging_v2.proto.logging_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest.LabelsEntry) - ), - ), - DESCRIPTOR=_WRITELOGENTRIESREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to WriteLogEntries. - - - Attributes: - log_name: - Optional. A default log resource name that is assigned to all - log entries in ``entries`` that do not specify a value for - ``log_name``: :: "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL- - encoded. For example: :: "projects/my-project- - id/logs/syslog" "organizations/1234567890/logs/cloudresour - cemanager.googleapis.com%2Factivity" The permission - logging.logEntries.create is needed on each project, - organization, billing account, or folder that is receiving new - log entries, whether the resource is specified in logName or - in an individual log entry. - resource: - Optional. A default monitored resource object that is assigned - to all log entries in ``entries`` that do not specify a value - for ``resource``. Example: :: { "type": "gce_instance", - "labels": { "zone": "us-central1-a", "instance_id": - "00000000000000000000" }} See - [LogEntry][google.logging.v2.LogEntry]. - labels: - Optional. Default labels that are added to the ``labels`` - field of all log entries in ``entries``. If a log entry - already has a label with the same key as a label in this - parameter, then the log entry's label is not changed. See - [LogEntry][google.logging.v2.LogEntry]. - entries: - Required. The log entries to send to Logging. The order of log - entries in this list does not matter. Values supplied in this - method's ``log_name``, ``resource``, and ``labels`` fields are - copied into those log entries in this list that do not include - values for their corresponding fields. For more information, - see the [LogEntry][google.logging.v2.LogEntry] type. If the - ``timestamp`` or ``insert_id`` fields are missing in log - entries, then this method supplies the current time or a - unique identifier, respectively. The supplied values are - chosen so that, among the log entries that did not supply - their own values, the entries earlier in the list will sort - before the entries later in the list. See the ``entries.list`` - method. Log entries with timestamps that are more than the - `logs retention period `__ in the past - or more than 24 hours in the future will not be available when - calling ``entries.list``. However, those log entries can still - be `exported with LogSinks `__. To improve throughput and to avoid exceeding the - `quota limit `__ for calls to - ``entries.write``, you should try to include several log - entries in this list, rather than calling this method for each - individual log entry. - partial_success: - Optional. Whether valid entries should be written even if some - other entries fail due to INVALID\_ARGUMENT or - PERMISSION\_DENIED errors. If any entry is not written, then - the response status is the error associated with one of the - failed entries and the response includes error details keyed - by the entries' zero-based index in the ``entries.write`` - method. - dry_run: - Optional. If true, the request should expect normal response, - but the entries won't be persisted nor exported. Useful for - checking whether the logging API endpoints are working - properly before sending valuable data. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest) - ), -) -_sym_db.RegisterMessage(WriteLogEntriesRequest) -_sym_db.RegisterMessage(WriteLogEntriesRequest.LabelsEntry) - -WriteLogEntriesResponse = _reflection.GeneratedProtocolMessageType( - "WriteLogEntriesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_WRITELOGENTRIESRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from WriteLogEntries. empty - - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesResponse) - ), -) -_sym_db.RegisterMessage(WriteLogEntriesResponse) - -WriteLogEntriesPartialErrors = _reflection.GeneratedProtocolMessageType( - "WriteLogEntriesPartialErrors", - (_message.Message,), - dict( - LogEntryErrorsEntry=_reflection.GeneratedProtocolMessageType( - "LogEntryErrorsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY, - __module__="google.cloud.logging_v2.proto.logging_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry) - ), - ), - DESCRIPTOR=_WRITELOGENTRIESPARTIALERRORS, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Error details for WriteLogEntries with partial success. - - - Attributes: - log_entry_errors: - When ``WriteLogEntriesRequest.partial_success`` is true, - records the error status for entries that were not written due - to a permanent error, keyed by the entry's zero-based index in - ``WriteLogEntriesRequest.entries``. Failed requests for which - no entries are written will not include per-entry errors. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors) - ), -) -_sym_db.RegisterMessage(WriteLogEntriesPartialErrors) -_sym_db.RegisterMessage(WriteLogEntriesPartialErrors.LogEntryErrorsEntry) - -ListLogEntriesRequest = _reflection.GeneratedProtocolMessageType( - "ListLogEntriesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGENTRIESREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to ``ListLogEntries``. - - - Attributes: - project_ids: - Deprecated. Use ``resource_names`` instead. One or more - project identifiers or project numbers from which to retrieve - log entries. Example: ``"my-project-1A"``. - resource_names: - Required. Names of one or more parent resources from which to - retrieve log entries: :: "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" Projects listed in the ``project_ids`` - field are added to this list. - filter: - Optional. A filter that chooses which log entries to return. - See `Advanced Logs Queries `__. Only log entries that match the filter are - returned. An empty filter matches all log entries in the - resources listed in ``resource_names``. Referencing a parent - resource that is not listed in ``resource_names`` will cause - the filter to return no results. The maximum length of the - filter is 20000 characters. - order_by: - Optional. How the results should be sorted. Presently, the - only permitted values are ``"timestamp asc"`` (default) and - ``"timestamp desc"``. The first option returns entries in - order of increasing values of ``LogEntry.timestamp`` (oldest - first), and the second option returns entries in order of - decreasing timestamps (newest first). Entries with equal - timestamps are returned in order of their ``insert_id`` - values. - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``next_page_token`` in the response indicates that more - results might be available. - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``page_token`` must be - the value of ``next_page_token`` from the previous response. - The values of other method parameters should be identical to - those in the previous call. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesRequest) - ), -) -_sym_db.RegisterMessage(ListLogEntriesRequest) - -ListLogEntriesResponse = _reflection.GeneratedProtocolMessageType( - "ListLogEntriesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGENTRIESRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from ``ListLogEntries``. - - - Attributes: - entries: - A list of log entries. If ``entries`` is empty, - ``nextPageToken`` may still be returned, indicating that more - entries may exist. See ``nextPageToken`` for more information. - next_page_token: - If there might be more results than those appearing in this - response, then ``nextPageToken`` is included. To get the next - set of results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. If a value for - ``next_page_token`` appears and the ``entries`` field is - empty, it means that the search found no log entries so far - but it did not have time to search all the possible log - entries. Retry the method with this value for ``page_token`` - to continue the search. Alternatively, consider speeding up - the search by changing your filter to specify a single log - name or resource type, or to narrow the time range of the - search. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesResponse) - ), -) -_sym_db.RegisterMessage(ListLogEntriesResponse) - -ListMonitoredResourceDescriptorsRequest = _reflection.GeneratedProtocolMessageType( - "ListMonitoredResourceDescriptorsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to ListMonitoredResourceDescriptors - - - Attributes: - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsRequest) - ), -) -_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsRequest) - -ListMonitoredResourceDescriptorsResponse = _reflection.GeneratedProtocolMessageType( - "ListMonitoredResourceDescriptorsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from ListMonitoredResourceDescriptors. - - - Attributes: - resource_descriptors: - A list of resource descriptors. - next_page_token: - If there might be more results than those appearing in this - response, then ``nextPageToken`` is included. To get the next - set of results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsResponse) - ), -) -_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsResponse) - -ListLogsRequest = _reflection.GeneratedProtocolMessageType( - "ListLogsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to ListLogs. - - - Attributes: - parent: - Required. The resource name that owns the logs: :: - "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsRequest) - ), -) -_sym_db.RegisterMessage(ListLogsRequest) - -ListLogsResponse = _reflection.GeneratedProtocolMessageType( - "ListLogsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from ListLogs. - - - Attributes: - log_names: - A list of log names. For example, ``"projects/my- - project/logs/syslog"`` or ``"organizations/123/logs/cloudresou - rcemanager.googleapis.com%2Factivity"``. - next_page_token: - If there might be more results than those appearing in this - response, then ``nextPageToken`` is included. To get the next - set of results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsResponse) - ), -) -_sym_db.RegisterMessage(ListLogsResponse) - - -DESCRIPTOR._options = None -_DELETELOGREQUEST.fields_by_name["log_name"]._options = None -_WRITELOGENTRIESREQUEST_LABELSENTRY._options = None -_WRITELOGENTRIESREQUEST.fields_by_name["log_name"]._options = None -_WRITELOGENTRIESREQUEST.fields_by_name["entries"]._options = None -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY._options = None -_LISTLOGENTRIESREQUEST.fields_by_name["project_ids"]._options = None -_LISTLOGENTRIESREQUEST.fields_by_name["resource_names"]._options = None -_LISTLOGSREQUEST.fields_by_name["parent"]._options = None - -_LOGGINGSERVICEV2 = _descriptor.ServiceDescriptor( - name="LoggingServiceV2", - full_name="google.logging.v2.LoggingServiceV2", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" - ), - serialized_start=1754, - serialized_end=3127, - methods=[ - _descriptor.MethodDescriptor( - name="DeleteLog", - full_name="google.logging.v2.LoggingServiceV2.DeleteLog", - index=0, - containing_service=None, - input_type=_DELETELOGREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002\266\001* /v2/{log_name=projects/*/logs/*}Z\033*\031/v2/{log_name=*/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}\332A\010log_name" - ), - ), - _descriptor.MethodDescriptor( - name="WriteLogEntries", - full_name="google.logging.v2.LoggingServiceV2.WriteLogEntries", - index=1, - containing_service=None, - input_type=_WRITELOGENTRIESREQUEST, - output_type=_WRITELOGENTRIESRESPONSE, - serialized_options=_b( - '\202\323\344\223\002\026"\021/v2/entries:write:\001*\332A log_name,resource,labels,entries' - ), - ), - _descriptor.MethodDescriptor( - name="ListLogEntries", - full_name="google.logging.v2.LoggingServiceV2.ListLogEntries", - index=2, - containing_service=None, - input_type=_LISTLOGENTRIESREQUEST, - output_type=_LISTLOGENTRIESRESPONSE, - serialized_options=_b( - '\202\323\344\223\002\025"\020/v2/entries:list:\001*\332A\036resource_names,filter,order_by' - ), - ), - _descriptor.MethodDescriptor( - name="ListMonitoredResourceDescriptors", - full_name="google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors", - index=3, - containing_service=None, - input_type=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, - output_type=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, - serialized_options=_b( - '\202\323\344\223\002"\022 /v2/monitoredResourceDescriptors' - ), - ), - _descriptor.MethodDescriptor( - name="ListLogs", - full_name="google.logging.v2.LoggingServiceV2.ListLogs", - index=4, - containing_service=None, - input_type=_LISTLOGSREQUEST, - output_type=_LISTLOGSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002\242\001\022\025/v2/{parent=*/*}/logsZ\036\022\034/v2/{parent=projects/*}/logsZ#\022!/v2/{parent=organizations/*}/logsZ\035\022\033/v2/{parent=folders/*}/logsZ%\022#/v2/{parent=billingAccounts/*}/logs\332A\006parent" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_LOGGINGSERVICEV2) - -DESCRIPTOR.services_by_name["LoggingServiceV2"] = _LOGGINGSERVICEV2 - -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py deleted file mode 100644 index 2e444b925486..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py +++ /dev/null @@ -1,127 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.logging_v2.proto import ( - logging_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class LoggingServiceV2Stub(object): - """Service for ingesting and querying logs.""" - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.DeleteLog = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/DeleteLog", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.WriteLogEntries = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/WriteLogEntries", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.FromString, - ) - self.ListLogEntries = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/ListLogEntries", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.FromString, - ) - self.ListMonitoredResourceDescriptors = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.FromString, - ) - self.ListLogs = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/ListLogs", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.FromString, - ) - - -class LoggingServiceV2Servicer(object): - """Service for ingesting and querying logs.""" - - def DeleteLog(self, request, context): - """Deletes all the log entries in a log. The log reappears if it receives new - entries. Log entries written shortly before the delete operation might not - be deleted. Entries received after the delete operation with a timestamp - before the operation will be deleted. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def WriteLogEntries(self, request, context): - """Writes log entries to Logging. This API method is the - only way to send log entries to Logging. This method - is used, directly or indirectly, by the Logging agent - (fluentd) and all logging libraries configured to use Logging. - A single request may contain log entries for a maximum of 1000 - different resources (projects, organizations, billing accounts or - folders) - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListLogEntries(self, request, context): - """Lists log entries. Use this method to retrieve log entries that originated - from a project/folder/organization/billing account. For ways to export log - entries, see [Exporting Logs](/logging/docs/export). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListMonitoredResourceDescriptors(self, request, context): - """Lists the descriptors for monitored resource types used by Logging.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListLogs(self, request, context): - """Lists the logs in projects, organizations, folders, or billing accounts. - Only logs that have entries are listed. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_LoggingServiceV2Servicer_to_server(servicer, server): - rpc_method_handlers = { - "DeleteLog": grpc.unary_unary_rpc_method_handler( - servicer.DeleteLog, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "WriteLogEntries": grpc.unary_unary_rpc_method_handler( - servicer.WriteLogEntries, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.SerializeToString, - ), - "ListLogEntries": grpc.unary_unary_rpc_method_handler( - servicer.ListLogEntries, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.SerializeToString, - ), - "ListMonitoredResourceDescriptors": grpc.unary_unary_rpc_method_handler( - servicer.ListMonitoredResourceDescriptors, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.SerializeToString, - ), - "ListLogs": grpc.unary_unary_rpc_method_handler( - servicer.ListLogs, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.logging.v2.LoggingServiceV2", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/py.typed b/packages/google-cloud-logging/google/cloud/logging_v2/py.typed new file mode 100644 index 000000000000..6c7420d0d9cb --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-logging package uses inline types. diff --git a/packages/google-cloud-logging/google/cloud/logging/resource.py b/packages/google-cloud-logging/google/cloud/logging_v2/resource.py similarity index 68% rename from packages/google-cloud-logging/google/cloud/logging/resource.py rename to packages/google-cloud-logging/google/cloud/logging_v2/resource.py index dda59ca09f61..eed5ca5fa5e6 100644 --- a/packages/google-cloud-logging/google/cloud/logging/resource.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/resource.py @@ -20,12 +20,10 @@ class Resource(collections.namedtuple("Resource", "type labels")): """A monitored resource identified by specifying values for all labels. - :type type: str - :param type: The resource type name. - - :type labels: dict - :param labels: A mapping from label names to values for all labels - enumerated in the associated :class:`ResourceDescriptor`. + Attributes: + type (str): The resource type name. + labels (dict): A mapping from label names to values for all labels + enumerated in the associated :class:`ResourceDescriptor`. """ __slots__ = () @@ -34,20 +32,20 @@ class Resource(collections.namedtuple("Resource", "type labels")): def _from_dict(cls, info): """Construct a resource object from the parsed JSON representation. - :type info: dict - :param info: - A ``dict`` parsed from the JSON wire-format representation. + Args: + info (dict): A ``dict`` parsed from the JSON wire-format representation. - :rtype: :class:`Resource` - :returns: A resource object. + Returns: + Resource: A resource object. """ return cls(type=info["type"], labels=info.get("labels", {})) def _to_dict(self): """Build a dictionary ready to be serialized to the JSON format. - :rtype: dict - :returns: A dict representation of the object that can be written to - the API. + Returns: + dict: + A dict representation of the object that can be written to + the API. """ return {"type": self.type, "labels": self.labels} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py new file mode 100644 index 000000000000..42ffdf2bc43d --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py new file mode 100644 index 000000000000..4ab8f4d4080a --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import ConfigServiceV2Client +from .async_client import ConfigServiceV2AsyncClient + +__all__ = ( + "ConfigServiceV2Client", + "ConfigServiceV2AsyncClient", +) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py new file mode 100644 index 000000000000..d025f5916648 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -0,0 +1,1531 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.types import logging_config +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport +from .client import ConfigServiceV2Client + + +class ConfigServiceV2AsyncClient: + """Service for configuring sinks used to route log entries.""" + + _client: ConfigServiceV2Client + + DEFAULT_ENDPOINT = ConfigServiceV2Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + + cmek_settings_path = staticmethod(ConfigServiceV2Client.cmek_settings_path) + parse_cmek_settings_path = staticmethod( + ConfigServiceV2Client.parse_cmek_settings_path + ) + log_bucket_path = staticmethod(ConfigServiceV2Client.log_bucket_path) + parse_log_bucket_path = staticmethod(ConfigServiceV2Client.parse_log_bucket_path) + log_exclusion_path = staticmethod(ConfigServiceV2Client.log_exclusion_path) + parse_log_exclusion_path = staticmethod( + ConfigServiceV2Client.parse_log_exclusion_path + ) + log_sink_path = staticmethod(ConfigServiceV2Client.log_sink_path) + parse_log_sink_path = staticmethod(ConfigServiceV2Client.parse_log_sink_path) + + common_billing_account_path = staticmethod( + ConfigServiceV2Client.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ConfigServiceV2Client.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(ConfigServiceV2Client.common_folder_path) + parse_common_folder_path = staticmethod( + ConfigServiceV2Client.parse_common_folder_path + ) + + common_organization_path = staticmethod( + ConfigServiceV2Client.common_organization_path + ) + parse_common_organization_path = staticmethod( + ConfigServiceV2Client.parse_common_organization_path + ) + + common_project_path = staticmethod(ConfigServiceV2Client.common_project_path) + parse_common_project_path = staticmethod( + ConfigServiceV2Client.parse_common_project_path + ) + + common_location_path = staticmethod(ConfigServiceV2Client.common_location_path) + parse_common_location_path = staticmethod( + ConfigServiceV2Client.parse_common_location_path + ) + + from_service_account_file = ConfigServiceV2Client.from_service_account_file + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ConfigServiceV2Transport: + """Return the transport used by the client instance. + + Returns: + ConfigServiceV2Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ConfigServiceV2Client).get_transport_class, type(ConfigServiceV2Client) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, ConfigServiceV2Transport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the config service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ConfigServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = ConfigServiceV2Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_buckets( + self, + request: logging_config.ListBucketsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBucketsAsyncPager: + r"""Lists buckets (Beta). + + Args: + request (:class:`~.logging_config.ListBucketsRequest`): + The request object. The parameters to `ListBuckets` + (Beta). + parent (:class:`str`): + Required. The parent resource whose buckets are to be + listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]" + + Note: The locations portion of the resource must be + specified, but supplying the character ``-`` in place of + [LOCATION_ID] will return all buckets. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListBucketsAsyncPager: + The response from ListBuckets (Beta). + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.ListBucketsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_buckets, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBucketsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_bucket( + self, + request: logging_config.GetBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Gets a bucket (Beta). + + Args: + request (:class:`~.logging_config.GetBucketRequest`): + The request object. The parameters to `GetBucket` + (Beta). + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogBucket: + Describes a repository of logs + (Beta). + + """ + # Create or coerce a protobuf request object. + + request = logging_config.GetBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_bucket( + self, + request: logging_config.UpdateBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Updates a bucket. This method replaces the following fields in + the existing bucket with values from the new bucket: + ``retention_period`` + + If the retention period is decreased and the bucket is locked, + FAILED_PRECONDITION will be returned. + + If the bucket has a LifecycleState of DELETE_REQUESTED, + FAILED_PRECONDITION will be returned. + + A buckets region may not be modified after it is created. This + method is in Beta. + + Args: + request (:class:`~.logging_config.UpdateBucketRequest`): + The request object. The parameters to `UpdateBucket` + (Beta). + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogBucket: + Describes a repository of logs + (Beta). + + """ + # Create or coerce a protobuf request object. + + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_sinks( + self, + request: logging_config.ListSinksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSinksAsyncPager: + r"""Lists sinks. + + Args: + request (:class:`~.logging_config.ListSinksRequest`): + The request object. The parameters to `ListSinks`. + parent (:class:`str`): + Required. The parent resource whose sinks are to be + listed: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListSinksAsyncPager: + Result returned from ``ListSinks``. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.ListSinksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_sinks, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSinksAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_sink( + self, + request: logging_config.GetSinkRequest = None, + *, + sink_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Gets a sink. + + Args: + request (:class:`~.logging_config.GetSinkRequest`): + The request object. The parameters to `GetSink`. + sink_name (:class:`str`): + Required. The resource name of the sink: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.GetSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_sink, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_sink( + self, + request: logging_config.CreateSinkRequest = None, + *, + parent: str = None, + sink: logging_config.LogSink = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + Args: + request (:class:`~.logging_config.CreateSinkRequest`): + The request object. The parameters to `CreateSink`. + parent (:class:`str`): + Required. The resource in which to create the sink: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (:class:`~.logging_config.LogSink`): + Required. The new sink, whose ``name`` parameter is a + sink identifier that is not already in use. + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, sink]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.CreateSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if sink is not None: + request.sink = sink + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_sink, + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_sink( + self, + request: logging_config.UpdateSinkRequest = None, + *, + sink_name: str = None, + sink: logging_config.LogSink = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + Args: + request (:class:`~.logging_config.UpdateSinkRequest`): + The request object. The parameters to `UpdateSink`. + sink_name (:class:`str`): + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (:class:`~.logging_config.LogSink`): + Required. The updated sink, whose name is the same + identifier that appears as part of ``sink_name``. + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Optional. Field mask that specifies the fields in + ``sink`` that need an update. A sink field will be + overwritten if, and only if, it is in the update mask. + ``name`` and output only fields cannot be updated. + + An empty updateMask is temporarily treated as using the + following mask for backwards compatibility purposes: + destination,filter,includeChildren At some point in the + future, behavior will be removed and specifying an empty + updateMask will be an error. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=filter``. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name, sink, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.UpdateSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if sink_name is not None: + request.sink_name = sink_name + if sink is not None: + request.sink = sink + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_sink, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_sink( + self, + request: logging_config.DeleteSinkRequest = None, + *, + sink_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + Args: + request (:class:`~.logging_config.DeleteSinkRequest`): + The request object. The parameters to `DeleteSink`. + sink_name (:class:`str`): + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.DeleteSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_sink, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def list_exclusions( + self, + request: logging_config.ListExclusionsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExclusionsAsyncPager: + r"""Lists all the exclusions in a parent resource. + + Args: + request (:class:`~.logging_config.ListExclusionsRequest`): + The request object. The parameters to `ListExclusions`. + parent (:class:`str`): + Required. The parent resource whose exclusions are to be + listed. + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListExclusionsAsyncPager: + Result returned from ``ListExclusions``. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.ListExclusionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_exclusions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListExclusionsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_exclusion( + self, + request: logging_config.GetExclusionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Gets the description of an exclusion. + + Args: + request (:class:`~.logging_config.GetExclusionRequest`): + The request object. The parameters to `GetExclusion`. + name (:class:`str`): + Required. The resource name of an existing exclusion: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.GetExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_exclusion, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_exclusion( + self, + request: logging_config.CreateExclusionRequest = None, + *, + parent: str = None, + exclusion: logging_config.LogExclusion = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Creates a new exclusion in a specified parent + resource. Only log entries belonging to that resource + can be excluded. You can have up to 10 exclusions in a + resource. + + Args: + request (:class:`~.logging_config.CreateExclusionRequest`): + The request object. The parameters to `CreateExclusion`. + parent (:class:`str`): + Required. The parent resource in which to create the + exclusion: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (:class:`~.logging_config.LogExclusion`): + Required. The new exclusion, whose ``name`` parameter is + an exclusion name that is not already used in the parent + resource. + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, exclusion]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.CreateExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if exclusion is not None: + request.exclusion = exclusion + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_exclusion, + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_exclusion( + self, + request: logging_config.UpdateExclusionRequest = None, + *, + name: str = None, + exclusion: logging_config.LogExclusion = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Changes one or more properties of an existing + exclusion. + + Args: + request (:class:`~.logging_config.UpdateExclusionRequest`): + The request object. The parameters to `UpdateExclusion`. + name (:class:`str`): + Required. The resource name of the exclusion to update: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (:class:`~.logging_config.LogExclusion`): + Required. New values for the existing exclusion. Only + the fields specified in ``update_mask`` are relevant. + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Required. A non-empty list of fields to change in the + existing exclusion. New values for the fields are taken + from the corresponding fields in the + [LogExclusion][google.logging.v2.LogExclusion] included + in this request. Fields not mentioned in ``update_mask`` + are not changed and are ignored in the request. + + For example, to change the filter and description of an + exclusion, specify an ``update_mask`` of + ``"filter,description"``. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, exclusion, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.UpdateExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if exclusion is not None: + request.exclusion = exclusion + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_exclusion, + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_exclusion( + self, + request: logging_config.DeleteExclusionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an exclusion. + + Args: + request (:class:`~.logging_config.DeleteExclusionRequest`): + The request object. The parameters to `DeleteExclusion`. + name (:class:`str`): + Required. The resource name of an existing exclusion to + delete: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.DeleteExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_exclusion, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def get_cmek_settings( + self, + request: logging_config.GetCmekSettingsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Gets the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Args: + request (:class:`~.logging_config.GetCmekSettingsRequest`): + The request object. The parameters to + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + See [Enabling CMEK for Logs + Router](https://cloud.google.com/logging/docs/routing/managed- + encryption) for more information. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.CmekSettings: + Describes the customer-managed encryption key (CMEK) + settings associated with a project, folder, + organization, billing account, or flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + """ + # Create or coerce a protobuf request object. + + request = logging_config.GetCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_cmek_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_cmek_settings( + self, + request: logging_config.UpdateCmekSettingsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Updates the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Args: + request (:class:`~.logging_config.UpdateCmekSettingsRequest`): + The request object. The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + See [Enabling CMEK for Logs + Router](https://cloud.google.com/logging/docs/routing/managed- + encryption) for more information. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.CmekSettings: + Describes the customer-managed encryption key (CMEK) + settings associated with a project, folder, + organization, billing account, or flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + """ + # Create or coerce a protobuf request object. + + request = logging_config.UpdateCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_cmek_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("ConfigServiceV2AsyncClient",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py new file mode 100644 index 000000000000..ea9ee605a3a6 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -0,0 +1,1692 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.types import logging_config +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import ConfigServiceV2GrpcTransport +from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport + + +class ConfigServiceV2ClientMeta(type): + """Metaclass for the ConfigServiceV2 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ConfigServiceV2Transport]] + _transport_registry["grpc"] = ConfigServiceV2GrpcTransport + _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[ConfigServiceV2Transport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ConfigServiceV2Client(metaclass=ConfigServiceV2ClientMeta): + """Service for configuring sinks used to route log entries.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "logging.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ConfigServiceV2Transport: + """Return the transport used by the client instance. + + Returns: + ConfigServiceV2Transport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def cmek_settings_path(project: str,) -> str: + """Return a fully-qualified cmek_settings string.""" + return "projects/{project}/cmekSettings".format(project=project,) + + @staticmethod + def parse_cmek_settings_path(path: str) -> Dict[str, str]: + """Parse a cmek_settings path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) + return m.groupdict() if m else {} + + @staticmethod + def log_bucket_path(project: str, location: str, bucket: str,) -> str: + """Return a fully-qualified log_bucket string.""" + return "projects/{project}/locations/{location}/buckets/{bucket}".format( + project=project, location=location, bucket=bucket, + ) + + @staticmethod + def parse_log_bucket_path(path: str) -> Dict[str, str]: + """Parse a log_bucket path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def log_exclusion_path(project: str, exclusion: str,) -> str: + """Return a fully-qualified log_exclusion string.""" + return "projects/{project}/exclusions/{exclusion}".format( + project=project, exclusion=exclusion, + ) + + @staticmethod + def parse_log_exclusion_path(path: str) -> Dict[str, str]: + """Parse a log_exclusion path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/exclusions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def log_sink_path(project: str, sink: str,) -> str: + """Return a fully-qualified log_sink string.""" + return "projects/{project}/sinks/{sink}".format(project=project, sink=sink,) + + @staticmethod + def parse_log_sink_path(path: str) -> Dict[str, str]: + """Parse a log_sink path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/sinks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, ConfigServiceV2Transport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the config service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ConfigServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ConfigServiceV2Transport): + # transport is a ConfigServiceV2Transport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + ssl_channel_credentials=ssl_credentials, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_buckets( + self, + request: logging_config.ListBucketsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBucketsPager: + r"""Lists buckets (Beta). + + Args: + request (:class:`~.logging_config.ListBucketsRequest`): + The request object. The parameters to `ListBuckets` + (Beta). + parent (:class:`str`): + Required. The parent resource whose buckets are to be + listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]" + + Note: The locations portion of the resource must be + specified, but supplying the character ``-`` in place of + [LOCATION_ID] will return all buckets. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListBucketsPager: + The response from ListBuckets (Beta). + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListBucketsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListBucketsRequest): + request = logging_config.ListBucketsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_buckets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBucketsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_bucket( + self, + request: logging_config.GetBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Gets a bucket (Beta). + + Args: + request (:class:`~.logging_config.GetBucketRequest`): + The request object. The parameters to `GetBucket` + (Beta). + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogBucket: + Describes a repository of logs + (Beta). + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetBucketRequest): + request = logging_config.GetBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_bucket( + self, + request: logging_config.UpdateBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Updates a bucket. This method replaces the following fields in + the existing bucket with values from the new bucket: + ``retention_period`` + + If the retention period is decreased and the bucket is locked, + FAILED_PRECONDITION will be returned. + + If the bucket has a LifecycleState of DELETE_REQUESTED, + FAILED_PRECONDITION will be returned. + + A buckets region may not be modified after it is created. This + method is in Beta. + + Args: + request (:class:`~.logging_config.UpdateBucketRequest`): + The request object. The parameters to `UpdateBucket` + (Beta). + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogBucket: + Describes a repository of logs + (Beta). + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_sinks( + self, + request: logging_config.ListSinksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSinksPager: + r"""Lists sinks. + + Args: + request (:class:`~.logging_config.ListSinksRequest`): + The request object. The parameters to `ListSinks`. + parent (:class:`str`): + Required. The parent resource whose sinks are to be + listed: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListSinksPager: + Result returned from ``ListSinks``. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListSinksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListSinksRequest): + request = logging_config.ListSinksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_sinks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSinksPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_sink( + self, + request: logging_config.GetSinkRequest = None, + *, + sink_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Gets a sink. + + Args: + request (:class:`~.logging_config.GetSinkRequest`): + The request object. The parameters to `GetSink`. + sink_name (:class:`str`): + Required. The resource name of the sink: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetSinkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetSinkRequest): + request = logging_config.GetSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_sink( + self, + request: logging_config.CreateSinkRequest = None, + *, + parent: str = None, + sink: logging_config.LogSink = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + Args: + request (:class:`~.logging_config.CreateSinkRequest`): + The request object. The parameters to `CreateSink`. + parent (:class:`str`): + Required. The resource in which to create the sink: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (:class:`~.logging_config.LogSink`): + Required. The new sink, whose ``name`` parameter is a + sink identifier that is not already in use. + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, sink]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateSinkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateSinkRequest): + request = logging_config.CreateSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if sink is not None: + request.sink = sink + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_sink( + self, + request: logging_config.UpdateSinkRequest = None, + *, + sink_name: str = None, + sink: logging_config.LogSink = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + Args: + request (:class:`~.logging_config.UpdateSinkRequest`): + The request object. The parameters to `UpdateSink`. + sink_name (:class:`str`): + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (:class:`~.logging_config.LogSink`): + Required. The updated sink, whose name is the same + identifier that appears as part of ``sink_name``. + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Optional. Field mask that specifies the fields in + ``sink`` that need an update. A sink field will be + overwritten if, and only if, it is in the update mask. + ``name`` and output only fields cannot be updated. + + An empty updateMask is temporarily treated as using the + following mask for backwards compatibility purposes: + destination,filter,includeChildren At some point in the + future, behavior will be removed and specifying an empty + updateMask will be an error. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=filter``. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name, sink, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateSinkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateSinkRequest): + request = logging_config.UpdateSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if sink_name is not None: + request.sink_name = sink_name + if sink is not None: + request.sink = sink + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_sink( + self, + request: logging_config.DeleteSinkRequest = None, + *, + sink_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + Args: + request (:class:`~.logging_config.DeleteSinkRequest`): + The request object. The parameters to `DeleteSink`. + sink_name (:class:`str`): + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteSinkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteSinkRequest): + request = logging_config.DeleteSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def list_exclusions( + self, + request: logging_config.ListExclusionsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExclusionsPager: + r"""Lists all the exclusions in a parent resource. + + Args: + request (:class:`~.logging_config.ListExclusionsRequest`): + The request object. The parameters to `ListExclusions`. + parent (:class:`str`): + Required. The parent resource whose exclusions are to be + listed. + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListExclusionsPager: + Result returned from ``ListExclusions``. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListExclusionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListExclusionsRequest): + request = logging_config.ListExclusionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_exclusions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListExclusionsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_exclusion( + self, + request: logging_config.GetExclusionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Gets the description of an exclusion. + + Args: + request (:class:`~.logging_config.GetExclusionRequest`): + The request object. The parameters to `GetExclusion`. + name (:class:`str`): + Required. The resource name of an existing exclusion: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetExclusionRequest): + request = logging_config.GetExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_exclusion( + self, + request: logging_config.CreateExclusionRequest = None, + *, + parent: str = None, + exclusion: logging_config.LogExclusion = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Creates a new exclusion in a specified parent + resource. Only log entries belonging to that resource + can be excluded. You can have up to 10 exclusions in a + resource. + + Args: + request (:class:`~.logging_config.CreateExclusionRequest`): + The request object. The parameters to `CreateExclusion`. + parent (:class:`str`): + Required. The parent resource in which to create the + exclusion: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (:class:`~.logging_config.LogExclusion`): + Required. The new exclusion, whose ``name`` parameter is + an exclusion name that is not already used in the parent + resource. + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, exclusion]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateExclusionRequest): + request = logging_config.CreateExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if exclusion is not None: + request.exclusion = exclusion + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_exclusion( + self, + request: logging_config.UpdateExclusionRequest = None, + *, + name: str = None, + exclusion: logging_config.LogExclusion = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Changes one or more properties of an existing + exclusion. + + Args: + request (:class:`~.logging_config.UpdateExclusionRequest`): + The request object. The parameters to `UpdateExclusion`. + name (:class:`str`): + Required. The resource name of the exclusion to update: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (:class:`~.logging_config.LogExclusion`): + Required. New values for the existing exclusion. Only + the fields specified in ``update_mask`` are relevant. + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Required. A non-empty list of fields to change in the + existing exclusion. New values for the fields are taken + from the corresponding fields in the + [LogExclusion][google.logging.v2.LogExclusion] included + in this request. Fields not mentioned in ``update_mask`` + are not changed and are ignored in the request. + + For example, to change the filter and description of an + exclusion, specify an ``update_mask`` of + ``"filter,description"``. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, exclusion, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateExclusionRequest): + request = logging_config.UpdateExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if exclusion is not None: + request.exclusion = exclusion + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_exclusion( + self, + request: logging_config.DeleteExclusionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an exclusion. + + Args: + request (:class:`~.logging_config.DeleteExclusionRequest`): + The request object. The parameters to `DeleteExclusion`. + name (:class:`str`): + Required. The resource name of an existing exclusion to + delete: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteExclusionRequest): + request = logging_config.DeleteExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def get_cmek_settings( + self, + request: logging_config.GetCmekSettingsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Gets the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Args: + request (:class:`~.logging_config.GetCmekSettingsRequest`): + The request object. The parameters to + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + See [Enabling CMEK for Logs + Router](https://cloud.google.com/logging/docs/routing/managed- + encryption) for more information. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.CmekSettings: + Describes the customer-managed encryption key (CMEK) + settings associated with a project, folder, + organization, billing account, or flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetCmekSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetCmekSettingsRequest): + request = logging_config.GetCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cmek_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_cmek_settings( + self, + request: logging_config.UpdateCmekSettingsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Updates the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Args: + request (:class:`~.logging_config.UpdateCmekSettingsRequest`): + The request object. The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + See [Enabling CMEK for Logs + Router](https://cloud.google.com/logging/docs/routing/managed- + encryption) for more information. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.CmekSettings: + Describes the customer-managed encryption key (CMEK) + settings associated with a project, folder, + organization, billing account, or flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateCmekSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateCmekSettingsRequest): + request = logging_config.UpdateCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_cmek_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("ConfigServiceV2Client",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py new file mode 100644 index 000000000000..173780b5eeb0 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -0,0 +1,404 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.cloud.logging_v2.types import logging_config + + +class ListBucketsPager: + """A pager for iterating through ``list_buckets`` requests. + + This class thinly wraps an initial + :class:`~.logging_config.ListBucketsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``buckets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBuckets`` requests and continue to iterate + through the ``buckets`` field on the + corresponding responses. + + All the usual :class:`~.logging_config.ListBucketsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., logging_config.ListBucketsResponse], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging_config.ListBucketsRequest`): + The initial request object. + response (:class:`~.logging_config.ListBucketsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListBucketsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_config.ListBucketsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_config.LogBucket]: + for page in self.pages: + yield from page.buckets + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBucketsAsyncPager: + """A pager for iterating through ``list_buckets`` requests. + + This class thinly wraps an initial + :class:`~.logging_config.ListBucketsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``buckets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBuckets`` requests and continue to iterate + through the ``buckets`` field on the + corresponding responses. + + All the usual :class:`~.logging_config.ListBucketsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListBucketsResponse]], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging_config.ListBucketsRequest`): + The initial request object. + response (:class:`~.logging_config.ListBucketsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListBucketsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_config.ListBucketsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_config.LogBucket]: + async def async_generator(): + async for page in self.pages: + for response in page.buckets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSinksPager: + """A pager for iterating through ``list_sinks`` requests. + + This class thinly wraps an initial + :class:`~.logging_config.ListSinksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sinks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSinks`` requests and continue to iterate + through the ``sinks`` field on the + corresponding responses. + + All the usual :class:`~.logging_config.ListSinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., logging_config.ListSinksResponse], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging_config.ListSinksRequest`): + The initial request object. + response (:class:`~.logging_config.ListSinksResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListSinksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_config.ListSinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_config.LogSink]: + for page in self.pages: + yield from page.sinks + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSinksAsyncPager: + """A pager for iterating through ``list_sinks`` requests. + + This class thinly wraps an initial + :class:`~.logging_config.ListSinksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sinks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSinks`` requests and continue to iterate + through the ``sinks`` field on the + corresponding responses. + + All the usual :class:`~.logging_config.ListSinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListSinksResponse]], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging_config.ListSinksRequest`): + The initial request object. + response (:class:`~.logging_config.ListSinksResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListSinksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_config.ListSinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_config.LogSink]: + async def async_generator(): + async for page in self.pages: + for response in page.sinks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListExclusionsPager: + """A pager for iterating through ``list_exclusions`` requests. + + This class thinly wraps an initial + :class:`~.logging_config.ListExclusionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``exclusions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListExclusions`` requests and continue to iterate + through the ``exclusions`` field on the + corresponding responses. + + All the usual :class:`~.logging_config.ListExclusionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., logging_config.ListExclusionsResponse], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging_config.ListExclusionsRequest`): + The initial request object. + response (:class:`~.logging_config.ListExclusionsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListExclusionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_config.ListExclusionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_config.LogExclusion]: + for page in self.pages: + yield from page.exclusions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListExclusionsAsyncPager: + """A pager for iterating through ``list_exclusions`` requests. + + This class thinly wraps an initial + :class:`~.logging_config.ListExclusionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``exclusions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListExclusions`` requests and continue to iterate + through the ``exclusions`` field on the + corresponding responses. + + All the usual :class:`~.logging_config.ListExclusionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListExclusionsResponse]], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging_config.ListExclusionsRequest`): + The initial request object. + response (:class:`~.logging_config.ListExclusionsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListExclusionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_config.ListExclusionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_config.LogExclusion]: + async def async_generator(): + async for page in self.pages: + for response in page.exclusions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py new file mode 100644 index 000000000000..c4ae13076d0c --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import ConfigServiceV2Transport +from .grpc import ConfigServiceV2GrpcTransport +from .grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] +_transport_registry["grpc"] = ConfigServiceV2GrpcTransport +_transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport + + +__all__ = ( + "ConfigServiceV2Transport", + "ConfigServiceV2GrpcTransport", + "ConfigServiceV2GrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py new file mode 100644 index 000000000000..a0393aa98c25 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -0,0 +1,405 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.logging_v2.types import logging_config +from google.protobuf import empty_pb2 as empty # type: ignore + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class ConfigServiceV2Transport(abc.ABC): + """Abstract transport class for ConfigServiceV2.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ) + + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_buckets: gapic_v1.method.wrap_method( + self.list_buckets, default_timeout=None, client_info=client_info, + ), + self.get_bucket: gapic_v1.method.wrap_method( + self.get_bucket, default_timeout=None, client_info=client_info, + ), + self.update_bucket: gapic_v1.method.wrap_method( + self.update_bucket, default_timeout=None, client_info=client_info, + ), + self.list_sinks: gapic_v1.method.wrap_method( + self.list_sinks, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_sink: gapic_v1.method.wrap_method( + self.get_sink, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_sink: gapic_v1.method.wrap_method( + self.create_sink, default_timeout=120.0, client_info=client_info, + ), + self.update_sink: gapic_v1.method.wrap_method( + self.update_sink, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_sink: gapic_v1.method.wrap_method( + self.delete_sink, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_exclusions: gapic_v1.method.wrap_method( + self.list_exclusions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_exclusion: gapic_v1.method.wrap_method( + self.get_exclusion, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_exclusion: gapic_v1.method.wrap_method( + self.create_exclusion, default_timeout=120.0, client_info=client_info, + ), + self.update_exclusion: gapic_v1.method.wrap_method( + self.update_exclusion, default_timeout=120.0, client_info=client_info, + ), + self.delete_exclusion: gapic_v1.method.wrap_method( + self.delete_exclusion, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cmek_settings: gapic_v1.method.wrap_method( + self.get_cmek_settings, default_timeout=None, client_info=client_info, + ), + self.update_cmek_settings: gapic_v1.method.wrap_method( + self.update_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + } + + @property + def list_buckets( + self, + ) -> typing.Callable[ + [logging_config.ListBucketsRequest], + typing.Union[ + logging_config.ListBucketsResponse, + typing.Awaitable[logging_config.ListBucketsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_bucket( + self, + ) -> typing.Callable[ + [logging_config.GetBucketRequest], + typing.Union[ + logging_config.LogBucket, typing.Awaitable[logging_config.LogBucket] + ], + ]: + raise NotImplementedError() + + @property + def update_bucket( + self, + ) -> typing.Callable[ + [logging_config.UpdateBucketRequest], + typing.Union[ + logging_config.LogBucket, typing.Awaitable[logging_config.LogBucket] + ], + ]: + raise NotImplementedError() + + @property + def list_sinks( + self, + ) -> typing.Callable[ + [logging_config.ListSinksRequest], + typing.Union[ + logging_config.ListSinksResponse, + typing.Awaitable[logging_config.ListSinksResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_sink( + self, + ) -> typing.Callable[ + [logging_config.GetSinkRequest], + typing.Union[logging_config.LogSink, typing.Awaitable[logging_config.LogSink]], + ]: + raise NotImplementedError() + + @property + def create_sink( + self, + ) -> typing.Callable[ + [logging_config.CreateSinkRequest], + typing.Union[logging_config.LogSink, typing.Awaitable[logging_config.LogSink]], + ]: + raise NotImplementedError() + + @property + def update_sink( + self, + ) -> typing.Callable[ + [logging_config.UpdateSinkRequest], + typing.Union[logging_config.LogSink, typing.Awaitable[logging_config.LogSink]], + ]: + raise NotImplementedError() + + @property + def delete_sink( + self, + ) -> typing.Callable[ + [logging_config.DeleteSinkRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def list_exclusions( + self, + ) -> typing.Callable[ + [logging_config.ListExclusionsRequest], + typing.Union[ + logging_config.ListExclusionsResponse, + typing.Awaitable[logging_config.ListExclusionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_exclusion( + self, + ) -> typing.Callable[ + [logging_config.GetExclusionRequest], + typing.Union[ + logging_config.LogExclusion, typing.Awaitable[logging_config.LogExclusion] + ], + ]: + raise NotImplementedError() + + @property + def create_exclusion( + self, + ) -> typing.Callable[ + [logging_config.CreateExclusionRequest], + typing.Union[ + logging_config.LogExclusion, typing.Awaitable[logging_config.LogExclusion] + ], + ]: + raise NotImplementedError() + + @property + def update_exclusion( + self, + ) -> typing.Callable[ + [logging_config.UpdateExclusionRequest], + typing.Union[ + logging_config.LogExclusion, typing.Awaitable[logging_config.LogExclusion] + ], + ]: + raise NotImplementedError() + + @property + def delete_exclusion( + self, + ) -> typing.Callable[ + [logging_config.DeleteExclusionRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def get_cmek_settings( + self, + ) -> typing.Callable[ + [logging_config.GetCmekSettingsRequest], + typing.Union[ + logging_config.CmekSettings, typing.Awaitable[logging_config.CmekSettings] + ], + ]: + raise NotImplementedError() + + @property + def update_cmek_settings( + self, + ) -> typing.Callable[ + [logging_config.UpdateCmekSettingsRequest], + typing.Union[ + logging_config.CmekSettings, typing.Awaitable[logging_config.CmekSettings] + ], + ]: + raise NotImplementedError() + + +__all__ = ("ConfigServiceV2Transport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py new file mode 100644 index 000000000000..5603beeb5247 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -0,0 +1,675 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.logging_v2.types import logging_config +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO + + +class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport): + """gRPC backend transport for ConfigServiceV2. + + Service for configuring sinks used to route log entries. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._ssl_channel_credentials = ssl_channel_credentials + + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_buckets( + self, + ) -> Callable[ + [logging_config.ListBucketsRequest], logging_config.ListBucketsResponse + ]: + r"""Return a callable for the list buckets method over gRPC. + + Lists buckets (Beta). + + Returns: + Callable[[~.ListBucketsRequest], + ~.ListBucketsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_buckets" not in self._stubs: + self._stubs["list_buckets"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListBuckets", + request_serializer=logging_config.ListBucketsRequest.serialize, + response_deserializer=logging_config.ListBucketsResponse.deserialize, + ) + return self._stubs["list_buckets"] + + @property + def get_bucket( + self, + ) -> Callable[[logging_config.GetBucketRequest], logging_config.LogBucket]: + r"""Return a callable for the get bucket method over gRPC. + + Gets a bucket (Beta). + + Returns: + Callable[[~.GetBucketRequest], + ~.LogBucket]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_bucket" not in self._stubs: + self._stubs["get_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetBucket", + request_serializer=logging_config.GetBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs["get_bucket"] + + @property + def update_bucket( + self, + ) -> Callable[[logging_config.UpdateBucketRequest], logging_config.LogBucket]: + r"""Return a callable for the update bucket method over gRPC. + + Updates a bucket. This method replaces the following fields in + the existing bucket with values from the new bucket: + ``retention_period`` + + If the retention period is decreased and the bucket is locked, + FAILED_PRECONDITION will be returned. + + If the bucket has a LifecycleState of DELETE_REQUESTED, + FAILED_PRECONDITION will be returned. + + A buckets region may not be modified after it is created. This + method is in Beta. + + Returns: + Callable[[~.UpdateBucketRequest], + ~.LogBucket]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_bucket" not in self._stubs: + self._stubs["update_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucket", + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs["update_bucket"] + + @property + def list_sinks( + self, + ) -> Callable[[logging_config.ListSinksRequest], logging_config.ListSinksResponse]: + r"""Return a callable for the list sinks method over gRPC. + + Lists sinks. + + Returns: + Callable[[~.ListSinksRequest], + ~.ListSinksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sinks" not in self._stubs: + self._stubs["list_sinks"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListSinks", + request_serializer=logging_config.ListSinksRequest.serialize, + response_deserializer=logging_config.ListSinksResponse.deserialize, + ) + return self._stubs["list_sinks"] + + @property + def get_sink( + self, + ) -> Callable[[logging_config.GetSinkRequest], logging_config.LogSink]: + r"""Return a callable for the get sink method over gRPC. + + Gets a sink. + + Returns: + Callable[[~.GetSinkRequest], + ~.LogSink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_sink" not in self._stubs: + self._stubs["get_sink"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSink", + request_serializer=logging_config.GetSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs["get_sink"] + + @property + def create_sink( + self, + ) -> Callable[[logging_config.CreateSinkRequest], logging_config.LogSink]: + r"""Return a callable for the create sink method over gRPC. + + Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + Returns: + Callable[[~.CreateSinkRequest], + ~.LogSink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_sink" not in self._stubs: + self._stubs["create_sink"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateSink", + request_serializer=logging_config.CreateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs["create_sink"] + + @property + def update_sink( + self, + ) -> Callable[[logging_config.UpdateSinkRequest], logging_config.LogSink]: + r"""Return a callable for the update sink method over gRPC. + + Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + Returns: + Callable[[~.UpdateSinkRequest], + ~.LogSink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_sink" not in self._stubs: + self._stubs["update_sink"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSink", + request_serializer=logging_config.UpdateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs["update_sink"] + + @property + def delete_sink(self) -> Callable[[logging_config.DeleteSinkRequest], empty.Empty]: + r"""Return a callable for the delete sink method over gRPC. + + Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + Returns: + Callable[[~.DeleteSinkRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_sink" not in self._stubs: + self._stubs["delete_sink"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteSink", + request_serializer=logging_config.DeleteSinkRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_sink"] + + @property + def list_exclusions( + self, + ) -> Callable[ + [logging_config.ListExclusionsRequest], logging_config.ListExclusionsResponse + ]: + r"""Return a callable for the list exclusions method over gRPC. + + Lists all the exclusions in a parent resource. + + Returns: + Callable[[~.ListExclusionsRequest], + ~.ListExclusionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_exclusions" not in self._stubs: + self._stubs["list_exclusions"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListExclusions", + request_serializer=logging_config.ListExclusionsRequest.serialize, + response_deserializer=logging_config.ListExclusionsResponse.deserialize, + ) + return self._stubs["list_exclusions"] + + @property + def get_exclusion( + self, + ) -> Callable[[logging_config.GetExclusionRequest], logging_config.LogExclusion]: + r"""Return a callable for the get exclusion method over gRPC. + + Gets the description of an exclusion. + + Returns: + Callable[[~.GetExclusionRequest], + ~.LogExclusion]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_exclusion" not in self._stubs: + self._stubs["get_exclusion"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetExclusion", + request_serializer=logging_config.GetExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs["get_exclusion"] + + @property + def create_exclusion( + self, + ) -> Callable[[logging_config.CreateExclusionRequest], logging_config.LogExclusion]: + r"""Return a callable for the create exclusion method over gRPC. + + Creates a new exclusion in a specified parent + resource. Only log entries belonging to that resource + can be excluded. You can have up to 10 exclusions in a + resource. + + Returns: + Callable[[~.CreateExclusionRequest], + ~.LogExclusion]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_exclusion" not in self._stubs: + self._stubs["create_exclusion"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateExclusion", + request_serializer=logging_config.CreateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs["create_exclusion"] + + @property + def update_exclusion( + self, + ) -> Callable[[logging_config.UpdateExclusionRequest], logging_config.LogExclusion]: + r"""Return a callable for the update exclusion method over gRPC. + + Changes one or more properties of an existing + exclusion. + + Returns: + Callable[[~.UpdateExclusionRequest], + ~.LogExclusion]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_exclusion" not in self._stubs: + self._stubs["update_exclusion"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateExclusion", + request_serializer=logging_config.UpdateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs["update_exclusion"] + + @property + def delete_exclusion( + self, + ) -> Callable[[logging_config.DeleteExclusionRequest], empty.Empty]: + r"""Return a callable for the delete exclusion method over gRPC. + + Deletes an exclusion. + + Returns: + Callable[[~.DeleteExclusionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_exclusion" not in self._stubs: + self._stubs["delete_exclusion"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteExclusion", + request_serializer=logging_config.DeleteExclusionRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_exclusion"] + + @property + def get_cmek_settings( + self, + ) -> Callable[[logging_config.GetCmekSettingsRequest], logging_config.CmekSettings]: + r"""Return a callable for the get cmek settings method over gRPC. + + Gets the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Returns: + Callable[[~.GetCmekSettingsRequest], + ~.CmekSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cmek_settings" not in self._stubs: + self._stubs["get_cmek_settings"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetCmekSettings", + request_serializer=logging_config.GetCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs["get_cmek_settings"] + + @property + def update_cmek_settings( + self, + ) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], logging_config.CmekSettings + ]: + r"""Return a callable for the update cmek settings method over gRPC. + + Updates the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Returns: + Callable[[~.UpdateCmekSettingsRequest], + ~.CmekSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cmek_settings" not in self._stubs: + self._stubs["update_cmek_settings"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", + request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs["update_cmek_settings"] + + +__all__ = ("ConfigServiceV2GrpcTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a4c94db22aed --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -0,0 +1,702 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.logging_v2.types import logging_config +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from .grpc import ConfigServiceV2GrpcTransport + + +class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): + """gRPC AsyncIO backend transport for ConfigServiceV2. + + Service for configuring sinks used to route log entries. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._ssl_channel_credentials = ssl_channel_credentials + + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_buckets( + self, + ) -> Callable[ + [logging_config.ListBucketsRequest], + Awaitable[logging_config.ListBucketsResponse], + ]: + r"""Return a callable for the list buckets method over gRPC. + + Lists buckets (Beta). + + Returns: + Callable[[~.ListBucketsRequest], + Awaitable[~.ListBucketsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_buckets" not in self._stubs: + self._stubs["list_buckets"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListBuckets", + request_serializer=logging_config.ListBucketsRequest.serialize, + response_deserializer=logging_config.ListBucketsResponse.deserialize, + ) + return self._stubs["list_buckets"] + + @property + def get_bucket( + self, + ) -> Callable[ + [logging_config.GetBucketRequest], Awaitable[logging_config.LogBucket] + ]: + r"""Return a callable for the get bucket method over gRPC. + + Gets a bucket (Beta). + + Returns: + Callable[[~.GetBucketRequest], + Awaitable[~.LogBucket]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_bucket" not in self._stubs: + self._stubs["get_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetBucket", + request_serializer=logging_config.GetBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs["get_bucket"] + + @property + def update_bucket( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], Awaitable[logging_config.LogBucket] + ]: + r"""Return a callable for the update bucket method over gRPC. + + Updates a bucket. This method replaces the following fields in + the existing bucket with values from the new bucket: + ``retention_period`` + + If the retention period is decreased and the bucket is locked, + FAILED_PRECONDITION will be returned. + + If the bucket has a LifecycleState of DELETE_REQUESTED, + FAILED_PRECONDITION will be returned. + + A buckets region may not be modified after it is created. This + method is in Beta. + + Returns: + Callable[[~.UpdateBucketRequest], + Awaitable[~.LogBucket]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_bucket" not in self._stubs: + self._stubs["update_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucket", + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs["update_bucket"] + + @property + def list_sinks( + self, + ) -> Callable[ + [logging_config.ListSinksRequest], Awaitable[logging_config.ListSinksResponse] + ]: + r"""Return a callable for the list sinks method over gRPC. + + Lists sinks. + + Returns: + Callable[[~.ListSinksRequest], + Awaitable[~.ListSinksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sinks" not in self._stubs: + self._stubs["list_sinks"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListSinks", + request_serializer=logging_config.ListSinksRequest.serialize, + response_deserializer=logging_config.ListSinksResponse.deserialize, + ) + return self._stubs["list_sinks"] + + @property + def get_sink( + self, + ) -> Callable[[logging_config.GetSinkRequest], Awaitable[logging_config.LogSink]]: + r"""Return a callable for the get sink method over gRPC. + + Gets a sink. + + Returns: + Callable[[~.GetSinkRequest], + Awaitable[~.LogSink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_sink" not in self._stubs: + self._stubs["get_sink"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSink", + request_serializer=logging_config.GetSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs["get_sink"] + + @property + def create_sink( + self, + ) -> Callable[ + [logging_config.CreateSinkRequest], Awaitable[logging_config.LogSink] + ]: + r"""Return a callable for the create sink method over gRPC. + + Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + Returns: + Callable[[~.CreateSinkRequest], + Awaitable[~.LogSink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_sink" not in self._stubs: + self._stubs["create_sink"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateSink", + request_serializer=logging_config.CreateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs["create_sink"] + + @property + def update_sink( + self, + ) -> Callable[ + [logging_config.UpdateSinkRequest], Awaitable[logging_config.LogSink] + ]: + r"""Return a callable for the update sink method over gRPC. + + Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + Returns: + Callable[[~.UpdateSinkRequest], + Awaitable[~.LogSink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_sink" not in self._stubs: + self._stubs["update_sink"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSink", + request_serializer=logging_config.UpdateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs["update_sink"] + + @property + def delete_sink( + self, + ) -> Callable[[logging_config.DeleteSinkRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete sink method over gRPC. + + Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + Returns: + Callable[[~.DeleteSinkRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_sink" not in self._stubs: + self._stubs["delete_sink"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteSink", + request_serializer=logging_config.DeleteSinkRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_sink"] + + @property + def list_exclusions( + self, + ) -> Callable[ + [logging_config.ListExclusionsRequest], + Awaitable[logging_config.ListExclusionsResponse], + ]: + r"""Return a callable for the list exclusions method over gRPC. + + Lists all the exclusions in a parent resource. + + Returns: + Callable[[~.ListExclusionsRequest], + Awaitable[~.ListExclusionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_exclusions" not in self._stubs: + self._stubs["list_exclusions"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListExclusions", + request_serializer=logging_config.ListExclusionsRequest.serialize, + response_deserializer=logging_config.ListExclusionsResponse.deserialize, + ) + return self._stubs["list_exclusions"] + + @property + def get_exclusion( + self, + ) -> Callable[ + [logging_config.GetExclusionRequest], Awaitable[logging_config.LogExclusion] + ]: + r"""Return a callable for the get exclusion method over gRPC. + + Gets the description of an exclusion. + + Returns: + Callable[[~.GetExclusionRequest], + Awaitable[~.LogExclusion]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_exclusion" not in self._stubs: + self._stubs["get_exclusion"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetExclusion", + request_serializer=logging_config.GetExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs["get_exclusion"] + + @property + def create_exclusion( + self, + ) -> Callable[ + [logging_config.CreateExclusionRequest], Awaitable[logging_config.LogExclusion] + ]: + r"""Return a callable for the create exclusion method over gRPC. + + Creates a new exclusion in a specified parent + resource. Only log entries belonging to that resource + can be excluded. You can have up to 10 exclusions in a + resource. + + Returns: + Callable[[~.CreateExclusionRequest], + Awaitable[~.LogExclusion]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_exclusion" not in self._stubs: + self._stubs["create_exclusion"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateExclusion", + request_serializer=logging_config.CreateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs["create_exclusion"] + + @property + def update_exclusion( + self, + ) -> Callable[ + [logging_config.UpdateExclusionRequest], Awaitable[logging_config.LogExclusion] + ]: + r"""Return a callable for the update exclusion method over gRPC. + + Changes one or more properties of an existing + exclusion. + + Returns: + Callable[[~.UpdateExclusionRequest], + Awaitable[~.LogExclusion]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_exclusion" not in self._stubs: + self._stubs["update_exclusion"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateExclusion", + request_serializer=logging_config.UpdateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs["update_exclusion"] + + @property + def delete_exclusion( + self, + ) -> Callable[[logging_config.DeleteExclusionRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete exclusion method over gRPC. + + Deletes an exclusion. + + Returns: + Callable[[~.DeleteExclusionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_exclusion" not in self._stubs: + self._stubs["delete_exclusion"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteExclusion", + request_serializer=logging_config.DeleteExclusionRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_exclusion"] + + @property + def get_cmek_settings( + self, + ) -> Callable[ + [logging_config.GetCmekSettingsRequest], Awaitable[logging_config.CmekSettings] + ]: + r"""Return a callable for the get cmek settings method over gRPC. + + Gets the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Returns: + Callable[[~.GetCmekSettingsRequest], + Awaitable[~.CmekSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cmek_settings" not in self._stubs: + self._stubs["get_cmek_settings"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetCmekSettings", + request_serializer=logging_config.GetCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs["get_cmek_settings"] + + @property + def update_cmek_settings( + self, + ) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + Awaitable[logging_config.CmekSettings], + ]: + r"""Return a callable for the update cmek settings method over gRPC. + + Updates the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Returns: + Callable[[~.UpdateCmekSettingsRequest], + Awaitable[~.CmekSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cmek_settings" not in self._stubs: + self._stubs["update_cmek_settings"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", + request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs["update_cmek_settings"] + + +__all__ = ("ConfigServiceV2GrpcAsyncIOTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py new file mode 100644 index 000000000000..c46b48a29424 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import LoggingServiceV2Client +from .async_client import LoggingServiceV2AsyncClient + +__all__ = ( + "LoggingServiceV2Client", + "LoggingServiceV2AsyncClient", +) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py new file mode 100644 index 000000000000..e6dd57247dc3 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -0,0 +1,702 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging + +from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport +from .client import LoggingServiceV2Client + + +class LoggingServiceV2AsyncClient: + """Service for ingesting and querying logs.""" + + _client: LoggingServiceV2Client + + DEFAULT_ENDPOINT = LoggingServiceV2Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + + log_path = staticmethod(LoggingServiceV2Client.log_path) + parse_log_path = staticmethod(LoggingServiceV2Client.parse_log_path) + + common_billing_account_path = staticmethod( + LoggingServiceV2Client.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + LoggingServiceV2Client.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(LoggingServiceV2Client.common_folder_path) + parse_common_folder_path = staticmethod( + LoggingServiceV2Client.parse_common_folder_path + ) + + common_organization_path = staticmethod( + LoggingServiceV2Client.common_organization_path + ) + parse_common_organization_path = staticmethod( + LoggingServiceV2Client.parse_common_organization_path + ) + + common_project_path = staticmethod(LoggingServiceV2Client.common_project_path) + parse_common_project_path = staticmethod( + LoggingServiceV2Client.parse_common_project_path + ) + + common_location_path = staticmethod(LoggingServiceV2Client.common_location_path) + parse_common_location_path = staticmethod( + LoggingServiceV2Client.parse_common_location_path + ) + + from_service_account_file = LoggingServiceV2Client.from_service_account_file + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LoggingServiceV2Transport: + """Return the transport used by the client instance. + + Returns: + LoggingServiceV2Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(LoggingServiceV2Client).get_transport_class, type(LoggingServiceV2Client) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, LoggingServiceV2Transport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the logging service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.LoggingServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = LoggingServiceV2Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def delete_log( + self, + request: logging.DeleteLogRequest = None, + *, + log_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes all the log entries in a log. The log + reappears if it receives new entries. Log entries + written shortly before the delete operation might not be + deleted. Entries received after the delete operation + with a timestamp before the operation will be deleted. + + Args: + request (:class:`~.logging.DeleteLogRequest`): + The request object. The parameters to DeleteLog. + log_name (:class:`str`): + Required. The resource name of the log to delete: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + For more information about log names, see + [LogEntry][google.logging.v2.LogEntry]. + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging.DeleteLogRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if log_name is not None: + request.log_name = log_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_log, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("log_name", request.log_name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def write_log_entries( + self, + request: logging.WriteLogEntriesRequest = None, + *, + log_name: str = None, + resource: monitored_resource.MonitoredResource = None, + labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, + entries: Sequence[log_entry.LogEntry] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging.WriteLogEntriesResponse: + r"""Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + Args: + request (:class:`~.logging.WriteLogEntriesRequest`): + The request object. The parameters to WriteLogEntries. + log_name (:class:`str`): + Optional. A default log resource name that is assigned + to all log entries in ``entries`` that do not specify a + value for ``log_name``: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example: + + :: + + "projects/my-project-id/logs/syslog" + "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + + The permission ``logging.logEntries.create`` is needed + on each project, organization, billing account, or + folder that is receiving new log entries, whether the + resource is specified in ``logName`` or in an individual + log entry. + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (:class:`~.monitored_resource.MonitoredResource`): + Optional. A default monitored resource object that is + assigned to all log entries in ``entries`` that do not + specify a value for ``resource``. Example: + + :: + + { "type": "gce_instance", + "labels": { + "zone": "us-central1-a", "instance_id": "00000000000000000000" }} + + See [LogEntry][google.logging.v2.LogEntry]. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + labels (:class:`Sequence[~.logging.WriteLogEntriesRequest.LabelsEntry]`): + Optional. Default labels that are added to the + ``labels`` field of all log entries in ``entries``. If a + log entry already has a label with the same key as a + label in this parameter, then the log entry's label is + not changed. See [LogEntry][google.logging.v2.LogEntry]. + This corresponds to the ``labels`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entries (:class:`Sequence[~.log_entry.LogEntry]`): + Required. The log entries to send to Logging. The order + of log entries in this list does not matter. Values + supplied in this method's ``log_name``, ``resource``, + and ``labels`` fields are copied into those log entries + in this list that do not include values for their + corresponding fields. For more information, see the + [LogEntry][google.logging.v2.LogEntry] type. + + If the ``timestamp`` or ``insert_id`` fields are missing + in log entries, then this method supplies the current + time or a unique identifier, respectively. The supplied + values are chosen so that, among the log entries that + did not supply their own values, the entries earlier in + the list will sort before the entries later in the list. + See the ``entries.list`` method. + + Log entries with timestamps that are more than the `logs + retention + period `__ + in the past or more than 24 hours in the future will not + be available when calling ``entries.list``. However, + those log entries can still be `exported with + LogSinks `__. + + To improve throughput and to avoid exceeding the `quota + limit `__ + for calls to ``entries.write``, you should try to + include several log entries in this list, rather than + calling this method for each individual log entry. + This corresponds to the ``entries`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging.WriteLogEntriesResponse: + Result returned from WriteLogEntries. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name, resource, labels, entries]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging.WriteLogEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if log_name is not None: + request.log_name = log_name + if resource is not None: + request.resource = resource + + if labels: + request.labels.update(labels) + + if entries: + request.entries.extend(entries) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.write_log_entries, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_log_entries( + self, + request: logging.ListLogEntriesRequest = None, + *, + resource_names: Sequence[str] = None, + filter: str = None, + order_by: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogEntriesAsyncPager: + r"""Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + Args: + request (:class:`~.logging.ListLogEntriesRequest`): + The request object. The parameters to `ListLogEntries`. + resource_names (:class:`Sequence[str]`): + Required. Names of one or more parent resources from + which to retrieve log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Projects listed in the ``project_ids`` field are added + to this list. + This corresponds to the ``resource_names`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. A filter that chooses which log entries to + return. See `Advanced Logs + Queries `__. + Only log entries that match the filter are returned. An + empty filter matches all log entries in the resources + listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will + cause the filter to return no results. The maximum + length of the filter is 20000 characters. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + order_by (:class:`str`): + Optional. How the results should be sorted. Presently, + the only permitted values are ``"timestamp asc"`` + (default) and ``"timestamp desc"``. The first option + returns entries in order of increasing values of + ``LogEntry.timestamp`` (oldest first), and the second + option returns entries in order of decreasing timestamps + (newest first). Entries with equal timestamps are + returned in order of their ``insert_id`` values. + This corresponds to the ``order_by`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListLogEntriesAsyncPager: + Result returned from ``ListLogEntries``. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource_names, filter, order_by]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging.ListLogEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if filter is not None: + request.filter = filter + if order_by is not None: + request.order_by = order_by + + if resource_names: + request.resource_names.extend(resource_names) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_log_entries, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLogEntriesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_monitored_resource_descriptors( + self, + request: logging.ListMonitoredResourceDescriptorsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: + r"""Lists the descriptors for monitored resource types + used by Logging. + + Args: + request (:class:`~.logging.ListMonitoredResourceDescriptorsRequest`): + The request object. The parameters to + ListMonitoredResourceDescriptors + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListMonitoredResourceDescriptorsAsyncPager: + Result returned from + ListMonitoredResourceDescriptors. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + + request = logging.ListMonitoredResourceDescriptorsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_monitored_resource_descriptors, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMonitoredResourceDescriptorsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_logs( + self, + request: logging.ListLogsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogsAsyncPager: + r"""Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + Args: + request (:class:`~.logging.ListLogsRequest`): + The request object. The parameters to ListLogs. + parent (:class:`str`): + Required. The resource name that owns the logs: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListLogsAsyncPager: + Result returned from ListLogs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging.ListLogsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_logs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLogsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("LoggingServiceV2AsyncClient",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py new file mode 100644 index 000000000000..79a9ed1af652 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -0,0 +1,845 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging + +from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import LoggingServiceV2GrpcTransport +from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport + + +class LoggingServiceV2ClientMeta(type): + """Metaclass for the LoggingServiceV2 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[LoggingServiceV2Transport]] + _transport_registry["grpc"] = LoggingServiceV2GrpcTransport + _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[LoggingServiceV2Transport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LoggingServiceV2Client(metaclass=LoggingServiceV2ClientMeta): + """Service for ingesting and querying logs.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "logging.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LoggingServiceV2Transport: + """Return the transport used by the client instance. + + Returns: + LoggingServiceV2Transport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def log_path(project: str, log: str,) -> str: + """Return a fully-qualified log string.""" + return "projects/{project}/logs/{log}".format(project=project, log=log,) + + @staticmethod + def parse_log_path(path: str) -> Dict[str, str]: + """Parse a log path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/logs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, LoggingServiceV2Transport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the logging service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.LoggingServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, LoggingServiceV2Transport): + # transport is a LoggingServiceV2Transport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + ssl_channel_credentials=ssl_credentials, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def delete_log( + self, + request: logging.DeleteLogRequest = None, + *, + log_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes all the log entries in a log. The log + reappears if it receives new entries. Log entries + written shortly before the delete operation might not be + deleted. Entries received after the delete operation + with a timestamp before the operation will be deleted. + + Args: + request (:class:`~.logging.DeleteLogRequest`): + The request object. The parameters to DeleteLog. + log_name (:class:`str`): + Required. The resource name of the log to delete: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + For more information about log names, see + [LogEntry][google.logging.v2.LogEntry]. + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging.DeleteLogRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging.DeleteLogRequest): + request = logging.DeleteLogRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if log_name is not None: + request.log_name = log_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_log] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("log_name", request.log_name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def write_log_entries( + self, + request: logging.WriteLogEntriesRequest = None, + *, + log_name: str = None, + resource: monitored_resource.MonitoredResource = None, + labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, + entries: Sequence[log_entry.LogEntry] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging.WriteLogEntriesResponse: + r"""Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + Args: + request (:class:`~.logging.WriteLogEntriesRequest`): + The request object. The parameters to WriteLogEntries. + log_name (:class:`str`): + Optional. A default log resource name that is assigned + to all log entries in ``entries`` that do not specify a + value for ``log_name``: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example: + + :: + + "projects/my-project-id/logs/syslog" + "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + + The permission ``logging.logEntries.create`` is needed + on each project, organization, billing account, or + folder that is receiving new log entries, whether the + resource is specified in ``logName`` or in an individual + log entry. + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (:class:`~.monitored_resource.MonitoredResource`): + Optional. A default monitored resource object that is + assigned to all log entries in ``entries`` that do not + specify a value for ``resource``. Example: + + :: + + { "type": "gce_instance", + "labels": { + "zone": "us-central1-a", "instance_id": "00000000000000000000" }} + + See [LogEntry][google.logging.v2.LogEntry]. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + labels (:class:`Sequence[~.logging.WriteLogEntriesRequest.LabelsEntry]`): + Optional. Default labels that are added to the + ``labels`` field of all log entries in ``entries``. If a + log entry already has a label with the same key as a + label in this parameter, then the log entry's label is + not changed. See [LogEntry][google.logging.v2.LogEntry]. + This corresponds to the ``labels`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entries (:class:`Sequence[~.log_entry.LogEntry]`): + Required. The log entries to send to Logging. The order + of log entries in this list does not matter. Values + supplied in this method's ``log_name``, ``resource``, + and ``labels`` fields are copied into those log entries + in this list that do not include values for their + corresponding fields. For more information, see the + [LogEntry][google.logging.v2.LogEntry] type. + + If the ``timestamp`` or ``insert_id`` fields are missing + in log entries, then this method supplies the current + time or a unique identifier, respectively. The supplied + values are chosen so that, among the log entries that + did not supply their own values, the entries earlier in + the list will sort before the entries later in the list. + See the ``entries.list`` method. + + Log entries with timestamps that are more than the `logs + retention + period `__ + in the past or more than 24 hours in the future will not + be available when calling ``entries.list``. However, + those log entries can still be `exported with + LogSinks `__. + + To improve throughput and to avoid exceeding the `quota + limit `__ + for calls to ``entries.write``, you should try to + include several log entries in this list, rather than + calling this method for each individual log entry. + This corresponds to the ``entries`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging.WriteLogEntriesResponse: + Result returned from WriteLogEntries. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name, resource, labels, entries]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging.WriteLogEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging.WriteLogEntriesRequest): + request = logging.WriteLogEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if log_name is not None: + request.log_name = log_name + if resource is not None: + request.resource = resource + + if labels: + request.labels.update(labels) + + if entries: + request.entries.extend(entries) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.write_log_entries] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_log_entries( + self, + request: logging.ListLogEntriesRequest = None, + *, + resource_names: Sequence[str] = None, + filter: str = None, + order_by: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogEntriesPager: + r"""Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + Args: + request (:class:`~.logging.ListLogEntriesRequest`): + The request object. The parameters to `ListLogEntries`. + resource_names (:class:`Sequence[str]`): + Required. Names of one or more parent resources from + which to retrieve log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Projects listed in the ``project_ids`` field are added + to this list. + This corresponds to the ``resource_names`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. A filter that chooses which log entries to + return. See `Advanced Logs + Queries `__. + Only log entries that match the filter are returned. An + empty filter matches all log entries in the resources + listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will + cause the filter to return no results. The maximum + length of the filter is 20000 characters. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + order_by (:class:`str`): + Optional. How the results should be sorted. Presently, + the only permitted values are ``"timestamp asc"`` + (default) and ``"timestamp desc"``. The first option + returns entries in order of increasing values of + ``LogEntry.timestamp`` (oldest first), and the second + option returns entries in order of decreasing timestamps + (newest first). Entries with equal timestamps are + returned in order of their ``insert_id`` values. + This corresponds to the ``order_by`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListLogEntriesPager: + Result returned from ``ListLogEntries``. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource_names, filter, order_by]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging.ListLogEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging.ListLogEntriesRequest): + request = logging.ListLogEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if filter is not None: + request.filter = filter + if order_by is not None: + request.order_by = order_by + + if resource_names: + request.resource_names.extend(resource_names) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_log_entries] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLogEntriesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def list_monitored_resource_descriptors( + self, + request: logging.ListMonitoredResourceDescriptorsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsPager: + r"""Lists the descriptors for monitored resource types + used by Logging. + + Args: + request (:class:`~.logging.ListMonitoredResourceDescriptorsRequest`): + The request object. The parameters to + ListMonitoredResourceDescriptors + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListMonitoredResourceDescriptorsPager: + Result returned from + ListMonitoredResourceDescriptors. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging.ListMonitoredResourceDescriptorsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest): + request = logging.ListMonitoredResourceDescriptorsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_monitored_resource_descriptors + ] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMonitoredResourceDescriptorsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def list_logs( + self, + request: logging.ListLogsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogsPager: + r"""Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + Args: + request (:class:`~.logging.ListLogsRequest`): + The request object. The parameters to ListLogs. + parent (:class:`str`): + Required. The resource name that owns the logs: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListLogsPager: + Result returned from ListLogs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging.ListLogsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging.ListLogsRequest): + request = logging.ListLogsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_logs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLogsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("LoggingServiceV2Client",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py new file mode 100644 index 000000000000..72bbe8e23aa2 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -0,0 +1,412 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging + + +class ListLogEntriesPager: + """A pager for iterating through ``list_log_entries`` requests. + + This class thinly wraps an initial + :class:`~.logging.ListLogEntriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLogEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`~.logging.ListLogEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., logging.ListLogEntriesResponse], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging.ListLogEntriesRequest`): + The initial request object. + response (:class:`~.logging.ListLogEntriesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListLogEntriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging.ListLogEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[log_entry.LogEntry]: + for page in self.pages: + yield from page.entries + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListLogEntriesAsyncPager: + """A pager for iterating through ``list_log_entries`` requests. + + This class thinly wraps an initial + :class:`~.logging.ListLogEntriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLogEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`~.logging.ListLogEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[logging.ListLogEntriesResponse]], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging.ListLogEntriesRequest`): + The initial request object. + response (:class:`~.logging.ListLogEntriesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListLogEntriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging.ListLogEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[log_entry.LogEntry]: + async def async_generator(): + async for page in self.pages: + for response in page.entries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMonitoredResourceDescriptorsPager: + """A pager for iterating through ``list_monitored_resource_descriptors`` requests. + + This class thinly wraps an initial + :class:`~.logging.ListMonitoredResourceDescriptorsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``resource_descriptors`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMonitoredResourceDescriptors`` requests and continue to iterate + through the ``resource_descriptors`` field on the + corresponding responses. + + All the usual :class:`~.logging.ListMonitoredResourceDescriptorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., logging.ListMonitoredResourceDescriptorsResponse], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging.ListMonitoredResourceDescriptorsRequest`): + The initial request object. + response (:class:`~.logging.ListMonitoredResourceDescriptorsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListMonitoredResourceDescriptorsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging.ListMonitoredResourceDescriptorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[monitored_resource.MonitoredResourceDescriptor]: + for page in self.pages: + yield from page.resource_descriptors + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMonitoredResourceDescriptorsAsyncPager: + """A pager for iterating through ``list_monitored_resource_descriptors`` requests. + + This class thinly wraps an initial + :class:`~.logging.ListMonitoredResourceDescriptorsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``resource_descriptors`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMonitoredResourceDescriptors`` requests and continue to iterate + through the ``resource_descriptors`` field on the + corresponding responses. + + All the usual :class:`~.logging.ListMonitoredResourceDescriptorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[logging.ListMonitoredResourceDescriptorsResponse] + ], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging.ListMonitoredResourceDescriptorsRequest`): + The initial request object. + response (:class:`~.logging.ListMonitoredResourceDescriptorsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListMonitoredResourceDescriptorsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterable[logging.ListMonitoredResourceDescriptorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterable[monitored_resource.MonitoredResourceDescriptor]: + async def async_generator(): + async for page in self.pages: + for response in page.resource_descriptors: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListLogsPager: + """A pager for iterating through ``list_logs`` requests. + + This class thinly wraps an initial + :class:`~.logging.ListLogsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``log_names`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLogs`` requests and continue to iterate + through the ``log_names`` field on the + corresponding responses. + + All the usual :class:`~.logging.ListLogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., logging.ListLogsResponse], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging.ListLogsRequest`): + The initial request object. + response (:class:`~.logging.ListLogsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListLogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging.ListLogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[str]: + for page in self.pages: + yield from page.log_names + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListLogsAsyncPager: + """A pager for iterating through ``list_logs`` requests. + + This class thinly wraps an initial + :class:`~.logging.ListLogsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``log_names`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLogs`` requests and continue to iterate + through the ``log_names`` field on the + corresponding responses. + + All the usual :class:`~.logging.ListLogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[logging.ListLogsResponse]], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging.ListLogsRequest`): + The initial request object. + response (:class:`~.logging.ListLogsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListLogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging.ListLogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[str]: + async def async_generator(): + async for page in self.pages: + for response in page.log_names: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py new file mode 100644 index 000000000000..910a38ecdb10 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import LoggingServiceV2Transport +from .grpc import LoggingServiceV2GrpcTransport +from .grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] +_transport_registry["grpc"] = LoggingServiceV2GrpcTransport +_transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport + + +__all__ = ( + "LoggingServiceV2Transport", + "LoggingServiceV2GrpcTransport", + "LoggingServiceV2GrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py new file mode 100644 index 000000000000..c8bcbcbf9524 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.logging_v2.types import logging +from google.protobuf import empty_pb2 as empty # type: ignore + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class LoggingServiceV2Transport(abc.ABC): + """Abstract transport class for LoggingServiceV2.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ) + + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete_log: gapic_v1.method.wrap_method( + self.delete_log, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.write_log_entries: gapic_v1.method.wrap_method( + self.write_log_entries, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_log_entries: gapic_v1.method.wrap_method( + self.list_log_entries, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_monitored_resource_descriptors: gapic_v1.method.wrap_method( + self.list_monitored_resource_descriptors, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_logs: gapic_v1.method.wrap_method( + self.list_logs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + @property + def delete_log( + self, + ) -> typing.Callable[ + [logging.DeleteLogRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def write_log_entries( + self, + ) -> typing.Callable[ + [logging.WriteLogEntriesRequest], + typing.Union[ + logging.WriteLogEntriesResponse, + typing.Awaitable[logging.WriteLogEntriesResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_log_entries( + self, + ) -> typing.Callable[ + [logging.ListLogEntriesRequest], + typing.Union[ + logging.ListLogEntriesResponse, + typing.Awaitable[logging.ListLogEntriesResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_monitored_resource_descriptors( + self, + ) -> typing.Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + typing.Union[ + logging.ListMonitoredResourceDescriptorsResponse, + typing.Awaitable[logging.ListMonitoredResourceDescriptorsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_logs( + self, + ) -> typing.Callable[ + [logging.ListLogsRequest], + typing.Union[ + logging.ListLogsResponse, typing.Awaitable[logging.ListLogsResponse] + ], + ]: + raise NotImplementedError() + + +__all__ = ("LoggingServiceV2Transport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py new file mode 100644 index 000000000000..4c0636e47e8c --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.logging_v2.types import logging +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO + + +class LoggingServiceV2GrpcTransport(LoggingServiceV2Transport): + """gRPC backend transport for LoggingServiceV2. + + Service for ingesting and querying logs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._ssl_channel_credentials = ssl_channel_credentials + + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty.Empty]: + r"""Return a callable for the delete log method over gRPC. + + Deletes all the log entries in a log. The log + reappears if it receives new entries. Log entries + written shortly before the delete operation might not be + deleted. Entries received after the delete operation + with a timestamp before the operation will be deleted. + + Returns: + Callable[[~.DeleteLogRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_log" not in self._stubs: + self._stubs["delete_log"] = self.grpc_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/DeleteLog", + request_serializer=logging.DeleteLogRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_log"] + + @property + def write_log_entries( + self, + ) -> Callable[[logging.WriteLogEntriesRequest], logging.WriteLogEntriesResponse]: + r"""Return a callable for the write log entries method over gRPC. + + Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + Returns: + Callable[[~.WriteLogEntriesRequest], + ~.WriteLogEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "write_log_entries" not in self._stubs: + self._stubs["write_log_entries"] = self.grpc_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/WriteLogEntries", + request_serializer=logging.WriteLogEntriesRequest.serialize, + response_deserializer=logging.WriteLogEntriesResponse.deserialize, + ) + return self._stubs["write_log_entries"] + + @property + def list_log_entries( + self, + ) -> Callable[[logging.ListLogEntriesRequest], logging.ListLogEntriesResponse]: + r"""Return a callable for the list log entries method over gRPC. + + Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + Returns: + Callable[[~.ListLogEntriesRequest], + ~.ListLogEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_log_entries" not in self._stubs: + self._stubs["list_log_entries"] = self.grpc_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogEntries", + request_serializer=logging.ListLogEntriesRequest.serialize, + response_deserializer=logging.ListLogEntriesResponse.deserialize, + ) + return self._stubs["list_log_entries"] + + @property + def list_monitored_resource_descriptors( + self, + ) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + logging.ListMonitoredResourceDescriptorsResponse, + ]: + r"""Return a callable for the list monitored resource + descriptors method over gRPC. + + Lists the descriptors for monitored resource types + used by Logging. + + Returns: + Callable[[~.ListMonitoredResourceDescriptorsRequest], + ~.ListMonitoredResourceDescriptorsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_monitored_resource_descriptors" not in self._stubs: + self._stubs[ + "list_monitored_resource_descriptors" + ] = self.grpc_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", + request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, + response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + ) + return self._stubs["list_monitored_resource_descriptors"] + + @property + def list_logs( + self, + ) -> Callable[[logging.ListLogsRequest], logging.ListLogsResponse]: + r"""Return a callable for the list logs method over gRPC. + + Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + Returns: + Callable[[~.ListLogsRequest], + ~.ListLogsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_logs" not in self._stubs: + self._stubs["list_logs"] = self.grpc_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogs", + request_serializer=logging.ListLogsRequest.serialize, + response_deserializer=logging.ListLogsResponse.deserialize, + ) + return self._stubs["list_logs"] + + +__all__ = ("LoggingServiceV2GrpcTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py new file mode 100644 index 000000000000..8a26a078e1fa --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -0,0 +1,394 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.logging_v2.types import logging +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from .grpc import LoggingServiceV2GrpcTransport + + +class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): + """gRPC AsyncIO backend transport for LoggingServiceV2. + + Service for ingesting and querying logs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._ssl_channel_credentials = ssl_channel_credentials + + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def delete_log( + self, + ) -> Callable[[logging.DeleteLogRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete log method over gRPC. + + Deletes all the log entries in a log. The log + reappears if it receives new entries. Log entries + written shortly before the delete operation might not be + deleted. Entries received after the delete operation + with a timestamp before the operation will be deleted. + + Returns: + Callable[[~.DeleteLogRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_log" not in self._stubs: + self._stubs["delete_log"] = self.grpc_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/DeleteLog", + request_serializer=logging.DeleteLogRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_log"] + + @property + def write_log_entries( + self, + ) -> Callable[ + [logging.WriteLogEntriesRequest], Awaitable[logging.WriteLogEntriesResponse] + ]: + r"""Return a callable for the write log entries method over gRPC. + + Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + Returns: + Callable[[~.WriteLogEntriesRequest], + Awaitable[~.WriteLogEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "write_log_entries" not in self._stubs: + self._stubs["write_log_entries"] = self.grpc_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/WriteLogEntries", + request_serializer=logging.WriteLogEntriesRequest.serialize, + response_deserializer=logging.WriteLogEntriesResponse.deserialize, + ) + return self._stubs["write_log_entries"] + + @property + def list_log_entries( + self, + ) -> Callable[ + [logging.ListLogEntriesRequest], Awaitable[logging.ListLogEntriesResponse] + ]: + r"""Return a callable for the list log entries method over gRPC. + + Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + Returns: + Callable[[~.ListLogEntriesRequest], + Awaitable[~.ListLogEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_log_entries" not in self._stubs: + self._stubs["list_log_entries"] = self.grpc_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogEntries", + request_serializer=logging.ListLogEntriesRequest.serialize, + response_deserializer=logging.ListLogEntriesResponse.deserialize, + ) + return self._stubs["list_log_entries"] + + @property + def list_monitored_resource_descriptors( + self, + ) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + Awaitable[logging.ListMonitoredResourceDescriptorsResponse], + ]: + r"""Return a callable for the list monitored resource + descriptors method over gRPC. + + Lists the descriptors for monitored resource types + used by Logging. + + Returns: + Callable[[~.ListMonitoredResourceDescriptorsRequest], + Awaitable[~.ListMonitoredResourceDescriptorsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_monitored_resource_descriptors" not in self._stubs: + self._stubs[ + "list_monitored_resource_descriptors" + ] = self.grpc_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", + request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, + response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + ) + return self._stubs["list_monitored_resource_descriptors"] + + @property + def list_logs( + self, + ) -> Callable[[logging.ListLogsRequest], Awaitable[logging.ListLogsResponse]]: + r"""Return a callable for the list logs method over gRPC. + + Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + Returns: + Callable[[~.ListLogsRequest], + Awaitable[~.ListLogsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_logs" not in self._stubs: + self._stubs["list_logs"] = self.grpc_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogs", + request_serializer=logging.ListLogsRequest.serialize, + response_deserializer=logging.ListLogsResponse.deserialize, + ) + return self._stubs["list_logs"] + + +__all__ = ("LoggingServiceV2GrpcAsyncIOTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py new file mode 100644 index 000000000000..c857ea037ff3 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import MetricsServiceV2Client +from .async_client import MetricsServiceV2AsyncClient + +__all__ = ( + "MetricsServiceV2Client", + "MetricsServiceV2AsyncClient", +) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py new file mode 100644 index 000000000000..93dfbd71b33d --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -0,0 +1,627 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import distribution_pb2 as distribution # type: ignore +from google.api import metric_pb2 as ga_metric # type: ignore +from google.api import metric_pb2 as metric # type: ignore +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport +from .client import MetricsServiceV2Client + + +class MetricsServiceV2AsyncClient: + """Service for configuring logs-based metrics.""" + + _client: MetricsServiceV2Client + + DEFAULT_ENDPOINT = MetricsServiceV2Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + + log_metric_path = staticmethod(MetricsServiceV2Client.log_metric_path) + parse_log_metric_path = staticmethod(MetricsServiceV2Client.parse_log_metric_path) + + common_billing_account_path = staticmethod( + MetricsServiceV2Client.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MetricsServiceV2Client.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(MetricsServiceV2Client.common_folder_path) + parse_common_folder_path = staticmethod( + MetricsServiceV2Client.parse_common_folder_path + ) + + common_organization_path = staticmethod( + MetricsServiceV2Client.common_organization_path + ) + parse_common_organization_path = staticmethod( + MetricsServiceV2Client.parse_common_organization_path + ) + + common_project_path = staticmethod(MetricsServiceV2Client.common_project_path) + parse_common_project_path = staticmethod( + MetricsServiceV2Client.parse_common_project_path + ) + + common_location_path = staticmethod(MetricsServiceV2Client.common_location_path) + parse_common_location_path = staticmethod( + MetricsServiceV2Client.parse_common_location_path + ) + + from_service_account_file = MetricsServiceV2Client.from_service_account_file + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetricsServiceV2Transport: + """Return the transport used by the client instance. + + Returns: + MetricsServiceV2Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(MetricsServiceV2Client).get_transport_class, type(MetricsServiceV2Client) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, MetricsServiceV2Transport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the metrics service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.MetricsServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = MetricsServiceV2Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_log_metrics( + self, + request: logging_metrics.ListLogMetricsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogMetricsAsyncPager: + r"""Lists logs-based metrics. + + Args: + request (:class:`~.logging_metrics.ListLogMetricsRequest`): + The request object. The parameters to ListLogMetrics. + parent (:class:`str`): + Required. The name of the project containing the + metrics: + + :: + + "projects/[PROJECT_ID]". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListLogMetricsAsyncPager: + Result returned from ListLogMetrics. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_metrics.ListLogMetricsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_log_metrics, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLogMetricsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_log_metric( + self, + request: logging_metrics.GetLogMetricRequest = None, + *, + metric_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Gets a logs-based metric. + + Args: + request (:class:`~.logging_metrics.GetLogMetricRequest`): + The request object. The parameters to GetLogMetric. + metric_name (:class:`str`): + Required. The resource name of the desired metric: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_metrics.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metric can also be used to + extract values from logs and create a a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_metrics.GetLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_log_metric, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_log_metric( + self, + request: logging_metrics.CreateLogMetricRequest = None, + *, + parent: str = None, + metric: logging_metrics.LogMetric = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates a logs-based metric. + + Args: + request (:class:`~.logging_metrics.CreateLogMetricRequest`): + The request object. The parameters to CreateLogMetric. + parent (:class:`str`): + Required. The resource name of the project in which to + create the metric: + + :: + + "projects/[PROJECT_ID]" + + The new metric must be provided in the request. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (:class:`~.logging_metrics.LogMetric`): + Required. The new logs-based metric, + which must not have an identifier that + already exists. + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_metrics.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metric can also be used to + extract values from logs and create a a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metric]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_metrics.CreateLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_log_metric, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_log_metric( + self, + request: logging_metrics.UpdateLogMetricRequest = None, + *, + metric_name: str = None, + metric: logging_metrics.LogMetric = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates or updates a logs-based metric. + + Args: + request (:class:`~.logging_metrics.UpdateLogMetricRequest`): + The request object. The parameters to UpdateLogMetric. + metric_name (:class:`str`): + Required. The resource name of the metric to update: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + The updated metric must be provided in the request and + it's ``name`` field must be the same as ``[METRIC_ID]`` + If the metric does not exist in ``[PROJECT_ID]``, then a + new metric is created. + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (:class:`~.logging_metrics.LogMetric`): + Required. The updated metric. + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_metrics.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metric can also be used to + extract values from logs and create a a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name, metric]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_metrics.UpdateLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if metric_name is not None: + request.metric_name = metric_name + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_log_metric, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_log_metric( + self, + request: logging_metrics.DeleteLogMetricRequest = None, + *, + metric_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a logs-based metric. + + Args: + request (:class:`~.logging_metrics.DeleteLogMetricRequest`): + The request object. The parameters to DeleteLogMetric. + metric_name (:class:`str`): + Required. The resource name of the metric to delete: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_metrics.DeleteLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_log_metric, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("MetricsServiceV2AsyncClient",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py new file mode 100644 index 000000000000..f4bca39263cf --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -0,0 +1,780 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import distribution_pb2 as distribution # type: ignore +from google.api import metric_pb2 as ga_metric # type: ignore +from google.api import metric_pb2 as metric # type: ignore +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import MetricsServiceV2GrpcTransport +from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport + + +class MetricsServiceV2ClientMeta(type): + """Metaclass for the MetricsServiceV2 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[MetricsServiceV2Transport]] + _transport_registry["grpc"] = MetricsServiceV2GrpcTransport + _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[MetricsServiceV2Transport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MetricsServiceV2Client(metaclass=MetricsServiceV2ClientMeta): + """Service for configuring logs-based metrics.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "logging.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetricsServiceV2Transport: + """Return the transport used by the client instance. + + Returns: + MetricsServiceV2Transport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def log_metric_path(project: str, metric: str,) -> str: + """Return a fully-qualified log_metric string.""" + return "projects/{project}/metrics/{metric}".format( + project=project, metric=metric, + ) + + @staticmethod + def parse_log_metric_path(path: str) -> Dict[str, str]: + """Parse a log_metric path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/metrics/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, MetricsServiceV2Transport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the metrics service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.MetricsServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MetricsServiceV2Transport): + # transport is a MetricsServiceV2Transport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + ssl_channel_credentials=ssl_credentials, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_log_metrics( + self, + request: logging_metrics.ListLogMetricsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogMetricsPager: + r"""Lists logs-based metrics. + + Args: + request (:class:`~.logging_metrics.ListLogMetricsRequest`): + The request object. The parameters to ListLogMetrics. + parent (:class:`str`): + Required. The name of the project containing the + metrics: + + :: + + "projects/[PROJECT_ID]". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListLogMetricsPager: + Result returned from ListLogMetrics. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_metrics.ListLogMetricsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_metrics.ListLogMetricsRequest): + request = logging_metrics.ListLogMetricsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_log_metrics] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLogMetricsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_log_metric( + self, + request: logging_metrics.GetLogMetricRequest = None, + *, + metric_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Gets a logs-based metric. + + Args: + request (:class:`~.logging_metrics.GetLogMetricRequest`): + The request object. The parameters to GetLogMetric. + metric_name (:class:`str`): + Required. The resource name of the desired metric: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_metrics.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metric can also be used to + extract values from logs and create a a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_metrics.GetLogMetricRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_metrics.GetLogMetricRequest): + request = logging_metrics.GetLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_log_metric( + self, + request: logging_metrics.CreateLogMetricRequest = None, + *, + parent: str = None, + metric: logging_metrics.LogMetric = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates a logs-based metric. + + Args: + request (:class:`~.logging_metrics.CreateLogMetricRequest`): + The request object. The parameters to CreateLogMetric. + parent (:class:`str`): + Required. The resource name of the project in which to + create the metric: + + :: + + "projects/[PROJECT_ID]" + + The new metric must be provided in the request. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (:class:`~.logging_metrics.LogMetric`): + Required. The new logs-based metric, + which must not have an identifier that + already exists. + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_metrics.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metric can also be used to + extract values from logs and create a a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metric]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_metrics.CreateLogMetricRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_metrics.CreateLogMetricRequest): + request = logging_metrics.CreateLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_log_metric( + self, + request: logging_metrics.UpdateLogMetricRequest = None, + *, + metric_name: str = None, + metric: logging_metrics.LogMetric = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates or updates a logs-based metric. + + Args: + request (:class:`~.logging_metrics.UpdateLogMetricRequest`): + The request object. The parameters to UpdateLogMetric. + metric_name (:class:`str`): + Required. The resource name of the metric to update: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + The updated metric must be provided in the request and + it's ``name`` field must be the same as ``[METRIC_ID]`` + If the metric does not exist in ``[PROJECT_ID]``, then a + new metric is created. + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (:class:`~.logging_metrics.LogMetric`): + Required. The updated metric. + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_metrics.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metric can also be used to + extract values from logs and create a a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name, metric]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_metrics.UpdateLogMetricRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_metrics.UpdateLogMetricRequest): + request = logging_metrics.UpdateLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if metric_name is not None: + request.metric_name = metric_name + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_log_metric( + self, + request: logging_metrics.DeleteLogMetricRequest = None, + *, + metric_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a logs-based metric. + + Args: + request (:class:`~.logging_metrics.DeleteLogMetricRequest`): + The request object. The parameters to DeleteLogMetric. + metric_name (:class:`str`): + Required. The resource name of the metric to delete: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_metrics.DeleteLogMetricRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_metrics.DeleteLogMetricRequest): + request = logging_metrics.DeleteLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("MetricsServiceV2Client",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py new file mode 100644 index 000000000000..09010a6858b2 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.cloud.logging_v2.types import logging_metrics + + +class ListLogMetricsPager: + """A pager for iterating through ``list_log_metrics`` requests. + + This class thinly wraps an initial + :class:`~.logging_metrics.ListLogMetricsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``metrics`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLogMetrics`` requests and continue to iterate + through the ``metrics`` field on the + corresponding responses. + + All the usual :class:`~.logging_metrics.ListLogMetricsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., logging_metrics.ListLogMetricsResponse], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging_metrics.ListLogMetricsRequest`): + The initial request object. + response (:class:`~.logging_metrics.ListLogMetricsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_metrics.ListLogMetricsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_metrics.ListLogMetricsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_metrics.LogMetric]: + for page in self.pages: + yield from page.metrics + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListLogMetricsAsyncPager: + """A pager for iterating through ``list_log_metrics`` requests. + + This class thinly wraps an initial + :class:`~.logging_metrics.ListLogMetricsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``metrics`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLogMetrics`` requests and continue to iterate + through the ``metrics`` field on the + corresponding responses. + + All the usual :class:`~.logging_metrics.ListLogMetricsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[logging_metrics.ListLogMetricsResponse]], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging_metrics.ListLogMetricsRequest`): + The initial request object. + response (:class:`~.logging_metrics.ListLogMetricsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_metrics.ListLogMetricsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_metrics.ListLogMetricsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_metrics.LogMetric]: + async def async_generator(): + async for page in self.pages: + for response in page.metrics: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py new file mode 100644 index 000000000000..eef07abd795d --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import MetricsServiceV2Transport +from .grpc import MetricsServiceV2GrpcTransport +from .grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] +_transport_registry["grpc"] = MetricsServiceV2GrpcTransport +_transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport + + +__all__ = ( + "MetricsServiceV2Transport", + "MetricsServiceV2GrpcTransport", + "MetricsServiceV2GrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py new file mode 100644 index 000000000000..78d226dfa6ec --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -0,0 +1,234 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import empty_pb2 as empty # type: ignore + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class MetricsServiceV2Transport(abc.ABC): + """Abstract transport class for MetricsServiceV2.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ) + + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_log_metrics: gapic_v1.method.wrap_method( + self.list_log_metrics, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_log_metric: gapic_v1.method.wrap_method( + self.get_log_metric, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_log_metric: gapic_v1.method.wrap_method( + self.create_log_metric, default_timeout=60.0, client_info=client_info, + ), + self.update_log_metric: gapic_v1.method.wrap_method( + self.update_log_metric, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_log_metric: gapic_v1.method.wrap_method( + self.delete_log_metric, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + @property + def list_log_metrics( + self, + ) -> typing.Callable[ + [logging_metrics.ListLogMetricsRequest], + typing.Union[ + logging_metrics.ListLogMetricsResponse, + typing.Awaitable[logging_metrics.ListLogMetricsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_log_metric( + self, + ) -> typing.Callable[ + [logging_metrics.GetLogMetricRequest], + typing.Union[ + logging_metrics.LogMetric, typing.Awaitable[logging_metrics.LogMetric] + ], + ]: + raise NotImplementedError() + + @property + def create_log_metric( + self, + ) -> typing.Callable[ + [logging_metrics.CreateLogMetricRequest], + typing.Union[ + logging_metrics.LogMetric, typing.Awaitable[logging_metrics.LogMetric] + ], + ]: + raise NotImplementedError() + + @property + def update_log_metric( + self, + ) -> typing.Callable[ + [logging_metrics.UpdateLogMetricRequest], + typing.Union[ + logging_metrics.LogMetric, typing.Awaitable[logging_metrics.LogMetric] + ], + ]: + raise NotImplementedError() + + @property + def delete_log_metric( + self, + ) -> typing.Callable[ + [logging_metrics.DeleteLogMetricRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + +__all__ = ("MetricsServiceV2Transport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py new file mode 100644 index 000000000000..0a6f25bd6dce --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -0,0 +1,366 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO + + +class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport): + """gRPC backend transport for MetricsServiceV2. + + Service for configuring logs-based metrics. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._ssl_channel_credentials = ssl_channel_credentials + + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_log_metrics( + self, + ) -> Callable[ + [logging_metrics.ListLogMetricsRequest], logging_metrics.ListLogMetricsResponse + ]: + r"""Return a callable for the list log metrics method over gRPC. + + Lists logs-based metrics. + + Returns: + Callable[[~.ListLogMetricsRequest], + ~.ListLogMetricsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_log_metrics" not in self._stubs: + self._stubs["list_log_metrics"] = self.grpc_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/ListLogMetrics", + request_serializer=logging_metrics.ListLogMetricsRequest.serialize, + response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, + ) + return self._stubs["list_log_metrics"] + + @property + def get_log_metric( + self, + ) -> Callable[[logging_metrics.GetLogMetricRequest], logging_metrics.LogMetric]: + r"""Return a callable for the get log metric method over gRPC. + + Gets a logs-based metric. + + Returns: + Callable[[~.GetLogMetricRequest], + ~.LogMetric]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_log_metric" not in self._stubs: + self._stubs["get_log_metric"] = self.grpc_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/GetLogMetric", + request_serializer=logging_metrics.GetLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs["get_log_metric"] + + @property + def create_log_metric( + self, + ) -> Callable[[logging_metrics.CreateLogMetricRequest], logging_metrics.LogMetric]: + r"""Return a callable for the create log metric method over gRPC. + + Creates a logs-based metric. + + Returns: + Callable[[~.CreateLogMetricRequest], + ~.LogMetric]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_log_metric" not in self._stubs: + self._stubs["create_log_metric"] = self.grpc_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/CreateLogMetric", + request_serializer=logging_metrics.CreateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs["create_log_metric"] + + @property + def update_log_metric( + self, + ) -> Callable[[logging_metrics.UpdateLogMetricRequest], logging_metrics.LogMetric]: + r"""Return a callable for the update log metric method over gRPC. + + Creates or updates a logs-based metric. + + Returns: + Callable[[~.UpdateLogMetricRequest], + ~.LogMetric]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_log_metric" not in self._stubs: + self._stubs["update_log_metric"] = self.grpc_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", + request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs["update_log_metric"] + + @property + def delete_log_metric( + self, + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], empty.Empty]: + r"""Return a callable for the delete log metric method over gRPC. + + Deletes a logs-based metric. + + Returns: + Callable[[~.DeleteLogMetricRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_log_metric" not in self._stubs: + self._stubs["delete_log_metric"] = self.grpc_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", + request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_log_metric"] + + +__all__ = ("MetricsServiceV2GrpcTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py new file mode 100644 index 000000000000..9ec30eed080f --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -0,0 +1,377 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from .grpc import MetricsServiceV2GrpcTransport + + +class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): + """gRPC AsyncIO backend transport for MetricsServiceV2. + + Service for configuring logs-based metrics. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._ssl_channel_credentials = ssl_channel_credentials + + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_log_metrics( + self, + ) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + Awaitable[logging_metrics.ListLogMetricsResponse], + ]: + r"""Return a callable for the list log metrics method over gRPC. + + Lists logs-based metrics. + + Returns: + Callable[[~.ListLogMetricsRequest], + Awaitable[~.ListLogMetricsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_log_metrics" not in self._stubs: + self._stubs["list_log_metrics"] = self.grpc_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/ListLogMetrics", + request_serializer=logging_metrics.ListLogMetricsRequest.serialize, + response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, + ) + return self._stubs["list_log_metrics"] + + @property + def get_log_metric( + self, + ) -> Callable[ + [logging_metrics.GetLogMetricRequest], Awaitable[logging_metrics.LogMetric] + ]: + r"""Return a callable for the get log metric method over gRPC. + + Gets a logs-based metric. + + Returns: + Callable[[~.GetLogMetricRequest], + Awaitable[~.LogMetric]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_log_metric" not in self._stubs: + self._stubs["get_log_metric"] = self.grpc_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/GetLogMetric", + request_serializer=logging_metrics.GetLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs["get_log_metric"] + + @property + def create_log_metric( + self, + ) -> Callable[ + [logging_metrics.CreateLogMetricRequest], Awaitable[logging_metrics.LogMetric] + ]: + r"""Return a callable for the create log metric method over gRPC. + + Creates a logs-based metric. + + Returns: + Callable[[~.CreateLogMetricRequest], + Awaitable[~.LogMetric]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_log_metric" not in self._stubs: + self._stubs["create_log_metric"] = self.grpc_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/CreateLogMetric", + request_serializer=logging_metrics.CreateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs["create_log_metric"] + + @property + def update_log_metric( + self, + ) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], Awaitable[logging_metrics.LogMetric] + ]: + r"""Return a callable for the update log metric method over gRPC. + + Creates or updates a logs-based metric. + + Returns: + Callable[[~.UpdateLogMetricRequest], + Awaitable[~.LogMetric]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_log_metric" not in self._stubs: + self._stubs["update_log_metric"] = self.grpc_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", + request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs["update_log_metric"] + + @property + def delete_log_metric( + self, + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete log metric method over gRPC. + + Deletes a logs-based metric. + + Returns: + Callable[[~.DeleteLogMetricRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_log_metric" not in self._stubs: + self._stubs["delete_log_metric"] = self.grpc_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", + request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_log_metric"] + + +__all__ = ("MetricsServiceV2GrpcAsyncIOTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/sink.py b/packages/google-cloud-logging/google/cloud/logging_v2/sink.py new file mode 100644 index 000000000000..43dd2208cfc7 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/sink.py @@ -0,0 +1,233 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define Cloud Logging API Sinks.""" + +from google.cloud.exceptions import NotFound + + +class Sink(object): + """Sinks represent filtered exports for log entries. + + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks + """ + + def __init__( + self, name, *, filter_=None, parent=None, destination=None, client=None + ): + """ + Args: + name (str): The name of the sink. + parent(Optional[str]): The resource in which to create the sink: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + + Defaults to the project stored on the client. + filter_ (Optional[str]): The advanced logs filter expression defining + the entries exported by the sink. + destination (Optional[str]): Destination URI for the entries exported by the sink. + If not passed, the instance should already exist, to + be refreshed via :meth:`reload`. + client (Optional[~logging_v2.client.Client]): A client which holds + credentials and project configuration for the sink (which requires a project). + """ + self.name = name + self.filter_ = filter_ + self.destination = destination + self._client = client + self._parent = parent + self._writer_identity = None + + @property + def client(self): + """Client bound to the sink.""" + return self._client + + @property + def parent(self): + """Parent resource of the sink (project, organization, billingAccount, or folder).""" + if self._parent is None: + self._parent = f"projects/{self.client.project}" + return self._parent + + @property + def full_name(self): + """Fully-qualified name used in sink APIs""" + return f"{self.parent}/sinks/{self.name}" + + @property + def path(self): + """URL path for the sink's APIs""" + return f"/{self.full_name}" + + @property + def writer_identity(self): + """Identity used for exports via the sink""" + return self._writer_identity + + def _update_from_api_repr(self, resource): + """Helper for API methods returning sink resources.""" + self.destination = resource["destination"] + self.filter_ = resource.get("filter") + self._writer_identity = resource.get("writerIdentity") + + @classmethod + def from_api_repr(cls, resource, client, *, parent=None): + """Construct a sink given its API representation + + Args: + resource (dict): sink resource representation returned from the API + client (~logging_v2.client.Client): Client which holds + credentials and project configuration for the sink. + parent(Optional[str]): The resource in which to create the sink: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + + Defaults to the project stored on the client. + + Returns: + ~logging_v2.sink.Sink: Sink parsed from ``resource``. + + Raises: + ValueError: if ``client`` is not ``None`` and the + project from the resource does not agree with the project + from the client. + """ + sink_name = resource["name"] + instance = cls(sink_name, client=client, parent=parent) + instance._update_from_api_repr(resource) + return instance + + def _require_client(self, client): + """Check client or verify over-ride. Also sets ``parent``. + + Args: + client (Union[None, ~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. + + Returns: + ~logging_v2.client.Client: The client passed in + or the currently bound client. + """ + if client is None: + client = self._client + return client + + def create(self, *, client=None, unique_writer_identity=False): + """Create the sink via a PUT request + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create + + Args: + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. + unique_writer_identity (Optional[bool]): Determines the kind of + IAM identity returned as writer_identity in the new sink. + """ + client = self._require_client(client) + resource = client.sinks_api.sink_create( + self.parent, + self.name, + self.filter_, + self.destination, + unique_writer_identity=unique_writer_identity, + ) + self._update_from_api_repr(resource) + + def exists(self, *, client=None): + """Test for the existence of the sink via a GET request + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get + + Args: + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. + + Returns: + bool: Boolean indicating existence of the sink. + """ + client = self._require_client(client) + + try: + client.sinks_api.sink_get(self.full_name) + except NotFound: + return False + else: + return True + + def reload(self, *, client=None): + """Sync local sink configuration via a GET request + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get + + Args: + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + resource = client.sinks_api.sink_get(self.full_name) + self._update_from_api_repr(resource) + + def update(self, *, client=None, unique_writer_identity=False): + """Update sink configuration via a PUT request + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/update + + Args: + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. + unique_writer_identity (Optional[bool]): Determines the kind of + IAM identity returned as writer_identity in the new sink. + """ + client = self._require_client(client) + resource = client.sinks_api.sink_update( + self.full_name, + self.filter_, + self.destination, + unique_writer_identity=unique_writer_identity, + ) + self._update_from_api_repr(resource) + + def delete(self, *, client=None): + """Delete a sink via a DELETE request + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/delete + + Args: + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + client.sinks_api.sink_delete(self.full_name) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types.py b/packages/google-cloud-logging/google/cloud/logging_v2/types.py deleted file mode 100644 index 464edbe709dc..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import sys - -from google.api import distribution_pb2 -from google.api import http_pb2 -from google.api import label_pb2 -from google.api import metric_pb2 -from google.api import monitored_resource_pb2 -from google.logging.type import http_request_pb2 -from google.protobuf import any_pb2 -from google.protobuf import descriptor_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import struct_pb2 -from google.protobuf import timestamp_pb2 -from google.rpc import status_pb2 - -from google.api_core.protobuf_helpers import get_messages -from google.cloud.logging_v2.proto import log_entry_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2 -from google.cloud.logging_v2.proto import logging_metrics_pb2 -from google.cloud.logging_v2.proto import logging_pb2 - - -_shared_modules = [ - distribution_pb2, - http_pb2, - label_pb2, - metric_pb2, - monitored_resource_pb2, - http_request_pb2, - any_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - struct_pb2, - timestamp_pb2, - status_pb2, -] - -_local_modules = [log_entry_pb2, logging_config_pb2, logging_metrics_pb2, logging_pb2] - -names = [] - -for module in _shared_modules: - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) - -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.logging_v2.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - -__all__ = tuple(sorted(names)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py new file mode 100644 index 000000000000..55161ba5f46c --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .log_entry import ( + LogEntry, + LogEntryOperation, + LogEntrySourceLocation, +) +from .logging_config import ( + LogBucket, + LogSink, + BigQueryOptions, + ListBucketsRequest, + ListBucketsResponse, + UpdateBucketRequest, + GetBucketRequest, + ListSinksRequest, + ListSinksResponse, + GetSinkRequest, + CreateSinkRequest, + UpdateSinkRequest, + DeleteSinkRequest, + LogExclusion, + ListExclusionsRequest, + ListExclusionsResponse, + GetExclusionRequest, + CreateExclusionRequest, + UpdateExclusionRequest, + DeleteExclusionRequest, + GetCmekSettingsRequest, + UpdateCmekSettingsRequest, + CmekSettings, + LifecycleState, +) +from .logging_metrics import ( + LogMetric, + ListLogMetricsRequest, + ListLogMetricsResponse, + GetLogMetricRequest, + CreateLogMetricRequest, + UpdateLogMetricRequest, + DeleteLogMetricRequest, +) +from .logging import ( + DeleteLogRequest, + WriteLogEntriesRequest, + WriteLogEntriesResponse, + WriteLogEntriesPartialErrors, + ListLogEntriesRequest, + ListLogEntriesResponse, + ListMonitoredResourceDescriptorsRequest, + ListMonitoredResourceDescriptorsResponse, + ListLogsRequest, + ListLogsResponse, +) + +__all__ = ( + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogBucket", + "LogSink", + "BigQueryOptions", + "ListBucketsRequest", + "ListBucketsResponse", + "UpdateBucketRequest", + "GetBucketRequest", + "ListSinksRequest", + "ListSinksResponse", + "GetSinkRequest", + "CreateSinkRequest", + "UpdateSinkRequest", + "DeleteSinkRequest", + "LogExclusion", + "ListExclusionsRequest", + "ListExclusionsResponse", + "GetExclusionRequest", + "CreateExclusionRequest", + "UpdateExclusionRequest", + "DeleteExclusionRequest", + "GetCmekSettingsRequest", + "UpdateCmekSettingsRequest", + "CmekSettings", + "LifecycleState", + "LogMetric", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "GetLogMetricRequest", + "CreateLogMetricRequest", + "UpdateLogMetricRequest", + "DeleteLogMetricRequest", + "DeleteLogRequest", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "ListLogsRequest", + "ListLogsResponse", +) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py new file mode 100644 index 000000000000..a481557fd1fd --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py @@ -0,0 +1,271 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.logging.type import http_request_pb2 as glt_http_request # type: ignore +from google.logging.type import log_severity_pb2 as log_severity # type: ignore +from google.protobuf import any_pb2 as gp_any # type: ignore +from google.protobuf import struct_pb2 as struct # type: ignore +from google.protobuf import timestamp_pb2 as gp_timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.logging.v2", + manifest={"LogEntry", "LogEntryOperation", "LogEntrySourceLocation",}, +) + + +class LogEntry(proto.Message): + r"""An individual entry in a log. + + Attributes: + log_name (str): + Required. The resource name of the log to which this log + entry belongs: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + A project number may be used in place of PROJECT_ID. The + project number is translated to its corresponding PROJECT_ID + internally and the ``log_name`` field will contain + PROJECT_ID in queries and exports. + + ``[LOG_ID]`` must be URL-encoded within ``log_name``. + Example: + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``[LOG_ID]`` must be less than 512 characters long and can + only include the following characters: upper and lower case + alphanumeric characters, forward-slash, underscore, hyphen, + and period. + + For backward compatibility, if ``log_name`` begins with a + forward-slash, such as ``/projects/...``, then the log entry + is ingested as usual but the forward-slash is removed. + Listing the log entry will not show the leading slash and + filtering for a log name with a leading slash will never + return any results. + resource (~.monitored_resource.MonitoredResource): + Required. The monitored resource that + produced this log entry. + Example: a log entry that reports a database + error would be associated with the monitored + resource designating the particular database + that reported the error. + proto_payload (~.gp_any.Any): + The log entry payload, represented as a + protocol buffer. Some Google Cloud Platform + services use this field for their log entry + payloads. + The following protocol buffer types are + supported; user-defined types are not supported: + + "type.googleapis.com/google.cloud.audit.AuditLog" + "type.googleapis.com/google.appengine.logging.v1.RequestLog". + text_payload (str): + The log entry payload, represented as a + Unicode string (UTF-8). + json_payload (~.struct.Struct): + The log entry payload, represented as a + structure that is expressed as a JSON object. + timestamp (~.gp_timestamp.Timestamp): + Optional. The time the event described by the log entry + occurred. This time is used to compute the log entry's age + and to enforce the logs retention period. If this field is + omitted in a new log entry, then Logging assigns it the + current time. Timestamps have nanosecond accuracy, but + trailing zeros in the fractional seconds might be omitted + when the timestamp is displayed. + + Incoming log entries must have timestamps that don't exceed + the `logs retention + period `__ + in the past, and that don't exceed 24 hours in the future. + Log entries outside those time boundaries aren't ingested by + Logging. + receive_timestamp (~.gp_timestamp.Timestamp): + Output only. The time the log entry was + received by Logging. + severity (~.log_severity.LogSeverity): + Optional. The severity of the log entry. The default value + is ``LogSeverity.DEFAULT``. + insert_id (str): + Optional. A unique identifier for the log entry. If you + provide a value, then Logging considers other log entries in + the same project, with the same ``timestamp``, and with the + same ``insert_id`` to be duplicates which are removed in a + single query result. However, there are no guarantees of + de-duplication in the export of logs. + + If the ``insert_id`` is omitted when writing a log entry, + the Logging API assigns its own unique identifier in this + field. + + In queries, the ``insert_id`` is also used to order log + entries that have the same ``log_name`` and ``timestamp`` + values. + http_request (~.glt_http_request.HttpRequest): + Optional. Information about the HTTP request + associated with this log entry, if applicable. + labels (Sequence[~.log_entry.LogEntry.LabelsEntry]): + Optional. A set of user-defined (key, value) + data that provides additional information about + the log entry. + operation (~.log_entry.LogEntryOperation): + Optional. Information about an operation + associated with the log entry, if applicable. + trace (str): + Optional. Resource name of the trace associated with the log + entry, if any. If it contains a relative resource name, the + name is assumed to be relative to + ``//tracing.googleapis.com``. Example: + ``projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824`` + span_id (str): + Optional. The span ID within the trace associated with the + log entry. + + For Trace spans, this is the same format that the Trace API + v2 uses: a 16-character hexadecimal encoding of an 8-byte + array, such as ``000000000000004a``. + trace_sampled (bool): + Optional. The sampling decision of the trace associated with + the log entry. + + True means that the trace resource name in the ``trace`` + field was sampled for storage in a trace backend. False + means that the trace was not sampled for storage when this + log entry was written, or the sampling decision was unknown + at the time. A non-sampled ``trace`` value is still useful + as a request correlation identifier. The default is False. + source_location (~.log_entry.LogEntrySourceLocation): + Optional. Source code location information + associated with the log entry, if any. + """ + + log_name = proto.Field(proto.STRING, number=12) + + resource = proto.Field( + proto.MESSAGE, number=8, message=monitored_resource.MonitoredResource, + ) + + proto_payload = proto.Field( + proto.MESSAGE, number=2, oneof="payload", message=gp_any.Any, + ) + + text_payload = proto.Field(proto.STRING, number=3, oneof="payload") + + json_payload = proto.Field( + proto.MESSAGE, number=6, oneof="payload", message=struct.Struct, + ) + + timestamp = proto.Field(proto.MESSAGE, number=9, message=gp_timestamp.Timestamp,) + + receive_timestamp = proto.Field( + proto.MESSAGE, number=24, message=gp_timestamp.Timestamp, + ) + + severity = proto.Field(proto.ENUM, number=10, enum=log_severity.LogSeverity,) + + insert_id = proto.Field(proto.STRING, number=4) + + http_request = proto.Field( + proto.MESSAGE, number=7, message=glt_http_request.HttpRequest, + ) + + labels = proto.MapField(proto.STRING, proto.STRING, number=11) + + operation = proto.Field(proto.MESSAGE, number=15, message="LogEntryOperation",) + + trace = proto.Field(proto.STRING, number=22) + + span_id = proto.Field(proto.STRING, number=27) + + trace_sampled = proto.Field(proto.BOOL, number=30) + + source_location = proto.Field( + proto.MESSAGE, number=23, message="LogEntrySourceLocation", + ) + + +class LogEntryOperation(proto.Message): + r"""Additional information about a potentially long-running + operation with which a log entry is associated. + + Attributes: + id (str): + Optional. An arbitrary operation identifier. + Log entries with the same identifier are assumed + to be part of the same operation. + producer (str): + Optional. An arbitrary producer identifier. The combination + of ``id`` and ``producer`` must be globally unique. Examples + for ``producer``: ``"MyDivision.MyBigCompany.com"``, + ``"github.com/MyProject/MyApplication"``. + first (bool): + Optional. Set this to True if this is the + first log entry in the operation. + last (bool): + Optional. Set this to True if this is the + last log entry in the operation. + """ + + id = proto.Field(proto.STRING, number=1) + + producer = proto.Field(proto.STRING, number=2) + + first = proto.Field(proto.BOOL, number=3) + + last = proto.Field(proto.BOOL, number=4) + + +class LogEntrySourceLocation(proto.Message): + r"""Additional information about the source code location that + produced the log entry. + + Attributes: + file (str): + Optional. Source file name. Depending on the + runtime environment, this might be a simple name + or a fully-qualified name. + line (int): + Optional. Line within the source file. + 1-based; 0 indicates no line number available. + function (str): + Optional. Human-readable name of the function or method + being invoked, with optional context such as the class or + package name. This information may be used in contexts such + as the logs viewer, where a file and line number are less + meaningful. The format can vary by language. For example: + ``qual.if.ied.Class.method`` (Java), ``dir/package.func`` + (Go), ``function`` (Python). + """ + + file = proto.Field(proto.STRING, number=1) + + line = proto.Field(proto.INT64, number=2) + + function = proto.Field(proto.STRING, number=3) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py new file mode 100644 index 000000000000..0d44439abff7 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py @@ -0,0 +1,394 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.cloud.logging_v2.types import log_entry +from google.rpc import status_pb2 as status # type: ignore + + +__protobuf__ = proto.module( + package="google.logging.v2", + manifest={ + "DeleteLogRequest", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "ListLogsRequest", + "ListLogsResponse", + }, +) + + +class DeleteLogRequest(proto.Message): + r"""The parameters to DeleteLog. + + Attributes: + log_name (str): + Required. The resource name of the log to delete: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + For more information about log names, see + [LogEntry][google.logging.v2.LogEntry]. + """ + + log_name = proto.Field(proto.STRING, number=1) + + +class WriteLogEntriesRequest(proto.Message): + r"""The parameters to WriteLogEntries. + + Attributes: + log_name (str): + Optional. A default log resource name that is assigned to + all log entries in ``entries`` that do not specify a value + for ``log_name``: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example: + + :: + + "projects/my-project-id/logs/syslog" + "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + + The permission ``logging.logEntries.create`` is needed on + each project, organization, billing account, or folder that + is receiving new log entries, whether the resource is + specified in ``logName`` or in an individual log entry. + resource (~.monitored_resource.MonitoredResource): + Optional. A default monitored resource object that is + assigned to all log entries in ``entries`` that do not + specify a value for ``resource``. Example: + + :: + + { "type": "gce_instance", + "labels": { + "zone": "us-central1-a", "instance_id": "00000000000000000000" }} + + See [LogEntry][google.logging.v2.LogEntry]. + labels (Sequence[~.logging.WriteLogEntriesRequest.LabelsEntry]): + Optional. Default labels that are added to the ``labels`` + field of all log entries in ``entries``. If a log entry + already has a label with the same key as a label in this + parameter, then the log entry's label is not changed. See + [LogEntry][google.logging.v2.LogEntry]. + entries (Sequence[~.log_entry.LogEntry]): + Required. The log entries to send to Logging. The order of + log entries in this list does not matter. Values supplied in + this method's ``log_name``, ``resource``, and ``labels`` + fields are copied into those log entries in this list that + do not include values for their corresponding fields. For + more information, see the + [LogEntry][google.logging.v2.LogEntry] type. + + If the ``timestamp`` or ``insert_id`` fields are missing in + log entries, then this method supplies the current time or a + unique identifier, respectively. The supplied values are + chosen so that, among the log entries that did not supply + their own values, the entries earlier in the list will sort + before the entries later in the list. See the + ``entries.list`` method. + + Log entries with timestamps that are more than the `logs + retention + period `__ in + the past or more than 24 hours in the future will not be + available when calling ``entries.list``. However, those log + entries can still be `exported with + LogSinks `__. + + To improve throughput and to avoid exceeding the `quota + limit `__ for + calls to ``entries.write``, you should try to include + several log entries in this list, rather than calling this + method for each individual log entry. + partial_success (bool): + Optional. Whether valid entries should be written even if + some other entries fail due to INVALID_ARGUMENT or + PERMISSION_DENIED errors. If any entry is not written, then + the response status is the error associated with one of the + failed entries and the response includes error details keyed + by the entries' zero-based index in the ``entries.write`` + method. + dry_run (bool): + Optional. If true, the request should expect + normal response, but the entries won't be + persisted nor exported. Useful for checking + whether the logging API endpoints are working + properly before sending valuable data. + """ + + log_name = proto.Field(proto.STRING, number=1) + + resource = proto.Field( + proto.MESSAGE, number=2, message=monitored_resource.MonitoredResource, + ) + + labels = proto.MapField(proto.STRING, proto.STRING, number=3) + + entries = proto.RepeatedField(proto.MESSAGE, number=4, message=log_entry.LogEntry,) + + partial_success = proto.Field(proto.BOOL, number=5) + + dry_run = proto.Field(proto.BOOL, number=6) + + +class WriteLogEntriesResponse(proto.Message): + r"""Result returned from WriteLogEntries.""" + + +class WriteLogEntriesPartialErrors(proto.Message): + r"""Error details for WriteLogEntries with partial success. + + Attributes: + log_entry_errors (Sequence[~.logging.WriteLogEntriesPartialErrors.LogEntryErrorsEntry]): + When ``WriteLogEntriesRequest.partial_success`` is true, + records the error status for entries that were not written + due to a permanent error, keyed by the entry's zero-based + index in ``WriteLogEntriesRequest.entries``. + + Failed requests for which no entries are written will not + include per-entry errors. + """ + + log_entry_errors = proto.MapField( + proto.INT32, proto.MESSAGE, number=1, message=status.Status, + ) + + +class ListLogEntriesRequest(proto.Message): + r"""The parameters to ``ListLogEntries``. + + Attributes: + resource_names (Sequence[str]): + Required. Names of one or more parent resources from which + to retrieve log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Projects listed in the ``project_ids`` field are added to + this list. + filter (str): + Optional. A filter that chooses which log entries to return. + See `Advanced Logs + Queries `__. + Only log entries that match the filter are returned. An + empty filter matches all log entries in the resources listed + in ``resource_names``. Referencing a parent resource that is + not listed in ``resource_names`` will cause the filter to + return no results. The maximum length of the filter is 20000 + characters. + order_by (str): + Optional. How the results should be sorted. Presently, the + only permitted values are ``"timestamp asc"`` (default) and + ``"timestamp desc"``. The first option returns entries in + order of increasing values of ``LogEntry.timestamp`` (oldest + first), and the second option returns entries in order of + decreasing timestamps (newest first). Entries with equal + timestamps are returned in order of their ``insert_id`` + values. + page_size (int): + Optional. The maximum number of results to return from this + request. Default is 50. If the value is negative or exceeds + 1000, the request is rejected. The presence of + ``next_page_token`` in the response indicates that more + results might be available. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``page_token`` must be the value of ``next_page_token`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + """ + + resource_names = proto.RepeatedField(proto.STRING, number=8) + + filter = proto.Field(proto.STRING, number=2) + + order_by = proto.Field(proto.STRING, number=3) + + page_size = proto.Field(proto.INT32, number=4) + + page_token = proto.Field(proto.STRING, number=5) + + +class ListLogEntriesResponse(proto.Message): + r"""Result returned from ``ListLogEntries``. + + Attributes: + entries (Sequence[~.log_entry.LogEntry]): + A list of log entries. If ``entries`` is empty, + ``nextPageToken`` may still be returned, indicating that + more entries may exist. See ``nextPageToken`` for more + information. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call this method again using the value + of ``nextPageToken`` as ``pageToken``. + + If a value for ``next_page_token`` appears and the + ``entries`` field is empty, it means that the search found + no log entries so far but it did not have time to search all + the possible log entries. Retry the method with this value + for ``page_token`` to continue the search. Alternatively, + consider speeding up the search by changing your filter to + specify a single log name or resource type, or to narrow the + time range of the search. + """ + + @property + def raw_page(self): + return self + + entries = proto.RepeatedField(proto.MESSAGE, number=1, message=log_entry.LogEntry,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class ListMonitoredResourceDescriptorsRequest(proto.Message): + r"""The parameters to ListMonitoredResourceDescriptors + + Attributes: + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + """ + + page_size = proto.Field(proto.INT32, number=1) + + page_token = proto.Field(proto.STRING, number=2) + + +class ListMonitoredResourceDescriptorsResponse(proto.Message): + r"""Result returned from ListMonitoredResourceDescriptors. + + Attributes: + resource_descriptors (Sequence[~.monitored_resource.MonitoredResourceDescriptor]): + A list of resource descriptors. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call this method again using the value + of ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + resource_descriptors = proto.RepeatedField( + proto.MESSAGE, number=1, message=monitored_resource.MonitoredResourceDescriptor, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class ListLogsRequest(proto.Message): + r"""The parameters to ListLogs. + + Attributes: + parent (str): + Required. The resource name that owns the logs: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class ListLogsResponse(proto.Message): + r"""Result returned from ListLogs. + + Attributes: + log_names (Sequence[str]): + A list of log names. For example, + ``"projects/my-project/logs/syslog"`` or + ``"organizations/123/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call this method again using the value + of ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + log_names = proto.RepeatedField(proto.STRING, number=3) + + next_page_token = proto.Field(proto.STRING, number=2) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py new file mode 100644 index 000000000000..2161d687232d --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -0,0 +1,960 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.logging.v2", + manifest={ + "LifecycleState", + "LogBucket", + "LogSink", + "BigQueryOptions", + "ListBucketsRequest", + "ListBucketsResponse", + "UpdateBucketRequest", + "GetBucketRequest", + "ListSinksRequest", + "ListSinksResponse", + "GetSinkRequest", + "CreateSinkRequest", + "UpdateSinkRequest", + "DeleteSinkRequest", + "LogExclusion", + "ListExclusionsRequest", + "ListExclusionsResponse", + "GetExclusionRequest", + "CreateExclusionRequest", + "UpdateExclusionRequest", + "DeleteExclusionRequest", + "GetCmekSettingsRequest", + "UpdateCmekSettingsRequest", + "CmekSettings", + }, +) + + +class LifecycleState(proto.Enum): + r"""LogBucket lifecycle states (Beta).""" + LIFECYCLE_STATE_UNSPECIFIED = 0 + ACTIVE = 1 + DELETE_REQUESTED = 2 + + +class LogBucket(proto.Message): + r"""Describes a repository of logs (Beta). + + Attributes: + name (str): + The resource name of the bucket. For example: + "projects/my-project-id/locations/my-location/buckets/my-bucket-id + The supported locations are: "global" "us-central1" + + For the location of ``global`` it is unspecified where logs + are actually stored. Once a bucket has been created, the + location can not be changed. + description (str): + Describes this bucket. + create_time (~.timestamp.Timestamp): + Output only. The creation timestamp of the + bucket. This is not set for any of the default + buckets. + update_time (~.timestamp.Timestamp): + Output only. The last update timestamp of the + bucket. + retention_days (int): + Logs will be retained by default for this + amount of time, after which they will + automatically be deleted. The minimum retention + period is 1 day. If this value is set to zero at + bucket creation time, the default time of 30 + days will be used. + lifecycle_state (~.logging_config.LifecycleState): + Output only. The bucket lifecycle state. + """ + + name = proto.Field(proto.STRING, number=1) + + description = proto.Field(proto.STRING, number=3) + + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + + retention_days = proto.Field(proto.INT32, number=11) + + lifecycle_state = proto.Field(proto.ENUM, number=12, enum="LifecycleState",) + + +class LogSink(proto.Message): + r"""Describes a sink used to export log entries to one of the + following destinations in any project: a Cloud Storage bucket, a + BigQuery dataset, or a Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. The sink must be + created within a project, organization, billing account, or + folder. + + Attributes: + name (str): + Required. The client-assigned sink identifier, unique within + the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink + identifiers are limited to 100 characters and can include + only the following characters: upper and lower-case + alphanumeric characters, underscores, hyphens, and periods. + First character has to be alphanumeric. + destination (str): + Required. The export destination: + + :: + + "storage.googleapis.com/[GCS_BUCKET]" + "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]" + "pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" + + The sink's ``writer_identity``, set when the sink is + created, must have permission to write to the destination or + else the log entries are not exported. For more information, + see `Exporting Logs with + Sinks `__. + filter (str): + Optional. An `advanced logs + filter `__. + The only exported log entries are those that are in the + resource owning the sink and that match the filter. For + example: + + :: + + logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR + description (str): + Optional. A description of this sink. + The maximum length of the description is 8000 + characters. + disabled (bool): + Optional. If set to True, then this sink is + disabled and it does not export any log entries. + output_version_format (~.logging_config.LogSink.VersionFormat): + Deprecated. This field is unused. + writer_identity (str): + Output only. An IAM identity–a service account or + group—under which Logging writes the exported log entries to + the sink's destination. This field is set by + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] + and + [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] + based on the value of ``unique_writer_identity`` in those + methods. + + Until you grant this identity write-access to the + destination, log entry exports from this sink will fail. For + more information, see `Granting Access for a + Resource `__. + Consult the destination service's documentation to determine + the appropriate IAM roles to assign to the identity. + include_children (bool): + Optional. This field applies only to sinks owned by + organizations and folders. If the field is false, the + default, only the logs owned by the sink's parent resource + are available for export. If the field is true, then logs + from all the projects, folders, and billing accounts + contained in the sink's parent resource are also available + for export. Whether a particular log entry from the children + is exported depends on the sink's filter expression. For + example, if this field is true, then the filter + ``resource.type=gce_instance`` would export all Compute + Engine VM instance log entries from all projects in the + sink's parent. To only export entries from certain child + projects, filter on the project part of the log name: + + :: + + logName:("projects/test-project1/" OR "projects/test-project2/") AND + resource.type=gce_instance + bigquery_options (~.logging_config.BigQueryOptions): + Optional. Options that affect sinks exporting + data to BigQuery. + create_time (~.timestamp.Timestamp): + Output only. The creation timestamp of the + sink. + This field may not be present for older sinks. + update_time (~.timestamp.Timestamp): + Output only. The last update timestamp of the + sink. + This field may not be present for older sinks. + """ + + class VersionFormat(proto.Enum): + r"""Deprecated. This is unused.""" + VERSION_FORMAT_UNSPECIFIED = 0 + V2 = 1 + V1 = 2 + + name = proto.Field(proto.STRING, number=1) + + destination = proto.Field(proto.STRING, number=3) + + filter = proto.Field(proto.STRING, number=5) + + description = proto.Field(proto.STRING, number=18) + + disabled = proto.Field(proto.BOOL, number=19) + + output_version_format = proto.Field(proto.ENUM, number=6, enum=VersionFormat,) + + writer_identity = proto.Field(proto.STRING, number=8) + + include_children = proto.Field(proto.BOOL, number=9) + + bigquery_options = proto.Field( + proto.MESSAGE, number=12, oneof="options", message="BigQueryOptions", + ) + + create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) + + +class BigQueryOptions(proto.Message): + r"""Options that change functionality of a sink exporting data to + BigQuery. + + Attributes: + use_partitioned_tables (bool): + Optional. Whether to use `BigQuery's partition + tables `__. + By default, Logging creates dated tables based on the log + entries' timestamps, e.g. syslog_20170523. With partitioned + tables the date suffix is no longer present and `special + query + syntax `__ + has to be used instead. In both cases, tables are sharded + based on UTC timezone. + uses_timestamp_column_partitioning (bool): + Output only. True if new timestamp column based partitioning + is in use, false if legacy ingestion-time partitioning is in + use. All new sinks will have this field set true and will + use timestamp column based partitioning. If + use_partitioned_tables is false, this value has no meaning + and will be false. Legacy sinks using partitioned tables + will have this field set to false. + """ + + use_partitioned_tables = proto.Field(proto.BOOL, number=1) + + uses_timestamp_column_partitioning = proto.Field(proto.BOOL, number=3) + + +class ListBucketsRequest(proto.Message): + r"""The parameters to ``ListBuckets`` (Beta). + + Attributes: + parent (str): + Required. The parent resource whose buckets are to be + listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]" + + Note: The locations portion of the resource must be + specified, but supplying the character ``-`` in place of + [LOCATION_ID] will return all buckets. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_token = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + +class ListBucketsResponse(proto.Message): + r"""The response from ListBuckets (Beta). + + Attributes: + buckets (Sequence[~.logging_config.LogBucket]): + A list of buckets. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + buckets = proto.RepeatedField(proto.MESSAGE, number=1, message="LogBucket",) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class UpdateBucketRequest(proto.Message): + r"""The parameters to ``UpdateBucket`` (Beta). + + Attributes: + name (str): + Required. The full resource name of the bucket to update. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + Also requires permission + "resourcemanager.projects.updateLiens" to set the locked + property + bucket (~.logging_config.LogBucket): + Required. The updated bucket. + update_mask (~.field_mask.FieldMask): + Required. Field mask that specifies the fields in ``bucket`` + that need an update. A bucket field will be overwritten if, + and only if, it is in the update mask. ``name`` and output + only fields cannot be updated. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=retention_days``. + """ + + name = proto.Field(proto.STRING, number=1) + + bucket = proto.Field(proto.MESSAGE, number=2, message="LogBucket",) + + update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + + +class GetBucketRequest(proto.Message): + r"""The parameters to ``GetBucket`` (Beta). + + Attributes: + name (str): + Required. The resource name of the bucket: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListSinksRequest(proto.Message): + r"""The parameters to ``ListSinks``. + + Attributes: + parent (str): + Required. The parent resource whose sinks are to be listed: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_token = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + +class ListSinksResponse(proto.Message): + r"""Result returned from ``ListSinks``. + + Attributes: + sinks (Sequence[~.logging_config.LogSink]): + A list of sinks. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + sinks = proto.RepeatedField(proto.MESSAGE, number=1, message="LogSink",) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class GetSinkRequest(proto.Message): + r"""The parameters to ``GetSink``. + + Attributes: + sink_name (str): + Required. The resource name of the sink: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + """ + + sink_name = proto.Field(proto.STRING, number=1) + + +class CreateSinkRequest(proto.Message): + r"""The parameters to ``CreateSink``. + + Attributes: + parent (str): + Required. The resource in which to create the sink: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + sink (~.logging_config.LogSink): + Required. The new sink, whose ``name`` parameter is a sink + identifier that is not already in use. + unique_writer_identity (bool): + Optional. Determines the kind of IAM identity returned as + ``writer_identity`` in the new sink. If this value is + omitted or set to false, and if the sink's parent is a + project, then the value returned as ``writer_identity`` is + the same group or service account used by Logging before the + addition of writer identities to this API. The sink's + destination must be in the same project as the sink itself. + + If this field is set to true, or if the sink is owned by a + non-project resource such as an organization, then the value + of ``writer_identity`` will be a unique service account used + only for exports from the new sink. For more information, + see ``writer_identity`` in + [LogSink][google.logging.v2.LogSink]. + """ + + parent = proto.Field(proto.STRING, number=1) + + sink = proto.Field(proto.MESSAGE, number=2, message="LogSink",) + + unique_writer_identity = proto.Field(proto.BOOL, number=3) + + +class UpdateSinkRequest(proto.Message): + r"""The parameters to ``UpdateSink``. + + Attributes: + sink_name (str): + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + sink (~.logging_config.LogSink): + Required. The updated sink, whose name is the same + identifier that appears as part of ``sink_name``. + unique_writer_identity (bool): + Optional. See + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] + for a description of this field. When updating a sink, the + effect of this field on the value of ``writer_identity`` in + the updated sink depends on both the old and new values of + this field: + + - If the old and new values of this field are both false or + both true, then there is no change to the sink's + ``writer_identity``. + - If the old value is false and the new value is true, then + ``writer_identity`` is changed to a unique service + account. + - It is an error if the old value is true and the new value + is set to false or defaulted to false. + update_mask (~.field_mask.FieldMask): + Optional. Field mask that specifies the fields in ``sink`` + that need an update. A sink field will be overwritten if, + and only if, it is in the update mask. ``name`` and output + only fields cannot be updated. + + An empty updateMask is temporarily treated as using the + following mask for backwards compatibility purposes: + destination,filter,includeChildren At some point in the + future, behavior will be removed and specifying an empty + updateMask will be an error. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=filter``. + """ + + sink_name = proto.Field(proto.STRING, number=1) + + sink = proto.Field(proto.MESSAGE, number=2, message="LogSink",) + + unique_writer_identity = proto.Field(proto.BOOL, number=3) + + update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + + +class DeleteSinkRequest(proto.Message): + r"""The parameters to ``DeleteSink``. + + Attributes: + sink_name (str): + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + """ + + sink_name = proto.Field(proto.STRING, number=1) + + +class LogExclusion(proto.Message): + r"""Specifies a set of log entries that are not to be stored in + Logging. If your GCP resource receives a large volume of logs, + you can use exclusions to reduce your chargeable logs. + Exclusions are processed after log sinks, so you can export log + entries before they are excluded. Note that organization-level + and folder-level exclusions don't apply to child resources, and + that you can't exclude audit log entries. + + Attributes: + name (str): + Required. A client-assigned identifier, such as + ``"load-balancer-exclusion"``. Identifiers are limited to + 100 characters and can include only letters, digits, + underscores, hyphens, and periods. First character has to be + alphanumeric. + description (str): + Optional. A description of this exclusion. + filter (str): + Required. An `advanced logs + filter `__ + that matches the log entries to be excluded. By using the + `sample + function `__, + you can exclude less than 100% of the matching log entries. + For example, the following query matches 99% of low-severity + log entries from Google Cloud Storage buckets: + + ``"resource.type=gcs_bucket severity`__ + for more information. + + Attributes: + name (str): + Required. The resource for which to retrieve CMEK settings. + + :: + + "projects/[PROJECT_ID]/cmekSettings" + "organizations/[ORGANIZATION_ID]/cmekSettings" + "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" + "folders/[FOLDER_ID]/cmekSettings" + + Example: ``"organizations/12345/cmekSettings"``. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP organization. + """ + + name = proto.Field(proto.STRING, number=1) + + +class UpdateCmekSettingsRequest(proto.Message): + r"""The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Attributes: + name (str): + Required. The resource name for the CMEK settings to update. + + :: + + "projects/[PROJECT_ID]/cmekSettings" + "organizations/[ORGANIZATION_ID]/cmekSettings" + "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" + "folders/[FOLDER_ID]/cmekSettings" + + Example: ``"organizations/12345/cmekSettings"``. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP organization. + cmek_settings (~.logging_config.CmekSettings): + Required. The CMEK settings to update. + + See `Enabling CMEK for Logs + Router `__ + for more information. + update_mask (~.field_mask.FieldMask): + Optional. Field mask identifying which fields from + ``cmek_settings`` should be updated. A field will be + overwritten if and only if it is in the update mask. Output + only fields cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + Example: ``"updateMask=kmsKeyName"`` + """ + + name = proto.Field(proto.STRING, number=1) + + cmek_settings = proto.Field(proto.MESSAGE, number=2, message="CmekSettings",) + + update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + + +class CmekSettings(proto.Message): + r"""Describes the customer-managed encryption key (CMEK) settings + associated with a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Logs Router can currently only be configured for + GCP organizations. Once configured, it applies to all projects and + folders in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Attributes: + name (str): + Output only. The resource name of the CMEK + settings. + kms_key_name (str): + The resource name for the configured Cloud KMS key. + + KMS key name format: + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" + + For example: + ``"projects/my-project-id/locations/my-region/keyRings/key-ring-name/cryptoKeys/key-name"`` + + To enable CMEK for the Logs Router, set this field to a + valid ``kms_key_name`` for which the associated service + account has the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned + for the key. + + The Cloud KMS key used by the Log Router can be updated by + changing the ``kms_key_name`` to a new valid key name. + Encryption operations that are in progress will be completed + with the key that was in use when they started. Decryption + operations will be completed using the key that was used at + the time of encryption unless access to that key has been + revoked. + + To disable CMEK for the Logs Router, set this field to an + empty string. + + See `Enabling CMEK for Logs + Router `__ + for more information. + service_account_id (str): + Output only. The service account that will be used by the + Logs Router to access your Cloud KMS key. + + Before enabling CMEK for Logs Router, you must first assign + the role ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` to + the service account that the Logs Router will use to access + your Cloud KMS key. Use + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings] + to obtain the service account ID. + + See `Enabling CMEK for Logs + Router `__ + for more information. + """ + + name = proto.Field(proto.STRING, number=1) + + kms_key_name = proto.Field(proto.STRING, number=2) + + service_account_id = proto.Field(proto.STRING, number=3) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py new file mode 100644 index 000000000000..2f7c5b47230e --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py @@ -0,0 +1,327 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.api import distribution_pb2 as distribution # type: ignore +from google.api import metric_pb2 as ga_metric # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.logging.v2", + manifest={ + "LogMetric", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "GetLogMetricRequest", + "CreateLogMetricRequest", + "UpdateLogMetricRequest", + "DeleteLogMetricRequest", + }, +) + + +class LogMetric(proto.Message): + r"""Describes a logs-based metric. The value of the metric is the + number of log entries that match a logs filter in a given time + interval. + Logs-based metric can also be used to extract values from logs + and create a a distribution of the values. The distribution + records the statistics of the extracted values along with an + optional histogram of the values as specified by the bucket + options. + + Attributes: + name (str): + Required. The client-assigned metric identifier. Examples: + ``"error_count"``, ``"nginx/requests"``. + + Metric identifiers are limited to 100 characters and can + include only the following characters: ``A-Z``, ``a-z``, + ``0-9``, and the special characters ``_-.,+!*',()%/``. The + forward-slash character (``/``) denotes a hierarchy of name + pieces, and it cannot be the first character of the name. + + The metric identifier in this field must not be + `URL-encoded `__. + However, when the metric identifier appears as the + ``[METRIC_ID]`` part of a ``metric_name`` API parameter, + then the metric identifier must be URL-encoded. Example: + ``"projects/my-project/metrics/nginx%2Frequests"``. + description (str): + Optional. A description of this metric, which + is used in documentation. The maximum length of + the description is 8000 characters. + filter (str): + Required. An `advanced logs + filter `__ + which is used to match log entries. Example: + + :: + + "resource.type=gae_app AND severity>=ERROR" + + The maximum length of the filter is 20000 characters. + metric_descriptor (~.ga_metric.MetricDescriptor): + Optional. The metric descriptor associated with the + logs-based metric. If unspecified, it uses a default metric + descriptor with a DELTA metric kind, INT64 value type, with + no labels and a unit of "1". Such a metric counts the number + of log entries matching the ``filter`` expression. + + The ``name``, ``type``, and ``description`` fields in the + ``metric_descriptor`` are output only, and is constructed + using the ``name`` and ``description`` field in the + LogMetric. + + To create a logs-based metric that records a distribution of + log values, a DELTA metric kind with a DISTRIBUTION value + type must be used along with a ``value_extractor`` + expression in the LogMetric. + + Each label in the metric descriptor must have a matching + label name as the key and an extractor expression as the + value in the ``label_extractors`` map. + + The ``metric_kind`` and ``value_type`` fields in the + ``metric_descriptor`` cannot be updated once initially + configured. New labels can be added in the + ``metric_descriptor``, but existing labels cannot be + modified except for their description. + value_extractor (str): + Optional. A ``value_extractor`` is required when using a + distribution logs-based metric to extract the values to + record from a log entry. Two functions are supported for + value extraction: ``EXTRACT(field)`` or + ``REGEXP_EXTRACT(field, regex)``. The argument are: + + 1. field: The name of the log entry field from which the + value is to be extracted. + 2. regex: A regular expression using the Google RE2 syntax + (https://github.com/google/re2/wiki/Syntax) with a single + capture group to extract data from the specified log + entry field. The value of the field is converted to a + string before applying the regex. It is an error to + specify a regex that does not include exactly one capture + group. + + The result of the extraction must be convertible to a double + type, as the distribution always records double values. If + either the extraction or the conversion to double fails, + then those values are not recorded in the distribution. + + Example: + ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` + label_extractors (Sequence[~.logging_metrics.LogMetric.LabelExtractorsEntry]): + Optional. A map from a label key string to an extractor + expression which is used to extract data from a log entry + field and assign as the label value. Each label key + specified in the LabelDescriptor must have an associated + extractor expression in this map. The syntax of the + extractor expression is the same as for the + ``value_extractor`` field. + + The extracted value is converted to the type defined in the + label descriptor. If the either the extraction or the type + conversion fails, the label will have a default value. The + default value for a string label is an empty string, for an + integer label its 0, and for a boolean label its ``false``. + + Note that there are upper bounds on the maximum number of + labels and the number of active time series that are allowed + in a project. + bucket_options (~.distribution.Distribution.BucketOptions): + Optional. The ``bucket_options`` are required when the + logs-based metric is using a DISTRIBUTION value type and it + describes the bucket boundaries used to create a histogram + of the extracted values. + create_time (~.timestamp.Timestamp): + Output only. The creation timestamp of the + metric. + This field may not be present for older metrics. + update_time (~.timestamp.Timestamp): + Output only. The last update timestamp of the + metric. + This field may not be present for older metrics. + version (~.logging_metrics.LogMetric.ApiVersion): + Deprecated. The API version that created or + updated this metric. The v2 format is used by + default and cannot be changed. + """ + + class ApiVersion(proto.Enum): + r"""Logging API version.""" + V2 = 0 + V1 = 1 + + name = proto.Field(proto.STRING, number=1) + + description = proto.Field(proto.STRING, number=2) + + filter = proto.Field(proto.STRING, number=3) + + metric_descriptor = proto.Field( + proto.MESSAGE, number=5, message=ga_metric.MetricDescriptor, + ) + + value_extractor = proto.Field(proto.STRING, number=6) + + label_extractors = proto.MapField(proto.STRING, proto.STRING, number=7) + + bucket_options = proto.Field( + proto.MESSAGE, number=8, message=distribution.Distribution.BucketOptions, + ) + + create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,) + + version = proto.Field(proto.ENUM, number=4, enum=ApiVersion,) + + +class ListLogMetricsRequest(proto.Message): + r"""The parameters to ListLogMetrics. + + Attributes: + parent (str): + Required. The name of the project containing the metrics: + + :: + + "projects/[PROJECT_ID]". + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_token = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + +class ListLogMetricsResponse(proto.Message): + r"""Result returned from ListLogMetrics. + + Attributes: + metrics (Sequence[~.logging_metrics.LogMetric]): + A list of logs-based metrics. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + metrics = proto.RepeatedField(proto.MESSAGE, number=1, message="LogMetric",) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class GetLogMetricRequest(proto.Message): + r"""The parameters to GetLogMetric. + + Attributes: + metric_name (str): + Required. The resource name of the desired metric: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + """ + + metric_name = proto.Field(proto.STRING, number=1) + + +class CreateLogMetricRequest(proto.Message): + r"""The parameters to CreateLogMetric. + + Attributes: + parent (str): + Required. The resource name of the project in which to + create the metric: + + :: + + "projects/[PROJECT_ID]" + + The new metric must be provided in the request. + metric (~.logging_metrics.LogMetric): + Required. The new logs-based metric, which + must not have an identifier that already exists. + """ + + parent = proto.Field(proto.STRING, number=1) + + metric = proto.Field(proto.MESSAGE, number=2, message="LogMetric",) + + +class UpdateLogMetricRequest(proto.Message): + r"""The parameters to UpdateLogMetric. + + Attributes: + metric_name (str): + Required. The resource name of the metric to update: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + The updated metric must be provided in the request and it's + ``name`` field must be the same as ``[METRIC_ID]`` If the + metric does not exist in ``[PROJECT_ID]``, then a new metric + is created. + metric (~.logging_metrics.LogMetric): + Required. The updated metric. + """ + + metric_name = proto.Field(proto.STRING, number=1) + + metric = proto.Field(proto.MESSAGE, number=2, message="LogMetric",) + + +class DeleteLogMetricRequest(proto.Message): + r"""The parameters to DeleteLogMetric. + + Attributes: + metric_name (str): + Required. The resource name of the metric to delete: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + """ + + metric_name = proto.Field(proto.STRING, number=1) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/mypy.ini b/packages/google-cloud-logging/mypy.ini new file mode 100644 index 000000000000..4505b485436b --- /dev/null +++ b/packages/google-cloud-logging/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 9cc3ab77f6c5..ca45b2c40643 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -28,7 +28,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["3.5", "3.6", "3.7", "3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -70,6 +70,8 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. + session.install("asyncmock", "pytest-asyncio") + session.install("mock", "pytest", "pytest-cov", "flask", "webob", "django") session.install("-e", ".") diff --git a/packages/google-cloud-logging/samples/snippets/README.rst b/packages/google-cloud-logging/samples/snippets/README.rst index d60cd0a3b997..1531d24ad5eb 100644 --- a/packages/google-cloud-logging/samples/snippets/README.rst +++ b/packages/google-cloud-logging/samples/snippets/README.rst @@ -1,26 +1,24 @@ - .. This file is automatically generated. Do not edit this file directly. -Stackdriver Logging Python Samples +Cloud Logging Python Samples =============================================================================== .. image:: https://gstatic.com/cloudssh/images/open-btn.png :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/README.rst -This directory contains samples for Stackdriver Logging. `Stackdriver Logging`_ allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud Platform and Amazon Web Services. +This directory contains samples for Cloud Logging. `Cloud Logging`_ allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud Platform and Amazon Web Services. -.. _Stackdriver Logging: https://cloud.google.com/logging/docs +.. _Cloud Logging: https://cloud.google.com/logging/docs Setup ------------------------------------------------------------------------------- - Authentication ++++++++++++++ @@ -32,16 +30,14 @@ credentials for applications. https://cloud.google.com/docs/authentication/getting-started - - Install Dependencies ++++++++++++++++++++ -#. Clone python-docs-samples and change directory to the sample directory you want to use. +#. Clone python-logging and change directory to the sample directory you want to use. .. code-block:: bash - $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + $ git clone https://github.com/googleapis/python-logging.git #. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. @@ -65,10 +61,6 @@ Install Dependencies .. _virtualenv: https://virtualenv.pypa.io/ - - - - Samples ------------------------------------------------------------------------------- @@ -90,7 +82,6 @@ To run this sample: - Snippets +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -110,7 +101,7 @@ To run this sample: usage: snippets.py [-h] logger_name {list,write,delete} ... This application demonstrates how to perform basic operations on logs and - log entries with Stackdriver Logging. + log entries with Cloud Logging. For more information, see the README.md under /logging and the documentation at https://cloud.google.com/logging/docs. @@ -129,7 +120,6 @@ To run this sample: - Export +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -168,10 +158,6 @@ To run this sample: - - - - The client library ------------------------------------------------------------------------------- @@ -187,5 +173,4 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-logging/samples/snippets/README.rst.in b/packages/google-cloud-logging/samples/snippets/README.rst.in index 00fa4b6b83c1..ff243c1ce81f 100644 --- a/packages/google-cloud-logging/samples/snippets/README.rst.in +++ b/packages/google-cloud-logging/samples/snippets/README.rst.in @@ -1,11 +1,11 @@ # This file is used to generate README.rst product: - name: Stackdriver Logging - short_name: Stackdriver Logging + name: Cloud Logging + short_name: Cloud Logging url: https://cloud.google.com/logging/docs description: > - `Stackdriver Logging`_ allows you to store, search, analyze, monitor, + `Cloud Logging`_ allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud Platform and Amazon Web Services. diff --git a/packages/google-cloud-logging/samples/snippets/export.py b/packages/google-cloud-logging/samples/snippets/export.py index f7606ba6c125..9a0673ee72dc 100644 --- a/packages/google-cloud-logging/samples/snippets/export.py +++ b/packages/google-cloud-logging/samples/snippets/export.py @@ -27,10 +27,12 @@ def list_sinks(): sinks = list(logging_client.list_sinks()) if not sinks: - print('No sinks.') + print("No sinks.") for sink in sinks: - print('{}: {} -> {}'.format(sink.name, sink.filter_, sink.destination)) + print("{}: {} -> {}".format(sink.name, sink.filter_, sink.destination)) + + # [END logging_list_sinks] @@ -50,20 +52,18 @@ def create_sink(sink_name, destination_bucket, filter_): # or a BigQuery dataset. In this case, it is a Cloud Storage Bucket. # See https://cloud.google.com/logging/docs/api/tasks/exporting-logs for # information on the destination format. - destination = 'storage.googleapis.com/{bucket}'.format( - bucket=destination_bucket) + destination = "storage.googleapis.com/{bucket}".format(bucket=destination_bucket) - sink = logging_client.sink( - sink_name, - filter_, - destination) + sink = logging_client.sink(sink_name, filter_=filter_, destination=destination) if sink.exists(): - print('Sink {} already exists.'.format(sink.name)) + print("Sink {} already exists.".format(sink.name)) return sink.create() - print('Created sink {}'.format(sink.name)) + print("Created sink {}".format(sink.name)) + + # [END logging_create_sink] @@ -83,8 +83,10 @@ def update_sink(sink_name, filter_): sink.reload() sink.filter_ = filter_ - print('Updated sink {}'.format(sink.name)) + print("Updated sink {}".format(sink.name)) sink.update() + + # [END logging_update_sink] @@ -96,50 +98,41 @@ def delete_sink(sink_name): sink.delete() - print('Deleted sink {}'.format(sink.name)) + print("Deleted sink {}".format(sink.name)) + + # [END logging_delete_sink] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) - subparsers = parser.add_subparsers(dest='command') - subparsers.add_parser('list', help=list_sinks.__doc__) + subparsers = parser.add_subparsers(dest="command") + subparsers.add_parser("list", help=list_sinks.__doc__) - create_parser = subparsers.add_parser('create', help=list_sinks.__doc__) - create_parser.add_argument( - 'sink_name', - help='Name of the log export sink.') - create_parser.add_argument( - 'destination_bucket', - help='Cloud Storage bucket where logs will be exported.') + create_parser = subparsers.add_parser("create", help=list_sinks.__doc__) + create_parser.add_argument("sink_name", help="Name of the log export sink.") create_parser.add_argument( - 'filter', - help='The filter used to match logs.') - - update_parser = subparsers.add_parser('update', help=update_sink.__doc__) - update_parser.add_argument( - 'sink_name', - help='Name of the log export sink.') - update_parser.add_argument( - 'filter', - help='The filter used to match logs.') - - delete_parser = subparsers.add_parser('delete', help=delete_sink.__doc__) - delete_parser.add_argument( - 'sink_name', - help='Name of the log export sink.') + "destination_bucket", help="Cloud Storage bucket where logs will be exported." + ) + create_parser.add_argument("filter", help="The filter used to match logs.") + + update_parser = subparsers.add_parser("update", help=update_sink.__doc__) + update_parser.add_argument("sink_name", help="Name of the log export sink.") + update_parser.add_argument("filter", help="The filter used to match logs.") + + delete_parser = subparsers.add_parser("delete", help=delete_sink.__doc__) + delete_parser.add_argument("sink_name", help="Name of the log export sink.") args = parser.parse_args() - if args.command == 'list': + if args.command == "list": list_sinks() - elif args.command == 'create': + elif args.command == "create": create_sink(args.sink_name, args.destination_bucket, args.filter) - elif args.command == 'update': + elif args.command == "update": update_sink(args.sink_name, args.filter) - elif args.command == 'delete': + elif args.command == "delete": delete_sink(args.sink_name) diff --git a/packages/google-cloud-logging/samples/snippets/export_test.py b/packages/google-cloud-logging/samples/snippets/export_test.py index b787c066af67..b1ecf49230f1 100644 --- a/packages/google-cloud-logging/samples/snippets/export_test.py +++ b/packages/google-cloud-logging/samples/snippets/export_test.py @@ -23,15 +23,15 @@ import export -BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] -TEST_SINK_NAME_TMPL = 'example_sink_{}' -TEST_SINK_FILTER = 'severity>=CRITICAL' +BUCKET = os.environ["CLOUD_STORAGE_BUCKET"] +TEST_SINK_NAME_TMPL = "example_sink_{}" +TEST_SINK_FILTER = "severity>=CRITICAL" def _random_id(): - return ''.join( - random.choice(string.ascii_uppercase + string.digits) - for _ in range(6)) + return "".join( + random.choice(string.ascii_uppercase + string.digits) for _ in range(6) + ) @pytest.yield_fixture @@ -40,8 +40,9 @@ def example_sink(): sink = client.sink( TEST_SINK_NAME_TMPL.format(_random_id()), - TEST_SINK_FILTER, - 'storage.googleapis.com/{bucket}'.format(bucket=BUCKET)) + filter_=TEST_SINK_FILTER, + destination="storage.googleapis.com/{bucket}".format(bucket=BUCKET), + ) sink.create() @@ -67,10 +68,7 @@ def test_create(capsys): sink_name = TEST_SINK_NAME_TMPL.format(_random_id()) try: - export.create_sink( - sink_name, - BUCKET, - TEST_SINK_FILTER) + export.create_sink(sink_name, BUCKET, TEST_SINK_FILTER) # Clean-up the temporary sink. finally: try: @@ -83,7 +81,7 @@ def test_create(capsys): def test_update(example_sink, capsys): - updated_filter = 'severity>=INFO' + updated_filter = "severity>=INFO" export.update_sink(example_sink.name, updated_filter) example_sink.reload() diff --git a/packages/google-cloud-logging/samples/snippets/handler.py b/packages/google-cloud-logging/samples/snippets/handler.py index d59458425633..9a63d022f93b 100644 --- a/packages/google-cloud-logging/samples/snippets/handler.py +++ b/packages/google-cloud-logging/samples/snippets/handler.py @@ -36,14 +36,14 @@ def use_logging_handler(): import logging # The data to log - text = 'Hello, world!' + text = "Hello, world!" # Emits the data using the standard logging module logging.warning(text) # [END logging_handler_usage] - print('Logged: {}'.format(text)) + print("Logged: {}".format(text)) -if __name__ == '__main__': +if __name__ == "__main__": use_logging_handler() diff --git a/packages/google-cloud-logging/samples/snippets/handler_test.py b/packages/google-cloud-logging/samples/snippets/handler_test.py index d48ee2e20d06..9d635806ae15 100644 --- a/packages/google-cloud-logging/samples/snippets/handler_test.py +++ b/packages/google-cloud-logging/samples/snippets/handler_test.py @@ -19,4 +19,4 @@ def test_handler(capsys): handler.use_logging_handler() out, _ = capsys.readouterr() - assert 'Logged' in out + assert "Logged" in out diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 01686e4a0379..ab2c49227c3b 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -37,24 +37,25 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": ["2.7"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -69,12 +70,12 @@ def get_pytest_env_vars(): ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret @@ -83,7 +84,7 @@ def get_pytest_env_vars(): ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -132,17 +133,33 @@ def _determine_local_import_names(start_dir): @nox.session def lint(session): - session.install("flake8", "flake8-import-order") + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session): + session.install("black") + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + # # Sample Tests # @@ -182,9 +199,9 @@ def py(session): if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # diff --git a/packages/google-cloud-logging/samples/snippets/quickstart.py b/packages/google-cloud-logging/samples/snippets/quickstart.py index 19409c776111..7c38ea6fa82f 100644 --- a/packages/google-cloud-logging/samples/snippets/quickstart.py +++ b/packages/google-cloud-logging/samples/snippets/quickstart.py @@ -24,19 +24,19 @@ def run_quickstart(): logging_client = logging.Client() # The name of the log to write to - log_name = 'my-log' + log_name = "my-log" # Selects the log to write to logger = logging_client.logger(log_name) # The data to log - text = 'Hello, world!' + text = "Hello, world!" # Writes the log entry logger.log_text(text) - print('Logged: {}'.format(text)) + print("Logged: {}".format(text)) # [END logging_quickstart] -if __name__ == '__main__': +if __name__ == "__main__": run_quickstart() diff --git a/packages/google-cloud-logging/samples/snippets/quickstart_test.py b/packages/google-cloud-logging/samples/snippets/quickstart_test.py index 1b49cd1263a9..d8ace2cbcf34 100644 --- a/packages/google-cloud-logging/samples/snippets/quickstart_test.py +++ b/packages/google-cloud-logging/samples/snippets/quickstart_test.py @@ -19,4 +19,4 @@ def test_quickstart(capsys): quickstart.run_quickstart() out, _ = capsys.readouterr() - assert 'Logged' in out + assert "Logged" in out diff --git a/packages/google-cloud-logging/samples/snippets/snippets.py b/packages/google-cloud-logging/samples/snippets/snippets.py index 78f67e8a983b..39399dcf793b 100644 --- a/packages/google-cloud-logging/samples/snippets/snippets.py +++ b/packages/google-cloud-logging/samples/snippets/snippets.py @@ -15,7 +15,7 @@ # limitations under the License. """This application demonstrates how to perform basic operations on logs and -log entries with Stackdriver Logging. +log entries with Cloud Logging. For more information, see the README.md under /logging and the documentation at https://cloud.google.com/logging/docs. @@ -35,19 +35,23 @@ def write_entry(logger_name): logger = logging_client.logger(logger_name) # Make a simple text log - logger.log_text('Hello, world!') + logger.log_text("Hello, world!") # Simple text log with severity. - logger.log_text('Goodbye, world!', severity='ERROR') + logger.log_text("Goodbye, world!", severity="ERROR") # Struct log. The struct can be any JSON-serializable dictionary. - logger.log_struct({ - 'name': 'King Arthur', - 'quest': 'Find the Holy Grail', - 'favorite_color': 'Blue' - }) + logger.log_struct( + { + "name": "King Arthur", + "quest": "Find the Holy Grail", + "favorite_color": "Blue", + } + ) + + print("Wrote logs to {}.".format(logger.name)) + - print('Wrote logs to {}.'.format(logger.name)) # [END logging_write_log_entry] @@ -57,12 +61,13 @@ def list_entries(logger_name): logging_client = logging.Client() logger = logging_client.logger(logger_name) - print('Listing entries for logger {}:'.format(logger.name)) + print("Listing entries for logger {}:".format(logger.name)) for entry in logger.list_entries(): timestamp = entry.timestamp.isoformat() - print('* {}: {}'.format - (timestamp, entry.payload)) + print("* {}: {}".format(timestamp, entry.payload)) + + # [END logging_list_log_entries] @@ -77,27 +82,27 @@ def delete_logger(logger_name): logger.delete() - print('Deleted all logging entries for {}'.format(logger.name)) + print("Deleted all logging entries for {}".format(logger.name)) + + # [END logging_delete_log] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) - parser.add_argument( - 'logger_name', help='Logger name', default='example_log') - subparsers = parser.add_subparsers(dest='command') - subparsers.add_parser('list', help=list_entries.__doc__) - subparsers.add_parser('write', help=write_entry.__doc__) - subparsers.add_parser('delete', help=delete_logger.__doc__) + parser.add_argument("logger_name", help="Logger name", default="example_log") + subparsers = parser.add_subparsers(dest="command") + subparsers.add_parser("list", help=list_entries.__doc__) + subparsers.add_parser("write", help=write_entry.__doc__) + subparsers.add_parser("delete", help=delete_logger.__doc__) args = parser.parse_args() - if args.command == 'list': + if args.command == "list": list_entries(args.logger_name) - elif args.command == 'write': + elif args.command == "write": write_entry(args.logger_name) - elif args.command == 'delete': + elif args.command == "delete": delete_logger(args.logger_name) diff --git a/packages/google-cloud-logging/samples/snippets/snippets_test.py b/packages/google-cloud-logging/samples/snippets/snippets_test.py index 1d1d01972efa..479f742ae5ce 100644 --- a/packages/google-cloud-logging/samples/snippets/snippets_test.py +++ b/packages/google-cloud-logging/samples/snippets/snippets_test.py @@ -22,14 +22,14 @@ import snippets -TEST_LOGGER_NAME = 'example_log_{}'.format(uuid.uuid4().hex) +TEST_LOGGER_NAME = "example_log_{}".format(uuid.uuid4().hex) @pytest.fixture def example_log(): client = logging.Client() logger = client.logger(TEST_LOGGER_NAME) - text = 'Hello, world.' + text = "Hello, world." logger.log_text(text) return text diff --git a/packages/google-cloud-logging/scripts/decrypt-secrets.sh b/packages/google-cloud-logging/scripts/decrypt-secrets.sh index ff599eb2af25..21f6d2a26d90 100755 --- a/packages/google-cloud-logging/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-logging/scripts/decrypt-secrets.sh @@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" ) # Work from the project root. cd $ROOT +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + # Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ > testing/test-env.sh gcloud secrets versions access latest \ --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ > testing/service-account.json gcloud secrets versions access latest \ --secret="python-docs-samples-client-secrets" \ - > testing/client-secrets.json \ No newline at end of file + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-logging/scripts/fixup_logging_v2_keywords.py b/packages/google-cloud-logging/scripts/fixup_logging_v2_keywords.py new file mode 100644 index 000000000000..c570c0883505 --- /dev/null +++ b/packages/google-cloud-logging/scripts/fixup_logging_v2_keywords.py @@ -0,0 +1,203 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class loggingCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_exclusion': ('parent', 'exclusion', ), + 'create_log_metric': ('parent', 'metric', ), + 'create_sink': ('parent', 'sink', 'unique_writer_identity', ), + 'delete_exclusion': ('name', ), + 'delete_log': ('log_name', ), + 'delete_log_metric': ('metric_name', ), + 'delete_sink': ('sink_name', ), + 'get_bucket': ('name', ), + 'get_cmek_settings': ('name', ), + 'get_exclusion': ('name', ), + 'get_log_metric': ('metric_name', ), + 'get_sink': ('sink_name', ), + 'list_buckets': ('parent', 'page_token', 'page_size', ), + 'list_exclusions': ('parent', 'page_token', 'page_size', ), + 'list_log_entries': ('resource_names', 'filter', 'order_by', 'page_size', 'page_token', ), + 'list_log_metrics': ('parent', 'page_token', 'page_size', ), + 'list_logs': ('parent', 'page_size', 'page_token', ), + 'list_monitored_resource_descriptors': ('page_size', 'page_token', ), + 'list_sinks': ('parent', 'page_token', 'page_size', ), + 'update_bucket': ('name', 'bucket', 'update_mask', ), + 'update_cmek_settings': ('name', 'cmek_settings', 'update_mask', ), + 'update_exclusion': ('name', 'exclusion', 'update_mask', ), + 'update_log_metric': ('metric_name', 'metric', ), + 'update_sink': ('sink_name', 'sink', 'unique_writer_identity', 'update_mask', ), + 'write_log_entries': ('entries', 'log_name', 'resource', 'labels', 'partial_success', 'dry_run', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=loggingCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the logging client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index ebd73c131ba4..87cb8f7f8be9 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -29,8 +29,9 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.15.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.22.0, < 2.0.0dev", "google-cloud-core >= 1.4.1, < 2.0dev", + "proto-plus >= 1.11.0", ] extras = {} @@ -70,7 +71,6 @@ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", @@ -82,7 +82,7 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires=">=3.5", + python_requires=">=3.6", include_package_data=True, zip_safe=False, ) diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 70f91ca29f82..493b1a2b5841 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -3,30 +3,30 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/python-logging.git", - "sha": "98029b5a0d997963a7a30758933e0cc8ee8f5127" + "remote": "git@github.com:googleapis/python-logging", + "sha": "8466c62f459af6c2d89b411297df06988e45b522" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "fd31b1600fc496d6127665d29f095371d985c637", - "internalRef": "336344634" + "sha": "4b0ad15b0ff483486ae90d73092e7be00f8c1848", + "internalRef": "341842584" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "befc24dcdeb8e57ec1259826fd33120b05137e8f" + "sha": "7db8a6c5ffb12a6e4c2f799c18f00f7f3d60e279" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "befc24dcdeb8e57ec1259826fd33120b05137e8f" + "sha": "7db8a6c5ffb12a6e4c2f799c18f00f7f3d60e279" } } ], diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/synth.py index 9965d9b693e7..5be81736175d 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/synth.py @@ -30,13 +30,25 @@ include_protos=True, ) -# the structure of the logging directory is a bit different, so manually copy the protos -s.move(library / "google/cloud/logging_v2/proto", "google/cloud/logging_v2/proto") +s.move( + library, + excludes=[ + "setup.py", + "README.rst", + "google/cloud/logging/__init__.py", # generated types are hidden from users + "google/cloud/logging_v2/__init__.py", + "docs/index.rst", + "docs/logging_v2", # Don't include gapic library docs. Users should use the hand-written layer instead + "scripts/fixup_logging_v2_keywords.py", # don't include script since it only works for generated layer + ], +) -s.move(library / "google/cloud/logging_v2/gapic") -s.move(library / "tests/unit/gapic/v2") -# Don't include gapic library docs. Users should use the hand-written layer instead -# s.move(library / "docs/gapic/v2") +# Fix generated unit tests +s.replace( + "tests/unit/gapic/logging_v2/test_logging_service_v2.py", + "MonitoredResource\(\s*type_", + "MonitoredResource(type" +) # ---------------------------------------------------------------------------- # Add templated files @@ -44,22 +56,23 @@ templated_files = common.py_library( unit_cov_level=95, cov_level=99, - system_test_python_versions = ['3.8'], - unit_test_python_versions = ['3.5', '3.6', '3.7', '3.8'], - system_test_external_dependencies = [ - 'google-cloud-bigquery', - 'google-cloud-pubsub', - 'google-cloud-storage', - 'google-cloud-testutils' - ], - unit_test_external_dependencies = [ - 'flask', - 'webob', - 'django' + microgenerator=True, + system_test_external_dependencies=[ + "google-cloud-bigquery", + "google-cloud-pubsub", + "google-cloud-storage", + "google-cloud-testutils", ], + unit_test_external_dependencies=["flask", "webob", "django"], samples=True, ) -s.move(templated_files, excludes=[".coveragerc"]) +s.move(templated_files, excludes=[".coveragerc", "docs/multiprocessing.rst"]) + +# -------------------------------------------------------------------------- +# Samples templates +# -------------------------------------------------------------------------- + +python.py_samples() # -------------------------------------------------------------------------- # Samples templates diff --git a/packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py b/packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py index d574de7785f7..dfd81a5e8cf3 100644 --- a/packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py +++ b/packages/google-cloud-logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py @@ -18,19 +18,17 @@ import google.auth from google.api import monitored_resource_pb2 from google.cloud import logging_v2 -from google.cloud.logging_v2.proto import log_entry_pb2 -from google.cloud.logging_v2.proto import logging_pb2 class TestSystemLoggingServiceV2(object): def test_write_log_entries(self): _, project_id = google.auth.default() - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() log_name = client.log_path(project_id, "test-{0}".format(time.time())) resource = {} labels = {} entries = [] response = client.write_log_entries( - entries, log_name=log_name, resource=resource, labels=labels + entries=entries, log_name=log_name, resource=resource, labels=labels ) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index db6dbe95ef70..10896adf72fa 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -23,13 +23,12 @@ from google.api_core.exceptions import ResourceExhausted from google.api_core.exceptions import RetryError from google.api_core.exceptions import ServiceUnavailable -from google.cloud._helpers import UTC import google.cloud.logging -import google.cloud.logging.handlers.handlers -from google.cloud.logging.handlers.handlers import CloudLoggingHandler -from google.cloud.logging.handlers.transports import SyncTransport -from google.cloud.logging import client -from google.cloud.logging.resource import Resource +from google.cloud._helpers import UTC +from google.cloud.logging_v2.handlers.handlers import CloudLoggingHandler +from google.cloud.logging_v2.handlers.transports import SyncTransport +from google.cloud.logging_v2 import client +from google.cloud.logging_v2.resource import Resource from test_utils.retry import RetryErrors from test_utils.retry import RetryResult @@ -124,7 +123,7 @@ def _logger_name(prefix): def test_list_entry_with_unregistered(self): from google.protobuf import any_pb2 from google.protobuf import descriptor_pool - from google.cloud.logging import entries + from google.cloud.logging_v2 import entries pool = descriptor_pool.Default() type_name = "google.cloud.audit.AuditLog" @@ -319,7 +318,9 @@ def test_log_root_handler(self): def test_create_metric(self): METRIC_NAME = "test-create-metric%s" % (_RESOURCE_ID,) - metric = Config.CLIENT.metric(METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + metric = Config.CLIENT.metric( + METRIC_NAME, filter_=DEFAULT_FILTER, description=DEFAULT_DESCRIPTION + ) self.assertFalse(metric.exists()) retry = RetryErrors(Conflict) @@ -330,7 +331,9 @@ def test_create_metric(self): def test_list_metrics(self): METRIC_NAME = "test-list-metrics%s" % (_RESOURCE_ID,) - metric = Config.CLIENT.metric(METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + metric = Config.CLIENT.metric( + METRIC_NAME, filter_=DEFAULT_FILTER, description=DEFAULT_DESCRIPTION + ) self.assertFalse(metric.exists()) before_metrics = list(Config.CLIENT.list_metrics()) before_names = set(before.name for before in before_metrics) @@ -348,7 +351,9 @@ def test_list_metrics(self): def test_reload_metric(self): METRIC_NAME = "test-reload-metric%s" % (_RESOURCE_ID,) retry = RetryErrors(Conflict) - metric = Config.CLIENT.metric(METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + metric = Config.CLIENT.metric( + METRIC_NAME, filter_=DEFAULT_FILTER, description=DEFAULT_DESCRIPTION + ) self.assertFalse(metric.exists()) retry(metric.create)() self.to_delete.append(metric) @@ -365,7 +370,9 @@ def test_update_metric(self): retry = RetryErrors(Conflict) NEW_FILTER = "logName:other" NEW_DESCRIPTION = "updated" - metric = Config.CLIENT.metric(METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + metric = Config.CLIENT.metric( + METRIC_NAME, filter_=DEFAULT_FILTER, description=DEFAULT_DESCRIPTION + ) self.assertFalse(metric.exists()) retry(metric.create)() self.to_delete.append(metric) @@ -406,7 +413,7 @@ def test_create_sink_storage_bucket(self): SINK_NAME = "test-create-sink-bucket%s" % (_RESOURCE_ID,) retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) - sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) + sink = Config.CLIENT.sink(SINK_NAME, filter_=DEFAULT_FILTER, destination=uri) self.assertFalse(sink.exists()) retry(sink.create)() @@ -434,7 +441,9 @@ def test_create_sink_pubsub_topic(self): TOPIC_URI = "pubsub.googleapis.com/%s" % (topic_path,) retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) - sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, TOPIC_URI) + sink = Config.CLIENT.sink( + SINK_NAME, filter_=DEFAULT_FILTER, destination=TOPIC_URI + ) self.assertFalse(sink.exists()) retry(sink.create)() @@ -469,7 +478,7 @@ def test_create_sink_bigquery_dataset(self): SINK_NAME = "test-create-sink-dataset%s" % (_RESOURCE_ID,) retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) uri = self._init_bigquery_dataset() - sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) + sink = Config.CLIENT.sink(SINK_NAME, filter_=DEFAULT_FILTER, destination=uri) self.assertFalse(sink.exists()) retry(sink.create)() @@ -481,7 +490,7 @@ def test_list_sinks(self): SINK_NAME = "test-list-sinks%s" % (_RESOURCE_ID,) uri = self._init_storage_bucket() retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) - sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) + sink = Config.CLIENT.sink(SINK_NAME, filter_=DEFAULT_FILTER, destination=uri) self.assertFalse(sink.exists()) before_sinks = list(Config.CLIENT.list_sinks()) before_names = set(before.name for before in before_sinks) @@ -499,7 +508,7 @@ def test_reload_sink(self): SINK_NAME = "test-reload-sink%s" % (_RESOURCE_ID,) retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) uri = self._init_bigquery_dataset() - sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) + sink = Config.CLIENT.sink(SINK_NAME, filter_=DEFAULT_FILTER, destination=uri) self.assertFalse(sink.exists()) retry(sink.create)() self.to_delete.append(sink) @@ -517,7 +526,9 @@ def test_update_sink(self): bucket_uri = self._init_storage_bucket() dataset_uri = self._init_bigquery_dataset() UPDATED_FILTER = "logName:syslog" - sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, bucket_uri) + sink = Config.CLIENT.sink( + SINK_NAME, filter_=DEFAULT_FILTER, destination=bucket_uri + ) self.assertFalse(sink.exists()) retry(sink.create)() self.to_delete.append(sink) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py @@ -0,0 +1 @@ + diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py new file mode 100644 index 000000000000..469684436dae --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -0,0 +1,4382 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.config_service_v2 import ( + ConfigServiceV2AsyncClient, +) +from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.services.config_service_v2 import transports +from google.cloud.logging_v2.types import logging_config +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ConfigServiceV2Client._get_default_mtls_endpoint(None) is None + assert ( + ConfigServiceV2Client._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ConfigServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ConfigServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ConfigServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ConfigServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient] +) +def test_config_service_v2_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + + assert client.transport._host == "logging.googleapis.com:443" + + +def test_config_service_v2_client_get_transport_class(): + transport = ConfigServiceV2Client.get_transport_class() + assert transport == transports.ConfigServiceV2GrpcTransport + + transport = ConfigServiceV2Client.get_transport_class("grpc") + assert transport == transports.ConfigServiceV2GrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + ( + ConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + ConfigServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfigServiceV2Client), +) +@mock.patch.object( + ConfigServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfigServiceV2AsyncClient), +) +def test_config_service_v2_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ConfigServiceV2Client, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ConfigServiceV2Client, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ConfigServiceV2Client, + transports.ConfigServiceV2GrpcTransport, + "grpc", + "true", + ), + ( + ConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + ConfigServiceV2Client, + transports.ConfigServiceV2GrpcTransport, + "grpc", + "false", + ), + ( + ConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + ConfigServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfigServiceV2Client), +) +@mock.patch.object( + ConfigServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfigServiceV2AsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_config_service_v2_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() + with mock.patch( + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds + ): + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + ( + ConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_config_service_v2_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + ( + ConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_config_service_v2_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_config_service_v2_client_client_options_from_dict(): + with mock.patch( + "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2GrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ConfigServiceV2Client( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_buckets( + transport: str = "grpc", request_type=logging_config.ListBucketsRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListBucketsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.ListBucketsRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListBucketsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_buckets_from_dict(): + test_list_buckets(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_buckets_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListBucketsResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.ListBucketsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBucketsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_buckets_async_from_dict(): + await test_list_buckets_async(request_type=dict) + + +def test_list_buckets_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListBucketsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + call.return_value = logging_config.ListBucketsResponse() + + client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_buckets_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListBucketsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListBucketsResponse() + ) + + await client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_buckets_flattened(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListBucketsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_buckets(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_buckets_flattened_error(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_buckets( + logging_config.ListBucketsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_buckets_flattened_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListBucketsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListBucketsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_buckets(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_buckets_flattened_error_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_buckets( + logging_config.ListBucketsRequest(), parent="parent_value", + ) + + +def test_list_buckets_pager(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token="abc", + ), + logging_config.ListBucketsResponse(buckets=[], next_page_token="def",), + logging_config.ListBucketsResponse( + buckets=[logging_config.LogBucket(),], next_page_token="ghi", + ), + logging_config.ListBucketsResponse( + buckets=[logging_config.LogBucket(), logging_config.LogBucket(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_buckets(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogBucket) for i in results) + + +def test_list_buckets_pages(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token="abc", + ), + logging_config.ListBucketsResponse(buckets=[], next_page_token="def",), + logging_config.ListBucketsResponse( + buckets=[logging_config.LogBucket(),], next_page_token="ghi", + ), + logging_config.ListBucketsResponse( + buckets=[logging_config.LogBucket(), logging_config.LogBucket(),], + ), + RuntimeError, + ) + pages = list(client.list_buckets(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_buckets_async_pager(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token="abc", + ), + logging_config.ListBucketsResponse(buckets=[], next_page_token="def",), + logging_config.ListBucketsResponse( + buckets=[logging_config.LogBucket(),], next_page_token="ghi", + ), + logging_config.ListBucketsResponse( + buckets=[logging_config.LogBucket(), logging_config.LogBucket(),], + ), + RuntimeError, + ) + async_pager = await client.list_buckets(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogBucket) for i in responses) + + +@pytest.mark.asyncio +async def test_list_buckets_async_pages(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token="abc", + ), + logging_config.ListBucketsResponse(buckets=[], next_page_token="def",), + logging_config.ListBucketsResponse( + buckets=[logging_config.LogBucket(),], next_page_token="ghi", + ), + logging_config.ListBucketsResponse( + buckets=[logging_config.LogBucket(), logging_config.LogBucket(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_buckets(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_bucket( + transport: str = "grpc", request_type=logging_config.GetBucketRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + + response = client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetBucketRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogBucket) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.retention_days == 1512 + + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_get_bucket_from_dict(): + test_get_bucket(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + ) + + response = await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.retention_days == 1512 + + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test_get_bucket_async_from_dict(): + await test_get_bucket_async(request_type=dict) + + +def test_get_bucket_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + + client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) + + await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_bucket( + transport: str = "grpc", request_type=logging_config.UpdateBucketRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + + response = client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateBucketRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogBucket) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.retention_days == 1512 + + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_update_bucket_from_dict(): + test_update_bucket(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + ) + + response = await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.retention_days == 1512 + + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test_update_bucket_async_from_dict(): + await test_update_bucket_async(request_type=dict) + + +def test_update_bucket_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + + client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) + + await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_list_sinks( + transport: str = "grpc", request_type=logging_config.ListSinksRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListSinksResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.ListSinksRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListSinksPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_sinks_from_dict(): + test_list_sinks(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_sinks_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListSinksResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.ListSinksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSinksAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_sinks_async_from_dict(): + await test_list_sinks_async(request_type=dict) + + +def test_list_sinks_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListSinksRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + call.return_value = logging_config.ListSinksResponse() + + client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_sinks_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListSinksRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListSinksResponse() + ) + + await client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_sinks_flattened(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListSinksResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sinks(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_sinks_flattened_error(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sinks( + logging_config.ListSinksRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_sinks_flattened_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListSinksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListSinksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sinks(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_sinks_flattened_error_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sinks( + logging_config.ListSinksRequest(), parent="parent_value", + ) + + +def test_list_sinks_pager(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token="abc", + ), + logging_config.ListSinksResponse(sinks=[], next_page_token="def",), + logging_config.ListSinksResponse( + sinks=[logging_config.LogSink(),], next_page_token="ghi", + ), + logging_config.ListSinksResponse( + sinks=[logging_config.LogSink(), logging_config.LogSink(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_sinks(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogSink) for i in results) + + +def test_list_sinks_pages(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token="abc", + ), + logging_config.ListSinksResponse(sinks=[], next_page_token="def",), + logging_config.ListSinksResponse( + sinks=[logging_config.LogSink(),], next_page_token="ghi", + ), + logging_config.ListSinksResponse( + sinks=[logging_config.LogSink(), logging_config.LogSink(),], + ), + RuntimeError, + ) + pages = list(client.list_sinks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_sinks_async_pager(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token="abc", + ), + logging_config.ListSinksResponse(sinks=[], next_page_token="def",), + logging_config.ListSinksResponse( + sinks=[logging_config.LogSink(),], next_page_token="ghi", + ), + logging_config.ListSinksResponse( + sinks=[logging_config.LogSink(), logging_config.LogSink(),], + ), + RuntimeError, + ) + async_pager = await client.list_sinks(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogSink) for i in responses) + + +@pytest.mark.asyncio +async def test_list_sinks_async_pages(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token="abc", + ), + logging_config.ListSinksResponse(sinks=[], next_page_token="def",), + logging_config.ListSinksResponse( + sinks=[logging_config.LogSink(),], next_page_token="ghi", + ), + logging_config.ListSinksResponse( + sinks=[logging_config.LogSink(), logging_config.LogSink(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_sinks(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_sink(transport: str = "grpc", request_type=logging_config.GetSinkRequest): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + bigquery_options=logging_config.BigQueryOptions( + use_partitioned_tables=True + ), + ) + + response = client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetSinkRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogSink) + + assert response.name == "name_value" + + assert response.destination == "destination_value" + + assert response.filter == "filter_value" + + assert response.description == "description_value" + + assert response.disabled is True + + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + + assert response.writer_identity == "writer_identity_value" + + assert response.include_children is True + + +def test_get_sink_from_dict(): + test_get_sink(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) + + response = await client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetSinkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + + assert response.name == "name_value" + + assert response.destination == "destination_value" + + assert response.filter == "filter_value" + + assert response.description == "description_value" + + assert response.disabled is True + + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + + assert response.writer_identity == "writer_identity_value" + + assert response.include_children is True + + +@pytest.mark.asyncio +async def test_get_sink_async_from_dict(): + await test_get_sink_async(request_type=dict) + + +def test_get_sink_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSinkRequest() + request.sink_name = "sink_name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + call.return_value = logging_config.LogSink() + + client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSinkRequest() + request.sink_name = "sink_name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) + + await client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + + +def test_get_sink_flattened(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_sink(sink_name="sink_name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].sink_name == "sink_name_value" + + +def test_get_sink_flattened_error(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_sink( + logging_config.GetSinkRequest(), sink_name="sink_name_value", + ) + + +@pytest.mark.asyncio +async def test_get_sink_flattened_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_sink(sink_name="sink_name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].sink_name == "sink_name_value" + + +@pytest.mark.asyncio +async def test_get_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_sink( + logging_config.GetSinkRequest(), sink_name="sink_name_value", + ) + + +def test_create_sink( + transport: str = "grpc", request_type=logging_config.CreateSinkRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + bigquery_options=logging_config.BigQueryOptions( + use_partitioned_tables=True + ), + ) + + response = client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.CreateSinkRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogSink) + + assert response.name == "name_value" + + assert response.destination == "destination_value" + + assert response.filter == "filter_value" + + assert response.description == "description_value" + + assert response.disabled is True + + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + + assert response.writer_identity == "writer_identity_value" + + assert response.include_children is True + + +def test_create_sink_from_dict(): + test_create_sink(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) + + response = await client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.CreateSinkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + + assert response.name == "name_value" + + assert response.destination == "destination_value" + + assert response.filter == "filter_value" + + assert response.description == "description_value" + + assert response.disabled is True + + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + + assert response.writer_identity == "writer_identity_value" + + assert response.include_children is True + + +@pytest.mark.asyncio +async def test_create_sink_async_from_dict(): + await test_create_sink_async(request_type=dict) + + +def test_create_sink_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateSinkRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value = logging_config.LogSink() + + client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateSinkRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) + + await client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_sink_flattened(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_sink( + parent="parent_value", sink=logging_config.LogSink(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].sink == logging_config.LogSink(name="name_value") + + +def test_create_sink_flattened_error(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_sink( + logging_config.CreateSinkRequest(), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_sink_flattened_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_sink( + parent="parent_value", sink=logging_config.LogSink(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].sink == logging_config.LogSink(name="name_value") + + +@pytest.mark.asyncio +async def test_create_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_sink( + logging_config.CreateSinkRequest(), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), + ) + + +def test_update_sink( + transport: str = "grpc", request_type=logging_config.UpdateSinkRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + bigquery_options=logging_config.BigQueryOptions( + use_partitioned_tables=True + ), + ) + + response = client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateSinkRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogSink) + + assert response.name == "name_value" + + assert response.destination == "destination_value" + + assert response.filter == "filter_value" + + assert response.description == "description_value" + + assert response.disabled is True + + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + + assert response.writer_identity == "writer_identity_value" + + assert response.include_children is True + + +def test_update_sink_from_dict(): + test_update_sink(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) + + response = await client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateSinkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + + assert response.name == "name_value" + + assert response.destination == "destination_value" + + assert response.filter == "filter_value" + + assert response.description == "description_value" + + assert response.disabled is True + + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + + assert response.writer_identity == "writer_identity_value" + + assert response.include_children is True + + +@pytest.mark.asyncio +async def test_update_sink_async_from_dict(): + await test_update_sink_async(request_type=dict) + + +def test_update_sink_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSinkRequest() + request.sink_name = "sink_name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value = logging_config.LogSink() + + client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSinkRequest() + request.sink_name = "sink_name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) + + await client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + + +def test_update_sink_flattened(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_sink( + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].sink_name == "sink_name_value" + + assert args[0].sink == logging_config.LogSink(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_sink_flattened_error(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_sink( + logging_config.UpdateSinkRequest(), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_sink_flattened_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_sink( + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].sink_name == "sink_name_value" + + assert args[0].sink == logging_config.LogSink(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_sink( + logging_config.UpdateSinkRequest(), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_sink( + transport: str = "grpc", request_type=logging_config.DeleteSinkRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteSinkRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_sink_from_dict(): + test_delete_sink(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteSinkRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_sink_async_from_dict(): + await test_delete_sink_async(request_type=dict) + + +def test_delete_sink_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteSinkRequest() + request.sink_name = "sink_name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + call.return_value = None + + client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteSinkRequest() + request.sink_name = "sink_name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + + +def test_delete_sink_flattened(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_sink(sink_name="sink_name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].sink_name == "sink_name_value" + + +def test_delete_sink_flattened_error(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_sink( + logging_config.DeleteSinkRequest(), sink_name="sink_name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_sink_flattened_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_sink(sink_name="sink_name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].sink_name == "sink_name_value" + + +@pytest.mark.asyncio +async def test_delete_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_sink( + logging_config.DeleteSinkRequest(), sink_name="sink_name_value", + ) + + +def test_list_exclusions( + transport: str = "grpc", request_type=logging_config.ListExclusionsRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListExclusionsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.ListExclusionsRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListExclusionsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_exclusions_from_dict(): + test_list_exclusions(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_exclusions_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.ListExclusionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExclusionsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_exclusions_async_from_dict(): + await test_list_exclusions_async(request_type=dict) + + +def test_list_exclusions_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListExclusionsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value = logging_config.ListExclusionsResponse() + + client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_exclusions_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListExclusionsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse() + ) + + await client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_exclusions_flattened(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListExclusionsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_exclusions(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_exclusions_flattened_error(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_exclusions( + logging_config.ListExclusionsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_exclusions_flattened_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListExclusionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_exclusions(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_exclusions_flattened_error_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_exclusions( + logging_config.ListExclusionsRequest(), parent="parent_value", + ) + + +def test_list_exclusions_pager(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token="abc", + ), + logging_config.ListExclusionsResponse( + exclusions=[], next_page_token="def", + ), + logging_config.ListExclusionsResponse( + exclusions=[logging_config.LogExclusion(),], next_page_token="ghi", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_exclusions(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogExclusion) for i in results) + + +def test_list_exclusions_pages(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token="abc", + ), + logging_config.ListExclusionsResponse( + exclusions=[], next_page_token="def", + ), + logging_config.ListExclusionsResponse( + exclusions=[logging_config.LogExclusion(),], next_page_token="ghi", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = list(client.list_exclusions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_exclusions_async_pager(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token="abc", + ), + logging_config.ListExclusionsResponse( + exclusions=[], next_page_token="def", + ), + logging_config.ListExclusionsResponse( + exclusions=[logging_config.LogExclusion(),], next_page_token="ghi", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_exclusions(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogExclusion) for i in responses) + + +@pytest.mark.asyncio +async def test_list_exclusions_async_pages(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token="abc", + ), + logging_config.ListExclusionsResponse( + exclusions=[], next_page_token="def", + ), + logging_config.ListExclusionsResponse( + exclusions=[logging_config.LogExclusion(),], next_page_token="ghi", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_exclusions(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_exclusion( + transport: str = "grpc", request_type=logging_config.GetExclusionRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + + response = client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetExclusionRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogExclusion) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + assert response.disabled is True + + +def test_get_exclusion_from_dict(): + test_get_exclusion(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + + response = await client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_get_exclusion_async_from_dict(): + await test_get_exclusion_async(request_type=dict) + + +def test_get_exclusion_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetExclusionRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value = logging_config.LogExclusion() + + client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetExclusionRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + + await client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_exclusion_flattened(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_exclusion(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_exclusion_flattened_error(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_exclusion( + logging_config.GetExclusionRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_exclusion(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_exclusion( + logging_config.GetExclusionRequest(), name="name_value", + ) + + +def test_create_exclusion( + transport: str = "grpc", request_type=logging_config.CreateExclusionRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + + response = client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.CreateExclusionRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogExclusion) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + assert response.disabled is True + + +def test_create_exclusion_from_dict(): + test_create_exclusion(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + + response = await client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.CreateExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_create_exclusion_async_from_dict(): + await test_create_exclusion_async(request_type=dict) + + +def test_create_exclusion_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateExclusionRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value = logging_config.LogExclusion() + + client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateExclusionRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + + await client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_exclusion_flattened(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_exclusion( + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].exclusion == logging_config.LogExclusion(name="name_value") + + +def test_create_exclusion_flattened_error(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_exclusion( + logging_config.CreateExclusionRequest(), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_exclusion( + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].exclusion == logging_config.LogExclusion(name="name_value") + + +@pytest.mark.asyncio +async def test_create_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_exclusion( + logging_config.CreateExclusionRequest(), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), + ) + + +def test_update_exclusion( + transport: str = "grpc", request_type=logging_config.UpdateExclusionRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + + response = client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateExclusionRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogExclusion) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + assert response.disabled is True + + +def test_update_exclusion_from_dict(): + test_update_exclusion(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + + response = await client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_update_exclusion_async_from_dict(): + await test_update_exclusion_async(request_type=dict) + + +def test_update_exclusion_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value = logging_config.LogExclusion() + + client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + + await client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_exclusion_flattened(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_exclusion( + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].exclusion == logging_config.LogExclusion(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_exclusion_flattened_error(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_exclusion( + logging_config.UpdateExclusionRequest(), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_exclusion( + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].exclusion == logging_config.LogExclusion(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_exclusion( + logging_config.UpdateExclusionRequest(), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_exclusion( + transport: str = "grpc", request_type=logging_config.DeleteExclusionRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteExclusionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_exclusion_from_dict(): + test_delete_exclusion(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteExclusionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_exclusion_async_from_dict(): + await test_delete_exclusion_async(request_type=dict) + + +def test_delete_exclusion_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + call.return_value = None + + client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_exclusion_flattened(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_exclusion(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_exclusion_flattened_error(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_exclusion( + logging_config.DeleteExclusionRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_exclusion(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_exclusion( + logging_config.DeleteExclusionRequest(), name="name_value", + ) + + +def test_get_cmek_settings( + transport: str = "grpc", request_type=logging_config.GetCmekSettingsRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + service_account_id="service_account_id_value", + ) + + response = client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetCmekSettingsRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.CmekSettings) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + assert response.service_account_id == "service_account_id_value" + + +def test_get_cmek_settings_from_dict(): + test_get_cmek_settings(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_cmek_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + service_account_id="service_account_id_value", + ) + ) + + response = await client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetCmekSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + assert response.service_account_id == "service_account_id_value" + + +@pytest.mark.asyncio +async def test_get_cmek_settings_async_from_dict(): + await test_get_cmek_settings_async(request_type=dict) + + +def test_get_cmek_settings_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + call.return_value = logging_config.CmekSettings() + + client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cmek_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings() + ) + + await client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_cmek_settings( + transport: str = "grpc", request_type=logging_config.UpdateCmekSettingsRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + service_account_id="service_account_id_value", + ) + + response = client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateCmekSettingsRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.CmekSettings) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + assert response.service_account_id == "service_account_id_value" + + +def test_update_cmek_settings_from_dict(): + test_update_cmek_settings(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_cmek_settings_async( + transport: str = "grpc_asyncio", + request_type=logging_config.UpdateCmekSettingsRequest, +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + service_account_id="service_account_id_value", + ) + ) + + response = await client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateCmekSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + assert response.service_account_id == "service_account_id_value" + + +@pytest.mark.asyncio +async def test_update_cmek_settings_async_from_dict(): + await test_update_cmek_settings_async(request_type=dict) + + +def test_update_cmek_settings_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateCmekSettingsRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + call.return_value = logging_config.CmekSettings() + + client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_cmek_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateCmekSettingsRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings() + ) + + await client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = ConfigServiceV2Client(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ConfigServiceV2GrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.ConfigServiceV2GrpcTransport,) + + +def test_config_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.ConfigServiceV2Transport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_config_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ConfigServiceV2Transport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_buckets", + "get_bucket", + "update_bucket", + "list_sinks", + "get_sink", + "create_sink", + "update_sink", + "delete_sink", + "list_exclusions", + "get_exclusion", + "create_exclusion", + "update_exclusion", + "delete_exclusion", + "get_cmek_settings", + "update_cmek_settings", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_config_service_v2_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.ConfigServiceV2Transport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + quota_project_id="octopus", + ) + + +def test_config_service_v2_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.ConfigServiceV2Transport() + adc.assert_called_once() + + +def test_config_service_v2_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + ConfigServiceV2Client() + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + quota_project_id=None, + ) + + +def test_config_service_v2_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.ConfigServiceV2GrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + quota_project_id="octopus", + ) + + +def test_config_service_v2_host_no_port(): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com" + ), + ) + assert client.transport._host == "logging.googleapis.com:443" + + +def test_config_service_v2_host_with_port(): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com:8000" + ), + ) + assert client.transport._host == "logging.googleapis.com:8000" + + +def test_config_service_v2_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.ConfigServiceV2GrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_config_service_v2_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.ConfigServiceV2GrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +def test_config_service_v2_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +def test_config_service_v2_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cmek_settings_path(): + project = "squid" + + expected = "projects/{project}/cmekSettings".format(project=project,) + actual = ConfigServiceV2Client.cmek_settings_path(project) + assert expected == actual + + +def test_parse_cmek_settings_path(): + expected = { + "project": "clam", + } + path = ConfigServiceV2Client.cmek_settings_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_cmek_settings_path(path) + assert expected == actual + + +def test_log_bucket_path(): + project = "whelk" + location = "octopus" + bucket = "oyster" + + expected = "projects/{project}/locations/{location}/buckets/{bucket}".format( + project=project, location=location, bucket=bucket, + ) + actual = ConfigServiceV2Client.log_bucket_path(project, location, bucket) + assert expected == actual + + +def test_parse_log_bucket_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "bucket": "mussel", + } + path = ConfigServiceV2Client.log_bucket_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_bucket_path(path) + assert expected == actual + + +def test_log_exclusion_path(): + project = "winkle" + exclusion = "nautilus" + + expected = "projects/{project}/exclusions/{exclusion}".format( + project=project, exclusion=exclusion, + ) + actual = ConfigServiceV2Client.log_exclusion_path(project, exclusion) + assert expected == actual + + +def test_parse_log_exclusion_path(): + expected = { + "project": "scallop", + "exclusion": "abalone", + } + path = ConfigServiceV2Client.log_exclusion_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_exclusion_path(path) + assert expected == actual + + +def test_log_sink_path(): + project = "squid" + sink = "clam" + + expected = "projects/{project}/sinks/{sink}".format(project=project, sink=sink,) + actual = ConfigServiceV2Client.log_sink_path(project, sink) + assert expected == actual + + +def test_parse_log_sink_path(): + expected = { + "project": "whelk", + "sink": "octopus", + } + path = ConfigServiceV2Client.log_sink_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_sink_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ConfigServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = ConfigServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + + expected = "folders/{folder}".format(folder=folder,) + actual = ConfigServiceV2Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = ConfigServiceV2Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + + expected = "organizations/{organization}".format(organization=organization,) + actual = ConfigServiceV2Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = ConfigServiceV2Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + + expected = "projects/{project}".format(project=project,) + actual = ConfigServiceV2Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = ConfigServiceV2Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = ConfigServiceV2Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = ConfigServiceV2Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ConfigServiceV2Transport, "_prep_wrapped_messages" + ) as prep: + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ConfigServiceV2Transport, "_prep_wrapped_messages" + ) as prep: + transport_class = ConfigServiceV2Client.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py new file mode 100644 index 000000000000..2c08f63b296a --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -0,0 +1,2166 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.logging_service_v2 import ( + LoggingServiceV2AsyncClient, +) +from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.services.logging_service_v2 import transports +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging +from google.logging.type import http_request_pb2 as http_request # type: ignore +from google.logging.type import log_severity_pb2 as log_severity # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 as gp_any # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import struct_pb2 as struct # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LoggingServiceV2Client._get_default_mtls_endpoint(None) is None + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient] +) +def test_logging_service_v2_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + + assert client.transport._host == "logging.googleapis.com:443" + + +def test_logging_service_v2_client_get_transport_class(): + transport = LoggingServiceV2Client.get_transport_class() + assert transport == transports.LoggingServiceV2GrpcTransport + + transport = LoggingServiceV2Client.get_transport_class("grpc") + assert transport == transports.LoggingServiceV2GrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + LoggingServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LoggingServiceV2AsyncClient), +) +def test_logging_service_v2_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(LoggingServiceV2Client, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(LoggingServiceV2Client, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + LoggingServiceV2Client, + transports.LoggingServiceV2GrpcTransport, + "grpc", + "true", + ), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + LoggingServiceV2Client, + transports.LoggingServiceV2GrpcTransport, + "grpc", + "false", + ), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + LoggingServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LoggingServiceV2AsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_logging_service_v2_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() + with mock.patch( + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds + ): + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_logging_service_v2_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_logging_service_v2_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_logging_service_v2_client_client_options_from_dict(): + with mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2GrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = LoggingServiceV2Client( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_delete_log(transport: str = "grpc", request_type=logging.DeleteLogRequest): + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging.DeleteLogRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_log_from_dict(): + test_delete_log(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_log_async( + transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest +): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging.DeleteLogRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_log_async_from_dict(): + await test_delete_log_async(request_type=dict) + + +def test_delete_log_field_headers(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.DeleteLogRequest() + request.log_name = "log_name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + call.return_value = None + + client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "log_name=log_name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_log_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.DeleteLogRequest() + request.log_name = "log_name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "log_name=log_name/value",) in kw["metadata"] + + +def test_delete_log_flattened(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_log(log_name="log_name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].log_name == "log_name_value" + + +def test_delete_log_flattened_error(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_log( + logging.DeleteLogRequest(), log_name="log_name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_log_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_log(log_name="log_name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].log_name == "log_name_value" + + +@pytest.mark.asyncio +async def test_delete_log_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_log( + logging.DeleteLogRequest(), log_name="log_name_value", + ) + + +def test_write_log_entries( + transport: str = "grpc", request_type=logging.WriteLogEntriesRequest +): + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = logging.WriteLogEntriesResponse() + + response = client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging.WriteLogEntriesRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging.WriteLogEntriesResponse) + + +def test_write_log_entries_from_dict(): + test_write_log_entries(request_type=dict) + + +@pytest.mark.asyncio +async def test_write_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest +): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.WriteLogEntriesResponse() + ) + + response = await client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging.WriteLogEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging.WriteLogEntriesResponse) + + +@pytest.mark.asyncio +async def test_write_log_entries_async_from_dict(): + await test_write_log_entries_async(request_type=dict) + + +def test_write_log_entries_flattened(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = logging.WriteLogEntriesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.write_log_entries( + log_name="log_name_value", + resource=monitored_resource.MonitoredResource(type="type__value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].log_name == "log_name_value" + + assert args[0].resource == monitored_resource.MonitoredResource( + type="type__value" + ) + + assert args[0].labels == {"key_value": "value_value"} + + assert args[0].entries == [log_entry.LogEntry(log_name="log_name_value")] + + +def test_write_log_entries_flattened_error(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.write_log_entries( + logging.WriteLogEntriesRequest(), + log_name="log_name_value", + resource=monitored_resource.MonitoredResource(type="type__value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], + ) + + +@pytest.mark.asyncio +async def test_write_log_entries_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = logging.WriteLogEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.WriteLogEntriesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.write_log_entries( + log_name="log_name_value", + resource=monitored_resource.MonitoredResource(type="type__value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].log_name == "log_name_value" + + assert args[0].resource == monitored_resource.MonitoredResource( + type="type__value" + ) + + assert args[0].labels == {"key_value": "value_value"} + + assert args[0].entries == [log_entry.LogEntry(log_name="log_name_value")] + + +@pytest.mark.asyncio +async def test_write_log_entries_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.write_log_entries( + logging.WriteLogEntriesRequest(), + log_name="log_name_value", + resource=monitored_resource.MonitoredResource(type="type__value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], + ) + + +def test_list_log_entries( + transport: str = "grpc", request_type=logging.ListLogEntriesRequest +): + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogEntriesResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging.ListLogEntriesRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListLogEntriesPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_log_entries_from_dict(): + test_list_log_entries(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest +): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogEntriesResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging.ListLogEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogEntriesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_log_entries_async_from_dict(): + await test_list_log_entries_async(request_type=dict) + + +def test_list_log_entries_flattened(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogEntriesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_log_entries( + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].resource_names == ["resource_names_value"] + + assert args[0].filter == "filter_value" + + assert args[0].order_by == "order_by_value" + + +def test_list_log_entries_flattened_error(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_log_entries( + logging.ListLogEntriesRequest(), + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", + ) + + +@pytest.mark.asyncio +async def test_list_log_entries_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogEntriesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_log_entries( + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].resource_names == ["resource_names_value"] + + assert args[0].filter == "filter_value" + + assert args[0].order_by == "order_by_value" + + +@pytest.mark.asyncio +async def test_list_log_entries_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_log_entries( + logging.ListLogEntriesRequest(), + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_log_entries_pager(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token="abc", + ), + logging.ListLogEntriesResponse(entries=[], next_page_token="def",), + logging.ListLogEntriesResponse( + entries=[log_entry.LogEntry(),], next_page_token="ghi", + ), + logging.ListLogEntriesResponse( + entries=[log_entry.LogEntry(), log_entry.LogEntry(),], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_log_entries(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, log_entry.LogEntry) for i in results) + + +def test_list_log_entries_pages(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token="abc", + ), + logging.ListLogEntriesResponse(entries=[], next_page_token="def",), + logging.ListLogEntriesResponse( + entries=[log_entry.LogEntry(),], next_page_token="ghi", + ), + logging.ListLogEntriesResponse( + entries=[log_entry.LogEntry(), log_entry.LogEntry(),], + ), + RuntimeError, + ) + pages = list(client.list_log_entries(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_log_entries_async_pager(): + client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token="abc", + ), + logging.ListLogEntriesResponse(entries=[], next_page_token="def",), + logging.ListLogEntriesResponse( + entries=[log_entry.LogEntry(),], next_page_token="ghi", + ), + logging.ListLogEntriesResponse( + entries=[log_entry.LogEntry(), log_entry.LogEntry(),], + ), + RuntimeError, + ) + async_pager = await client.list_log_entries(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, log_entry.LogEntry) for i in responses) + + +@pytest.mark.asyncio +async def test_list_log_entries_async_pages(): + client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token="abc", + ), + logging.ListLogEntriesResponse(entries=[], next_page_token="def",), + logging.ListLogEntriesResponse( + entries=[log_entry.LogEntry(),], next_page_token="ghi", + ), + logging.ListLogEntriesResponse( + entries=[log_entry.LogEntry(), log_entry.LogEntry(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_log_entries(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_monitored_resource_descriptors( + transport: str = "grpc", + request_type=logging.ListMonitoredResourceDescriptorsRequest, +): + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListMonitoredResourceDescriptorsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_monitored_resource_descriptors_from_dict(): + test_list_monitored_resource_descriptors(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async( + transport: str = "grpc_asyncio", + request_type=logging.ListMonitoredResourceDescriptorsRequest, +): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListMonitoredResourceDescriptorsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_from_dict(): + await test_list_monitored_resource_descriptors_async(request_type=dict) + + +def test_list_monitored_resource_descriptors_pager(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource.MonitoredResourceDescriptor(), + monitored_resource.MonitoredResourceDescriptor(), + monitored_resource.MonitoredResourceDescriptor(), + ], + next_page_token="abc", + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], next_page_token="def", + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource.MonitoredResourceDescriptor(), + ], + next_page_token="ghi", + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource.MonitoredResourceDescriptor(), + monitored_resource.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_monitored_resource_descriptors(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all( + isinstance(i, monitored_resource.MonitoredResourceDescriptor) + for i in results + ) + + +def test_list_monitored_resource_descriptors_pages(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource.MonitoredResourceDescriptor(), + monitored_resource.MonitoredResourceDescriptor(), + monitored_resource.MonitoredResourceDescriptor(), + ], + next_page_token="abc", + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], next_page_token="def", + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource.MonitoredResourceDescriptor(), + ], + next_page_token="ghi", + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource.MonitoredResourceDescriptor(), + monitored_resource.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + pages = list(client.list_monitored_resource_descriptors(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_pager(): + client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource.MonitoredResourceDescriptor(), + monitored_resource.MonitoredResourceDescriptor(), + monitored_resource.MonitoredResourceDescriptor(), + ], + next_page_token="abc", + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], next_page_token="def", + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource.MonitoredResourceDescriptor(), + ], + next_page_token="ghi", + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource.MonitoredResourceDescriptor(), + monitored_resource.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_monitored_resource_descriptors(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, monitored_resource.MonitoredResourceDescriptor) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_pages(): + client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource.MonitoredResourceDescriptor(), + monitored_resource.MonitoredResourceDescriptor(), + monitored_resource.MonitoredResourceDescriptor(), + ], + next_page_token="abc", + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], next_page_token="def", + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource.MonitoredResourceDescriptor(), + ], + next_page_token="ghi", + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource.MonitoredResourceDescriptor(), + monitored_resource.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_monitored_resource_descriptors(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_logs(transport: str = "grpc", request_type=logging.ListLogsRequest): + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogsResponse( + log_names=["log_names_value"], next_page_token="next_page_token_value", + ) + + response = client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging.ListLogsRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListLogsPager) + + assert response.log_names == ["log_names_value"] + + assert response.next_page_token == "next_page_token_value" + + +def test_list_logs_from_dict(): + test_list_logs(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_logs_async( + transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest +): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogsResponse( + log_names=["log_names_value"], next_page_token="next_page_token_value", + ) + ) + + response = await client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging.ListLogsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogsAsyncPager) + + assert response.log_names == ["log_names_value"] + + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_logs_async_from_dict(): + await test_list_logs_async(request_type=dict) + + +def test_list_logs_field_headers(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.ListLogsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + call.return_value = logging.ListLogsResponse() + + client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_logs_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.ListLogsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogsResponse() + ) + + await client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_logs_flattened(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_logs(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_logs_flattened_error(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_logs( + logging.ListLogsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_logs_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_logs(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_logs_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_logs( + logging.ListLogsRequest(), parent="parent_value", + ) + + +def test_list_logs_pager(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[str(), str(), str(),], next_page_token="abc", + ), + logging.ListLogsResponse(log_names=[], next_page_token="def",), + logging.ListLogsResponse(log_names=[str(),], next_page_token="ghi",), + logging.ListLogsResponse(log_names=[str(), str(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_logs(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + +def test_list_logs_pages(): + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[str(), str(), str(),], next_page_token="abc", + ), + logging.ListLogsResponse(log_names=[], next_page_token="def",), + logging.ListLogsResponse(log_names=[str(),], next_page_token="ghi",), + logging.ListLogsResponse(log_names=[str(), str(),],), + RuntimeError, + ) + pages = list(client.list_logs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_logs_async_pager(): + client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[str(), str(), str(),], next_page_token="abc", + ), + logging.ListLogsResponse(log_names=[], next_page_token="def",), + logging.ListLogsResponse(log_names=[str(),], next_page_token="ghi",), + logging.ListLogsResponse(log_names=[str(), str(),],), + RuntimeError, + ) + async_pager = await client.list_logs(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) for i in responses) + + +@pytest.mark.asyncio +async def test_list_logs_async_pages(): + client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[str(), str(), str(),], next_page_token="abc", + ), + logging.ListLogsResponse(log_names=[], next_page_token="def",), + logging.ListLogsResponse(log_names=[str(),], next_page_token="ghi",), + logging.ListLogsResponse(log_names=[str(), str(),],), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_logs(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = LoggingServiceV2Client(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.LoggingServiceV2GrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.LoggingServiceV2GrpcTransport,) + + +def test_logging_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.LoggingServiceV2Transport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_logging_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.LoggingServiceV2Transport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "delete_log", + "write_log_entries", + "list_log_entries", + "list_monitored_resource_descriptors", + "list_logs", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_logging_service_v2_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.LoggingServiceV2Transport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", + ) + + +def test_logging_service_v2_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.LoggingServiceV2Transport() + adc.assert_called_once() + + +def test_logging_service_v2_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + LoggingServiceV2Client() + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id=None, + ) + + +def test_logging_service_v2_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.LoggingServiceV2GrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", + ) + + +def test_logging_service_v2_host_no_port(): + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com" + ), + ) + assert client.transport._host == "logging.googleapis.com:443" + + +def test_logging_service_v2_host_with_port(): + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com:8000" + ), + ) + assert client.transport._host == "logging.googleapis.com:8000" + + +def test_logging_service_v2_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.LoggingServiceV2GrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_logging_service_v2_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.LoggingServiceV2GrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +def test_logging_service_v2_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_log_path(): + project = "squid" + log = "clam" + + expected = "projects/{project}/logs/{log}".format(project=project, log=log,) + actual = LoggingServiceV2Client.log_path(project, log) + assert expected == actual + + +def test_parse_log_path(): + expected = { + "project": "whelk", + "log": "octopus", + } + path = LoggingServiceV2Client.log_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_log_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = LoggingServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = LoggingServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + + expected = "folders/{folder}".format(folder=folder,) + actual = LoggingServiceV2Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = LoggingServiceV2Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + + expected = "organizations/{organization}".format(organization=organization,) + actual = LoggingServiceV2Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = LoggingServiceV2Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + + expected = "projects/{project}".format(project=project,) + actual = LoggingServiceV2Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = LoggingServiceV2Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = LoggingServiceV2Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = LoggingServiceV2Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.LoggingServiceV2Transport, "_prep_wrapped_messages" + ) as prep: + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.LoggingServiceV2Transport, "_prep_wrapped_messages" + ) as prep: + transport_class = LoggingServiceV2Client.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py new file mode 100644 index 000000000000..0cf2e894407b --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -0,0 +1,2189 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api import distribution_pb2 as distribution # type: ignore +from google.api import label_pb2 as label # type: ignore +from google.api import launch_stage_pb2 as launch_stage # type: ignore +from google.api import metric_pb2 as ga_metric # type: ignore +from google.api import metric_pb2 as metric # type: ignore +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.metrics_service_v2 import ( + MetricsServiceV2AsyncClient, +) +from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2Client +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.services.metrics_service_v2 import transports +from google.cloud.logging_v2.types import logging_metrics +from google.oauth2 import service_account +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MetricsServiceV2Client._get_default_mtls_endpoint(None) is None + assert ( + MetricsServiceV2Client._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + MetricsServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + MetricsServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MetricsServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MetricsServiceV2Client._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient] +) +def test_metrics_service_v2_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + + assert client.transport._host == "logging.googleapis.com:443" + + +def test_metrics_service_v2_client_get_transport_class(): + transport = MetricsServiceV2Client.get_transport_class() + assert transport == transports.MetricsServiceV2GrpcTransport + + transport = MetricsServiceV2Client.get_transport_class("grpc") + assert transport == transports.MetricsServiceV2GrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + ( + MetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + MetricsServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetricsServiceV2Client), +) +@mock.patch.object( + MetricsServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetricsServiceV2AsyncClient), +) +def test_metrics_service_v2_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MetricsServiceV2Client, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MetricsServiceV2Client, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + MetricsServiceV2Client, + transports.MetricsServiceV2GrpcTransport, + "grpc", + "true", + ), + ( + MetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + MetricsServiceV2Client, + transports.MetricsServiceV2GrpcTransport, + "grpc", + "false", + ), + ( + MetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + MetricsServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetricsServiceV2Client), +) +@mock.patch.object( + MetricsServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetricsServiceV2AsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_metrics_service_v2_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() + with mock.patch( + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds + ): + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + ( + MetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_metrics_service_v2_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + ( + MetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_metrics_service_v2_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_metrics_service_v2_client_client_options_from_dict(): + with mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2GrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = MetricsServiceV2Client( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_log_metrics( + transport: str = "grpc", request_type=logging_metrics.ListLogMetricsRequest +): + client = MetricsServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.ListLogMetricsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_metrics.ListLogMetricsRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListLogMetricsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_log_metrics_from_dict(): + test_list_log_metrics(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_log_metrics_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest +): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.ListLogMetricsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_metrics.ListLogMetricsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogMetricsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_log_metrics_async_from_dict(): + await test_list_log_metrics_async(request_type=dict) + + +def test_list_log_metrics_field_headers(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.ListLogMetricsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + call.return_value = logging_metrics.ListLogMetricsResponse() + + client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_log_metrics_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.ListLogMetricsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.ListLogMetricsResponse() + ) + + await client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_log_metrics_flattened(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.ListLogMetricsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_log_metrics(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_log_metrics_flattened_error(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_log_metrics( + logging_metrics.ListLogMetricsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_log_metrics_flattened_async(): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.ListLogMetricsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.ListLogMetricsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_log_metrics(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_log_metrics_flattened_error_async(): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_log_metrics( + logging_metrics.ListLogMetricsRequest(), parent="parent_value", + ) + + +def test_list_log_metrics_pager(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token="abc", + ), + logging_metrics.ListLogMetricsResponse(metrics=[], next_page_token="def",), + logging_metrics.ListLogMetricsResponse( + metrics=[logging_metrics.LogMetric(),], next_page_token="ghi", + ), + logging_metrics.ListLogMetricsResponse( + metrics=[logging_metrics.LogMetric(), logging_metrics.LogMetric(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_log_metrics(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_metrics.LogMetric) for i in results) + + +def test_list_log_metrics_pages(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token="abc", + ), + logging_metrics.ListLogMetricsResponse(metrics=[], next_page_token="def",), + logging_metrics.ListLogMetricsResponse( + metrics=[logging_metrics.LogMetric(),], next_page_token="ghi", + ), + logging_metrics.ListLogMetricsResponse( + metrics=[logging_metrics.LogMetric(), logging_metrics.LogMetric(),], + ), + RuntimeError, + ) + pages = list(client.list_log_metrics(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_log_metrics_async_pager(): + client = MetricsServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token="abc", + ), + logging_metrics.ListLogMetricsResponse(metrics=[], next_page_token="def",), + logging_metrics.ListLogMetricsResponse( + metrics=[logging_metrics.LogMetric(),], next_page_token="ghi", + ), + logging_metrics.ListLogMetricsResponse( + metrics=[logging_metrics.LogMetric(), logging_metrics.LogMetric(),], + ), + RuntimeError, + ) + async_pager = await client.list_log_metrics(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_metrics.LogMetric) for i in responses) + + +@pytest.mark.asyncio +async def test_list_log_metrics_async_pages(): + client = MetricsServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token="abc", + ), + logging_metrics.ListLogMetricsResponse(metrics=[], next_page_token="def",), + logging_metrics.ListLogMetricsResponse( + metrics=[logging_metrics.LogMetric(),], next_page_token="ghi", + ), + logging_metrics.ListLogMetricsResponse( + metrics=[logging_metrics.LogMetric(), logging_metrics.LogMetric(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_log_metrics(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_log_metric( + transport: str = "grpc", request_type=logging_metrics.GetLogMetricRequest +): + client = MetricsServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + + response = client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_metrics.GetLogMetricRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_metrics.LogMetric) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + assert response.value_extractor == "value_extractor_value" + + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +def test_get_log_metric_from_dict(): + test_get_log_metric(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_log_metric_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest +): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) + + response = await client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_metrics.GetLogMetricRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + assert response.value_extractor == "value_extractor_value" + + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +@pytest.mark.asyncio +async def test_get_log_metric_async_from_dict(): + await test_get_log_metric_async(request_type=dict) + + +def test_get_log_metric_field_headers(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.GetLogMetricRequest() + request.metric_name = "metric_name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + call.return_value = logging_metrics.LogMetric() + + client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_log_metric_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.GetLogMetricRequest() + request.metric_name = "metric_name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) + + await client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + + +def test_get_log_metric_flattened(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_log_metric(metric_name="metric_name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].metric_name == "metric_name_value" + + +def test_get_log_metric_flattened_error(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_log_metric( + logging_metrics.GetLogMetricRequest(), metric_name="metric_name_value", + ) + + +@pytest.mark.asyncio +async def test_get_log_metric_flattened_async(): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_log_metric(metric_name="metric_name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].metric_name == "metric_name_value" + + +@pytest.mark.asyncio +async def test_get_log_metric_flattened_error_async(): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_log_metric( + logging_metrics.GetLogMetricRequest(), metric_name="metric_name_value", + ) + + +def test_create_log_metric( + transport: str = "grpc", request_type=logging_metrics.CreateLogMetricRequest +): + client = MetricsServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + + response = client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_metrics.CreateLogMetricRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_metrics.LogMetric) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + assert response.value_extractor == "value_extractor_value" + + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +def test_create_log_metric_from_dict(): + test_create_log_metric(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_log_metric_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest +): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) + + response = await client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_metrics.CreateLogMetricRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + assert response.value_extractor == "value_extractor_value" + + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +@pytest.mark.asyncio +async def test_create_log_metric_async_from_dict(): + await test_create_log_metric_async(request_type=dict) + + +def test_create_log_metric_field_headers(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.CreateLogMetricRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), "__call__" + ) as call: + call.return_value = logging_metrics.LogMetric() + + client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_log_metric_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.CreateLogMetricRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) + + await client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_log_metric_flattened(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_log_metric( + parent="parent_value", metric=logging_metrics.LogMetric(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].metric == logging_metrics.LogMetric(name="name_value") + + +def test_create_log_metric_flattened_error(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_log_metric( + logging_metrics.CreateLogMetricRequest(), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_log_metric_flattened_async(): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_log_metric( + parent="parent_value", metric=logging_metrics.LogMetric(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].metric == logging_metrics.LogMetric(name="name_value") + + +@pytest.mark.asyncio +async def test_create_log_metric_flattened_error_async(): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_log_metric( + logging_metrics.CreateLogMetricRequest(), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), + ) + + +def test_update_log_metric( + transport: str = "grpc", request_type=logging_metrics.UpdateLogMetricRequest +): + client = MetricsServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + + response = client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_metrics.UpdateLogMetricRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_metrics.LogMetric) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + assert response.value_extractor == "value_extractor_value" + + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +def test_update_log_metric_from_dict(): + test_update_log_metric(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_log_metric_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest +): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) + + response = await client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_metrics.UpdateLogMetricRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + assert response.value_extractor == "value_extractor_value" + + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +@pytest.mark.asyncio +async def test_update_log_metric_async_from_dict(): + await test_update_log_metric_async(request_type=dict) + + +def test_update_log_metric_field_headers(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.UpdateLogMetricRequest() + request.metric_name = "metric_name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), "__call__" + ) as call: + call.return_value = logging_metrics.LogMetric() + + client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_log_metric_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.UpdateLogMetricRequest() + request.metric_name = "metric_name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) + + await client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + + +def test_update_log_metric_flattened(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_log_metric( + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].metric_name == "metric_name_value" + + assert args[0].metric == logging_metrics.LogMetric(name="name_value") + + +def test_update_log_metric_flattened_error(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_log_metric( + logging_metrics.UpdateLogMetricRequest(), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_update_log_metric_flattened_async(): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_log_metric( + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].metric_name == "metric_name_value" + + assert args[0].metric == logging_metrics.LogMetric(name="name_value") + + +@pytest.mark.asyncio +async def test_update_log_metric_flattened_error_async(): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_log_metric( + logging_metrics.UpdateLogMetricRequest(), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), + ) + + +def test_delete_log_metric( + transport: str = "grpc", request_type=logging_metrics.DeleteLogMetricRequest +): + client = MetricsServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_metrics.DeleteLogMetricRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_log_metric_from_dict(): + test_delete_log_metric(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_log_metric_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest +): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_metrics.DeleteLogMetricRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_log_metric_async_from_dict(): + await test_delete_log_metric_async(request_type=dict) + + +def test_delete_log_metric_field_headers(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.DeleteLogMetricRequest() + request.metric_name = "metric_name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), "__call__" + ) as call: + call.return_value = None + + client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_log_metric_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.DeleteLogMetricRequest() + request.metric_name = "metric_name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + + +def test_delete_log_metric_flattened(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_log_metric(metric_name="metric_name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].metric_name == "metric_name_value" + + +def test_delete_log_metric_flattened_error(): + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_log_metric( + logging_metrics.DeleteLogMetricRequest(), metric_name="metric_name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_log_metric_flattened_async(): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_log_metric(metric_name="metric_name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].metric_name == "metric_name_value" + + +@pytest.mark.asyncio +async def test_delete_log_metric_flattened_error_async(): + client = MetricsServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_log_metric( + logging_metrics.DeleteLogMetricRequest(), metric_name="metric_name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = MetricsServiceV2Client(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MetricsServiceV2GrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.MetricsServiceV2GrpcTransport,) + + +def test_metrics_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.MetricsServiceV2Transport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_metrics_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.MetricsServiceV2Transport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_log_metrics", + "get_log_metric", + "create_log_metric", + "update_log_metric", + "delete_log_metric", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_metrics_service_v2_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.MetricsServiceV2Transport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", + ) + + +def test_metrics_service_v2_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.MetricsServiceV2Transport() + adc.assert_called_once() + + +def test_metrics_service_v2_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + MetricsServiceV2Client() + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id=None, + ) + + +def test_metrics_service_v2_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.MetricsServiceV2GrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", + ) + + +def test_metrics_service_v2_host_no_port(): + client = MetricsServiceV2Client( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com" + ), + ) + assert client.transport._host == "logging.googleapis.com:443" + + +def test_metrics_service_v2_host_with_port(): + client = MetricsServiceV2Client( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com:8000" + ), + ) + assert client.transport._host == "logging.googleapis.com:8000" + + +def test_metrics_service_v2_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.MetricsServiceV2GrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_metrics_service_v2_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.MetricsServiceV2GrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +def test_metrics_service_v2_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_log_metric_path(): + project = "squid" + metric = "clam" + + expected = "projects/{project}/metrics/{metric}".format( + project=project, metric=metric, + ) + actual = MetricsServiceV2Client.log_metric_path(project, metric) + assert expected == actual + + +def test_parse_log_metric_path(): + expected = { + "project": "whelk", + "metric": "octopus", + } + path = MetricsServiceV2Client.log_metric_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_log_metric_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = MetricsServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = MetricsServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + + expected = "folders/{folder}".format(folder=folder,) + actual = MetricsServiceV2Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = MetricsServiceV2Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + + expected = "organizations/{organization}".format(organization=organization,) + actual = MetricsServiceV2Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = MetricsServiceV2Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + + expected = "projects/{project}".format(project=project,) + actual = MetricsServiceV2Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = MetricsServiceV2Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = MetricsServiceV2Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = MetricsServiceV2Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.MetricsServiceV2Transport, "_prep_wrapped_messages" + ) as prep: + client = MetricsServiceV2Client( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.MetricsServiceV2Transport, "_prep_wrapped_messages" + ) as prep: + transport_class = MetricsServiceV2Client.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py deleted file mode 100644 index 0483a458296a..000000000000 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py +++ /dev/null @@ -1,604 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import logging_v2 -from google.cloud.logging_v2.proto import logging_config_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestConfigServiceV2Client(object): - def test_list_sinks(self): - # Setup Expected Response - next_page_token = "" - sinks_element = {} - sinks = [sinks_element] - expected_response = {"next_page_token": next_page_token, "sinks": sinks} - expected_response = logging_config_pb2.ListSinksResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_sinks(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.sinks[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.ListSinksRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_sinks_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_sinks(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_sink(self): - # Setup Expected Response - name = "name3373707" - destination = "destination-1429847026" - filter_ = "filter-1274492040" - description = "description-1724546052" - disabled = True - writer_identity = "writerIdentity775638794" - include_children = True - expected_response = { - "name": name, - "destination": destination, - "filter": filter_, - "description": description, - "disabled": disabled, - "writer_identity": writer_identity, - "include_children": include_children, - } - expected_response = logging_config_pb2.LogSink(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - sink_name = client.sink_path("[PROJECT]", "[SINK]") - - response = client.get_sink(sink_name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.GetSinkRequest(sink_name=sink_name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_sink_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - sink_name = client.sink_path("[PROJECT]", "[SINK]") - - with pytest.raises(CustomException): - client.get_sink(sink_name) - - def test_create_sink(self): - # Setup Expected Response - name = "name3373707" - destination = "destination-1429847026" - filter_ = "filter-1274492040" - description = "description-1724546052" - disabled = True - writer_identity = "writerIdentity775638794" - include_children = True - expected_response = { - "name": name, - "destination": destination, - "filter": filter_, - "description": description, - "disabled": disabled, - "writer_identity": writer_identity, - "include_children": include_children, - } - expected_response = logging_config_pb2.LogSink(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - parent = client.project_path("[PROJECT]") - sink = {} - - response = client.create_sink(parent, sink) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.CreateSinkRequest( - parent=parent, sink=sink - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_sink_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - parent = client.project_path("[PROJECT]") - sink = {} - - with pytest.raises(CustomException): - client.create_sink(parent, sink) - - def test_update_sink(self): - # Setup Expected Response - name = "name3373707" - destination = "destination-1429847026" - filter_ = "filter-1274492040" - description = "description-1724546052" - disabled = True - writer_identity = "writerIdentity775638794" - include_children = True - expected_response = { - "name": name, - "destination": destination, - "filter": filter_, - "description": description, - "disabled": disabled, - "writer_identity": writer_identity, - "include_children": include_children, - } - expected_response = logging_config_pb2.LogSink(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - sink_name = client.sink_path("[PROJECT]", "[SINK]") - sink = {} - - response = client.update_sink(sink_name, sink) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.UpdateSinkRequest( - sink_name=sink_name, sink=sink - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_sink_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - sink_name = client.sink_path("[PROJECT]", "[SINK]") - sink = {} - - with pytest.raises(CustomException): - client.update_sink(sink_name, sink) - - def test_delete_sink(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - sink_name = client.sink_path("[PROJECT]", "[SINK]") - - client.delete_sink(sink_name) - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_sink_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - sink_name = client.sink_path("[PROJECT]", "[SINK]") - - with pytest.raises(CustomException): - client.delete_sink(sink_name) - - def test_list_exclusions(self): - # Setup Expected Response - next_page_token = "" - exclusions_element = {} - exclusions = [exclusions_element] - expected_response = { - "next_page_token": next_page_token, - "exclusions": exclusions, - } - expected_response = logging_config_pb2.ListExclusionsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_exclusions(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.exclusions[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.ListExclusionsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_exclusions_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_exclusions(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_exclusion(self): - # Setup Expected Response - name_2 = "name2-1052831874" - description = "description-1724546052" - filter_ = "filter-1274492040" - disabled = True - expected_response = { - "name": name_2, - "description": description, - "filter": filter_, - "disabled": disabled, - } - expected_response = logging_config_pb2.LogExclusion(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") - - response = client.get_exclusion(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.GetExclusionRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_exclusion_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") - - with pytest.raises(CustomException): - client.get_exclusion(name) - - def test_create_exclusion(self): - # Setup Expected Response - name = "name3373707" - description = "description-1724546052" - filter_ = "filter-1274492040" - disabled = True - expected_response = { - "name": name, - "description": description, - "filter": filter_, - "disabled": disabled, - } - expected_response = logging_config_pb2.LogExclusion(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - parent = client.project_path("[PROJECT]") - exclusion = {} - - response = client.create_exclusion(parent, exclusion) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.CreateExclusionRequest( - parent=parent, exclusion=exclusion - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_exclusion_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - parent = client.project_path("[PROJECT]") - exclusion = {} - - with pytest.raises(CustomException): - client.create_exclusion(parent, exclusion) - - def test_update_exclusion(self): - # Setup Expected Response - name_2 = "name2-1052831874" - description = "description-1724546052" - filter_ = "filter-1274492040" - disabled = True - expected_response = { - "name": name_2, - "description": description, - "filter": filter_, - "disabled": disabled, - } - expected_response = logging_config_pb2.LogExclusion(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") - exclusion = {} - update_mask = {} - - response = client.update_exclusion(name, exclusion, update_mask) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.UpdateExclusionRequest( - name=name, exclusion=exclusion, update_mask=update_mask - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_exclusion_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") - exclusion = {} - update_mask = {} - - with pytest.raises(CustomException): - client.update_exclusion(name, exclusion, update_mask) - - def test_delete_exclusion(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup Request - name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") - - client.delete_exclusion(name) - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.DeleteExclusionRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_exclusion_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - # Setup request - name = client.exclusion_path("[PROJECT]", "[EXCLUSION]") - - with pytest.raises(CustomException): - client.delete_exclusion(name) - - def test_get_cmek_settings(self): - # Setup Expected Response - name = "name3373707" - kms_key_name = "kmsKeyName2094986649" - service_account_id = "serviceAccountId-111486921" - expected_response = { - "name": name, - "kms_key_name": kms_key_name, - "service_account_id": service_account_id, - } - expected_response = logging_config_pb2.CmekSettings(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - response = client.get_cmek_settings() - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.GetCmekSettingsRequest() - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_cmek_settings_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - with pytest.raises(CustomException): - client.get_cmek_settings() - - def test_update_cmek_settings(self): - # Setup Expected Response - name = "name3373707" - kms_key_name = "kmsKeyName2094986649" - service_account_id = "serviceAccountId-111486921" - expected_response = { - "name": name, - "kms_key_name": kms_key_name, - "service_account_id": service_account_id, - } - expected_response = logging_config_pb2.CmekSettings(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - response = client.update_cmek_settings() - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_config_pb2.UpdateCmekSettingsRequest() - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_cmek_settings_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.ConfigServiceV2Client() - - with pytest.raises(CustomException): - client.update_cmek_settings() diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py deleted file mode 100644 index ef2abc733bc4..000000000000 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py +++ /dev/null @@ -1,262 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.api import monitored_resource_pb2 -from google.cloud import logging_v2 -from google.cloud.logging_v2.proto import log_entry_pb2 -from google.cloud.logging_v2.proto import logging_pb2 -from google.protobuf import empty_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestLoggingServiceV2Client(object): - def test_delete_log(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup Request - log_name = client.log_path("[PROJECT]", "[LOG]") - - client.delete_log(log_name) - - assert len(channel.requests) == 1 - expected_request = logging_pb2.DeleteLogRequest(log_name=log_name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_log_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup request - log_name = client.log_path("[PROJECT]", "[LOG]") - - with pytest.raises(CustomException): - client.delete_log(log_name) - - def test_write_log_entries(self): - # Setup Expected Response - expected_response = {} - expected_response = logging_pb2.WriteLogEntriesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup Request - entries = [] - - response = client.write_log_entries(entries) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_pb2.WriteLogEntriesRequest(entries=entries) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_write_log_entries_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup request - entries = [] - - with pytest.raises(CustomException): - client.write_log_entries(entries) - - def test_list_log_entries(self): - # Setup Expected Response - next_page_token = "" - entries_element = {} - entries = [entries_element] - expected_response = {"next_page_token": next_page_token, "entries": entries} - expected_response = logging_pb2.ListLogEntriesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup Request - resource_names = [] - - paged_list_response = client.list_log_entries(resource_names) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.entries[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = logging_pb2.ListLogEntriesRequest( - resource_names=resource_names - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_log_entries_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup request - resource_names = [] - - paged_list_response = client.list_log_entries(resource_names) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_list_monitored_resource_descriptors(self): - # Setup Expected Response - next_page_token = "" - resource_descriptors_element = {} - resource_descriptors = [resource_descriptors_element] - expected_response = { - "next_page_token": next_page_token, - "resource_descriptors": resource_descriptors, - } - expected_response = logging_pb2.ListMonitoredResourceDescriptorsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - paged_list_response = client.list_monitored_resource_descriptors() - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.resource_descriptors[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = logging_pb2.ListMonitoredResourceDescriptorsRequest() - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_monitored_resource_descriptors_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - paged_list_response = client.list_monitored_resource_descriptors() - with pytest.raises(CustomException): - list(paged_list_response) - - def test_list_logs(self): - # Setup Expected Response - next_page_token = "" - log_names_element = "logNamesElement-1079688374" - log_names = [log_names_element] - expected_response = {"next_page_token": next_page_token, "log_names": log_names} - expected_response = logging_pb2.ListLogsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_logs(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.log_names[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = logging_pb2.ListLogsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_logs_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.LoggingServiceV2Client() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_logs(parent) - with pytest.raises(CustomException): - list(paged_list_response) diff --git a/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py b/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py deleted file mode 100644 index 35201f790cd8..000000000000 --- a/packages/google-cloud-logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py +++ /dev/null @@ -1,288 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import logging_v2 -from google.cloud.logging_v2.proto import logging_metrics_pb2 -from google.protobuf import empty_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestMetricsServiceV2Client(object): - def test_list_log_metrics(self): - # Setup Expected Response - next_page_token = "" - metrics_element = {} - metrics = [metrics_element] - expected_response = {"next_page_token": next_page_token, "metrics": metrics} - expected_response = logging_metrics_pb2.ListLogMetricsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_log_metrics(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.metrics[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = logging_metrics_pb2.ListLogMetricsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_log_metrics_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_log_metrics(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_log_metric(self): - # Setup Expected Response - name = "name3373707" - description = "description-1724546052" - filter_ = "filter-1274492040" - value_extractor = "valueExtractor2047672534" - expected_response = { - "name": name, - "description": description, - "filter": filter_, - "value_extractor": value_extractor, - } - expected_response = logging_metrics_pb2.LogMetric(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup Request - metric_name = client.metric_path("[PROJECT]", "[METRIC]") - - response = client.get_log_metric(metric_name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_metrics_pb2.GetLogMetricRequest( - metric_name=metric_name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_log_metric_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup request - metric_name = client.metric_path("[PROJECT]", "[METRIC]") - - with pytest.raises(CustomException): - client.get_log_metric(metric_name) - - def test_create_log_metric(self): - # Setup Expected Response - name = "name3373707" - description = "description-1724546052" - filter_ = "filter-1274492040" - value_extractor = "valueExtractor2047672534" - expected_response = { - "name": name, - "description": description, - "filter": filter_, - "value_extractor": value_extractor, - } - expected_response = logging_metrics_pb2.LogMetric(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup Request - parent = client.project_path("[PROJECT]") - metric = {} - - response = client.create_log_metric(parent, metric) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_metrics_pb2.CreateLogMetricRequest( - parent=parent, metric=metric - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_log_metric_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup request - parent = client.project_path("[PROJECT]") - metric = {} - - with pytest.raises(CustomException): - client.create_log_metric(parent, metric) - - def test_update_log_metric(self): - # Setup Expected Response - name = "name3373707" - description = "description-1724546052" - filter_ = "filter-1274492040" - value_extractor = "valueExtractor2047672534" - expected_response = { - "name": name, - "description": description, - "filter": filter_, - "value_extractor": value_extractor, - } - expected_response = logging_metrics_pb2.LogMetric(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup Request - metric_name = client.metric_path("[PROJECT]", "[METRIC]") - metric = {} - - response = client.update_log_metric(metric_name, metric) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = logging_metrics_pb2.UpdateLogMetricRequest( - metric_name=metric_name, metric=metric - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_log_metric_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup request - metric_name = client.metric_path("[PROJECT]", "[METRIC]") - metric = {} - - with pytest.raises(CustomException): - client.update_log_metric(metric_name, metric) - - def test_delete_log_metric(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup Request - metric_name = client.metric_path("[PROJECT]", "[METRIC]") - - client.delete_log_metric(metric_name) - - assert len(channel.requests) == 1 - expected_request = logging_metrics_pb2.DeleteLogMetricRequest( - metric_name=metric_name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_log_metric_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = logging_v2.MetricsServiceV2Client() - - # Setup request - metric_name = client.metric_path("[PROJECT]", "[METRIC]") - - with pytest.raises(CustomException): - client.delete_log_metric(metric_name) diff --git a/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py b/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py index f606da573cec..16d3f9ba2ebd 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py +++ b/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py @@ -36,7 +36,7 @@ def tearDownClass(cls): class TestRequestMiddleware(DjangoBase): def _get_target_class(self): - from google.cloud.logging.handlers.middleware import request + from google.cloud.logging_v2.handlers.middleware import request return request.RequestMiddleware @@ -45,7 +45,7 @@ def _make_one(self, *args, **kw): def test_process_request(self): from django.test import RequestFactory - from google.cloud.logging.handlers.middleware import request + from google.cloud.logging_v2.handlers.middleware import request middleware = self._make_one() mock_request = RequestFactory().get("/") @@ -58,14 +58,14 @@ def test_process_request(self): class Test__get_django_request(DjangoBase): @staticmethod def _call_fut(): - from google.cloud.logging.handlers.middleware import request + from google.cloud.logging_v2.handlers.middleware import request return request._get_django_request() @staticmethod def _make_patch(new_locals): return mock.patch( - "google.cloud.logging.handlers.middleware.request._thread_locals", + "google.cloud.logging_v2.handlers.middleware.request._thread_locals", new=new_locals, ) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index 972e3db392d1..0cd3b30d819b 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -20,7 +20,7 @@ class Test_get_trace_id_from_flask(unittest.TestCase): @staticmethod def _call_fut(): - from google.cloud.logging.handlers import _helpers + from google.cloud.logging_v2.handlers import _helpers return _helpers.get_trace_id_from_flask() @@ -62,7 +62,7 @@ def test_valid_context_header(self): class Test_get_trace_id_from_django(unittest.TestCase): @staticmethod def _call_fut(): - from google.cloud.logging.handlers import _helpers + from google.cloud.logging_v2.handlers import _helpers return _helpers.get_trace_id_from_django() @@ -76,14 +76,14 @@ def setUp(self): def tearDown(self): from django.test.utils import teardown_test_environment - from google.cloud.logging.handlers.middleware import request + from google.cloud.logging_v2.handlers.middleware import request teardown_test_environment() request._thread_locals.__dict__.clear() def test_no_context_header(self): from django.test import RequestFactory - from google.cloud.logging.handlers.middleware import request + from google.cloud.logging_v2.handlers.middleware import request django_request = RequestFactory().get("/") @@ -94,7 +94,7 @@ def test_no_context_header(self): def test_valid_context_header(self): from django.test import RequestFactory - from google.cloud.logging.handlers.middleware import request + from google.cloud.logging_v2.handlers.middleware import request django_trace_header = "HTTP_X_CLOUD_TRACE_CONTEXT" expected_trace_id = "testtraceiddjango" @@ -114,17 +114,17 @@ def test_valid_context_header(self): class Test_get_trace_id(unittest.TestCase): @staticmethod def _call_fut(): - from google.cloud.logging.handlers import _helpers + from google.cloud.logging_v2.handlers import _helpers return _helpers.get_trace_id() def _helper(self, django_return, flask_return): django_patch = mock.patch( - "google.cloud.logging.handlers._helpers.get_trace_id_from_django", + "google.cloud.logging_v2.handlers._helpers.get_trace_id_from_django", return_value=django_return, ) flask_patch = mock.patch( - "google.cloud.logging.handlers._helpers.get_trace_id_from_flask", + "google.cloud.logging_v2.handlers._helpers.get_trace_id_from_flask", return_value=flask_return, ) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index eef4ac7410e3..2a80e79b1ee4 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -22,7 +22,7 @@ class TestAppEngineHandler(unittest.TestCase): PROJECT = "PROJECT" def _get_target_class(self): - from google.cloud.logging.handlers.app_engine import AppEngineHandler + from google.cloud.logging_v2.handlers.app_engine import AppEngineHandler return AppEngineHandler @@ -31,7 +31,7 @@ def _make_one(self, *args, **kw): def test_constructor_w_gae_standard_env(self): import sys - from google.cloud.logging.handlers import app_engine + from google.cloud.logging_v2.handlers import app_engine client = mock.Mock(project=self.PROJECT, spec=["project"]) @@ -57,7 +57,7 @@ def test_constructor_w_gae_standard_env(self): def test_constructor_w_gae_flex_env(self): import io - from google.cloud.logging.handlers import app_engine + from google.cloud.logging_v2.handlers import app_engine client = mock.Mock(project=self.PROJECT, spec=["project"]) name = "test-logger" @@ -106,7 +106,7 @@ def test_emit(self): def _get_gae_labels_helper(self, trace_id): get_trace_patch = mock.patch( - "google.cloud.logging.handlers.app_engine.get_trace_id", + "google.cloud.logging_v2.handlers.app_engine.get_trace_id", return_value=trace_id, ) @@ -121,7 +121,7 @@ def _get_gae_labels_helper(self, trace_id): return gae_labels def test_get_gae_labels_with_label(self): - from google.cloud.logging.handlers import app_engine + from google.cloud.logging_v2.handlers import app_engine trace_id = "test-gae-trace-id" gae_labels = self._get_gae_labels_helper(trace_id) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py index 09ee329ba3f2..c5d6df65f0fe 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py @@ -19,7 +19,7 @@ class TestContainerEngineHandler(unittest.TestCase): PROJECT = "PROJECT" def _get_target_class(self): - from google.cloud.logging.handlers.container_engine import ( + from google.cloud.logging_v2.handlers.container_engine import ( ContainerEngineHandler, ) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 5559791bc2fa..1c5492e1aa0e 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -22,7 +22,7 @@ class TestCloudLoggingHandler(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging.handlers.handlers import CloudLoggingHandler + from google.cloud.logging_v2.handlers.handlers import CloudLoggingHandler return CloudLoggingHandler @@ -31,8 +31,8 @@ def _make_one(self, *args, **kw): def test_ctor_defaults(self): import sys - from google.cloud.logging.logger import _GLOBAL_RESOURCE - from google.cloud.logging.handlers.handlers import DEFAULT_LOGGER_NAME + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.handlers.handlers import DEFAULT_LOGGER_NAME client = _Client(self.PROJECT) handler = self._make_one(client, transport=_Transport) @@ -47,7 +47,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): import io - from google.cloud.logging.resource import Resource + from google.cloud.logging_v2.resource import Resource resource = Resource("resource_type", {"resource_label": "value"}) labels = {"handler_lable": "value"} @@ -72,7 +72,7 @@ def test_ctor_explicit(self): self.assertIs(handler.stream, stream) def test_emit(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE client = _Client(self.PROJECT) handler = self._make_one( @@ -91,7 +91,7 @@ def test_emit(self): class TestSetupLogging(unittest.TestCase): def _call_fut(self, handler, excludes=None): - from google.cloud.logging.handlers.handlers import setup_logging + from google.cloud.logging_v2.handlers.handlers import setup_logging if excludes: return setup_logging(handler, excluded_loggers=excludes) diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 7edae8a7bfa0..71d868d8690c 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -25,13 +25,15 @@ class TestBackgroundThreadHandler(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging.handlers.transports import BackgroundThreadTransport + from google.cloud.logging_v2.handlers.transports import ( + BackgroundThreadTransport, + ) return BackgroundThreadTransport def _make_one(self, *args, **kw): worker_patch = mock.patch( - "google.cloud.logging.handlers.transports." "background_thread._Worker", + "google.cloud.logging_v2.handlers.transports." "background_thread._Worker", autospec=True, ) with worker_patch as worker_mock: @@ -47,7 +49,7 @@ def test_constructor(self): self.assertEqual(logger.name, name) def test_send(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE client = _Client(self.PROJECT) name = "python_logger" @@ -61,14 +63,19 @@ def test_send(self): python_logger_name, logging.INFO, None, None, message, None, None ) - transport.send(record, message, _GLOBAL_RESOURCE) + transport.send(record, message, resource=_GLOBAL_RESOURCE) transport.worker.enqueue.assert_called_once_with( - record, message, _GLOBAL_RESOURCE, None, trace=None, span_id=None + record, + message, + resource=_GLOBAL_RESOURCE, + labels=None, + trace=None, + span_id=None, ) def test_trace_send(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE client = _Client(self.PROJECT) name = "python_logger" @@ -83,14 +90,19 @@ def test_trace_send(self): python_logger_name, logging.INFO, None, None, message, None, None ) - transport.send(record, message, _GLOBAL_RESOURCE, trace=trace) + transport.send(record, message, resource=_GLOBAL_RESOURCE, trace=trace) transport.worker.enqueue.assert_called_once_with( - record, message, _GLOBAL_RESOURCE, None, trace=trace, span_id=None + record, + message, + resource=_GLOBAL_RESOURCE, + labels=None, + trace=trace, + span_id=None, ) def test_span_send(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE client = _Client(self.PROJECT) name = "python_logger" @@ -105,10 +117,15 @@ def test_span_send(self): python_logger_name, logging.INFO, None, None, message, None, None ) - transport.send(record, message, _GLOBAL_RESOURCE, span_id=span_id) + transport.send(record, message, resource=_GLOBAL_RESOURCE, span_id=span_id) transport.worker.enqueue.assert_called_once_with( - record, message, _GLOBAL_RESOURCE, None, trace=None, span_id=span_id + record, + message, + resource=_GLOBAL_RESOURCE, + labels=None, + trace=None, + span_id=span_id, ) def test_flush(self): @@ -147,7 +164,7 @@ class Test_Worker(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging.handlers.transports import background_thread + from google.cloud.logging_v2.handlers.transports import background_thread return background_thread._Worker @@ -181,7 +198,7 @@ def test_constructor(self): self.assertIsNone(worker._thread) def test_start(self): - from google.cloud.logging.handlers.transports import background_thread + from google.cloud.logging_v2.handlers.transports import background_thread worker = self._make_one(_Logger(self.NAME)) @@ -200,7 +217,7 @@ def test_start(self): self.assertIs(current_thread, worker._thread) def test_stop(self): - from google.cloud.logging.handlers.transports import background_thread + from google.cloud.logging_v2.handlers.transports import background_thread grace_period = 5.0 worker = self._make_one(_Logger(self.NAME)) @@ -208,7 +225,7 @@ def test_stop(self): self._start_with_thread_patch(worker) thread = worker._thread - worker.stop(grace_period) + worker.stop(grace_period=grace_period) self.assertEqual(worker._queue.qsize(), 1) self.assertEqual(worker._queue.get(), background_thread._WORKER_TERMINATOR) @@ -270,7 +287,7 @@ def _enqueue_record(worker, message, levelno=logging.INFO, **kw): def test_enqueue_defaults(self): import datetime - from google.cloud.logging._helpers import LogSeverity + from google.cloud.logging_v2._helpers import LogSeverity worker = self._make_one(_Logger(self.NAME)) self.assertTrue(worker._queue.empty()) @@ -290,7 +307,7 @@ def test_enqueue_defaults(self): def test_enqueue_explicit(self): import datetime - from google.cloud.logging._helpers import LogSeverity + from google.cloud.logging_v2._helpers import LogSeverity worker = self._make_one(_Logger(self.NAME)) self.assertTrue(worker._queue.empty()) @@ -322,7 +339,7 @@ def test_enqueue_explicit(self): self.assertIsInstance(entry["timestamp"], datetime.datetime) def test__thread_main(self): - from google.cloud.logging.handlers.transports import background_thread + from google.cloud.logging_v2.handlers.transports import background_thread worker = self._make_one(_Logger(self.NAME)) @@ -338,7 +355,7 @@ def test__thread_main(self): self.assertEqual(worker._queue.qsize(), 0) def test__thread_main_error(self): - from google.cloud.logging.handlers.transports import background_thread + from google.cloud.logging_v2.handlers.transports import background_thread worker = self._make_one(_Logger(self.NAME)) worker._cloud_logger._batch_cls = _RaisingBatch @@ -353,7 +370,7 @@ def test__thread_main_error(self): self.assertEqual(worker._queue.qsize(), 0) def test__thread_main_batches(self): - from google.cloud.logging.handlers.transports import background_thread + from google.cloud.logging_v2.handlers.transports import background_thread worker = self._make_one(_Logger(self.NAME), max_batch_size=2) @@ -379,7 +396,7 @@ def test__thread_main_max_latency(self, time): # the "change detector" test in that way. However, this is still a # useful test to verify the queue timeout is appropriately calculated. from six.moves import queue - from google.cloud.logging.handlers.transports import background_thread + from google.cloud.logging_v2.handlers.transports import background_thread # Use monotonically increasing time. time.side_effect = range(1, 6) @@ -489,7 +506,7 @@ def log_struct( span_id=None, timestamp=None, ): - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE assert resource is None resource = _GLOBAL_RESOURCE diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py index 03612e115a98..bff253f94eb9 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py @@ -21,7 +21,7 @@ class TestBaseHandler(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging.handlers.transports import Transport + from google.cloud.logging_v2.handlers.transports import Transport return Transport @@ -31,7 +31,7 @@ def _make_one(self, *args, **kw): def test_send_is_abstract(self): target = self._make_one() with self.assertRaises(NotImplementedError): - target.send(None, None, None) + target.send(None, None, resource=None) def test_flush_is_abstract_and_optional(self): target = self._make_one() diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py index f2ff67d59d82..7bc2cd46fe75 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py @@ -22,7 +22,7 @@ class TestSyncHandler(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging.handlers.transports import SyncTransport + from google.cloud.logging_v2.handlers.transports import SyncTransport return SyncTransport @@ -36,8 +36,8 @@ def test_ctor(self): self.assertEqual(transport.logger.name, "python_logger") def test_send(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE - from google.cloud.logging._helpers import LogSeverity + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2._helpers import LogSeverity client = _Client(self.PROJECT) @@ -49,7 +49,7 @@ def test_send(self): python_logger_name, logging.INFO, None, None, message, None, None ) - transport.send(record, message, _GLOBAL_RESOURCE) + transport.send(record, message, resource=_GLOBAL_RESOURCE) EXPECTED_STRUCT = {"message": message, "python_logger": python_logger_name} EXPECTED_SENT = ( EXPECTED_STRUCT, @@ -63,7 +63,7 @@ def test_send(self): class _Logger(object): - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE def __init__(self, name): self.name = name diff --git a/packages/google-cloud-logging/tests/unit/test__gapic.py b/packages/google-cloud-logging/tests/unit/test__gapic.py index ad6ded2bd1f7..75aa20d46757 100644 --- a/packages/google-cloud-logging/tests/unit/test__gapic.py +++ b/packages/google-cloud-logging/tests/unit/test__gapic.py @@ -14,112 +14,116 @@ import unittest -from google.api_core import grpc_helpers import google.auth.credentials -from google.protobuf import empty_pb2 import mock -import google.cloud.logging -from google.cloud.logging import _gapic -from google.cloud.logging_v2.gapic import config_service_v2_client -from google.cloud.logging_v2.gapic import logging_service_v2_client -from google.cloud.logging_v2.gapic import metrics_service_v2_client -from google.cloud.logging_v2.proto import log_entry_pb2 -from google.cloud.logging_v2.proto import logging_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2 -from google.cloud.logging_v2.proto import logging_metrics_pb2 +import google.cloud.logging_v2 +from google.cloud import logging_v2 +from google.cloud.logging_v2 import _gapic +from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client +from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client +from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2Client +from google.cloud.logging_v2.types import LogSink +from google.cloud.logging_v2.types import LogEntry as LogEntryPB PROJECT = "PROJECT" -PROJECT_PATH = "projects/%s" % (PROJECT,) +PROJECT_PATH = f"projects/{PROJECT}" FILTER = "logName:syslog AND severity>=ERROR" class Test_LoggingAPI(object): LOG_NAME = "log_name" - LOG_PATH = "projects/%s/logs/%s" % (PROJECT, LOG_NAME) + LOG_PATH = f"projects/{PROJECT}/logs/{LOG_NAME}" @staticmethod def make_logging_api(): - channel = grpc_helpers.ChannelStub() - gapic_client = logging_service_v2_client.LoggingServiceV2Client(channel=channel) + gapic_client = LoggingServiceV2Client() handwritten_client = mock.Mock() api = _gapic._LoggingAPI(gapic_client, handwritten_client) - return channel, api + return api def test_ctor(self): - channel = grpc_helpers.ChannelStub() - gapic_client = logging_service_v2_client.LoggingServiceV2Client(channel=channel) + gapic_client = LoggingServiceV2Client() api = _gapic._LoggingAPI(gapic_client, mock.sentinel.client) assert api._gapic_api is gapic_client assert api._client is mock.sentinel.client def test_list_entries(self): - channel, api = self.make_logging_api() + client = self.make_logging_api() - log_entry_msg = log_entry_pb2.LogEntry( - log_name=self.LOG_PATH, text_payload="text" - ) - channel.ListLogEntries.response = logging_pb2.ListLogEntriesResponse( - entries=[log_entry_msg] - ) - result = api.list_entries([PROJECT], FILTER, google.cloud.logging.DESCENDING) + log_entry_msg = LogEntryPB(log_name=self.LOG_PATH, text_payload="text") + + with mock.patch.object( + type(client._gapic_api.transport.list_log_entries), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogEntriesResponse( + entries=[log_entry_msg] + ) + result = client.list_entries( + [PROJECT_PATH], filter_=FILTER, order_by=logging_v2.DESCENDING + ) entries = list(result) # Check the response assert len(entries) == 1 entry = entries[0] - assert isinstance(entry, google.cloud.logging.entries.TextEntry) + + assert isinstance(entry, logging_v2.entries.TextEntry) assert entry.payload == "text" # Check the request - assert len(channel.ListLogEntries.requests) == 1 - request = channel.ListLogEntries.requests[0] - assert request.project_ids == [PROJECT] + call.assert_called_once() + request = call.call_args.args[0] + assert request.resource_names == [PROJECT_PATH] assert request.filter == FILTER - assert request.order_by == google.cloud.logging.DESCENDING + assert request.order_by == logging_v2.DESCENDING def test_list_entries_with_options(self): - channel, api = self.make_logging_api() + client = self.make_logging_api() - channel.ListLogEntries.response = logging_pb2.ListLogEntriesResponse(entries=[]) + with mock.patch.object( + type(client._gapic_api.transport.list_log_entries), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogEntriesResponse(entries=[]) - result = api.list_entries( - [PROJECT], - FILTER, - google.cloud.logging.ASCENDING, - page_size=42, - page_token="token", - ) + result = client.list_entries( + [PROJECT_PATH], + filter_=FILTER, + order_by=google.cloud.logging_v2.ASCENDING, + page_size=42, + page_token="token", + ) list(result) # Check the request - assert len(channel.ListLogEntries.requests) == 1 - request = channel.ListLogEntries.requests[0] - assert request.project_ids == [PROJECT] + call.assert_called_once() + request = call.call_args.args[0] + assert request.resource_names == [PROJECT_PATH] assert request.filter == FILTER - assert request.order_by == google.cloud.logging.ASCENDING + assert request.order_by == google.cloud.logging_v2.ASCENDING assert request.page_size == 42 assert request.page_token == "token" def test_write_entries_single(self): - channel, api = self.make_logging_api() - - channel.WriteLogEntries.response = empty_pb2.Empty() - - entry = { - "logName": self.LOG_PATH, - "resource": {"type": "global"}, - "textPayload": "text", - } - - api.write_entries([entry]) + client = self.make_logging_api() + + with mock.patch.object( + type(client._gapic_api.transport.write_log_entries), "__call__" + ) as call: + call.return_value = logging_v2.types.WriteLogEntriesResponse() + entry = { + "logName": self.LOG_PATH, + "resource": {"type": "global"}, + "textPayload": "text", + } + client.write_entries([entry]) # Check the request - assert len(channel.WriteLogEntries.requests) == 1 - request = channel.WriteLogEntries.requests[0] + call.assert_called_once() + request = call.call_args.args[0] assert request.partial_success is False assert len(request.entries) == 1 assert request.entries[0].log_name == entry["logName"] @@ -127,152 +131,160 @@ def test_write_entries_single(self): assert request.entries[0].text_payload == "text" def test_logger_delete(self): - channel, api = self.make_logging_api() - - channel.DeleteLog.response = empty_pb2.Empty() + client = self.make_logging_api() - api.logger_delete(PROJECT, self.LOG_NAME) - - assert len(channel.DeleteLog.requests) == 1 - request = channel.DeleteLog.requests[0] - assert request.log_name == self.LOG_PATH + with mock.patch.object( + type(client._gapic_api.transport.delete_log), "__call__" + ) as call: + client.logger_delete(self.LOG_PATH) + call.assert_called_once() + assert call.call_args.args[0].log_name == self.LOG_PATH class Test_SinksAPI(object): SINK_NAME = "sink_name" - SINK_PATH = "projects/%s/sinks/%s" % (PROJECT, SINK_NAME) + PARENT_PATH = f"projects/{PROJECT}" + SINK_PATH = f"projects/{PROJECT}/sinks/{SINK_NAME}" DESTINATION_URI = "faux.googleapis.com/destination" SINK_WRITER_IDENTITY = "serviceAccount:project-123@example.com" @staticmethod def make_sinks_api(): - channel = grpc_helpers.ChannelStub() - gapic_client = config_service_v2_client.ConfigServiceV2Client(channel=channel) + gapic_client = ConfigServiceV2Client() handwritten_client = mock.Mock() api = _gapic._SinksAPI(gapic_client, handwritten_client) - return channel, api + return api def test_ctor(self): - channel = grpc_helpers.ChannelStub() - gapic_client = config_service_v2_client.ConfigServiceV2Client(channel=channel) + gapic_client = ConfigServiceV2Client() api = _gapic._SinksAPI(gapic_client, mock.sentinel.client) assert api._gapic_api is gapic_client assert api._client is mock.sentinel.client def test_list_sinks(self): - channel, api = self.make_sinks_api() + client = self.make_sinks_api() - sink_msg = logging_config_pb2.LogSink( - name=self.SINK_PATH, destination=self.DESTINATION_URI, filter=FILTER - ) - channel.ListSinks.response = logging_config_pb2.ListSinksResponse( - sinks=[sink_msg] + sink_msg = LogSink( + name=self.SINK_NAME, destination=self.DESTINATION_URI, filter=FILTER ) + with mock.patch.object( + type(client._gapic_api.transport.list_sinks), "__call__" + ) as call: + call.return_value = logging_v2.types.ListSinksResponse(sinks=[sink_msg]) + + result = client.list_sinks(self.PARENT_PATH,) - result = api.list_sinks(PROJECT) sinks = list(result) # Check the response assert len(sinks) == 1 sink = sinks[0] - assert isinstance(sink, google.cloud.logging.sink.Sink) - assert sink.name == self.SINK_PATH + assert isinstance(sink, google.cloud.logging_v2.sink.Sink) + assert sink.name == self.SINK_NAME assert sink.destination == self.DESTINATION_URI assert sink.filter_ == FILTER # Check the request - assert len(channel.ListSinks.requests) == 1 - request = channel.ListSinks.requests[0] - assert request.parent == PROJECT_PATH + call.assert_called_once() + request = call.call_args.args[0] + assert request.parent == self.PARENT_PATH def test_list_sinks_with_options(self): - channel, api = self.make_sinks_api() - - channel.ListSinks.response = logging_config_pb2.ListSinksResponse(sinks=[]) - - result = api.list_sinks(PROJECT, page_size=42, page_token="token") + client = self.make_sinks_api() + + with mock.patch.object( + type(client._gapic_api.transport.list_sinks), "__call__" + ) as call: + call.return_value = logging_v2.types.ListSinksResponse(sinks=[]) + result = client.list_sinks( + self.PARENT_PATH, page_size=42, page_token="token" + ) list(result) # Check the request - assert len(channel.ListSinks.requests) == 1 - request = channel.ListSinks.requests[0] - assert request.parent == "projects/%s" % PROJECT + call.assert_called_once() + request = call.call_args.args[0] + assert request.parent == self.PARENT_PATH assert request.page_size == 42 assert request.page_token == "token" def test_sink_create(self): - channel, api = self.make_sinks_api() - - channel.CreateSink.response = logging_config_pb2.LogSink( - name=self.SINK_NAME, - destination=self.DESTINATION_URI, - filter=FILTER, - writer_identity=self.SINK_WRITER_IDENTITY, - ) - - result = api.sink_create( - PROJECT, - self.SINK_NAME, - FILTER, - self.DESTINATION_URI, - unique_writer_identity=True, - ) + client = self.make_sinks_api() + with mock.patch.object( + type(client._gapic_api.transport.create_sink), "__call__" + ) as call: + call.return_value = logging_v2.types.LogSink( + name=self.SINK_NAME, + destination=self.DESTINATION_URI, + filter=FILTER, + writer_identity=self.SINK_WRITER_IDENTITY, + ) + + result = client.sink_create( + self.PARENT_PATH, + self.SINK_NAME, + FILTER, + self.DESTINATION_URI, + unique_writer_identity=True, + ) # Check response - assert result == { - "name": self.SINK_NAME, - "filter": FILTER, - "destination": self.DESTINATION_URI, - "writerIdentity": self.SINK_WRITER_IDENTITY, - } + # TODO: response has extra fields (blank fields) is this OK? + assert result["name"] == self.SINK_NAME + assert result["filter"] == FILTER + assert result["destination"] == self.DESTINATION_URI + assert result["writerIdentity"] == self.SINK_WRITER_IDENTITY # Check request - assert len(channel.CreateSink.requests) == 1 - request = channel.CreateSink.requests[0] - assert request.parent == PROJECT_PATH + call.assert_called_once() + request = call.call_args.args[0] + assert request.parent == self.PARENT_PATH assert request.unique_writer_identity is True assert request.sink.name == self.SINK_NAME assert request.sink.filter == FILTER assert request.sink.destination == self.DESTINATION_URI def test_sink_get(self): - channel, api = self.make_sinks_api() + client = self.make_sinks_api() + with mock.patch.object( + type(client._gapic_api.transport.get_sink), "__call__" + ) as call: + call.return_value = logging_v2.types.LogSink( + name=self.SINK_NAME, destination=self.DESTINATION_URI, filter=FILTER + ) - channel.GetSink.response = logging_config_pb2.LogSink( - name=self.SINK_PATH, destination=self.DESTINATION_URI, filter=FILTER - ) - - response = api.sink_get(PROJECT, self.SINK_NAME) + response = client.sink_get(self.SINK_PATH) # Check response assert response == { - "name": self.SINK_PATH, + "name": self.SINK_NAME, "filter": FILTER, "destination": self.DESTINATION_URI, } # Check request - assert len(channel.GetSink.requests) == 1 - request = channel.GetSink.requests[0] + call.assert_called_once() + request = call.call_args.args[0] assert request.sink_name == self.SINK_PATH def test_sink_update(self): - channel, api = self.make_sinks_api() - - channel.UpdateSink.response = logging_config_pb2.LogSink( - name=self.SINK_NAME, - destination=self.DESTINATION_URI, - filter=FILTER, - writer_identity=self.SINK_WRITER_IDENTITY, - ) - - result = api.sink_update( - PROJECT, - self.SINK_NAME, - FILTER, - self.DESTINATION_URI, - unique_writer_identity=True, - ) + client = self.make_sinks_api() + with mock.patch.object( + type(client._gapic_api.transport.update_sink), "__call__" + ) as call: + call.return_value = logging_v2.types.LogSink( + name=self.SINK_NAME, + destination=self.DESTINATION_URI, + filter=FILTER, + writer_identity=self.SINK_WRITER_IDENTITY, + ) + + result = client.sink_update( + self.SINK_PATH, + FILTER, + self.DESTINATION_URI, + unique_writer_identity=True, + ) # Check response assert result == { @@ -283,112 +295,116 @@ def test_sink_update(self): } # Check request - assert len(channel.UpdateSink.requests) == 1 - request = channel.UpdateSink.requests[0] + call.assert_called_once() + request = call.call_args.args[0] assert request.sink_name == self.SINK_PATH assert request.unique_writer_identity is True - assert request.sink.name == self.SINK_PATH + assert request.sink.name == self.SINK_NAME assert request.sink.filter == FILTER assert request.sink.destination == self.DESTINATION_URI def test_sink_delete(self): - channel, api = self.make_sinks_api() - - channel.DeleteSink.response = empty_pb2.Empty() - - api.sink_delete(PROJECT, self.SINK_NAME) - - assert len(channel.DeleteSink.requests) == 1 - request = channel.DeleteSink.requests[0] + client = self.make_sinks_api() + with mock.patch.object( + type(client._gapic_api.transport.get_sink), "__call__" + ) as call: + client.sink_delete(self.SINK_PATH) + + call.assert_called_once() + request = call.call_args.args[0] assert request.sink_name == self.SINK_PATH class Test_MetricsAPI(object): METRIC_NAME = "metric_name" - METRIC_PATH = "projects/%s/metrics/%s" % (PROJECT, METRIC_NAME) + METRIC_PATH = f"projects/{PROJECT}/metrics/{METRIC_NAME}" DESCRIPTION = "Description" @staticmethod def make_metrics_api(): - channel = grpc_helpers.ChannelStub() - gapic_client = metrics_service_v2_client.MetricsServiceV2Client(channel=channel) + gapic_client = MetricsServiceV2Client() handwritten_client = mock.Mock() api = _gapic._MetricsAPI(gapic_client, handwritten_client) - return channel, api + return api def test_ctor(self): - channel = grpc_helpers.ChannelStub() - gapic_client = metrics_service_v2_client.MetricsServiceV2Client(channel=channel) + gapic_client = MetricsServiceV2Client() api = _gapic._MetricsAPI(gapic_client, mock.sentinel.client) assert api._gapic_api is gapic_client assert api._client is mock.sentinel.client def test_list_metrics(self): - channel, api = self.make_metrics_api() + client = self.make_metrics_api() - sink_msg = logging_metrics_pb2.LogMetric( + metric = logging_v2.types.LogMetric( name=self.METRIC_PATH, description=self.DESCRIPTION, filter=FILTER ) - channel.ListLogMetrics.response = logging_metrics_pb2.ListLogMetricsResponse( - metrics=[sink_msg] - ) - - result = api.list_metrics(PROJECT) + with mock.patch.object( + type(client._gapic_api.transport.list_log_metrics), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogMetricsResponse( + metrics=[metric] + ) + result = client.list_metrics(PROJECT) metrics = list(result) # Check the response assert len(metrics) == 1 metric = metrics[0] - assert isinstance(metric, google.cloud.logging.metric.Metric) + assert isinstance(metric, google.cloud.logging_v2.metric.Metric) assert metric.name == self.METRIC_PATH assert metric.description == self.DESCRIPTION assert metric.filter_ == FILTER # Check the request - assert len(channel.ListLogMetrics.requests) == 1 - request = channel.ListLogMetrics.requests[0] + call.assert_called_once() + request = call.call_args.args[0] assert request.parent == PROJECT_PATH def test_list_metrics_options(self): - channel, api = self.make_metrics_api() + client = self.make_metrics_api() - channel.ListLogMetrics.response = logging_metrics_pb2.ListLogMetricsResponse( - metrics=[] - ) + with mock.patch.object( + type(client._gapic_api.transport.list_log_metrics), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogMetricsResponse(metrics=[]) - result = api.list_metrics(PROJECT, page_size=42, page_token="token") + result = client.list_metrics(PROJECT, page_size=42, page_token="token") list(result) # Check the request - assert len(channel.ListLogMetrics.requests) == 1 - request = channel.ListLogMetrics.requests[0] + call.assert_called_once() + request = call.call_args.args[0] assert request.parent == PROJECT_PATH assert request.page_size == 42 assert request.page_token == "token" def test_metric_create(self): - channel, api = self.make_metrics_api() + client = self.make_metrics_api() - channel.CreateLogMetric.response = empty_pb2.Empty() - - api.metric_create(PROJECT, self.METRIC_NAME, FILTER, self.DESCRIPTION) + with mock.patch.object( + type(client._gapic_api.transport.create_log_metric), "__call__" + ) as call: + client.metric_create(PROJECT, self.METRIC_NAME, FILTER, self.DESCRIPTION) # Check the request - assert len(channel.CreateLogMetric.requests) == 1 - request = channel.CreateLogMetric.requests[0] + call.assert_called_once() + request = call.call_args.args[0] assert request.parent == PROJECT_PATH assert request.metric.name == self.METRIC_NAME assert request.metric.filter == FILTER assert request.metric.description == self.DESCRIPTION def test_metric_get(self): - channel, api = self.make_metrics_api() - - channel.GetLogMetric.response = logging_metrics_pb2.LogMetric( - name=self.METRIC_PATH, description=self.DESCRIPTION, filter=FILTER - ) + client = self.make_metrics_api() - response = api.metric_get(PROJECT, self.METRIC_NAME) + with mock.patch.object( + type(client._gapic_api.transport.get_log_metric), "__call__" + ) as call: + call.return_value = logging_v2.types.LogMetric( + name=self.METRIC_PATH, description=self.DESCRIPTION, filter=FILTER + ) + response = client.metric_get(PROJECT, self.METRIC_NAME) # Check the response assert response == { @@ -398,20 +414,23 @@ def test_metric_get(self): } # Check the request - assert len(channel.GetLogMetric.requests) == 1 - request = channel.GetLogMetric.requests[0] + call.assert_called_once() + request = call.call_args.args[0] assert request.metric_name == self.METRIC_PATH def test_metric_update(self): - channel, api = self.make_metrics_api() + client = self.make_metrics_api() - channel.UpdateLogMetric.response = logging_metrics_pb2.LogMetric( - name=self.METRIC_PATH, description=self.DESCRIPTION, filter=FILTER - ) + with mock.patch.object( + type(client._gapic_api.transport.update_log_metric), "__call__" + ) as call: + call.return_value = logging_v2.types.LogMetric( + name=self.METRIC_PATH, description=self.DESCRIPTION, filter=FILTER + ) - response = api.metric_update( - PROJECT, self.METRIC_NAME, FILTER, self.DESCRIPTION - ) + response = client.metric_update( + PROJECT, self.METRIC_NAME, FILTER, self.DESCRIPTION + ) # Check the response assert response == { @@ -421,41 +440,39 @@ def test_metric_update(self): } # Check the request - assert len(channel.UpdateLogMetric.requests) == 1 - request = channel.UpdateLogMetric.requests[0] + call.assert_called_once() + request = call.call_args.args[0] assert request.metric_name == self.METRIC_PATH assert request.metric.name == self.METRIC_PATH assert request.metric.filter == FILTER assert request.metric.description == self.DESCRIPTION def test_metric_delete(self): - channel, api = self.make_metrics_api() - - channel.DeleteLogMetric.response = empty_pb2.Empty() - - api.metric_delete(PROJECT, self.METRIC_NAME) - - assert len(channel.DeleteLogMetric.requests) == 1 - request = channel.DeleteLogMetric.requests[0] + client = self.make_metrics_api() + with mock.patch.object( + type(client._gapic_api.transport.delete_log_metric), "__call__" + ) as call: + client.metric_delete(PROJECT, self.METRIC_NAME) + + call.assert_called_once() + request = call.call_args.args[0] assert request.metric_name == self.METRIC_PATH class Test__parse_log_entry(unittest.TestCase): @staticmethod def _call_fut(*args, **kwargs): - from google.cloud.logging._gapic import _parse_log_entry + from google.cloud.logging_v2._gapic import _parse_log_entry return _parse_log_entry(*args, **kwargs) def test_simple(self): - from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry - - entry_pb = LogEntry(log_name=u"lol-jk", text_payload=u"bah humbug") - result = self._call_fut(entry_pb) + entry_pb = LogEntryPB(log_name="lol-jk", text_payload="bah humbug") + result = self._call_fut(LogEntryPB.pb(entry_pb)) expected = {"logName": entry_pb.log_name, "textPayload": entry_pb.text_payload} self.assertEqual(result, expected) - @mock.patch("google.cloud.logging._gapic.MessageToDict", side_effect=TypeError) + @mock.patch("google.cloud.logging_v2._gapic.MessageToDict", side_effect=TypeError) def test_non_registry_failure(self, msg_to_dict_mock): entry_pb = mock.Mock(spec=["HasField"]) entry_pb.HasField.return_value = False @@ -463,10 +480,13 @@ def test_non_registry_failure(self, msg_to_dict_mock): self._call_fut(entry_pb) entry_pb.HasField.assert_called_once_with("proto_payload") - msg_to_dict_mock.assert_called_once_with(entry_pb) + msg_to_dict_mock.assert_called_once_with( + entry_pb, + preserving_proto_field_name=False, + including_default_value_fields=False, + ) def test_unregistered_type(self): - from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry from google.protobuf import any_pb2 from google.protobuf import descriptor_pool from google.protobuf.timestamp_pb2 import Timestamp @@ -482,8 +502,8 @@ def test_unregistered_type(self): any_pb = any_pb2.Any(type_url=type_url, value=metadata_bytes) timestamp = Timestamp(seconds=61, nanos=1234000) - entry_pb = LogEntry(proto_payload=any_pb, timestamp=timestamp) - result = self._call_fut(entry_pb) + entry_pb = LogEntryPB(proto_payload=any_pb, timestamp=timestamp) + result = self._call_fut(LogEntryPB.pb(entry_pb)) self.assertEqual(len(result), 2) self.assertEqual(result["timestamp"], "1970-01-01T00:01:01.001234Z") # NOTE: This "hack" is needed on Windows, where the equality check @@ -492,7 +512,6 @@ def test_unregistered_type(self): self.assertEqual(result["protoPayload"].value, metadata_bytes) def test_registered_type(self): - from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry from google.protobuf import any_pb2 from google.protobuf import descriptor_pool from google.protobuf.struct_pb2 import Struct @@ -506,12 +525,12 @@ def test_registered_type(self): type_url = "type.googleapis.com/" + type_name field_name = "foo" - field_value = u"Bar" + field_value = "Bar" struct_pb = Struct(fields={field_name: Value(string_value=field_value)}) any_pb = any_pb2.Any(type_url=type_url, value=struct_pb.SerializeToString()) - entry_pb = LogEntry(proto_payload=any_pb, log_name=u"all-good") - result = self._call_fut(entry_pb) + entry_pb = LogEntryPB(proto_payload=any_pb, log_name="all-good") + result = self._call_fut(LogEntryPB.pb(entry_pb)) expected_proto = { "logName": entry_pb.log_name, "protoPayload": {"@type": type_url, "value": {field_name: field_value}}, @@ -522,15 +541,13 @@ def test_registered_type(self): class Test__log_entry_mapping_to_pb(unittest.TestCase): @staticmethod def _call_fut(*args, **kwargs): - from google.cloud.logging._gapic import _log_entry_mapping_to_pb + from google.cloud.logging_v2._gapic import _log_entry_mapping_to_pb return _log_entry_mapping_to_pb(*args, **kwargs) def test_simple(self): - from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry - result = self._call_fut({}) - self.assertEqual(result, LogEntry()) + self.assertEqual(result, LogEntryPB()) def test_unregistered_type(self): from google.protobuf import descriptor_pool @@ -554,7 +571,6 @@ def test_unregistered_type(self): self._call_fut(json_mapping) def test_registered_type(self): - from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry from google.protobuf import any_pb2 from google.protobuf import descriptor_pool @@ -566,14 +582,14 @@ def test_registered_type(self): type_url = "type.googleapis.com/" + type_name field_name = "foo" - field_value = u"Bar" + field_value = "Bar" json_mapping = { - "logName": u"hi-everybody", + "logName": "hi-everybody", "protoPayload": {"@type": type_url, "value": {field_name: field_value}}, } # Convert to a valid LogEntry. result = self._call_fut(json_mapping) - entry_pb = LogEntry( + entry_pb = LogEntryPB( log_name=json_mapping["logName"], proto_payload=any_pb2.Any( type_url=type_url, value=b"\n\014\n\003foo\022\005\032\003Bar" @@ -582,34 +598,40 @@ def test_registered_type(self): self.assertEqual(result, entry_pb) -@mock.patch("google.cloud.logging._gapic.LoggingServiceV2Client", autospec=True) +@mock.patch("google.cloud.logging_v2._gapic.LoggingServiceV2Client", autospec=True) def test_make_logging_api(gapic_client): - client = mock.Mock(spec=["_credentials", "_client_info"]) + client = mock.Mock(spec=["_credentials", "_client_info", "_client_options"]) api = _gapic.make_logging_api(client) assert api._client == client assert api._gapic_api == gapic_client.return_value gapic_client.assert_called_once_with( - credentials=client._credentials, client_info=client._client_info + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, ) -@mock.patch("google.cloud.logging._gapic.MetricsServiceV2Client", autospec=True) +@mock.patch("google.cloud.logging_v2._gapic.MetricsServiceV2Client", autospec=True) def test_make_metrics_api(gapic_client): - client = mock.Mock(spec=["_credentials", "_client_info"]) + client = mock.Mock(spec=["_credentials", "_client_info", "_client_options"]) api = _gapic.make_metrics_api(client) assert api._client == client assert api._gapic_api == gapic_client.return_value gapic_client.assert_called_once_with( - credentials=client._credentials, client_info=client._client_info + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, ) -@mock.patch("google.cloud.logging._gapic.ConfigServiceV2Client", autospec=True) +@mock.patch("google.cloud.logging_v2._gapic.ConfigServiceV2Client", autospec=True) def test_make_sinks_api(gapic_client): - client = mock.Mock(spec=["_credentials", "_client_info"]) + client = mock.Mock(spec=["_credentials", "_client_info", "_client_options"]) api = _gapic.make_sinks_api(client) assert api._client == client assert api._gapic_api == gapic_client.return_value gapic_client.assert_called_once_with( - credentials=client._credentials, client_info=client._client_info + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, ) diff --git a/packages/google-cloud-logging/tests/unit/test__helpers.py b/packages/google-cloud-logging/tests/unit/test__helpers.py index c924567944ec..fb3e09f04446 100644 --- a/packages/google-cloud-logging/tests/unit/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/test__helpers.py @@ -25,7 +25,7 @@ class Test_entry_from_resource(unittest.TestCase): @staticmethod def _call_fut(resource, client, loggers): - from google.cloud.logging._helpers import entry_from_resource + from google.cloud.logging_v2._helpers import entry_from_resource return entry_from_resource(resource, client, loggers) @@ -39,7 +39,7 @@ def _payload_helper(self, key, class_name): loggers = {} mock_class = EntryMock() - name = "google.cloud.logging._helpers." + class_name + name = "google.cloud.logging_v2._helpers." + class_name with mock.patch(name, new=mock_class): result = self._call_fut(resource, client, loggers) @@ -62,7 +62,7 @@ def test_proto_payload(self): class Test_retrieve_metadata_server(unittest.TestCase): @staticmethod def _call_fut(metadata_key): - from google.cloud.logging._helpers import retrieve_metadata_server + from google.cloud.logging_v2._helpers import retrieve_metadata_server return retrieve_metadata_server(metadata_key) @@ -78,7 +78,7 @@ def test_metadata_exists(self): requests_mock.get.return_value = response_mock requests_mock.codes.ok = status_code_ok - patch = mock.patch("google.cloud.logging._helpers.requests", requests_mock) + patch = mock.patch("google.cloud.logging_v2._helpers.requests", requests_mock) with patch: metadata = self._call_fut(metadata_key) @@ -96,7 +96,7 @@ def test_metadata_does_not_exist(self): requests_mock.get.return_value = response_mock requests_mock.codes.ok = status_code_ok - patch = mock.patch("google.cloud.logging._helpers.requests", requests_mock) + patch = mock.patch("google.cloud.logging_v2._helpers.requests", requests_mock) with patch: metadata = self._call_fut(metadata_key) @@ -115,7 +115,7 @@ def test_request_exception(self): requests_get_patch = mock.patch("requests.get", requests_get_mock) url_patch = mock.patch( - "google.cloud.logging._helpers.METADATA_URL", new=metadata_url + "google.cloud.logging_v2._helpers.METADATA_URL", new=metadata_url ) with requests_get_patch: @@ -128,12 +128,12 @@ def test_request_exception(self): class Test__normalize_severity(unittest.TestCase): @staticmethod def _stackdriver_severity(): - from google.cloud.logging._helpers import LogSeverity + from google.cloud.logging_v2._helpers import LogSeverity return LogSeverity def _normalize_severity_helper(self, stdlib_level, enum_level): - from google.cloud.logging._helpers import _normalize_severity + from google.cloud.logging_v2._helpers import _normalize_severity self.assertEqual(_normalize_severity(stdlib_level), enum_level) @@ -173,7 +173,7 @@ def _time_format(): @staticmethod def _add_defaults_to_filter(filter_): - from google.cloud.logging._helpers import _add_defaults_to_filter + from google.cloud.logging_v2._helpers import _add_defaults_to_filter return _add_defaults_to_filter(filter_) diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index 7ad247ca7866..0cf8dcfddeaf 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -30,13 +30,13 @@ class TestConnection(unittest.TestCase): @staticmethod def _get_default_timeout(): - from google.cloud.logging._http import _http + from google.cloud.logging_v2._http import _http return _http._DEFAULT_TIMEOUT @staticmethod def _get_target_class(): - from google.cloud.logging._http import Connection + from google.cloud.logging_v2._http import Connection return Connection @@ -49,8 +49,8 @@ def test_default_url(self): self.assertIs(conn._client, client) def test_build_api_url_w_custom_endpoint(self): - from six.moves.urllib.parse import parse_qsl - from six.moves.urllib.parse import urlsplit + from urllib.parse import parse_qsl + from urllib.parse import urlsplit custom_endpoint = "https://foo-logging.googleapis.com" conn = self._make_one(object(), api_endpoint=custom_endpoint) @@ -98,14 +98,16 @@ def test_extra_headers(self): class Test_LoggingAPI(unittest.TestCase): PROJECT = "project" + PROJECT_PATH = "projects/project" LIST_ENTRIES_PATH = "entries:list" WRITE_ENTRIES_PATH = "entries:write" LOGGER_NAME = "LOGGER_NAME" + LOGGER_PATH = "projects/project/logs/LOGGER_NAME" FILTER = "logName:syslog AND severity>=ERROR" @staticmethod def _get_target_class(): - from google.cloud.logging._http import _LoggingAPI + from google.cloud.logging_v2._http import _LoggingAPI return _LoggingAPI @@ -128,15 +130,14 @@ def _make_timestamp(): return NOW, _datetime_to_rfc3339_w_nanos(NOW) def test_list_entries_no_paging(self): - import six - from google.cloud.logging.client import Client - from google.cloud.logging.entries import TextEntry - from google.cloud.logging.logger import Logger + from google.cloud.logging_v2.client import Client + from google.cloud.logging_v2.entries import TextEntry + from google.cloud.logging_v2.logger import Logger NOW, TIMESTAMP = self._make_timestamp() IID = "IID" TEXT = "TEXT" - SENT = {"projectIds": [self.PROJECT]} + SENT = {"resourceNames": [self.PROJECT_PATH]} TOKEN = "TOKEN" RETURNED = { "entries": [ @@ -145,7 +146,7 @@ def test_list_entries_no_paging(self): "insertId": IID, "resource": {"type": "global"}, "timestamp": TIMESTAMP, - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "logName": f"projects/{self.PROJECT}/logs/{self.LOGGER_NAME}", } ], "nextPageToken": TOKEN, @@ -156,8 +157,8 @@ def test_list_entries_no_paging(self): client._connection = _Connection(RETURNED) api = self._make_one(client) - iterator = api.list_entries([self.PROJECT]) - page = six.next(iterator.pages) + iterator = api.list_entries([self.PROJECT_PATH]) + page = next(iterator.pages) entries = list(page) token = iterator.next_page_token @@ -183,14 +184,16 @@ def test_list_entries_no_paging(self): ) def test_list_entries_w_paging(self): - from google.cloud.logging import DESCENDING - from google.cloud.logging.client import Client - from google.cloud.logging.logger import Logger - from google.cloud.logging.entries import ProtobufEntry - from google.cloud.logging.entries import StructEntry + from google.cloud.logging_v2 import DESCENDING + from google.cloud.logging_v2.client import Client + from google.cloud.logging_v2.logger import Logger + from google.cloud.logging_v2.entries import ProtobufEntry + from google.cloud.logging_v2.entries import StructEntry PROJECT1 = "PROJECT1" + PROJECT1_PATH = f"projects/{PROJECT1}" PROJECT2 = "PROJECT2" + PROJECT2_PATH = f"projects/{PROJECT2}" NOW, TIMESTAMP = self._make_timestamp() IID1 = "IID1" IID2 = "IID2" @@ -200,7 +203,7 @@ def test_list_entries_w_paging(self): TOKEN = "TOKEN" PAGE_SIZE = 42 SENT = { - "projectIds": [PROJECT1, PROJECT2], + "resourceNames": [PROJECT1_PATH, PROJECT2_PATH], "filter": self.FILTER, "orderBy": DESCENDING, "pageSize": PAGE_SIZE, @@ -231,7 +234,7 @@ def test_list_entries_w_paging(self): api = self._make_one(client) iterator = api.list_entries( - projects=[PROJECT1, PROJECT2], + resource_names=[PROJECT1_PATH, PROJECT2_PATH], filter_=self.FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, @@ -277,9 +280,9 @@ def test_write_entries_single(self): ENTRY = { "textPayload": TEXT, "resource": {"type": "global"}, - "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "logName": "projects/{self.PROJECT}/logs/{self.LOGGER_NAME}", } - SENT = {"entries": [ENTRY]} + SENT = {"entries": [ENTRY], "partialSuccess": False, "dry_run": False} conn = _Connection({}) client = _Client(conn) api = self._make_one(client) @@ -287,13 +290,13 @@ def test_write_entries_single(self): api.write_entries([ENTRY]) self.assertEqual(conn._called_with["method"], "POST") - path = "/%s" % self.WRITE_ENTRIES_PATH + path = f"/{self.WRITE_ENTRIES_PATH}" self.assertEqual(conn._called_with["path"], path) self.assertEqual(conn._called_with["data"], SENT) def test_write_entries_multiple(self): TEXT = "TEXT" - LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + LOG_NAME = f"projects/{self.PROJECT}/logs/{self.LOGGER_NAME}" RESOURCE = {"type": "global"} LABELS = {"baz": "qux", "spam": "eggs"} ENTRY1 = {"textPayload": TEXT} @@ -303,25 +306,29 @@ def test_write_entries_multiple(self): "resource": RESOURCE, "labels": LABELS, "entries": [ENTRY1, ENTRY2], + "partialSuccess": False, + "dry_run": False, } conn = _Connection({}) client = _Client(conn) api = self._make_one(client) - api.write_entries([ENTRY1, ENTRY2], LOG_NAME, RESOURCE, LABELS) + api.write_entries( + [ENTRY1, ENTRY2], logger_name=LOG_NAME, resource=RESOURCE, labels=LABELS + ) self.assertEqual(conn._called_with["method"], "POST") - path = "/%s" % self.WRITE_ENTRIES_PATH + path = f"/{self.WRITE_ENTRIES_PATH}" self.assertEqual(conn._called_with["path"], path) self.assertEqual(conn._called_with["data"], SENT) def test_logger_delete(self): - path = "/projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + path = f"/projects/{self.PROJECT}/logs/{self.LOGGER_NAME}" conn = _Connection({}) client = _Client(conn) api = self._make_one(client) - api.logger_delete(self.PROJECT, self.LOGGER_NAME) + api.logger_delete(self.LOGGER_PATH) self.assertEqual(conn._called_with["method"], "DELETE") self.assertEqual(conn._called_with["path"], path) @@ -330,16 +337,17 @@ def test_logger_delete(self): class Test_SinksAPI(unittest.TestCase): PROJECT = "project" + PROJECT_PATH = "projects/project" FILTER = "logName:syslog AND severity>=ERROR" - LIST_SINKS_PATH = "projects/%s/sinks" % (PROJECT,) + LIST_SINKS_PATH = f"projects/{PROJECT}/sinks" SINK_NAME = "sink_name" - SINK_PATH = "projects/%s/sinks/%s" % (PROJECT, SINK_NAME) + SINK_PATH = f"projects/{PROJECT}/sinks/{SINK_NAME}" DESTINATION_URI = "faux.googleapis.com/destination" WRITER_IDENTITY = "serviceAccount:project-123@example.com" @staticmethod def _get_target_class(): - from google.cloud.logging._http import _SinksAPI + from google.cloud.logging_v2._http import _SinksAPI return _SinksAPI @@ -354,8 +362,7 @@ def test_ctor(self): self.assertEqual(api.api_request, connection.api_request) def test_list_sinks_no_paging(self): - import six - from google.cloud.logging.sink import Sink + from google.cloud.logging_v2.sink import Sink TOKEN = "TOKEN" RETURNED = { @@ -372,8 +379,8 @@ def test_list_sinks_no_paging(self): client = _Client(conn) api = self._make_one(client) - iterator = api.list_sinks(self.PROJECT) - page = six.next(iterator.pages) + iterator = api.list_sinks(self.PROJECT_PATH) + page = next(iterator.pages) sinks = list(page) token = iterator.next_page_token @@ -389,13 +396,13 @@ def test_list_sinks_no_paging(self): self.assertIs(sink.client, client) called_with = conn._called_with - path = "/%s" % (self.LIST_SINKS_PATH,) + path = f"/{self.LIST_SINKS_PATH}" self.assertEqual( called_with, {"method": "GET", "path": path, "query_params": {}} ) def test_list_sinks_w_paging(self): - from google.cloud.logging.sink import Sink + from google.cloud.logging_v2.sink import Sink TOKEN = "TOKEN" PAGE_SIZE = 42 @@ -412,7 +419,9 @@ def test_list_sinks_w_paging(self): client = _Client(conn) api = self._make_one(client) - iterator = api.list_sinks(self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + iterator = api.list_sinks( + self.PROJECT_PATH, page_size=PAGE_SIZE, page_token=TOKEN + ) sinks = list(iterator) token = iterator.next_page_token @@ -428,7 +437,7 @@ def test_list_sinks_w_paging(self): self.assertIs(sink.client, client) called_with = conn._called_with - path = "/%s" % (self.LIST_SINKS_PATH,) + path = f"/{self.LIST_SINKS_PATH}" self.assertEqual( called_with, { @@ -453,10 +462,10 @@ def test_sink_create_conflict(self): with self.assertRaises(Conflict): api.sink_create( - self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI + self.PROJECT_PATH, self.SINK_NAME, self.FILTER, self.DESTINATION_URI ) - path = "/projects/%s/sinks" % (self.PROJECT,) + path = f"/projects/{self.PROJECT}/sinks" expected = { "method": "POST", "path": path, @@ -478,7 +487,7 @@ def test_sink_create_ok(self): api = self._make_one(client) returned = api.sink_create( - self.PROJECT, + self.PROJECT_PATH, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, @@ -486,7 +495,7 @@ def test_sink_create_ok(self): ) self.assertEqual(returned, after_create) - path = "/projects/%s/sinks" % (self.PROJECT,) + path = f"/projects/{self.PROJECT}/sinks" expected = { "method": "POST", "path": path, @@ -503,10 +512,10 @@ def test_sink_get_miss(self): api = self._make_one(client) with self.assertRaises(NotFound): - api.sink_get(self.PROJECT, self.SINK_NAME) + api.sink_get(self.SINK_PATH) self.assertEqual(conn._called_with["method"], "GET") - path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) + path = f"/projects/{self.PROJECT}/sinks/{self.SINK_NAME}" self.assertEqual(conn._called_with["path"], path) def test_sink_get_hit(self): @@ -519,11 +528,11 @@ def test_sink_get_hit(self): client = _Client(conn) api = self._make_one(client) - response = api.sink_get(self.PROJECT, self.SINK_NAME) + response = api.sink_get(self.SINK_PATH) self.assertEqual(response, RESPONSE) self.assertEqual(conn._called_with["method"], "GET") - path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) + path = f"/projects/{self.PROJECT}/sinks/{self.SINK_NAME}" self.assertEqual(conn._called_with["path"], path) def test_sink_update_miss(self): @@ -539,11 +548,9 @@ def test_sink_update_miss(self): api = self._make_one(client) with self.assertRaises(NotFound): - api.sink_update( - self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI - ) + api.sink_update(self.SINK_PATH, self.FILTER, self.DESTINATION_URI) - path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) + path = f"/projects/{self.PROJECT}/sinks/{self.SINK_NAME}" expected = { "method": "PUT", "path": path, @@ -565,15 +572,14 @@ def test_sink_update_hit(self): api = self._make_one(client) returned = api.sink_update( - self.PROJECT, - self.SINK_NAME, + self.SINK_PATH, self.FILTER, self.DESTINATION_URI, unique_writer_identity=True, ) self.assertEqual(returned, after_update) - path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) + path = f"/projects/{self.PROJECT}/sinks/{self.SINK_NAME}" expected = { "method": "PUT", "path": path, @@ -590,10 +596,10 @@ def test_sink_delete_miss(self): api = self._make_one(client) with self.assertRaises(NotFound): - api.sink_delete(self.PROJECT, self.SINK_NAME) + api.sink_delete(self.SINK_PATH) self.assertEqual(conn._called_with["method"], "DELETE") - path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) + path = f"/projects/{self.PROJECT}/sinks/{self.SINK_NAME}" self.assertEqual(conn._called_with["path"], path) def test_sink_delete_hit(self): @@ -601,10 +607,10 @@ def test_sink_delete_hit(self): client = _Client(conn) api = self._make_one(client) - api.sink_delete(self.PROJECT, self.SINK_NAME) + api.sink_delete(self.SINK_PATH) self.assertEqual(conn._called_with["method"], "DELETE") - path = "/projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) + path = f"/projects/{self.PROJECT}/sinks/{self.SINK_NAME}" self.assertEqual(conn._called_with["path"], path) @@ -619,7 +625,7 @@ class Test_MetricsAPI(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging._http import _MetricsAPI + from google.cloud.logging_v2._http import _MetricsAPI return _MetricsAPI @@ -627,8 +633,7 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_list_metrics_no_paging(self): - import six - from google.cloud.logging.metric import Metric + from google.cloud.logging_v2.metric import Metric TOKEN = "TOKEN" RETURNED = { @@ -640,7 +645,7 @@ def test_list_metrics_no_paging(self): api = self._make_one(client) iterator = api.list_metrics(self.PROJECT) - page = six.next(iterator.pages) + page = next(iterator.pages) metrics = list(page) token = iterator.next_page_token @@ -662,7 +667,7 @@ def test_list_metrics_no_paging(self): ) def test_list_metrics_w_paging(self): - from google.cloud.logging.metric import Metric + from google.cloud.logging_v2.metric import Metric TOKEN = "TOKEN" PAGE_SIZE = 42 diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 9e2a15bb4696..29934c389a21 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -31,6 +31,7 @@ def _make_credentials(): class TestClient(unittest.TestCase): PROJECT = "PROJECT" + PROJECT_PATH = f"projects/{PROJECT}" LOGGER_NAME = "LOGGER_NAME" SINK_NAME = "SINK_NAME" FILTER = "logName:syslog AND severity>=ERROR" @@ -42,7 +43,7 @@ class TestClient(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging.client import Client + from google.cloud.logging_v2.client import Client return Client @@ -51,7 +52,7 @@ def _make_one(self, *args, **kw): def test_ctor_defaults(self): from google.cloud._http import ClientInfo - from google.cloud.logging._http import Connection + from google.cloud.logging_v2._http import Connection creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -61,7 +62,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): from google.cloud._http import ClientInfo - from google.cloud.logging._http import Connection + from google.cloud.logging_v2._http import Connection creds = _make_credentials() client_info = ClientInfo() @@ -110,10 +111,10 @@ def test_ctor_w_client_options_dict(self): ) def test_logging_api_wo_gapic(self): - from google.cloud.logging._http import _LoggingAPI + from google.cloud.logging_v2._http import _LoggingAPI client = self._make_one( - self.PROJECT, credentials=_make_credentials(), _use_grpc=False + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) conn = client._connection = _Connection() @@ -136,7 +137,7 @@ def make_api(client_obj): creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) - patch = mock.patch("google.cloud.logging.client._gapic") + patch = mock.patch("google.cloud.logging_v2.client._gapic") with patch as gapic_module: gapic_module.make_logging_api.side_effect = make_api api = client.logging_api @@ -148,10 +149,10 @@ def make_api(client_obj): self.assertIs(again, api) def test_no_gapic_ctor(self): - from google.cloud.logging._http import _LoggingAPI + from google.cloud.logging_v2._http import _LoggingAPI creds = _make_credentials() - patch = mock.patch("google.cloud.logging.client._USE_GRPC", new=True) + patch = mock.patch("google.cloud.logging_v2.client._USE_GRPC", new=True) with patch: client = self._make_one( project=self.PROJECT, credentials=creds, _use_grpc=False @@ -161,10 +162,10 @@ def test_no_gapic_ctor(self): self.assertIsInstance(api, _LoggingAPI) def test_sinks_api_wo_gapic(self): - from google.cloud.logging._http import _SinksAPI + from google.cloud.logging_v2._http import _SinksAPI client = self._make_one( - self.PROJECT, credentials=_make_credentials(), _use_grpc=False + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) conn = client._connection = _Connection() @@ -187,7 +188,7 @@ def make_api(client_obj): creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) - patch = mock.patch("google.cloud.logging.client._gapic") + patch = mock.patch("google.cloud.logging_v2.client._gapic") with patch as gapic_module: gapic_module.make_sinks_api.side_effect = make_api api = client.sinks_api @@ -199,10 +200,10 @@ def make_api(client_obj): self.assertIs(again, api) def test_metrics_api_wo_gapic(self): - from google.cloud.logging._http import _MetricsAPI + from google.cloud.logging_v2._http import _MetricsAPI client = self._make_one( - self.PROJECT, credentials=_make_credentials(), _use_grpc=False + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) conn = client._connection = _Connection() @@ -225,7 +226,7 @@ def make_api(client_obj): creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) - patch = mock.patch("google.cloud.logging.client._gapic") + patch = mock.patch("google.cloud.logging_v2.client._gapic") with patch as gapic_module: gapic_module.make_metrics_api.side_effect = make_api api = client.metrics_api @@ -237,7 +238,7 @@ def make_api(client_obj): self.assertIs(again, api) def test_logger(self): - from google.cloud.logging.logger import Logger + from google.cloud.logging_v2.logger import Logger creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -248,8 +249,7 @@ def test_logger(self): self.assertEqual(logger.project, self.PROJECT) def test_list_entries_defaults(self): - import six - from google.cloud.logging.entries import TextEntry + from google.cloud.logging_v2.entries import TextEntry IID = "IID" TEXT = "TEXT" @@ -270,7 +270,7 @@ def test_list_entries_defaults(self): client._connection = _Connection(returned) iterator = client.list_entries() - page = six.next(iterator.pages) + page = next(iterator.pages) entries = list(page) token = iterator.next_page_token @@ -293,7 +293,10 @@ def test_list_entries_defaults(self): { "path": "/entries:list", "method": "POST", - "data": {"filter": "removed", "projectIds": [self.PROJECT]}, + "data": { + "filter": "removed", + "resourceNames": [f"projects/{self.PROJECT}"], + }, }, ) # verify that default filter is 24 hours @@ -305,10 +308,10 @@ def test_list_entries_defaults(self): self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit(self): - from google.cloud.logging import DESCENDING - from google.cloud.logging.entries import ProtobufEntry - from google.cloud.logging.entries import StructEntry - from google.cloud.logging.logger import Logger + from google.cloud.logging_v2 import DESCENDING + from google.cloud.logging_v2.entries import ProtobufEntry + from google.cloud.logging_v2.entries import StructEntry + from google.cloud.logging_v2.logger import Logger PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" @@ -335,13 +338,13 @@ def test_list_entries_explicit(self): }, ] client = self._make_one( - self.PROJECT, credentials=_make_credentials(), _use_grpc=False + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) returned = {"entries": ENTRIES} client._connection = _Connection(returned) iterator = client.list_entries( - projects=[PROJECT1, PROJECT2], + resource_names=[f"projects/{PROJECT1}", f"projects/{PROJECT2}"], filter_=INPUT_FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, @@ -388,7 +391,7 @@ def test_list_entries_explicit(self): "orderBy": DESCENDING, "pageSize": PAGE_SIZE, "pageToken": TOKEN, - "projectIds": [PROJECT1, PROJECT2], + "resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"], }, }, ) @@ -401,10 +404,10 @@ def test_list_entries_explicit(self): self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit_timestamp(self): - from google.cloud.logging import DESCENDING - from google.cloud.logging.entries import ProtobufEntry - from google.cloud.logging.entries import StructEntry - from google.cloud.logging.logger import Logger + from google.cloud.logging_v2 import DESCENDING + from google.cloud.logging_v2.entries import ProtobufEntry + from google.cloud.logging_v2.entries import StructEntry + from google.cloud.logging_v2.logger import Logger PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" @@ -431,13 +434,13 @@ def test_list_entries_explicit_timestamp(self): }, ] client = self._make_one( - self.PROJECT, credentials=_make_credentials(), _use_grpc=False + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) returned = {"entries": ENTRIES} client._connection = _Connection(returned) iterator = client.list_entries( - projects=[PROJECT1, PROJECT2], + resource_names=[f"projects/{PROJECT1}", f"projects/{PROJECT2}"], filter_=INPUT_FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, @@ -483,13 +486,13 @@ def test_list_entries_explicit_timestamp(self): "orderBy": DESCENDING, "pageSize": PAGE_SIZE, "pageToken": TOKEN, - "projectIds": [PROJECT1, PROJECT2], + "resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"], }, }, ) def test_sink_defaults(self): - from google.cloud.logging.sink import Sink + from google.cloud.logging_v2.sink import Sink creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -499,24 +502,25 @@ def test_sink_defaults(self): self.assertIsNone(sink.filter_) self.assertIsNone(sink.destination) self.assertIs(sink.client, client) - self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.parent, self.PROJECT_PATH) def test_sink_explicit(self): - from google.cloud.logging.sink import Sink + from google.cloud.logging_v2.sink import Sink creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) - sink = client.sink(self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + sink = client.sink( + self.SINK_NAME, filter_=self.FILTER, destination=self.DESTINATION_URI + ) self.assertIsInstance(sink, Sink) self.assertEqual(sink.name, self.SINK_NAME) self.assertEqual(sink.filter_, self.FILTER) self.assertEqual(sink.destination, self.DESTINATION_URI) self.assertIs(sink.client, client) - self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.parent, self.PROJECT_PATH) def test_list_sinks_no_paging(self): - import six - from google.cloud.logging.sink import Sink + from google.cloud.logging_v2.sink import Sink PROJECT = "PROJECT" TOKEN = "TOKEN" @@ -532,7 +536,7 @@ def test_list_sinks_no_paging(self): client._connection = _Connection(returned) iterator = client.list_sinks() - page = six.next(iterator.pages) + page = next(iterator.pages) sinks = list(page) token = iterator.next_page_token @@ -555,7 +559,7 @@ def test_list_sinks_no_paging(self): ) def test_list_sinks_with_paging(self): - from google.cloud.logging.sink import Sink + from google.cloud.logging_v2.sink import Sink PROJECT = "PROJECT" SINK_NAME = "sink_name" @@ -571,7 +575,7 @@ def test_list_sinks_with_paging(self): returned = {"sinks": SINKS} client._connection = _Connection(returned) - iterator = client.list_sinks(PAGE_SIZE, TOKEN) + iterator = client.list_sinks(page_size=PAGE_SIZE, page_token=TOKEN) sinks = list(iterator) token = iterator.next_page_token @@ -599,7 +603,7 @@ def test_list_sinks_with_paging(self): ) def test_metric_defaults(self): - from google.cloud.logging.metric import Metric + from google.cloud.logging_v2.metric import Metric creds = _make_credentials() @@ -613,13 +617,13 @@ def test_metric_defaults(self): self.assertEqual(metric.project, self.PROJECT) def test_metric_explicit(self): - from google.cloud.logging.metric import Metric + from google.cloud.logging_v2.metric import Metric creds = _make_credentials() client_obj = self._make_one(project=self.PROJECT, credentials=creds) metric = client_obj.metric( - self.METRIC_NAME, self.FILTER, description=self.DESCRIPTION + self.METRIC_NAME, filter_=self.FILTER, description=self.DESCRIPTION ) self.assertIsInstance(metric, Metric) self.assertEqual(metric.name, self.METRIC_NAME) @@ -629,7 +633,7 @@ def test_metric_explicit(self): self.assertEqual(metric.project, self.PROJECT) def test_list_metrics_no_paging(self): - from google.cloud.logging.metric import Metric + from google.cloud.logging_v2.metric import Metric metrics = [ { @@ -665,8 +669,7 @@ def test_list_metrics_no_paging(self): ) def test_list_metrics_with_paging(self): - import six - from google.cloud.logging.metric import Metric + from google.cloud.logging_v2.metric import Metric token = "TOKEN" next_token = "T00KEN" @@ -685,8 +688,8 @@ def test_list_metrics_with_paging(self): client._connection = _Connection(returned) # Execute request. - iterator = client.list_metrics(page_size, token) - page = six.next(iterator.pages) + iterator = client.list_metrics(page_size=page_size, page_token=token) + page = next(iterator.pages) metrics = list(page) # First check the token. @@ -715,8 +718,8 @@ def test_list_metrics_with_paging(self): def test_get_default_handler_app_engine(self): import os from google.cloud._testing import _Monkey - from google.cloud.logging.client import _APPENGINE_FLEXIBLE_ENV_VM - from google.cloud.logging.handlers import AppEngineHandler + from google.cloud.logging_v2.client import _APPENGINE_FLEXIBLE_ENV_VM + from google.cloud.logging_v2.handlers import AppEngineHandler credentials = _make_credentials() client = self._make_one( @@ -731,7 +734,7 @@ def test_get_default_handler_app_engine(self): self.assertIsInstance(handler, AppEngineHandler) def test_get_default_handler_container_engine(self): - from google.cloud.logging.handlers import ContainerEngineHandler + from google.cloud.logging_v2.handlers import ContainerEngineHandler credentials = _make_credentials() client = self._make_one( @@ -739,7 +742,7 @@ def test_get_default_handler_container_engine(self): ) patch = mock.patch( - "google.cloud.logging.client.retrieve_metadata_server", + "google.cloud.logging_v2.client.retrieve_metadata_server", return_value="test-gke-cluster", ) @@ -750,8 +753,8 @@ def test_get_default_handler_container_engine(self): def test_get_default_handler_general(self): import io - from google.cloud.logging.handlers import CloudLoggingHandler - from google.cloud.logging.resource import Resource + from google.cloud.logging_v2.handlers import CloudLoggingHandler + from google.cloud.logging_v2.resource import Resource name = "test-logger" resource = Resource("resource_type", {"resource_label": "value"}) @@ -775,14 +778,14 @@ def test_get_default_handler_general(self): self.assertEqual(handler.labels, labels) def test_setup_logging(self): - from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging_v2.handlers import CloudLoggingHandler credentials = _make_credentials() client = self._make_one( project=self.PROJECT, credentials=credentials, _use_grpc=False ) - with mock.patch("google.cloud.logging.client.setup_logging") as mocked: + with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked: client.setup_logging() self.assertEqual(len(mocked.mock_calls), 1) @@ -801,8 +804,8 @@ def test_setup_logging(self): def test_setup_logging_w_extra_kwargs(self): import io - from google.cloud.logging.handlers import CloudLoggingHandler - from google.cloud.logging.resource import Resource + from google.cloud.logging_v2.handlers import CloudLoggingHandler + from google.cloud.logging_v2.resource import Resource name = "test-logger" resource = Resource("resource_type", {"resource_label": "value"}) @@ -814,7 +817,7 @@ def test_setup_logging_w_extra_kwargs(self): project=self.PROJECT, credentials=credentials, _use_grpc=False ) - with mock.patch("google.cloud.logging.client.setup_logging") as mocked: + with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked: client.setup_logging( name=name, resource=resource, labels=labels, stream=stream ) diff --git a/packages/google-cloud-logging/tests/unit/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py index 3aad7fbb130c..5b7763f45e40 100644 --- a/packages/google-cloud-logging/tests/unit/test_entries.py +++ b/packages/google-cloud-logging/tests/unit/test_entries.py @@ -19,7 +19,7 @@ class Test_logger_name_from_path(unittest.TestCase): def _call_fut(self, path): - from google.cloud.logging.entries import logger_name_from_path + from google.cloud.logging_v2.entries import logger_name_from_path return logger_name_from_path(path) @@ -40,7 +40,7 @@ def test_w_name_w_all_extras(self): class Test__int_or_none(unittest.TestCase): def _call_fut(self, value): - from google.cloud.logging.entries import _int_or_none + from google.cloud.logging_v2.entries import _int_or_none return _int_or_none(value) @@ -61,7 +61,7 @@ class TestLogEntry(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging.entries import LogEntry + from google.cloud.logging_v2.entries import LogEntry return LogEntry @@ -69,7 +69,7 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): - from google.cloud.logging.entries import _GLOBAL_RESOURCE + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE entry = self._make_one() @@ -90,7 +90,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): import datetime - from google.cloud.logging.resource import Resource + from google.cloud.logging_v2.resource import Resource LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) IID = "IID" @@ -178,7 +178,7 @@ def test_from_api_repr_missing_data_no_loggers(self): def test_from_api_repr_w_loggers_no_logger_match(self): from datetime import datetime from google.cloud._helpers import UTC - from google.cloud.logging.resource import Resource + from google.cloud.logging_v2.resource import Resource klass = self._get_target_class() client = _Client(self.PROJECT) @@ -316,7 +316,7 @@ def test_from_api_repr_w_loggers_w_logger_match(self): self.assertIsNone(entry.payload) def test_to_api_repr_w_source_location_no_line(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE LOG_NAME = "test.log" FILE = "my_file.py" @@ -332,7 +332,7 @@ def test_to_api_repr_w_source_location_no_line(self): def test_to_api_repr_explicit(self): import datetime - from google.cloud.logging.resource import Resource + from google.cloud.logging_v2.resource import Resource from google.cloud._helpers import _datetime_to_rfc3339 LOG_NAME = "test.log" @@ -395,7 +395,7 @@ class TestTextEntry(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging.entries import TextEntry + from google.cloud.logging_v2.entries import TextEntry return TextEntry @@ -403,7 +403,7 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_to_api_repr_defaults(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE LOG_NAME = "test.log" TEXT = "TESTING" @@ -417,7 +417,7 @@ def test_to_api_repr_defaults(self): def test_to_api_repr_explicit(self): import datetime - from google.cloud.logging.resource import Resource + from google.cloud.logging_v2.resource import Resource from google.cloud._helpers import _datetime_to_rfc3339 LOG_NAME = "test.log" @@ -483,7 +483,7 @@ class TestStructEntry(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging.entries import StructEntry + from google.cloud.logging_v2.entries import StructEntry return StructEntry @@ -491,7 +491,7 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_to_api_repr_defaults(self): - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE LOG_NAME = "test.log" JSON_PAYLOAD = {"key": "value"} @@ -505,7 +505,7 @@ def test_to_api_repr_defaults(self): def test_to_api_repr_explicit(self): import datetime - from google.cloud.logging.resource import Resource + from google.cloud.logging_v2.resource import Resource from google.cloud._helpers import _datetime_to_rfc3339 LOG_NAME = "test.log" @@ -571,7 +571,7 @@ class TestProtobufEntry(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging.entries import ProtobufEntry + from google.cloud.logging_v2.entries import ProtobufEntry return ProtobufEntry @@ -634,7 +634,7 @@ def test_parse_message(self): def test_to_api_repr_proto_defaults(self): from google.protobuf.json_format import MessageToDict - from google.cloud.logging.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value @@ -652,7 +652,7 @@ def test_to_api_repr_proto_defaults(self): def test_to_api_repr_proto_explicit(self): import datetime from google.protobuf.json_format import MessageToDict - from google.cloud.logging.resource import Resource + from google.cloud.logging_v2.resource import Resource from google.cloud._helpers import _datetime_to_rfc3339 from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 966a515e0af5..853bcce22c44 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -36,7 +36,7 @@ class TestLogger(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging.logger import Logger + from google.cloud.logging_v2.logger import Logger return Logger @@ -75,7 +75,7 @@ def test_ctor_explicit(self): self.assertEqual(logger.labels, LABELS) def test_batch_w_bound_client(self): - from google.cloud.logging.logger import Batch + from google.cloud.logging_v2.logger import Batch conn = object() client = _Client(self.PROJECT, conn) @@ -86,14 +86,14 @@ def test_batch_w_bound_client(self): self.assertIs(batch.client, client) def test_batch_w_alternate_client(self): - from google.cloud.logging.logger import Batch + from google.cloud.logging_v2.logger import Batch conn1 = object() conn2 = object() client1 = _Client(self.PROJECT, conn1) client2 = _Client(self.PROJECT, conn2) logger = self._make_one(self.LOGGER_NAME, client=client1) - batch = logger.batch(client2) + batch = logger.batch(client=client2) self.assertIsInstance(batch, Batch) self.assertIs(batch.logger, logger) self.assertIs(batch.client, client2) @@ -117,7 +117,7 @@ def test_log_empty_defaults_w_default_labels(self): def test_log_empty_w_explicit(self): import datetime - from google.cloud.logging.resource import Resource + from google.cloud.logging_v2.resource import Resource ALT_LOG_NAME = "projects/foo/logs/alt.log.name" DEFAULT_LABELS = {"foo": "spam"} @@ -187,7 +187,7 @@ def test_log_text_defaults(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_text_w_unicode_and_default_labels(self): - TEXT = u"TEXT" + TEXT = "TEXT" DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { @@ -207,7 +207,7 @@ def test_log_text_w_unicode_and_default_labels(self): def test_log_text_explicit(self): import datetime - from google.cloud.logging.resource import Resource + from google.cloud.logging_v2.resource import Resource ALT_LOG_NAME = "projects/foo/logs/alt.log.name" TEXT = "TEXT" @@ -300,7 +300,7 @@ def test_log_struct_w_default_labels(self): def test_log_struct_w_explicit(self): import datetime - from google.cloud.logging.resource import Resource + from google.cloud.logging_v2.resource import Resource ALT_LOG_NAME = "projects/foo/logs/alt.log.name" STRUCT = {"message": "MESSAGE", "weather": "cloudy"} @@ -405,7 +405,7 @@ def test_log_proto_w_explicit(self): from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - from google.cloud.logging.resource import Resource + from google.cloud.logging_v2.resource import Resource message = Struct(fields={"foo": Value(bool_value=True)}) ALT_LOG_NAME = "projects/foo/logs/alt.log.name" @@ -468,7 +468,8 @@ def test_delete_w_bound_client(self): logger.delete() self.assertEqual( - api._logger_delete_called_with, (self.PROJECT, self.LOGGER_NAME) + api._logger_delete_called_with, + (f"projects/{self.PROJECT}/logs/{self.LOGGER_NAME}"), ) def test_delete_w_alternate_client(self): @@ -480,12 +481,12 @@ def test_delete_w_alternate_client(self): logger.delete(client=client2) self.assertEqual( - api._logger_delete_called_with, (self.PROJECT, self.LOGGER_NAME) + api._logger_delete_called_with, + (f"projects/{self.PROJECT}/logs/{self.LOGGER_NAME}"), ) def test_list_entries_defaults(self): - import six - from google.cloud.logging.client import Client + from google.cloud.logging_v2.client import Client TOKEN = "TOKEN" @@ -498,7 +499,7 @@ def test_list_entries_defaults(self): logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries() - page = six.next(iterator.pages) + page = next(iterator.pages) entries = list(page) token = iterator.next_page_token @@ -514,7 +515,10 @@ def test_list_entries_defaults(self): { "path": "/entries:list", "method": "POST", - "data": {"filter": "removed", "projectIds": [self.PROJECT]}, + "data": { + "filter": "removed", + "resourceNames": [f"projects/{self.PROJECT}"], + }, }, ) # verify that default filter is 24 hours @@ -526,8 +530,8 @@ def test_list_entries_defaults(self): self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit(self): - from google.cloud.logging import DESCENDING - from google.cloud.logging.client import Client + from google.cloud.logging_v2 import DESCENDING + from google.cloud.logging_v2.client import Client PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" @@ -540,7 +544,7 @@ def test_list_entries_explicit(self): client._connection = _Connection({}) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries( - projects=[PROJECT1, PROJECT2], + resource_names=[f"projects/{PROJECT1}", f"projects/{PROJECT2}"], filter_=INPUT_FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, @@ -565,7 +569,7 @@ def test_list_entries_explicit(self): "orderBy": DESCENDING, "pageSize": PAGE_SIZE, "pageToken": TOKEN, - "projectIds": [PROJECT1, PROJECT2], + "resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"], }, }, ) @@ -586,8 +590,8 @@ def test_list_entries_explicit(self): self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit_timestamp(self): - from google.cloud.logging import DESCENDING - from google.cloud.logging.client import Client + from google.cloud.logging_v2 import DESCENDING + from google.cloud.logging_v2.client import Client PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" @@ -600,7 +604,7 @@ def test_list_entries_explicit_timestamp(self): client._connection = _Connection({}) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries( - projects=[PROJECT1, PROJECT2], + resource_names=[f"projects/{PROJECT1}", f"projects/{PROJECT2}"], filter_=INPUT_FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, @@ -625,7 +629,7 @@ def test_list_entries_explicit_timestamp(self): "orderBy": DESCENDING, "pageSize": PAGE_SIZE, "pageToken": TOKEN, - "projectIds": [PROJECT1, PROJECT2], + "resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"], }, }, ) @@ -637,7 +641,7 @@ class TestBatch(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging.logger import Batch + from google.cloud.logging_v2.logger import Batch return Batch @@ -653,7 +657,7 @@ def test_ctor_defaults(self): self.assertEqual(len(batch.entries), 0) def test_log_empty_defaults(self): - from google.cloud.logging.entries import LogEntry + from google.cloud.logging_v2.entries import LogEntry ENTRY = LogEntry() client = _Client(project=self.PROJECT, connection=_make_credentials()) @@ -664,8 +668,8 @@ def test_log_empty_defaults(self): def test_log_empty_explicit(self): import datetime - from google.cloud.logging.resource import Resource - from google.cloud.logging.entries import LogEntry + from google.cloud.logging_v2.resource import Resource + from google.cloud.logging_v2.entries import LogEntry LABELS = {"foo": "bar", "baz": "qux"} IID = "IID" @@ -709,8 +713,8 @@ def test_log_empty_explicit(self): self.assertEqual(batch.entries, [ENTRY]) def test_log_text_defaults(self): - from google.cloud.logging.entries import _GLOBAL_RESOURCE - from google.cloud.logging.entries import TextEntry + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.cloud.logging_v2.entries import TextEntry TEXT = "This is the entry text" ENTRY = TextEntry(payload=TEXT, resource=_GLOBAL_RESOURCE) @@ -722,8 +726,8 @@ def test_log_text_defaults(self): def test_log_text_explicit(self): import datetime - from google.cloud.logging.resource import Resource - from google.cloud.logging.entries import TextEntry + from google.cloud.logging_v2.resource import Resource + from google.cloud.logging_v2.entries import TextEntry TEXT = "This is the entry text" LABELS = {"foo": "bar", "baz": "qux"} @@ -770,8 +774,8 @@ def test_log_text_explicit(self): self.assertEqual(batch.entries, [ENTRY]) def test_log_struct_defaults(self): - from google.cloud.logging.entries import _GLOBAL_RESOURCE - from google.cloud.logging.entries import StructEntry + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.cloud.logging_v2.entries import StructEntry STRUCT = {"message": "Message text", "weather": "partly cloudy"} ENTRY = StructEntry(payload=STRUCT, resource=_GLOBAL_RESOURCE) @@ -783,8 +787,8 @@ def test_log_struct_defaults(self): def test_log_struct_explicit(self): import datetime - from google.cloud.logging.resource import Resource - from google.cloud.logging.entries import StructEntry + from google.cloud.logging_v2.resource import Resource + from google.cloud.logging_v2.entries import StructEntry STRUCT = {"message": "Message text", "weather": "partly cloudy"} LABELS = {"foo": "bar", "baz": "qux"} @@ -831,8 +835,8 @@ def test_log_struct_explicit(self): self.assertEqual(batch.entries, [ENTRY]) def test_log_proto_defaults(self): - from google.cloud.logging.entries import _GLOBAL_RESOURCE - from google.cloud.logging.entries import ProtobufEntry + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.cloud.logging_v2.entries import ProtobufEntry from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value @@ -846,8 +850,8 @@ def test_log_proto_defaults(self): def test_log_proto_explicit(self): import datetime - from google.cloud.logging.resource import Resource - from google.cloud.logging.entries import ProtobufEntry + from google.cloud.logging_v2.resource import Resource + from google.cloud.logging_v2.entries import ProtobufEntry from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value @@ -895,8 +899,8 @@ def test_log_proto_explicit(self): self.assertEqual(batch.entries, [ENTRY]) def test_commit_w_unknown_entry_type(self): - from google.cloud.logging.entries import _GLOBAL_RESOURCE - from google.cloud.logging.entries import LogEntry + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.cloud.logging_v2.entries import LogEntry logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) @@ -913,8 +917,8 @@ def test_commit_w_unknown_entry_type(self): ) def test_commit_w_resource_specified(self): - from google.cloud.logging.entries import _GLOBAL_RESOURCE - from google.cloud.logging.resource import Resource + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.cloud.logging_v2.resource import Resource logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) @@ -944,7 +948,7 @@ def test_commit_w_bound_client(self): from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud._helpers import _datetime_to_rfc3339 - from google.cloud.logging.entries import _GLOBAL_RESOURCE + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE TEXT = "This is the entry text" STRUCT = {"message": TEXT, "weather": "partly cloudy"} @@ -1031,8 +1035,8 @@ def test_commit_w_alternate_client(self): from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - from google.cloud.logging.logger import Logger - from google.cloud.logging.entries import _GLOBAL_RESOURCE + from google.cloud.logging_v2.logger import Logger + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE TEXT = "This is the entry text" STRUCT = {"message": TEXT, "weather": "partly cloudy"} @@ -1083,8 +1087,8 @@ def test_context_mgr_success(self): from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - from google.cloud.logging.logger import Logger - from google.cloud.logging.entries import _GLOBAL_RESOURCE + from google.cloud.logging_v2.logger import Logger + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE TEXT = "This is the entry text" STRUCT = {"message": TEXT, "weather": "partly cloudy"} @@ -1133,9 +1137,9 @@ def test_context_mgr_failure(self): import datetime from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - from google.cloud.logging.entries import TextEntry - from google.cloud.logging.entries import StructEntry - from google.cloud.logging.entries import ProtobufEntry + from google.cloud.logging_v2.entries import TextEntry + from google.cloud.logging_v2.entries import StructEntry + from google.cloud.logging_v2.entries import ProtobufEntry TEXT = "This is the entry text" STRUCT = {"message": TEXT, "weather": "partly cloudy"} @@ -1183,11 +1187,11 @@ class _DummyLoggingAPI(object): _write_entries_called_with = None - def write_entries(self, entries, logger_name=None, resource=None, labels=None): + def write_entries(self, entries, *, logger_name=None, resource=None, labels=None): self._write_entries_called_with = (entries, logger_name, resource, labels) - def logger_delete(self, project, logger_name): - self._logger_delete_called_with = (project, logger_name) + def logger_delete(self, logger_name): + self._logger_delete_called_with = logger_name class _Client(object): diff --git a/packages/google-cloud-logging/tests/unit/test_logging_shim.py b/packages/google-cloud-logging/tests/unit/test_logging_shim.py new file mode 100644 index 000000000000..507b7c635900 --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/test_logging_shim.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestLoggingShim(unittest.TestCase): + def test_shim_matches_logging_v2(self): + from google.cloud import logging + from google.cloud import logging_v2 + + self.assertEqual(logging.__all__, logging_v2.__all__) + + for name in logging.__all__: + found = getattr(logging, name) + expected = getattr(logging_v2, name) + self.assertIs(found, expected) diff --git a/packages/google-cloud-logging/tests/unit/test_metric.py b/packages/google-cloud-logging/tests/unit/test_metric.py index 93ee90b87470..a71fd763f9ed 100644 --- a/packages/google-cloud-logging/tests/unit/test_metric.py +++ b/packages/google-cloud-logging/tests/unit/test_metric.py @@ -19,12 +19,13 @@ class TestMetric(unittest.TestCase): PROJECT = "test-project" METRIC_NAME = "metric-name" + FULL_METRIC_NAME = f"projects/{PROJECT}/metrics/{METRIC_NAME}" FILTER = "logName:syslog AND severity>=ERROR" DESCRIPTION = "DESCRIPTION" @staticmethod def _get_target_class(): - from google.cloud.logging.metric import Metric + from google.cloud.logging_v2.metric import Metric return Metric @@ -32,7 +33,6 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): - FULL = "projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) client = _Client(self.PROJECT) metric = self._make_one(self.METRIC_NAME, client=client) self.assertEqual(metric.name, self.METRIC_NAME) @@ -40,26 +40,27 @@ def test_ctor_defaults(self): self.assertEqual(metric.description, "") self.assertIs(metric.client, client) self.assertEqual(metric.project, self.PROJECT) - self.assertEqual(metric.full_name, FULL) - self.assertEqual(metric.path, "/%s" % (FULL,)) + self.assertEqual(metric.full_name, self.FULL_METRIC_NAME) + self.assertEqual(metric.path, f"/{self.FULL_METRIC_NAME}") def test_ctor_explicit(self): - FULL = "projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) client = _Client(self.PROJECT) metric = self._make_one( - self.METRIC_NAME, self.FILTER, client=client, description=self.DESCRIPTION + self.METRIC_NAME, + filter_=self.FILTER, + client=client, + description=self.DESCRIPTION, ) self.assertEqual(metric.name, self.METRIC_NAME) self.assertEqual(metric.filter_, self.FILTER) self.assertEqual(metric.description, self.DESCRIPTION) self.assertIs(metric.client, client) self.assertEqual(metric.project, self.PROJECT) - self.assertEqual(metric.full_name, FULL) - self.assertEqual(metric.path, "/%s" % (FULL,)) + self.assertEqual(metric.full_name, self.FULL_METRIC_NAME) + self.assertEqual(metric.path, f"/{self.FULL_METRIC_NAME}") def test_from_api_repr_minimal(self): client = _Client(project=self.PROJECT) - FULL = "projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) RESOURCE = {"name": self.METRIC_NAME, "filter": self.FILTER} klass = self._get_target_class() metric = klass.from_api_repr(RESOURCE, client=client) @@ -68,11 +69,10 @@ def test_from_api_repr_minimal(self): self.assertEqual(metric.description, "") self.assertIs(metric._client, client) self.assertEqual(metric.project, self.PROJECT) - self.assertEqual(metric.full_name, FULL) + self.assertEqual(metric.full_name, self.FULL_METRIC_NAME) def test_from_api_repr_w_description(self): client = _Client(project=self.PROJECT) - FULL = "projects/%s/metrics/%s" % (self.PROJECT, self.METRIC_NAME) DESCRIPTION = "DESCRIPTION" RESOURCE = { "name": self.METRIC_NAME, @@ -86,12 +86,12 @@ def test_from_api_repr_w_description(self): self.assertEqual(metric.description, DESCRIPTION) self.assertIs(metric._client, client) self.assertEqual(metric.project, self.PROJECT) - self.assertEqual(metric.full_name, FULL) + self.assertEqual(metric.full_name, self.FULL_METRIC_NAME) def test_create_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.metrics_api = _DummyMetricsAPI() - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client) + metric = self._make_one(self.METRIC_NAME, filter_=self.FILTER, client=client) metric.create() @@ -105,7 +105,10 @@ def test_create_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.metrics_api = _DummyMetricsAPI() metric = self._make_one( - self.METRIC_NAME, self.FILTER, client=client1, description=self.DESCRIPTION + self.METRIC_NAME, + filter_=self.FILTER, + client=client1, + description=self.DESCRIPTION, ) metric.create(client=client2) @@ -118,7 +121,7 @@ def test_create_w_alternate_client(self): def test_exists_miss_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.metrics_api = _DummyMetricsAPI() - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client) + metric = self._make_one(self.METRIC_NAME, filter_=self.FILTER, client=client) self.assertFalse(metric.exists()) @@ -130,7 +133,7 @@ def test_exists_hit_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.metrics_api = _DummyMetricsAPI() api._metric_get_response = RESOURCE - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client1) + metric = self._make_one(self.METRIC_NAME, filter_=self.FILTER, client=client1) self.assertTrue(metric.exists(client=client2)) @@ -143,7 +146,10 @@ def test_reload_w_bound_client(self): api = client.metrics_api = _DummyMetricsAPI() api._metric_get_response = RESOURCE metric = self._make_one( - self.METRIC_NAME, self.FILTER, client=client, description=self.DESCRIPTION + self.METRIC_NAME, + filter_=self.FILTER, + client=client, + description=self.DESCRIPTION, ) metric.reload() @@ -163,7 +169,7 @@ def test_reload_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.metrics_api = _DummyMetricsAPI() api._metric_get_response = RESOURCE - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client1) + metric = self._make_one(self.METRIC_NAME, filter_=self.FILTER, client=client1) metric.reload(client=client2) @@ -174,7 +180,7 @@ def test_reload_w_alternate_client(self): def test_update_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.metrics_api = _DummyMetricsAPI() - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client) + metric = self._make_one(self.METRIC_NAME, filter_=self.FILTER, client=client) metric.update() @@ -188,7 +194,10 @@ def test_update_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.metrics_api = _DummyMetricsAPI() metric = self._make_one( - self.METRIC_NAME, self.FILTER, client=client1, description=self.DESCRIPTION + self.METRIC_NAME, + filter_=self.FILTER, + client=client1, + description=self.DESCRIPTION, ) metric.update(client=client2) @@ -201,7 +210,7 @@ def test_update_w_alternate_client(self): def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.metrics_api = _DummyMetricsAPI() - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client) + metric = self._make_one(self.METRIC_NAME, filter_=self.FILTER, client=client) metric.delete() @@ -213,7 +222,7 @@ def test_delete_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.metrics_api = _DummyMetricsAPI() - metric = self._make_one(self.METRIC_NAME, self.FILTER, client=client1) + metric = self._make_one(self.METRIC_NAME, filter_=self.FILTER, client=client1) metric.delete(client=client2) diff --git a/packages/google-cloud-logging/tests/unit/test_sink.py b/packages/google-cloud-logging/tests/unit/test_sink.py index dc1ff9563f9c..cac6040589d4 100644 --- a/packages/google-cloud-logging/tests/unit/test_sink.py +++ b/packages/google-cloud-logging/tests/unit/test_sink.py @@ -18,14 +18,16 @@ class TestSink(unittest.TestCase): PROJECT = "test-project" + PROJECT_PATH = f"projects/{PROJECT}" SINK_NAME = "sink-name" + FULL_NAME = f"projects/{PROJECT}/sinks/{SINK_NAME}" FILTER = "logName:syslog AND severity>=INFO" DESTINATION_URI = "faux.googleapis.com/destination" WRITER_IDENTITY = "serviceAccount:project-123@example.com" @staticmethod def _get_target_class(): - from google.cloud.logging.sink import Sink + from google.cloud.logging_v2.sink import Sink return Sink @@ -33,34 +35,37 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): - FULL = "projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) client = _Client(self.PROJECT) sink = self._make_one(self.SINK_NAME, client=client) self.assertEqual(sink.name, self.SINK_NAME) self.assertIsNone(sink.filter_) self.assertIsNone(sink.destination) self.assertIs(sink.client, client) - self.assertEqual(sink.project, self.PROJECT) - self.assertEqual(sink.full_name, FULL) - self.assertEqual(sink.path, "/%s" % (FULL,)) + self.assertEqual(sink.parent, self.PROJECT_PATH) + self.assertEqual(sink.full_name, self.FULL_NAME) + self.assertEqual(sink.path, f"/{self.FULL_NAME}") def test_ctor_explicit(self): - FULL = "projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) client = _Client(self.PROJECT) + parent = "folders/testFolder" sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client + self.SINK_NAME, + filter_=self.FILTER, + parent=parent, + destination=self.DESTINATION_URI, + client=client, ) self.assertEqual(sink.name, self.SINK_NAME) self.assertEqual(sink.filter_, self.FILTER) self.assertEqual(sink.destination, self.DESTINATION_URI) self.assertIs(sink.client, client) - self.assertEqual(sink.project, self.PROJECT) - self.assertEqual(sink.full_name, FULL) - self.assertEqual(sink.path, "/%s" % (FULL,)) + self.assertEqual(sink.parent, parent) + self.assertEqual(sink.full_name, f"{parent}/sinks/{self.SINK_NAME}") + self.assertEqual(sink.path, f"/{parent}/sinks/{self.SINK_NAME}") def test_from_api_repr_minimal(self): client = _Client(project=self.PROJECT) - FULL = "projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) + RESOURCE = {"name": self.SINK_NAME, "destination": self.DESTINATION_URI} klass = self._get_target_class() sink = klass.from_api_repr(RESOURCE, client=client) @@ -69,12 +74,12 @@ def test_from_api_repr_minimal(self): self.assertIsNone(sink.filter_) self.assertIsNone(sink.writer_identity) self.assertIs(sink._client, client) - self.assertEqual(sink.project, self.PROJECT) - self.assertEqual(sink.full_name, FULL) + self.assertEqual(sink.parent, self.PROJECT_PATH) + self.assertEqual(sink.full_name, self.FULL_NAME) def test_from_api_repr_full(self): client = _Client(project=self.PROJECT) - FULL = "projects/%s/sinks/%s" % (self.PROJECT, self.SINK_NAME) + parent = "organizations/my_organization" RESOURCE = { "name": self.SINK_NAME, "destination": self.DESTINATION_URI, @@ -82,14 +87,14 @@ def test_from_api_repr_full(self): "writerIdentity": self.WRITER_IDENTITY, } klass = self._get_target_class() - sink = klass.from_api_repr(RESOURCE, client=client) + sink = klass.from_api_repr(RESOURCE, client=client, parent=parent) self.assertEqual(sink.name, self.SINK_NAME) self.assertEqual(sink.filter_, self.FILTER) self.assertEqual(sink.destination, self.DESTINATION_URI) self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) self.assertIs(sink._client, client) - self.assertEqual(sink.project, self.PROJECT) - self.assertEqual(sink.full_name, FULL) + self.assertEqual(sink.parent, parent) + self.assertEqual(sink.full_name, f"{parent}/sinks/{self.SINK_NAME}") def test_create_w_bound_client(self): client = _Client(project=self.PROJECT) @@ -101,7 +106,10 @@ def test_create_w_bound_client(self): "writerIdentity": self.WRITER_IDENTITY, } sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client + self.SINK_NAME, + filter_=self.FILTER, + destination=self.DESTINATION_URI, + client=client, ) sink.create() @@ -112,14 +120,23 @@ def test_create_w_bound_client(self): self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) self.assertEqual( api._sink_create_called_with, - (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, False), + ( + self.PROJECT_PATH, + self.SINK_NAME, + self.FILTER, + self.DESTINATION_URI, + False, + ), ) def test_create_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1 + self.SINK_NAME, + filter_=self.FILTER, + destination=self.DESTINATION_URI, + client=client1, ) api = client2.sinks_api = _DummySinksAPI() api._sink_create_response = { @@ -137,19 +154,28 @@ def test_create_w_alternate_client(self): self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) self.assertEqual( api._sink_create_called_with, - (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, True), + ( + self.PROJECT_PATH, + self.SINK_NAME, + self.FILTER, + self.DESTINATION_URI, + True, + ), ) def test_exists_miss_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client + self.SINK_NAME, + filter_=self.FILTER, + destination=self.DESTINATION_URI, + client=client, ) self.assertFalse(sink.exists()) - self.assertEqual(api._sink_get_called_with, (self.PROJECT, self.SINK_NAME)) + self.assertEqual(api._sink_get_called_with, (self.FULL_NAME)) def test_exists_hit_w_alternate_client(self): RESOURCE = { @@ -162,12 +188,15 @@ def test_exists_hit_w_alternate_client(self): api = client2.sinks_api = _DummySinksAPI() api._sink_get_response = RESOURCE sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1 + self.SINK_NAME, + filter_=self.FILTER, + destination=self.DESTINATION_URI, + client=client1, ) self.assertTrue(sink.exists(client=client2)) - self.assertEqual(api._sink_get_called_with, (self.PROJECT, self.SINK_NAME)) + self.assertEqual(api._sink_get_called_with, (self.FULL_NAME)) def test_reload_w_bound_client(self): NEW_DESTINATION_URI = "faux.googleapis.com/other" @@ -182,7 +211,7 @@ def test_reload_w_bound_client(self): self.assertEqual(sink.destination, NEW_DESTINATION_URI) self.assertIsNone(sink.filter_) self.assertIsNone(sink.writer_identity) - self.assertEqual(api._sink_get_called_with, (self.PROJECT, self.SINK_NAME)) + self.assertEqual(api._sink_get_called_with, (self.FULL_NAME)) def test_reload_w_alternate_client(self): NEW_FILTER = "logName:syslog AND severity>=INFO" @@ -204,7 +233,7 @@ def test_reload_w_alternate_client(self): self.assertEqual(sink.destination, NEW_DESTINATION_URI) self.assertEqual(sink.filter_, NEW_FILTER) self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) - self.assertEqual(api._sink_get_called_with, (self.PROJECT, self.SINK_NAME)) + self.assertEqual(api._sink_get_called_with, (self.FULL_NAME)) def test_update_w_bound_client(self): client = _Client(project=self.PROJECT) @@ -216,7 +245,10 @@ def test_update_w_bound_client(self): "writerIdentity": self.WRITER_IDENTITY, } sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client + self.SINK_NAME, + filter_=self.FILTER, + destination=self.DESTINATION_URI, + client=client, ) sink.update() @@ -227,7 +259,7 @@ def test_update_w_bound_client(self): self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) self.assertEqual( api._sink_update_called_with, - (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, False), + (self.FULL_NAME, self.FILTER, self.DESTINATION_URI, False), ) def test_update_w_alternate_client(self): @@ -241,7 +273,10 @@ def test_update_w_alternate_client(self): "writerIdentity": self.WRITER_IDENTITY, } sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1 + self.SINK_NAME, + filter_=self.FILTER, + destination=self.DESTINATION_URI, + client=client1, ) sink.update(client=client2, unique_writer_identity=True) @@ -252,31 +287,37 @@ def test_update_w_alternate_client(self): self.assertEqual(sink.writer_identity, self.WRITER_IDENTITY) self.assertEqual( api._sink_update_called_with, - (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI, True), + (self.FULL_NAME, self.FILTER, self.DESTINATION_URI, True), ) def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.sinks_api = _DummySinksAPI() sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client + self.SINK_NAME, + filter_=self.FILTER, + destination=self.DESTINATION_URI, + client=client, ) sink.delete() - self.assertEqual(api._sink_delete_called_with, (self.PROJECT, self.SINK_NAME)) + self.assertEqual(api._sink_delete_called_with, (self.FULL_NAME)) def test_delete_w_alternate_client(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.sinks_api = _DummySinksAPI() sink = self._make_one( - self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1 + self.SINK_NAME, + filter_=self.FILTER, + destination=self.DESTINATION_URI, + client=client1, ) sink.delete(client=client2) - self.assertEqual(api._sink_delete_called_with, (self.PROJECT, self.SINK_NAME)) + self.assertEqual(api._sink_delete_called_with, (self.FULL_NAME)) class _Client(object): @@ -286,10 +327,10 @@ def __init__(self, project): class _DummySinksAPI(object): def sink_create( - self, project, sink_name, filter_, destination, unique_writer_identity=False + self, parent, sink_name, filter_, destination, *, unique_writer_identity=False ): self._sink_create_called_with = ( - project, + parent, sink_name, filter_, destination, @@ -297,20 +338,19 @@ def sink_create( ) return self._sink_create_response - def sink_get(self, project, sink_name): + def sink_get(self, sink_name): from google.cloud.exceptions import NotFound - self._sink_get_called_with = (project, sink_name) + self._sink_get_called_with = sink_name try: return self._sink_get_response except AttributeError: raise NotFound("miss") def sink_update( - self, project, sink_name, filter_, destination, unique_writer_identity=False + self, sink_name, filter_, destination, *, unique_writer_identity=False ): self._sink_update_called_with = ( - project, sink_name, filter_, destination, @@ -318,5 +358,5 @@ def sink_update( ) return self._sink_update_response - def sink_delete(self, project, sink_name): - self._sink_delete_called_with = (project, sink_name) + def sink_delete(self, sink_name): + self._sink_delete_called_with = sink_name From 021c7ac6de9f0c35a937a11efb2c4934221b1885 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 19 Nov 2020 00:34:05 +0000 Subject: [PATCH 339/855] chore: release 2.0.0 (#82) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release \*beep\* \*boop\* --- ## [2.0.0](https://www.github.com/googleapis/python-logging/compare/v1.15.1...v2.0.0) (2020-11-19) ### ⚠ BREAKING CHANGES * Use microgenerator for GAPIC layer. See [UPGRADING.md](https://github.com/googleapis/python-logging/blob/master/UPGRADING.md) for details. (#94) * removes support for webapp2 and other Python2 specific code ### Features * pass 'client_options' to super ctor ([#61](https://www.github.com/googleapis/python-logging/issues/61)) ([c4387b3](https://www.github.com/googleapis/python-logging/commit/c4387b307f8f3502fb53ae1f7e1144f6284280a4)), closes [#55](https://www.github.com/googleapis/python-logging/issues/55) * use microgenerator ([#94](https://www.github.com/googleapis/python-logging/issues/94)) ([ff90fd2](https://www.github.com/googleapis/python-logging/commit/ff90fd2fb54c612fe6ab29708a2d5d984f60dea7)) ### Bug Fixes * add default filter settings to list_entries ([#73](https://www.github.com/googleapis/python-logging/issues/73)) ([0a1dd94](https://www.github.com/googleapis/python-logging/commit/0a1dd94811232634fdb849cb2c85bd44e870642f)) * failing CI tests ([#70](https://www.github.com/googleapis/python-logging/issues/70)) ([96adeed](https://www.github.com/googleapis/python-logging/commit/96adeedbda16a5c21651c356261442478aaa867a)) ### Code Refactoring * remove python2 ([#78](https://www.github.com/googleapis/python-logging/issues/78)) ([bf579e4](https://www.github.com/googleapis/python-logging/commit/bf579e4f871c92391a9f6f87eca931744158e31a)) ### Documentation * update docs ([#77](https://www.github.com/googleapis/python-logging/issues/77)) ([bdd9c44](https://www.github.com/googleapis/python-logging/commit/bdd9c440f29d1fcd6fb9545d8465c63efa6c0cea)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- packages/google-cloud-logging/CHANGELOG.md | 29 ++++++++++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index d0af80e6ceb5..811d691a057e 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,35 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [2.0.0](https://www.github.com/googleapis/python-logging/compare/v1.15.1...v2.0.0) (2020-11-19) + + +### ⚠ BREAKING CHANGES + +* Use microgenerator for GAPIC layer. See [UPGRADING.md](https://github.com/googleapis/python-logging/blob/master/UPGRADING.md) for details. (#94) +* removes support for webapp2 and other Python2 specific code + +### Features + +* pass 'client_options' to super ctor ([#61](https://www.github.com/googleapis/python-logging/issues/61)) ([c4387b3](https://www.github.com/googleapis/python-logging/commit/c4387b307f8f3502fb53ae1f7e1144f6284280a4)), closes [#55](https://www.github.com/googleapis/python-logging/issues/55) +* use microgenerator ([#94](https://www.github.com/googleapis/python-logging/issues/94)) ([ff90fd2](https://www.github.com/googleapis/python-logging/commit/ff90fd2fb54c612fe6ab29708a2d5d984f60dea7)) + + +### Bug Fixes + +* add default filter settings to list_entries ([#73](https://www.github.com/googleapis/python-logging/issues/73)) ([0a1dd94](https://www.github.com/googleapis/python-logging/commit/0a1dd94811232634fdb849cb2c85bd44e870642f)) +* failing CI tests ([#70](https://www.github.com/googleapis/python-logging/issues/70)) ([96adeed](https://www.github.com/googleapis/python-logging/commit/96adeedbda16a5c21651c356261442478aaa867a)) + + +### Code Refactoring + +* remove python2 ([#78](https://www.github.com/googleapis/python-logging/issues/78)) ([bf579e4](https://www.github.com/googleapis/python-logging/commit/bf579e4f871c92391a9f6f87eca931744158e31a)) + + +### Documentation + +* update docs ([#77](https://www.github.com/googleapis/python-logging/issues/77)) ([bdd9c44](https://www.github.com/googleapis/python-logging/commit/bdd9c440f29d1fcd6fb9545d8465c63efa6c0cea)) + ### [1.15.1](https://www.github.com/googleapis/python-logging/compare/v1.15.0...v1.15.1) (2020-07-01) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 87cb8f7f8be9..fff41ba80990 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "1.15.1" +version = "2.0.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From f3eb30e8ff8cbe09be8da052ecfc2e7607c73d5f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 19 Nov 2020 18:02:28 +0100 Subject: [PATCH 340/855] chore(deps): update dependency google-cloud-logging to v2 (#95) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index dbb4176a17c9..379ac65b1c45 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==1.15.1 +google-cloud-logging==2.0.0 From 75a7ed924d59e2226a4ce767649814fde30b38c6 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 20 Nov 2020 08:43:02 -0700 Subject: [PATCH 341/855] docs: fix logger documentation (#100) * docs: fix documentation for logger * chore: blacken --- packages/google-cloud-logging/docs/conf.py | 6 +++--- .../google/cloud/logging_v2/logger.py | 11 ++++++----- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index 296607b790b7..6da1e2e7988d 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -345,10 +345,10 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), - "grpc": ("https://grpc.io/grpc/python/", None), + "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), } diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py index 89202bcbd659..6e9c5f00da95 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -43,12 +43,13 @@ class Logger(object): - def __init__(self, name, client, *, labels=None): - """Loggers represent named targets for log entries. + """Loggers represent named targets for log entries. - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs + See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs + """ + def __init__(self, name, client, *, labels=None): + """ Args: name (str): The name of the logger. client (~logging_v2.client.Client): @@ -57,7 +58,6 @@ def __init__(self, name, client, *, labels=None): labels (Optional[dict]): Mapping of default labels for entries written via this logger. - """ self.name = name self._client = client @@ -247,6 +247,7 @@ def list_entries( "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" + If not passed, defaults to the project bound to the client. filter_ (Optional[str]): a filter expression. See https://cloud.google.com/logging/docs/view/advanced_filters From 710304038876b406fb5154088a50b28eed175bc1 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Mon, 23 Nov 2020 15:00:50 -0800 Subject: [PATCH 342/855] tests: skip some system tests for mtls testing (#105) --- .../tests/system/test_system.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 10896adf72fa..f9cb96e18591 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -14,6 +14,8 @@ import datetime import logging +import os +import pytest import unittest from google.api_core.exceptions import BadGateway @@ -81,12 +83,21 @@ class Config(object): """ CLIENT = None + use_mtls = os.environ.get("GOOGLE_API_USE_MTLS_ENDPOINT", "never") def setUpModule(): Config.CLIENT = client.Client() +# Skip the test cases using bigquery, storage and pubsub clients for mTLS testing. +# Bigquery and storage uses http which doesn't have mTLS support, pubsub doesn't +# have mTLS fix released yet. +skip_for_mtls = pytest.mark.skipif( + Config.use_mtls == "always", reason="Skip the test case for mTLS testing" +) + + class TestLogging(unittest.TestCase): JSON_PAYLOAD = { @@ -408,6 +419,7 @@ def _init_storage_bucket(self): return BUCKET_URI + @skip_for_mtls def test_create_sink_storage_bucket(self): uri = self._init_storage_bucket() SINK_NAME = "test-create-sink-bucket%s" % (_RESOURCE_ID,) @@ -421,6 +433,7 @@ def test_create_sink_storage_bucket(self): self.to_delete.append(sink) self.assertTrue(sink.exists()) + @skip_for_mtls def test_create_sink_pubsub_topic(self): from google.cloud import pubsub_v1 @@ -474,6 +487,7 @@ def _init_bigquery_dataset(self): bigquery_client.update_dataset(dataset, ["access_entries"]) return dataset_uri + @skip_for_mtls def test_create_sink_bigquery_dataset(self): SINK_NAME = "test-create-sink-dataset%s" % (_RESOURCE_ID,) retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) @@ -486,6 +500,7 @@ def test_create_sink_bigquery_dataset(self): self.to_delete.append(sink) self.assertTrue(sink.exists()) + @skip_for_mtls def test_list_sinks(self): SINK_NAME = "test-list-sinks%s" % (_RESOURCE_ID,) uri = self._init_storage_bucket() @@ -504,6 +519,7 @@ def test_list_sinks(self): after_names = set(after.name for after in after_sinks) self.assertTrue(sink.name in after_names) + @skip_for_mtls def test_reload_sink(self): SINK_NAME = "test-reload-sink%s" % (_RESOURCE_ID,) retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) @@ -520,6 +536,7 @@ def test_reload_sink(self): self.assertEqual(sink.filter_, DEFAULT_FILTER) self.assertEqual(sink.destination, uri) + @skip_for_mtls def test_update_sink(self): SINK_NAME = "test-update-sink%s" % (_RESOURCE_ID,) retry = RetryErrors((Conflict, ServiceUnavailable), max_tries=10) From 087a2d337992fc3e244035ec893d86a2e7c9df94 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 1 Dec 2020 16:40:46 -0700 Subject: [PATCH 343/855] chore: require samples checks (#108) --- .../.github/sync-repo-settings.yaml | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 packages/google-cloud-logging/.github/sync-repo-settings.yaml diff --git a/packages/google-cloud-logging/.github/sync-repo-settings.yaml b/packages/google-cloud-logging/.github/sync-repo-settings.yaml new file mode 100644 index 000000000000..af59935321a9 --- /dev/null +++ b/packages/google-cloud-logging/.github/sync-repo-settings.yaml @@ -0,0 +1,13 @@ +# https://github.com/googleapis/repo-automation-bots/tree/master/packages/sync-repo-settings +# Rules for master branch protection +branchProtectionRules: +# Identifies the protection rule pattern. Name of the branch to be protected. +# Defaults to `master` +- pattern: master + requiredStatusCheckContexts: + - 'Kokoro' + - 'cla/google' + - 'Samples - Lint' + - 'Samples - Python 3.6' + - 'Samples - Python 3.7' + - 'Samples - Python 3.8' From 73c3fc0479dddfbccfb798220daa914b1da45c5d Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 1 Dec 2020 16:11:16 -0800 Subject: [PATCH 344/855] fix: remove duplicate stream handler (#106) --- .../google/cloud/logging_v2/handlers/handlers.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 2d79c7f8ad64..d45c7b61b4b5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -139,7 +139,6 @@ def setup_logging( logger = logging.getLogger() logger.setLevel(log_level) logger.addHandler(handler) - logger.addHandler(logging.StreamHandler()) for logger_name in all_excluded_loggers: logger = logging.getLogger(logger_name) logger.propagate = False From 9230f1cdfa7cec0b95fdf2fcaa63cf7bc274cb21 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 2 Dec 2020 10:56:12 -0800 Subject: [PATCH 345/855] chore: Re-generated to pick up changes from synthtool. (#98) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * fix: address lint issues Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Thu Nov 12 11:30:49 2020 -0800 Source-Repo: googleapis/synthtool Source-Sha: e89175cf074dccc4babb4eca66ae913696e47a71 Source-Link: https://github.com/googleapis/synthtool/commit/e89175cf074dccc4babb4eca66ae913696e47a71 * docs(python): update intersphinx for grpc and auth * docs(python): update intersphinx for grpc and auth * use https for python intersphinx Co-authored-by: Tim Swast Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Nov 18 14:37:25 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: 9a7d9fbb7045c34c9d3d22c1ff766eeae51f04c9 Source-Link: https://github.com/googleapis/synthtool/commit/9a7d9fbb7045c34c9d3d22c1ff766eeae51f04c9 * docs(python): fix intersphinx link for google-auth Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Thu Nov 19 10:16:05 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: a073c873f3928c561bdf87fdfbf1d081d1998984 Source-Link: https://github.com/googleapis/synthtool/commit/a073c873f3928c561bdf87fdfbf1d081d1998984 --- .../google/cloud/logging_v2/types/__init__.py | 35 +++--- packages/google-cloud-logging/noxfile.py | 2 +- .../samples/snippets/README.rst | 21 +++- .../samples/snippets/noxfile.py | 34 ++--- packages/google-cloud-logging/synth.metadata | 117 +++++++++++++++++- 5 files changed, 166 insertions(+), 43 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py index 55161ba5f46c..ab5f9c8c1546 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py @@ -44,16 +44,6 @@ GetCmekSettingsRequest, UpdateCmekSettingsRequest, CmekSettings, - LifecycleState, -) -from .logging_metrics import ( - LogMetric, - ListLogMetricsRequest, - ListLogMetricsResponse, - GetLogMetricRequest, - CreateLogMetricRequest, - UpdateLogMetricRequest, - DeleteLogMetricRequest, ) from .logging import ( DeleteLogRequest, @@ -67,6 +57,16 @@ ListLogsRequest, ListLogsResponse, ) +from .logging_metrics import ( + LogMetric, + ListLogMetricsRequest, + ListLogMetricsResponse, + GetLogMetricRequest, + CreateLogMetricRequest, + UpdateLogMetricRequest, + DeleteLogMetricRequest, +) + __all__ = ( "LogEntry", @@ -95,14 +95,6 @@ "GetCmekSettingsRequest", "UpdateCmekSettingsRequest", "CmekSettings", - "LifecycleState", - "LogMetric", - "ListLogMetricsRequest", - "ListLogMetricsResponse", - "GetLogMetricRequest", - "CreateLogMetricRequest", - "UpdateLogMetricRequest", - "DeleteLogMetricRequest", "DeleteLogRequest", "WriteLogEntriesRequest", "WriteLogEntriesResponse", @@ -113,4 +105,11 @@ "ListMonitoredResourceDescriptorsResponse", "ListLogsRequest", "ListLogsResponse", + "LogMetric", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "GetLogMetricRequest", + "CreateLogMetricRequest", + "UpdateLogMetricRequest", + "DeleteLogMetricRequest", ) diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index ca45b2c40643..1844aa62ba8b 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -28,7 +28,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/packages/google-cloud-logging/samples/snippets/README.rst b/packages/google-cloud-logging/samples/snippets/README.rst index 1531d24ad5eb..9a38dca7d2d1 100644 --- a/packages/google-cloud-logging/samples/snippets/README.rst +++ b/packages/google-cloud-logging/samples/snippets/README.rst @@ -1,3 +1,4 @@ + .. This file is automatically generated. Do not edit this file directly. Cloud Logging Python Samples @@ -19,6 +20,7 @@ Setup ------------------------------------------------------------------------------- + Authentication ++++++++++++++ @@ -30,14 +32,16 @@ credentials for applications. https://cloud.google.com/docs/authentication/getting-started + + Install Dependencies ++++++++++++++++++++ -#. Clone python-logging and change directory to the sample directory you want to use. +#. Clone python-docs-samples and change directory to the sample directory you want to use. .. code-block:: bash - $ git clone https://github.com/googleapis/python-logging.git + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git #. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. @@ -61,6 +65,10 @@ Install Dependencies .. _virtualenv: https://virtualenv.pypa.io/ + + + + Samples ------------------------------------------------------------------------------- @@ -82,6 +90,7 @@ To run this sample: + Snippets +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -120,6 +129,7 @@ To run this sample: + Export +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -158,6 +168,10 @@ To run this sample: + + + + The client library ------------------------------------------------------------------------------- @@ -173,4 +187,5 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues -.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index ab2c49227c3b..b90eef00f2d9 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -37,25 +37,28 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + # Old samples are opted out of enforcing Python type hints # All new samples should feature them - "enforce_type_hints": False, + 'enforce_type_hints': False, + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -70,12 +73,12 @@ def get_pytest_env_vars(): ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret @@ -84,7 +87,7 @@ def get_pytest_env_vars(): ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -133,7 +136,7 @@ def _determine_local_import_names(start_dir): @nox.session def lint(session): - if not TEST_CONFIG["enforce_type_hints"]: + if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -142,11 +145,9 @@ def lint(session): args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) - - # # Black # @@ -159,7 +160,6 @@ def blacken(session): session.run("black", *python_files) - # # Sample Tests # @@ -199,9 +199,9 @@ def py(session): if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 493b1a2b5841..05600e4ce718 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -3,8 +3,8 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/python-logging", - "sha": "8466c62f459af6c2d89b411297df06988e45b522" + "remote": "https://github.com/googleapis/python-logging.git", + "sha": "4e24b3c360adef8d7761573d789867857586337d" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "7db8a6c5ffb12a6e4c2f799c18f00f7f3d60e279" + "sha": "a073c873f3928c561bdf87fdfbf1d081d1998984" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "7db8a6c5ffb12a6e4c2f799c18f00f7f3d60e279" + "sha": "a073c873f3928c561bdf87fdfbf1d081d1998984" } } ], @@ -40,5 +40,114 @@ "generator": "bazel" } } + ], + "generatedFiles": [ + ".flake8", + ".github/CONTRIBUTING.md", + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/ISSUE_TEMPLATE/support_request.md", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/release-please.yml", + ".github/snippet-bot.yml", + ".gitignore", + ".kokoro/build.sh", + ".kokoro/continuous/common.cfg", + ".kokoro/continuous/continuous.cfg", + ".kokoro/docker/docs/Dockerfile", + ".kokoro/docker/docs/fetch_gpg_keys.sh", + ".kokoro/docs/common.cfg", + ".kokoro/docs/docs-presubmit.cfg", + ".kokoro/docs/docs.cfg", + ".kokoro/populate-secrets.sh", + ".kokoro/presubmit/common.cfg", + ".kokoro/presubmit/presubmit.cfg", + ".kokoro/publish-docs.sh", + ".kokoro/release.sh", + ".kokoro/release/common.cfg", + ".kokoro/release/release.cfg", + ".kokoro/samples/lint/common.cfg", + ".kokoro/samples/lint/continuous.cfg", + ".kokoro/samples/lint/periodic.cfg", + ".kokoro/samples/lint/presubmit.cfg", + ".kokoro/samples/python3.6/common.cfg", + ".kokoro/samples/python3.6/continuous.cfg", + ".kokoro/samples/python3.6/periodic.cfg", + ".kokoro/samples/python3.6/presubmit.cfg", + ".kokoro/samples/python3.7/common.cfg", + ".kokoro/samples/python3.7/continuous.cfg", + ".kokoro/samples/python3.7/periodic.cfg", + ".kokoro/samples/python3.7/presubmit.cfg", + ".kokoro/samples/python3.8/common.cfg", + ".kokoro/samples/python3.8/continuous.cfg", + ".kokoro/samples/python3.8/periodic.cfg", + ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/test-samples.sh", + ".kokoro/trampoline.sh", + ".kokoro/trampoline_v2.sh", + ".trampolinerc", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.rst", + "LICENSE", + "MANIFEST.in", + "docs/_static/custom.css", + "docs/_templates/layout.html", + "docs/conf.py", + "google/cloud/logging/py.typed", + "google/cloud/logging_v2/proto/log_entry.proto", + "google/cloud/logging_v2/proto/logging.proto", + "google/cloud/logging_v2/proto/logging_config.proto", + "google/cloud/logging_v2/proto/logging_metrics.proto", + "google/cloud/logging_v2/py.typed", + "google/cloud/logging_v2/services/__init__.py", + "google/cloud/logging_v2/services/config_service_v2/__init__.py", + "google/cloud/logging_v2/services/config_service_v2/async_client.py", + "google/cloud/logging_v2/services/config_service_v2/client.py", + "google/cloud/logging_v2/services/config_service_v2/pagers.py", + "google/cloud/logging_v2/services/config_service_v2/transports/__init__.py", + "google/cloud/logging_v2/services/config_service_v2/transports/base.py", + "google/cloud/logging_v2/services/config_service_v2/transports/grpc.py", + "google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py", + "google/cloud/logging_v2/services/logging_service_v2/__init__.py", + "google/cloud/logging_v2/services/logging_service_v2/async_client.py", + "google/cloud/logging_v2/services/logging_service_v2/client.py", + "google/cloud/logging_v2/services/logging_service_v2/pagers.py", + "google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py", + "google/cloud/logging_v2/services/logging_service_v2/transports/base.py", + "google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py", + "google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py", + "google/cloud/logging_v2/services/metrics_service_v2/__init__.py", + "google/cloud/logging_v2/services/metrics_service_v2/async_client.py", + "google/cloud/logging_v2/services/metrics_service_v2/client.py", + "google/cloud/logging_v2/services/metrics_service_v2/pagers.py", + "google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py", + "google/cloud/logging_v2/services/metrics_service_v2/transports/base.py", + "google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py", + "google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py", + "google/cloud/logging_v2/types/__init__.py", + "google/cloud/logging_v2/types/log_entry.py", + "google/cloud/logging_v2/types/logging.py", + "google/cloud/logging_v2/types/logging_config.py", + "google/cloud/logging_v2/types/logging_metrics.py", + "mypy.ini", + "noxfile.py", + "renovate.json", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", + "samples/snippets/noxfile.py", + "scripts/decrypt-secrets.sh", + "scripts/readme-gen/readme_gen.py", + "scripts/readme-gen/templates/README.tmpl.rst", + "scripts/readme-gen/templates/auth.tmpl.rst", + "scripts/readme-gen/templates/auth_api_key.tmpl.rst", + "scripts/readme-gen/templates/install_deps.tmpl.rst", + "scripts/readme-gen/templates/install_portaudio.tmpl.rst", + "setup.cfg", + "testing/.gitignore", + "tests/unit/gapic/logging_v2/__init__.py", + "tests/unit/gapic/logging_v2/test_config_service_v2.py", + "tests/unit/gapic/logging_v2/test_logging_service_v2.py", + "tests/unit/gapic/logging_v2/test_metrics_service_v2.py" ] } \ No newline at end of file From 7420e9c5b1e27acb12f160a1505118ee6eac9266 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 2 Dec 2020 14:17:37 -0800 Subject: [PATCH 346/855] chore: release 2.0.1 (#101) --- packages/google-cloud-logging/CHANGELOG.md | 12 ++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 811d691a057e..a85cfb01e3e3 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +### [2.0.1](https://www.github.com/googleapis/python-logging/compare/v2.0.0...v2.0.1) (2020-12-02) + + +### Bug Fixes + +* remove duplicate stream handler ([#106](https://www.github.com/googleapis/python-logging/issues/106)) ([eb5cf40](https://www.github.com/googleapis/python-logging/commit/eb5cf407129fb76124d6a405c0805b70f2689cc4)) + + +### Documentation + +* fix logger documentation ([#100](https://www.github.com/googleapis/python-logging/issues/100)) ([6a46b46](https://www.github.com/googleapis/python-logging/commit/6a46b46a6bbc154c9b5b737859f108021ab5b201)) + ## [2.0.0](https://www.github.com/googleapis/python-logging/compare/v1.15.1...v2.0.0) (2020-11-19) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index fff41ba80990..635bd1a57868 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.0.0" +version = "2.0.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From d81161cdb29bd27ff9ca48b4d695bfa778c4a512 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 4 Dec 2020 12:07:26 -0800 Subject: [PATCH 347/855] chore: Re-generated to pick up changes from googleapis. (#97) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * feat: Makes remaining LogBucket and LogViews methods public PiperOrigin-RevId: 342353190 Source-Author: Google APIs Source-Date: Fri Nov 13 15:44:35 2020 -0800 Source-Repo: googleapis/googleapis Source-Sha: be0bdf86cd31aa7c1a7b30a9a2e9f2fd53ee3d91 Source-Link: https://github.com/googleapis/googleapis/commit/be0bdf86cd31aa7c1a7b30a9a2e9f2fd53ee3d91 * fixed lint issue Co-authored-by: Daniel Sanche --- .../logging_v2/proto/logging_config.proto | 435 ++++- .../logging_v2/proto/logging_metrics.proto | 4 +- .../config_service_v2/async_client.py | 444 ++++- .../services/config_service_v2/client.py | 466 +++++- .../services/config_service_v2/pagers.py | 128 ++ .../config_service_v2/transports/base.py | 101 ++ .../config_service_v2/transports/grpc.py | 220 ++- .../transports/grpc_asyncio.py | 230 ++- .../metrics_service_v2/async_client.py | 12 +- .../services/metrics_service_v2/client.py | 12 +- .../google/cloud/logging_v2/types/__init__.py | 20 + .../cloud/logging_v2/types/logging_config.py | 298 +++- .../cloud/logging_v2/types/logging_metrics.py | 4 +- .../samples/snippets/noxfile.py | 2 + packages/google-cloud-logging/synth.metadata | 4 +- .../logging_v2/test_config_service_v2.py | 1451 ++++++++++++++++- 16 files changed, 3658 insertions(+), 173 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto index 9486f4a9a4fe..9b10932d637b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto @@ -55,7 +55,7 @@ service ConfigServiceV2 { "https://www.googleapis.com/auth/logging.admin," "https://www.googleapis.com/auth/logging.read"; - // Lists buckets (Beta). + // Lists buckets. rpc ListBuckets(ListBucketsRequest) returns (ListBucketsResponse) { option (google.api.http) = { get: "/v2/{parent=*/*/locations/*}/buckets" @@ -75,7 +75,7 @@ service ConfigServiceV2 { option (google.api.method_signature) = "parent"; } - // Gets a bucket (Beta). + // Gets a bucket. rpc GetBucket(GetBucketRequest) returns (LogBucket) { option (google.api.http) = { get: "/v2/{name=*/*/locations/*/buckets/*}" @@ -94,6 +94,31 @@ service ConfigServiceV2 { }; } + // Creates a bucket that can be used to store log entries. Once a bucket has + // been created, the region cannot be changed. + rpc CreateBucket(CreateBucketRequest) returns (LogBucket) { + option (google.api.http) = { + post: "/v2/{parent=*/*/locations/*}/buckets" + body: "bucket" + additional_bindings { + post: "/v2/{parent=projects/*/locations/*}/buckets" + body: "bucket" + } + additional_bindings { + post: "/v2/{parent=organizations/*/locations/*}/buckets" + body: "bucket" + } + additional_bindings { + post: "/v2/{parent=folders/*/locations/*}/buckets" + body: "bucket" + } + additional_bindings { + post: "/v2/{parent=billingAccounts/*/locations/*}/buckets" + body: "bucket" + } + }; + } + // Updates a bucket. This method replaces the following fields in the // existing bucket with values from the new bucket: `retention_period` // @@ -104,7 +129,6 @@ service ConfigServiceV2 { // will be returned. // // A buckets region may not be modified after it is created. - // This method is in Beta. rpc UpdateBucket(UpdateBucketRequest) returns (LogBucket) { option (google.api.http) = { patch: "/v2/{name=*/*/locations/*/buckets/*}" @@ -128,6 +152,161 @@ service ConfigServiceV2 { }; } + // Deletes a bucket. + // Moves the bucket to the DELETE_REQUESTED state. After 7 days, the + // bucket will be purged and all logs in the bucket will be permanently + // deleted. + rpc DeleteBucket(DeleteBucketRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2/{name=*/*/locations/*/buckets/*}" + additional_bindings { + delete: "/v2/{name=projects/*/locations/*/buckets/*}" + } + additional_bindings { + delete: "/v2/{name=organizations/*/locations/*/buckets/*}" + } + additional_bindings { + delete: "/v2/{name=folders/*/locations/*/buckets/*}" + } + additional_bindings { + delete: "/v2/{name=billingAccounts/*/locations/*/buckets/*}" + } + }; + } + + // Undeletes a bucket. A bucket that has been deleted may be undeleted within + // the grace period of 7 days. + rpc UndeleteBucket(UndeleteBucketRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2/{name=*/*/locations/*/buckets/*}:undelete" + body: "*" + additional_bindings { + post: "/v2/{name=projects/*/locations/*/buckets/*}:undelete" + body: "*" + } + additional_bindings { + post: "/v2/{name=organizations/*/locations/*/buckets/*}:undelete" + body: "*" + } + additional_bindings { + post: "/v2/{name=folders/*/locations/*/buckets/*}:undelete" + body: "*" + } + additional_bindings { + post: "/v2/{name=billingAccounts/*/locations/*/buckets/*}:undelete" + body: "*" + } + }; + } + + // Lists views on a bucket. + rpc ListViews(ListViewsRequest) returns (ListViewsResponse) { + option (google.api.http) = { + get: "/v2/{parent=*/*/locations/*/buckets/*}/views" + additional_bindings { + get: "/v2/{parent=projects/*/locations/*/buckets/*}/views" + } + additional_bindings { + get: "/v2/{parent=organizations/*/locations/*/buckets/*}/views" + } + additional_bindings { + get: "/v2/{parent=folders/*/locations/*/buckets/*}/views" + } + additional_bindings { + get: "/v2/{parent=billingAccounts/*/locations/*/buckets/*}/views" + } + }; + option (google.api.method_signature) = "parent"; + } + + // Gets a view. + rpc GetView(GetViewRequest) returns (LogView) { + option (google.api.http) = { + get: "/v2/{name=*/*/locations/*/buckets/*/views/*}" + additional_bindings { + get: "/v2/{name=projects/*/locations/*/buckets/*/views/*}" + } + additional_bindings { + get: "/v2/{name=organizations/*/locations/*/buckets/*/views/*}" + } + additional_bindings { + get: "/v2/{name=folders/*/locations/*/buckets/*/views/*}" + } + additional_bindings { + get: "/v2/{name=billingAccounts/*/buckets/*/views/*}" + } + }; + } + + // Creates a view over logs in a bucket. A bucket may contain a maximum of + // 50 views. + rpc CreateView(CreateViewRequest) returns (LogView) { + option (google.api.http) = { + post: "/v2/{parent=*/*/locations/*/buckets/*}/views" + body: "view" + additional_bindings { + post: "/v2/{parent=projects/*/locations/*/buckets/*}/views" + body: "view" + } + additional_bindings { + post: "/v2/{parent=organizations/*/locations/*/buckets/*}/views" + body: "view" + } + additional_bindings { + post: "/v2/{parent=folders/*/locations/*/buckets/*}/views" + body: "view" + } + additional_bindings { + post: "/v2/{parent=billingAccounts/*/locations/*/buckets/*}/views" + body: "view" + } + }; + } + + // Updates a view. This method replaces the following fields in the existing + // view with values from the new view: `filter`. + rpc UpdateView(UpdateViewRequest) returns (LogView) { + option (google.api.http) = { + patch: "/v2/{name=*/*/locations/*/buckets/*/views/*}" + body: "view" + additional_bindings { + patch: "/v2/{name=projects/*/locations/*/buckets/*/views/*}" + body: "view" + } + additional_bindings { + patch: "/v2/{name=organizations/*/locations/*/buckets/*/views/*}" + body: "view" + } + additional_bindings { + patch: "/v2/{name=folders/*/locations/*/buckets/*/views/*}" + body: "view" + } + additional_bindings { + patch: "/v2/{name=billingAccounts/*/locations/*/buckets/*/views/*}" + body: "view" + } + }; + } + + // Deletes a view from a bucket. + rpc DeleteView(DeleteViewRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2/{name=*/*/locations/*/buckets/*/views/*}" + additional_bindings { + delete: "/v2/{name=projects/*/locations/*/buckets/*/views/*}" + } + additional_bindings { + delete: "/v2/{name=organizations/*/locations/*/buckets/*/views/*}" + } + additional_bindings { + delete: "/v2/{name=folders/*/locations/*/buckets/*/views/*}" + } + additional_bindings { + delete: "/v2/{name=billingAccounts/*/locations/*/buckets/*/views/*}" + } + }; + } + // Lists sinks. rpc ListSinks(ListSinksRequest) returns (ListSinksResponse) { option (google.api.http) = { @@ -420,7 +599,7 @@ service ConfigServiceV2 { } } -// Describes a repository of logs (Beta). +// Describes a repository of logs. message LogBucket { option (google.api.resource) = { type: "logging.googleapis.com/LogBucket" @@ -435,7 +614,6 @@ message LogBucket { // "projects/my-project-id/locations/my-location/buckets/my-bucket-id The // supported locations are: // "global" - // "us-central1" // // For the location of `global` it is unspecified where logs are actually // stored. @@ -458,10 +636,63 @@ message LogBucket { // 30 days will be used. int32 retention_days = 11; + // Whether the bucket has been locked. + // The retention period on a locked bucket may not be changed. + // Locked buckets may only be deleted if they are empty. + bool locked = 9; + // Output only. The bucket lifecycle state. LifecycleState lifecycle_state = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; } +// LogBucket lifecycle states. +enum LifecycleState { + // Unspecified state. This is only used/useful for distinguishing + // unset values. + LIFECYCLE_STATE_UNSPECIFIED = 0; + + // The normal and active state. + ACTIVE = 1; + + // The bucket has been marked for deletion by the user. + DELETE_REQUESTED = 2; +} + +// Describes a view over logs in a bucket. +message LogView { + option (google.api.resource) = { + type: "logging.googleapis.com/LogView" + pattern: "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" + pattern: "organizations/{organization}/locations/{location}/buckets/{bucket}/views/{view}" + pattern: "folders/{folder}/locations/{location}/buckets/{bucket}/views/{view}" + pattern: "billingAccounts/{billing_account}/locations/{location}/buckets/{bucket}/views/{view}" + }; + + // The resource name of the view. + // For example + // "projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view + string name = 1; + + // Describes this view. + string description = 3; + + // Output only. The creation timestamp of the view. + google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The last update timestamp of the view. + google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Filter that restricts which log entries in a bucket are visible in this + // view. Filters are restricted to be a logical AND of ==/!= of any of the + // following: + // originating project/folder/organization/billing account. + // resource type + // log id + // Example: SOURCE("projects/myproject") AND resource.type = "gce_instance" + // AND LOG_ID("stdout") + string filter = 7; +} + // Describes a sink used to export log entries to one of the following // destinations in any project: a Cloud Storage bucket, a BigQuery dataset, or a // Cloud Pub/Sub topic. A logs filter controls which log entries are exported. @@ -529,10 +760,15 @@ message LogSink { // export any log entries. bool disabled = 19 [(google.api.field_behavior) = OPTIONAL]; + // Optional. Log entries that match any of the exclusion filters will not be exported. + // If a log entry is matched by both `filter` and one of `exclusion_filters` + // it will not be exported. + repeated LogExclusion exclusions = 16 [(google.api.field_behavior) = OPTIONAL]; + // Deprecated. This field is unused. VersionFormat output_version_format = 6 [deprecated = true]; - // Output only. An IAM identity–a service account or group—under which Logging + // Output only. An IAM identity—a service account or group—under which Logging // writes the exported log entries to the sink's destination. This field is // set by [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] and // [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] based on the @@ -599,20 +835,7 @@ message BigQueryOptions { bool uses_timestamp_column_partitioning = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } -// LogBucket lifecycle states (Beta). -enum LifecycleState { - // Unspecified state. This is only used/useful for distinguishing - // unset values. - LIFECYCLE_STATE_UNSPECIFIED = 0; - - // The normal and active state. - ACTIVE = 1; - - // The bucket has been marked for deletion by the user. - DELETE_REQUESTED = 2; -} - -// The parameters to `ListBuckets` (Beta). +// The parameters to `ListBuckets`. message ListBucketsRequest { // Required. The parent resource whose buckets are to be listed: // @@ -643,7 +866,7 @@ message ListBucketsRequest { int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; } -// The response from ListBuckets (Beta). +// The response from ListBuckets. message ListBucketsResponse { // A list of buckets. repeated LogBucket buckets = 1; @@ -654,7 +877,32 @@ message ListBucketsResponse { string next_page_token = 2; } -// The parameters to `UpdateBucket` (Beta). +// The parameters to `CreateBucket`. +message CreateBucketRequest { + // Required. The resource in which to create the bucket: + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + // + // Example: `"projects/my-logging-project/locations/global"` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "logging.googleapis.com/LogBucket" + } + ]; + + // Required. A client-assigned identifier such as `"my-bucket"`. Identifiers are + // limited to 100 characters and can include only letters, digits, + // underscores, hyphens, and periods. + string bucket_id = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The new bucket. The region specified in the new bucket must be compliant + // with any Location Restriction Org Policy. The name field in the bucket is + // ignored. + LogBucket bucket = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// The parameters to `UpdateBucket`. message UpdateBucketRequest { // Required. The full resource name of the bucket to update. // @@ -688,7 +936,7 @@ message UpdateBucketRequest { google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; } -// The parameters to `GetBucket` (Beta). +// The parameters to `GetBucket`. message GetBucketRequest { // Required. The resource name of the bucket: // @@ -707,6 +955,147 @@ message GetBucketRequest { ]; } +// The parameters to `DeleteBucket`. +message DeleteBucketRequest { + // Required. The full resource name of the bucket to delete. + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // + // Example: + // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id"`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/LogBucket" + } + ]; +} + +// The parameters to `UndeleteBucket`. +message UndeleteBucketRequest { + // Required. The full resource name of the bucket to undelete. + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // + // Example: + // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id"`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/LogBucket" + } + ]; +} + +// The parameters to `ListViews`. +message ListViewsRequest { + // Required. The bucket whose views are to be listed: + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + string parent = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. If present, then retrieve the next batch of results from the + // preceding call to this method. `pageToken` must be the value of + // `nextPageToken` from the previous response. The values of other method + // parameters should be identical to those in the previous call. + string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of results to return from this request. + // Non-positive values are ignored. The presence of `nextPageToken` in the + // response indicates that more results might be available. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// The response from ListViews. +message ListViewsResponse { + // A list of views. + repeated LogView views = 1; + + // If there might be more results than appear in this response, then + // `nextPageToken` is included. To get the next set of results, call the same + // method again using the value of `nextPageToken` as `pageToken`. + string next_page_token = 2; +} + +// The parameters to `CreateView`. +message CreateViewRequest { + // Required. The bucket in which to create the view + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // + // Example: + // `"projects/my-logging-project/locations/my-location/buckets/my-bucket"` + string parent = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The id to use for this view. + string view_id = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The new view. + LogView view = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// The parameters to `UpdateView`. +message UpdateViewRequest { + // Required. The full resource name of the view to update + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + // + // Example: + // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"`. + string name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The updated view. + LogView view = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Field mask that specifies the fields in `view` that need + // an update. A field will be overwritten if, and only if, it is + // in the update mask. `name` and output only fields cannot be updated. + // + // For a detailed `FieldMask` definition, see + // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + // + // Example: `updateMask=filter`. + google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// The parameters to `GetView`. +message GetViewRequest { + // Required. The resource name of the policy: + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + // + // Example: + // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/LogView" + } + ]; +} + +// The parameters to `DeleteView`. +message DeleteViewRequest { + // Required. The full resource name of the view to delete: + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + // + // Example: + // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/LogView" + } + ]; +} + // The parameters to `ListSinks`. message ListSinksRequest { // Required. The parent resource whose sinks are to be listed: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto index eb9f73ffabcf..09d629648114 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto @@ -92,8 +92,8 @@ service MetricsServiceV2 { // Describes a logs-based metric. The value of the metric is the number of log // entries that match a logs filter in a given time interval. // -// Logs-based metric can also be used to extract values from logs and create a -// a distribution of the values. The distribution records the statistics of the +// Logs-based metrics can also be used to extract values from logs and create a +// distribution of the values. The distribution records the statistics of the // extracted values along with an optional histogram of the values as specified // by the bucket options. message LogMetric { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index d025f5916648..73737c1d835a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -58,6 +58,8 @@ class ConfigServiceV2AsyncClient: ) log_sink_path = staticmethod(ConfigServiceV2Client.log_sink_path) parse_log_sink_path = staticmethod(ConfigServiceV2Client.parse_log_sink_path) + log_view_path = staticmethod(ConfigServiceV2Client.log_view_path) + parse_log_view_path = staticmethod(ConfigServiceV2Client.parse_log_view_path) common_billing_account_path = staticmethod( ConfigServiceV2Client.common_billing_account_path @@ -161,12 +163,11 @@ async def list_buckets( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsAsyncPager: - r"""Lists buckets (Beta). + r"""Lists buckets. Args: request (:class:`~.logging_config.ListBucketsRequest`): - The request object. The parameters to `ListBuckets` - (Beta). + The request object. The parameters to `ListBuckets`. parent (:class:`str`): Required. The parent resource whose buckets are to be listed: @@ -193,7 +194,7 @@ async def list_buckets( Returns: ~.pagers.ListBucketsAsyncPager: - The response from ListBuckets (Beta). + The response from ListBuckets. Iterating over this object will yield results and resolve additional pages automatically. @@ -251,12 +252,11 @@ async def get_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Gets a bucket (Beta). + r"""Gets a bucket. Args: request (:class:`~.logging_config.GetBucketRequest`): - The request object. The parameters to `GetBucket` - (Beta). + The request object. The parameters to `GetBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -266,9 +266,7 @@ async def get_bucket( Returns: ~.logging_config.LogBucket: - Describes a repository of logs - (Beta). - + Describes a repository of logs. """ # Create or coerce a protobuf request object. @@ -294,6 +292,56 @@ async def get_bucket( # Done; return the response. return response + async def create_bucket( + self, + request: logging_config.CreateBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Args: + request (:class:`~.logging_config.CreateBucketRequest`): + The request object. The parameters to `CreateBucket`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + async def update_bucket( self, request: logging_config.UpdateBucketRequest = None, @@ -312,13 +360,11 @@ async def update_bucket( If the bucket has a LifecycleState of DELETE_REQUESTED, FAILED_PRECONDITION will be returned. - A buckets region may not be modified after it is created. This - method is in Beta. + A buckets region may not be modified after it is created. Args: request (:class:`~.logging_config.UpdateBucketRequest`): - The request object. The parameters to `UpdateBucket` - (Beta). + The request object. The parameters to `UpdateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -328,9 +374,7 @@ async def update_bucket( Returns: ~.logging_config.LogBucket: - Describes a repository of logs - (Beta). - + Describes a repository of logs. """ # Create or coerce a protobuf request object. @@ -356,6 +400,372 @@ async def update_bucket( # Done; return the response. return response + async def delete_bucket( + self, + request: logging_config.DeleteBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Args: + request (:class:`~.logging_config.DeleteBucketRequest`): + The request object. The parameters to `DeleteBucket`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + request = logging_config.DeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def undelete_bucket( + self, + request: logging_config.UndeleteBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Args: + request (:class:`~.logging_config.UndeleteBucketRequest`): + The request object. The parameters to `UndeleteBucket`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + request = logging_config.UndeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.undelete_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def list_views( + self, + request: logging_config.ListViewsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListViewsAsyncPager: + r"""Lists views on a bucket. + + Args: + request (:class:`~.logging_config.ListViewsRequest`): + The request object. The parameters to `ListViews`. + parent (:class:`str`): + Required. The bucket whose views are to be listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListViewsAsyncPager: + The response from ListViews. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.ListViewsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_views, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListViewsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_view( + self, + request: logging_config.GetViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Gets a view. + + Args: + request (:class:`~.logging_config.GetViewRequest`): + The request object. The parameters to `GetView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + + request = logging_config.GetViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_view( + self, + request: logging_config.CreateViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Args: + request (:class:`~.logging_config.CreateViewRequest`): + The request object. The parameters to `CreateView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + + request = logging_config.CreateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_view( + self, + request: logging_config.UpdateViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Args: + request (:class:`~.logging_config.UpdateViewRequest`): + The request object. The parameters to `UpdateView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + + request = logging_config.UpdateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_view( + self, + request: logging_config.DeleteViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a view from a bucket. + + Args: + request (:class:`~.logging_config.DeleteViewRequest`): + The request object. The parameters to `DeleteView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + request = logging_config.DeleteViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + async def list_sinks( self, request: logging_config.ListSinksRequest = None, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index ea9ee605a3a6..a16f5f20b8bf 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -192,6 +192,22 @@ def parse_log_sink_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/sinks/(?P.+?)$", path) return m.groupdict() if m else {} + @staticmethod + def log_view_path(project: str, location: str, bucket: str, view: str,) -> str: + """Return a fully-qualified log_view string.""" + return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( + project=project, location=location, bucket=bucket, view=view, + ) + + @staticmethod + def parse_log_view_path(path: str) -> Dict[str, str]: + """Parse a log_view path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" @@ -377,12 +393,11 @@ def list_buckets( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsPager: - r"""Lists buckets (Beta). + r"""Lists buckets. Args: request (:class:`~.logging_config.ListBucketsRequest`): - The request object. The parameters to `ListBuckets` - (Beta). + The request object. The parameters to `ListBuckets`. parent (:class:`str`): Required. The parent resource whose buckets are to be listed: @@ -409,7 +424,7 @@ def list_buckets( Returns: ~.pagers.ListBucketsPager: - The response from ListBuckets (Beta). + The response from ListBuckets. Iterating over this object will yield results and resolve additional pages automatically. @@ -468,12 +483,11 @@ def get_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Gets a bucket (Beta). + r"""Gets a bucket. Args: request (:class:`~.logging_config.GetBucketRequest`): - The request object. The parameters to `GetBucket` - (Beta). + The request object. The parameters to `GetBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -483,9 +497,7 @@ def get_bucket( Returns: ~.logging_config.LogBucket: - Describes a repository of logs - (Beta). - + Describes a repository of logs. """ # Create or coerce a protobuf request object. @@ -512,6 +524,57 @@ def get_bucket( # Done; return the response. return response + def create_bucket( + self, + request: logging_config.CreateBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Args: + request (:class:`~.logging_config.CreateBucketRequest`): + The request object. The parameters to `CreateBucket`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + def update_bucket( self, request: logging_config.UpdateBucketRequest = None, @@ -530,13 +593,11 @@ def update_bucket( If the bucket has a LifecycleState of DELETE_REQUESTED, FAILED_PRECONDITION will be returned. - A buckets region may not be modified after it is created. This - method is in Beta. + A buckets region may not be modified after it is created. Args: request (:class:`~.logging_config.UpdateBucketRequest`): - The request object. The parameters to `UpdateBucket` - (Beta). + The request object. The parameters to `UpdateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -546,9 +607,7 @@ def update_bucket( Returns: ~.logging_config.LogBucket: - Describes a repository of logs - (Beta). - + Describes a repository of logs. """ # Create or coerce a protobuf request object. @@ -575,6 +634,379 @@ def update_bucket( # Done; return the response. return response + def delete_bucket( + self, + request: logging_config.DeleteBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Args: + request (:class:`~.logging_config.DeleteBucketRequest`): + The request object. The parameters to `DeleteBucket`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteBucketRequest): + request = logging_config.DeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def undelete_bucket( + self, + request: logging_config.UndeleteBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Args: + request (:class:`~.logging_config.UndeleteBucketRequest`): + The request object. The parameters to `UndeleteBucket`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UndeleteBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UndeleteBucketRequest): + request = logging_config.UndeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.undelete_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def list_views( + self, + request: logging_config.ListViewsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListViewsPager: + r"""Lists views on a bucket. + + Args: + request (:class:`~.logging_config.ListViewsRequest`): + The request object. The parameters to `ListViews`. + parent (:class:`str`): + Required. The bucket whose views are to be listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListViewsPager: + The response from ListViews. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListViewsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListViewsRequest): + request = logging_config.ListViewsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_views] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListViewsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_view( + self, + request: logging_config.GetViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Gets a view. + + Args: + request (:class:`~.logging_config.GetViewRequest`): + The request object. The parameters to `GetView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetViewRequest): + request = logging_config.GetViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_view( + self, + request: logging_config.CreateViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Args: + request (:class:`~.logging_config.CreateViewRequest`): + The request object. The parameters to `CreateView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateViewRequest): + request = logging_config.CreateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_view( + self, + request: logging_config.UpdateViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Args: + request (:class:`~.logging_config.UpdateViewRequest`): + The request object. The parameters to `UpdateView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateViewRequest): + request = logging_config.UpdateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_view( + self, + request: logging_config.DeleteViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a view from a bucket. + + Args: + request (:class:`~.logging_config.DeleteViewRequest`): + The request object. The parameters to `DeleteView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteViewRequest): + request = logging_config.DeleteViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + def list_sinks( self, request: logging_config.ListSinksRequest = None, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index 173780b5eeb0..8e1c4ee0d4ce 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -148,6 +148,134 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListViewsPager: + """A pager for iterating through ``list_views`` requests. + + This class thinly wraps an initial + :class:`~.logging_config.ListViewsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``views`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListViews`` requests and continue to iterate + through the ``views`` field on the + corresponding responses. + + All the usual :class:`~.logging_config.ListViewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., logging_config.ListViewsResponse], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging_config.ListViewsRequest`): + The initial request object. + response (:class:`~.logging_config.ListViewsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListViewsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_config.ListViewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_config.LogView]: + for page in self.pages: + yield from page.views + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListViewsAsyncPager: + """A pager for iterating through ``list_views`` requests. + + This class thinly wraps an initial + :class:`~.logging_config.ListViewsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``views`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListViews`` requests and continue to iterate + through the ``views`` field on the + corresponding responses. + + All the usual :class:`~.logging_config.ListViewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListViewsResponse]], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging_config.ListViewsRequest`): + The initial request object. + response (:class:`~.logging_config.ListViewsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListViewsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_config.ListViewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_config.LogView]: + async def async_generator(): + async for page in self.pages: + for response in page.views: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListSinksPager: """A pager for iterating through ``list_sinks`` requests. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index a0393aa98c25..3e17598fe680 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -116,9 +116,33 @@ def _prep_wrapped_messages(self, client_info): self.get_bucket: gapic_v1.method.wrap_method( self.get_bucket, default_timeout=None, client_info=client_info, ), + self.create_bucket: gapic_v1.method.wrap_method( + self.create_bucket, default_timeout=None, client_info=client_info, + ), self.update_bucket: gapic_v1.method.wrap_method( self.update_bucket, default_timeout=None, client_info=client_info, ), + self.delete_bucket: gapic_v1.method.wrap_method( + self.delete_bucket, default_timeout=None, client_info=client_info, + ), + self.undelete_bucket: gapic_v1.method.wrap_method( + self.undelete_bucket, default_timeout=None, client_info=client_info, + ), + self.list_views: gapic_v1.method.wrap_method( + self.list_views, default_timeout=None, client_info=client_info, + ), + self.get_view: gapic_v1.method.wrap_method( + self.get_view, default_timeout=None, client_info=client_info, + ), + self.create_view: gapic_v1.method.wrap_method( + self.create_view, default_timeout=None, client_info=client_info, + ), + self.update_view: gapic_v1.method.wrap_method( + self.update_view, default_timeout=None, client_info=client_info, + ), + self.delete_view: gapic_v1.method.wrap_method( + self.delete_view, default_timeout=None, client_info=client_info, + ), self.list_sinks: gapic_v1.method.wrap_method( self.list_sinks, default_retry=retries.Retry( @@ -266,6 +290,17 @@ def get_bucket( ]: raise NotImplementedError() + @property + def create_bucket( + self, + ) -> typing.Callable[ + [logging_config.CreateBucketRequest], + typing.Union[ + logging_config.LogBucket, typing.Awaitable[logging_config.LogBucket] + ], + ]: + raise NotImplementedError() + @property def update_bucket( self, @@ -277,6 +312,72 @@ def update_bucket( ]: raise NotImplementedError() + @property + def delete_bucket( + self, + ) -> typing.Callable[ + [logging_config.DeleteBucketRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def undelete_bucket( + self, + ) -> typing.Callable[ + [logging_config.UndeleteBucketRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def list_views( + self, + ) -> typing.Callable[ + [logging_config.ListViewsRequest], + typing.Union[ + logging_config.ListViewsResponse, + typing.Awaitable[logging_config.ListViewsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_view( + self, + ) -> typing.Callable[ + [logging_config.GetViewRequest], + typing.Union[logging_config.LogView, typing.Awaitable[logging_config.LogView]], + ]: + raise NotImplementedError() + + @property + def create_view( + self, + ) -> typing.Callable[ + [logging_config.CreateViewRequest], + typing.Union[logging_config.LogView, typing.Awaitable[logging_config.LogView]], + ]: + raise NotImplementedError() + + @property + def update_view( + self, + ) -> typing.Callable[ + [logging_config.UpdateViewRequest], + typing.Union[logging_config.LogView, typing.Awaitable[logging_config.LogView]], + ]: + raise NotImplementedError() + + @property + def delete_view( + self, + ) -> typing.Callable[ + [logging_config.DeleteViewRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + @property def list_sinks( self, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 5603beeb5247..f083373b1e5b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -238,7 +238,7 @@ def list_buckets( ]: r"""Return a callable for the list buckets method over gRPC. - Lists buckets (Beta). + Lists buckets. Returns: Callable[[~.ListBucketsRequest], @@ -264,7 +264,7 @@ def get_bucket( ) -> Callable[[logging_config.GetBucketRequest], logging_config.LogBucket]: r"""Return a callable for the get bucket method over gRPC. - Gets a bucket (Beta). + Gets a bucket. Returns: Callable[[~.GetBucketRequest], @@ -284,6 +284,34 @@ def get_bucket( ) return self._stubs["get_bucket"] + @property + def create_bucket( + self, + ) -> Callable[[logging_config.CreateBucketRequest], logging_config.LogBucket]: + r"""Return a callable for the create bucket method over gRPC. + + Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + ~.LogBucket]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_bucket" not in self._stubs: + self._stubs["create_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucket", + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs["create_bucket"] + @property def update_bucket( self, @@ -300,8 +328,7 @@ def update_bucket( If the bucket has a LifecycleState of DELETE_REQUESTED, FAILED_PRECONDITION will be returned. - A buckets region may not be modified after it is created. This - method is in Beta. + A buckets region may not be modified after it is created. Returns: Callable[[~.UpdateBucketRequest], @@ -321,6 +348,191 @@ def update_bucket( ) return self._stubs["update_bucket"] + @property + def delete_bucket( + self, + ) -> Callable[[logging_config.DeleteBucketRequest], empty.Empty]: + r"""Return a callable for the delete bucket method over gRPC. + + Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Returns: + Callable[[~.DeleteBucketRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_bucket" not in self._stubs: + self._stubs["delete_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteBucket", + request_serializer=logging_config.DeleteBucketRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_bucket"] + + @property + def undelete_bucket( + self, + ) -> Callable[[logging_config.UndeleteBucketRequest], empty.Empty]: + r"""Return a callable for the undelete bucket method over gRPC. + + Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Returns: + Callable[[~.UndeleteBucketRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "undelete_bucket" not in self._stubs: + self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UndeleteBucket", + request_serializer=logging_config.UndeleteBucketRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["undelete_bucket"] + + @property + def list_views( + self, + ) -> Callable[[logging_config.ListViewsRequest], logging_config.ListViewsResponse]: + r"""Return a callable for the list views method over gRPC. + + Lists views on a bucket. + + Returns: + Callable[[~.ListViewsRequest], + ~.ListViewsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_views" not in self._stubs: + self._stubs["list_views"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListViews", + request_serializer=logging_config.ListViewsRequest.serialize, + response_deserializer=logging_config.ListViewsResponse.deserialize, + ) + return self._stubs["list_views"] + + @property + def get_view( + self, + ) -> Callable[[logging_config.GetViewRequest], logging_config.LogView]: + r"""Return a callable for the get view method over gRPC. + + Gets a view. + + Returns: + Callable[[~.GetViewRequest], + ~.LogView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_view" not in self._stubs: + self._stubs["get_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetView", + request_serializer=logging_config.GetViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs["get_view"] + + @property + def create_view( + self, + ) -> Callable[[logging_config.CreateViewRequest], logging_config.LogView]: + r"""Return a callable for the create view method over gRPC. + + Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Returns: + Callable[[~.CreateViewRequest], + ~.LogView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_view" not in self._stubs: + self._stubs["create_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateView", + request_serializer=logging_config.CreateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs["create_view"] + + @property + def update_view( + self, + ) -> Callable[[logging_config.UpdateViewRequest], logging_config.LogView]: + r"""Return a callable for the update view method over gRPC. + + Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Returns: + Callable[[~.UpdateViewRequest], + ~.LogView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_view" not in self._stubs: + self._stubs["update_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateView", + request_serializer=logging_config.UpdateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs["update_view"] + + @property + def delete_view(self) -> Callable[[logging_config.DeleteViewRequest], empty.Empty]: + r"""Return a callable for the delete view method over gRPC. + + Deletes a view from a bucket. + + Returns: + Callable[[~.DeleteViewRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_view" not in self._stubs: + self._stubs["delete_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteView", + request_serializer=logging_config.DeleteViewRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_view"] + @property def list_sinks( self, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index a4c94db22aed..7376164e4730 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -243,7 +243,7 @@ def list_buckets( ]: r"""Return a callable for the list buckets method over gRPC. - Lists buckets (Beta). + Lists buckets. Returns: Callable[[~.ListBucketsRequest], @@ -271,7 +271,7 @@ def get_bucket( ]: r"""Return a callable for the get bucket method over gRPC. - Gets a bucket (Beta). + Gets a bucket. Returns: Callable[[~.GetBucketRequest], @@ -291,6 +291,36 @@ def get_bucket( ) return self._stubs["get_bucket"] + @property + def create_bucket( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], Awaitable[logging_config.LogBucket] + ]: + r"""Return a callable for the create bucket method over gRPC. + + Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + Awaitable[~.LogBucket]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_bucket" not in self._stubs: + self._stubs["create_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucket", + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs["create_bucket"] + @property def update_bucket( self, @@ -309,8 +339,7 @@ def update_bucket( If the bucket has a LifecycleState of DELETE_REQUESTED, FAILED_PRECONDITION will be returned. - A buckets region may not be modified after it is created. This - method is in Beta. + A buckets region may not be modified after it is created. Returns: Callable[[~.UpdateBucketRequest], @@ -330,6 +359,199 @@ def update_bucket( ) return self._stubs["update_bucket"] + @property + def delete_bucket( + self, + ) -> Callable[[logging_config.DeleteBucketRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete bucket method over gRPC. + + Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Returns: + Callable[[~.DeleteBucketRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_bucket" not in self._stubs: + self._stubs["delete_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteBucket", + request_serializer=logging_config.DeleteBucketRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_bucket"] + + @property + def undelete_bucket( + self, + ) -> Callable[[logging_config.UndeleteBucketRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the undelete bucket method over gRPC. + + Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Returns: + Callable[[~.UndeleteBucketRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "undelete_bucket" not in self._stubs: + self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UndeleteBucket", + request_serializer=logging_config.UndeleteBucketRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["undelete_bucket"] + + @property + def list_views( + self, + ) -> Callable[ + [logging_config.ListViewsRequest], Awaitable[logging_config.ListViewsResponse] + ]: + r"""Return a callable for the list views method over gRPC. + + Lists views on a bucket. + + Returns: + Callable[[~.ListViewsRequest], + Awaitable[~.ListViewsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_views" not in self._stubs: + self._stubs["list_views"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListViews", + request_serializer=logging_config.ListViewsRequest.serialize, + response_deserializer=logging_config.ListViewsResponse.deserialize, + ) + return self._stubs["list_views"] + + @property + def get_view( + self, + ) -> Callable[[logging_config.GetViewRequest], Awaitable[logging_config.LogView]]: + r"""Return a callable for the get view method over gRPC. + + Gets a view. + + Returns: + Callable[[~.GetViewRequest], + Awaitable[~.LogView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_view" not in self._stubs: + self._stubs["get_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetView", + request_serializer=logging_config.GetViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs["get_view"] + + @property + def create_view( + self, + ) -> Callable[ + [logging_config.CreateViewRequest], Awaitable[logging_config.LogView] + ]: + r"""Return a callable for the create view method over gRPC. + + Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Returns: + Callable[[~.CreateViewRequest], + Awaitable[~.LogView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_view" not in self._stubs: + self._stubs["create_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateView", + request_serializer=logging_config.CreateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs["create_view"] + + @property + def update_view( + self, + ) -> Callable[ + [logging_config.UpdateViewRequest], Awaitable[logging_config.LogView] + ]: + r"""Return a callable for the update view method over gRPC. + + Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Returns: + Callable[[~.UpdateViewRequest], + Awaitable[~.LogView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_view" not in self._stubs: + self._stubs["update_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateView", + request_serializer=logging_config.UpdateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs["update_view"] + + @property + def delete_view( + self, + ) -> Callable[[logging_config.DeleteViewRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete view method over gRPC. + + Deletes a view from a bucket. + + Returns: + Callable[[~.DeleteViewRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_view" not in self._stubs: + self._stubs["delete_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteView", + request_serializer=logging_config.DeleteViewRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_view"] + @property def list_sinks( self, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 93dfbd71b33d..bd3c759a1d7e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -273,8 +273,8 @@ async def get_log_metric( value of the metric is the number of log entries that match a logs filter in a given time interval. - Logs-based metric can also be used to - extract values from logs and create a a + Logs-based metrics can also be used to + extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an @@ -379,8 +379,8 @@ async def create_log_metric( value of the metric is the number of log entries that match a logs filter in a given time interval. - Logs-based metric can also be used to - extract values from logs and create a a + Logs-based metrics can also be used to + extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an @@ -475,8 +475,8 @@ async def update_log_metric( value of the metric is the number of log entries that match a logs filter in a given time interval. - Logs-based metric can also be used to - extract values from logs and create a a + Logs-based metrics can also be used to + extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index f4bca39263cf..d03ce86cdc25 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -452,8 +452,8 @@ def get_log_metric( value of the metric is the number of log entries that match a logs filter in a given time interval. - Logs-based metric can also be used to - extract values from logs and create a a + Logs-based metrics can also be used to + extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an @@ -549,8 +549,8 @@ def create_log_metric( value of the metric is the number of log entries that match a logs filter in a given time interval. - Logs-based metric can also be used to - extract values from logs and create a a + Logs-based metrics can also be used to + extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an @@ -646,8 +646,8 @@ def update_log_metric( value of the metric is the number of log entries that match a logs filter in a given time interval. - Logs-based metric can also be used to - extract values from logs and create a a + Logs-based metrics can also be used to + extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py index ab5f9c8c1546..b24bf3b8c508 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py @@ -22,12 +22,22 @@ ) from .logging_config import ( LogBucket, + LogView, LogSink, BigQueryOptions, ListBucketsRequest, ListBucketsResponse, + CreateBucketRequest, UpdateBucketRequest, GetBucketRequest, + DeleteBucketRequest, + UndeleteBucketRequest, + ListViewsRequest, + ListViewsResponse, + CreateViewRequest, + UpdateViewRequest, + GetViewRequest, + DeleteViewRequest, ListSinksRequest, ListSinksResponse, GetSinkRequest, @@ -73,12 +83,22 @@ "LogEntryOperation", "LogEntrySourceLocation", "LogBucket", + "LogView", "LogSink", "BigQueryOptions", "ListBucketsRequest", "ListBucketsResponse", + "CreateBucketRequest", "UpdateBucketRequest", "GetBucketRequest", + "DeleteBucketRequest", + "UndeleteBucketRequest", + "ListViewsRequest", + "ListViewsResponse", + "CreateViewRequest", + "UpdateViewRequest", + "GetViewRequest", + "DeleteViewRequest", "ListSinksRequest", "ListSinksResponse", "GetSinkRequest", diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index 2161d687232d..aaf057acffc8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -27,12 +27,22 @@ manifest={ "LifecycleState", "LogBucket", + "LogView", "LogSink", "BigQueryOptions", "ListBucketsRequest", "ListBucketsResponse", + "CreateBucketRequest", "UpdateBucketRequest", "GetBucketRequest", + "DeleteBucketRequest", + "UndeleteBucketRequest", + "ListViewsRequest", + "ListViewsResponse", + "CreateViewRequest", + "UpdateViewRequest", + "GetViewRequest", + "DeleteViewRequest", "ListSinksRequest", "ListSinksResponse", "GetSinkRequest", @@ -54,20 +64,20 @@ class LifecycleState(proto.Enum): - r"""LogBucket lifecycle states (Beta).""" + r"""LogBucket lifecycle states.""" LIFECYCLE_STATE_UNSPECIFIED = 0 ACTIVE = 1 DELETE_REQUESTED = 2 class LogBucket(proto.Message): - r"""Describes a repository of logs (Beta). + r"""Describes a repository of logs. Attributes: name (str): The resource name of the bucket. For example: "projects/my-project-id/locations/my-location/buckets/my-bucket-id - The supported locations are: "global" "us-central1" + The supported locations are: "global" For the location of ``global`` it is unspecified where logs are actually stored. Once a bucket has been created, the @@ -88,6 +98,11 @@ class LogBucket(proto.Message): period is 1 day. If this value is set to zero at bucket creation time, the default time of 30 days will be used. + locked (bool): + Whether the bucket has been locked. + The retention period on a locked bucket may not + be changed. Locked buckets may only be deleted + if they are empty. lifecycle_state (~.logging_config.LifecycleState): Output only. The bucket lifecycle state. """ @@ -102,9 +117,48 @@ class LogBucket(proto.Message): retention_days = proto.Field(proto.INT32, number=11) + locked = proto.Field(proto.BOOL, number=9) + lifecycle_state = proto.Field(proto.ENUM, number=12, enum="LifecycleState",) +class LogView(proto.Message): + r"""Describes a view over logs in a bucket. + + Attributes: + name (str): + The resource name of the view. + For example + "projects/my-project-id/locations/my- + location/buckets/my-bucket-id/views/my-view + description (str): + Describes this view. + create_time (~.timestamp.Timestamp): + Output only. The creation timestamp of the + view. + update_time (~.timestamp.Timestamp): + Output only. The last update timestamp of the + view. + filter (str): + Filter that restricts which log entries in a bucket are + visible in this view. Filters are restricted to be a logical + AND of ==/!= of any of the following: originating + project/folder/organization/billing account. resource type + log id Example: SOURCE("projects/myproject") AND + resource.type = "gce_instance" AND LOG_ID("stdout") + """ + + name = proto.Field(proto.STRING, number=1) + + description = proto.Field(proto.STRING, number=3) + + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + + filter = proto.Field(proto.STRING, number=7) + + class LogSink(proto.Message): r"""Describes a sink used to export log entries to one of the following destinations in any project: a Cloud Storage bucket, a @@ -152,10 +206,15 @@ class LogSink(proto.Message): disabled (bool): Optional. If set to True, then this sink is disabled and it does not export any log entries. + exclusions (Sequence[~.logging_config.LogExclusion]): + Optional. Log entries that match any of the exclusion + filters will not be exported. If a log entry is matched by + both ``filter`` and one of ``exclusion_filters`` it will not + be exported. output_version_format (~.logging_config.LogSink.VersionFormat): Deprecated. This field is unused. writer_identity (str): - Output only. An IAM identity–a service account or + Output only. An IAM identity—a service account or group—under which Logging writes the exported log entries to the sink's destination. This field is set by [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] @@ -218,6 +277,8 @@ class VersionFormat(proto.Enum): disabled = proto.Field(proto.BOOL, number=19) + exclusions = proto.RepeatedField(proto.MESSAGE, number=16, message="LogExclusion",) + output_version_format = proto.Field(proto.ENUM, number=6, enum=VersionFormat,) writer_identity = proto.Field(proto.STRING, number=8) @@ -264,7 +325,7 @@ class BigQueryOptions(proto.Message): class ListBucketsRequest(proto.Message): - r"""The parameters to ``ListBuckets`` (Beta). + r"""The parameters to ``ListBuckets``. Attributes: parent (str): @@ -302,7 +363,7 @@ class ListBucketsRequest(proto.Message): class ListBucketsResponse(proto.Message): - r"""The response from ListBuckets (Beta). + r"""The response from ListBuckets. Attributes: buckets (Sequence[~.logging_config.LogBucket]): @@ -323,8 +384,39 @@ def raw_page(self): next_page_token = proto.Field(proto.STRING, number=2) +class CreateBucketRequest(proto.Message): + r"""The parameters to ``CreateBucket``. + + Attributes: + parent (str): + Required. The resource in which to create the bucket: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + + Example: ``"projects/my-logging-project/locations/global"`` + bucket_id (str): + Required. A client-assigned identifier such as + ``"my-bucket"``. Identifiers are limited to 100 characters + and can include only letters, digits, underscores, hyphens, + and periods. + bucket (~.logging_config.LogBucket): + Required. The new bucket. The region + specified in the new bucket must be compliant + with any Location Restriction Org Policy. The + name field in the bucket is ignored. + """ + + parent = proto.Field(proto.STRING, number=1) + + bucket_id = proto.Field(proto.STRING, number=2) + + bucket = proto.Field(proto.MESSAGE, number=3, message="LogBucket",) + + class UpdateBucketRequest(proto.Message): - r"""The parameters to ``UpdateBucket`` (Beta). + r"""The parameters to ``UpdateBucket``. Attributes: name (str): @@ -364,7 +456,7 @@ class UpdateBucketRequest(proto.Message): class GetBucketRequest(proto.Message): - r"""The parameters to ``GetBucket`` (Beta). + r"""The parameters to ``GetBucket``. Attributes: name (str): @@ -384,6 +476,196 @@ class GetBucketRequest(proto.Message): name = proto.Field(proto.STRING, number=1) +class DeleteBucketRequest(proto.Message): + r"""The parameters to ``DeleteBucket``. + + Attributes: + name (str): + Required. The full resource name of the bucket to delete. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class UndeleteBucketRequest(proto.Message): + r"""The parameters to ``UndeleteBucket``. + + Attributes: + name (str): + Required. The full resource name of the bucket to undelete. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListViewsRequest(proto.Message): + r"""The parameters to ``ListViews``. + + Attributes: + parent (str): + Required. The bucket whose views are to be listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_token = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + +class ListViewsResponse(proto.Message): + r"""The response from ListViews. + + Attributes: + views (Sequence[~.logging_config.LogView]): + A list of views. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + views = proto.RepeatedField(proto.MESSAGE, number=1, message="LogView",) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class CreateViewRequest(proto.Message): + r"""The parameters to ``CreateView``. + + Attributes: + parent (str): + Required. The bucket in which to create the view + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-logging-project/locations/my-location/buckets/my-bucket"`` + view_id (str): + Required. The id to use for this view. + view (~.logging_config.LogView): + Required. The new view. + """ + + parent = proto.Field(proto.STRING, number=1) + + view_id = proto.Field(proto.STRING, number=2) + + view = proto.Field(proto.MESSAGE, number=3, message="LogView",) + + +class UpdateViewRequest(proto.Message): + r"""The parameters to ``UpdateView``. + + Attributes: + name (str): + Required. The full resource name of the view to update + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + view (~.logging_config.LogView): + Required. The updated view. + update_mask (~.field_mask.FieldMask): + Optional. Field mask that specifies the fields in ``view`` + that need an update. A field will be overwritten if, and + only if, it is in the update mask. ``name`` and output only + fields cannot be updated. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=filter``. + """ + + name = proto.Field(proto.STRING, number=1) + + view = proto.Field(proto.MESSAGE, number=2, message="LogView",) + + update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + + +class GetViewRequest(proto.Message): + r"""The parameters to ``GetView``. + + Attributes: + name (str): + Required. The resource name of the policy: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class DeleteViewRequest(proto.Message): + r"""The parameters to ``DeleteView``. + + Attributes: + name (str): + Required. The full resource name of the view to delete: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + """ + + name = proto.Field(proto.STRING, number=1) + + class ListSinksRequest(proto.Message): r"""The parameters to ``ListSinks``. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py index 2f7c5b47230e..a9642d13ba52 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py @@ -41,8 +41,8 @@ class LogMetric(proto.Message): r"""Describes a logs-based metric. The value of the metric is the number of log entries that match a logs filter in a given time interval. - Logs-based metric can also be used to extract values from logs - and create a a distribution of the values. The distribution + Logs-based metrics can also be used to extract values from logs + and create a distribution of the values. The distribution records the statistics of the extracted values along with an optional histogram of the values as specified by the bucket options. diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index b90eef00f2d9..2a06290780bc 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -148,6 +148,8 @@ def lint(session): "." ] session.run("flake8", *args) + + # # Black # diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 05600e4ce718..4e26d487701d 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "4b0ad15b0ff483486ae90d73092e7be00f8c1848", - "internalRef": "341842584" + "sha": "be0bdf86cd31aa7c1a7b30a9a2e9f2fd53ee3d91", + "internalRef": "342353190" } }, { diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 469684436dae..25e35e5c64c4 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -800,6 +800,7 @@ def test_get_bucket( name="name_value", description="description_value", retention_days=1512, + locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, ) @@ -821,16 +822,1163 @@ def test_get_bucket( assert response.retention_days == 1512 + assert response.locked is True + + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_get_bucket_from_dict(): + test_get_bucket(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + ) + + response = await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.retention_days == 1512 + + assert response.locked is True + + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test_get_bucket_async_from_dict(): + await test_get_bucket_async(request_type=dict) + + +def test_get_bucket_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + + client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) + + await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_create_bucket( + transport: str = "grpc", request_type=logging_config.CreateBucketRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + + response = client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.CreateBucketRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogBucket) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.retention_days == 1512 + + assert response.locked is True + + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_create_bucket_from_dict(): + test_create_bucket(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + ) + + response = await client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.CreateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.retention_days == 1512 + + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_get_bucket_from_dict(): - test_get_bucket(request_type=dict) +@pytest.mark.asyncio +async def test_create_bucket_async_from_dict(): + await test_create_bucket_async(request_type=dict) + + +def test_create_bucket_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + + client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) + + await client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_update_bucket( + transport: str = "grpc", request_type=logging_config.UpdateBucketRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + + response = client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateBucketRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogBucket) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.retention_days == 1512 + + assert response.locked is True + + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_update_bucket_from_dict(): + test_update_bucket(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + ) + + response = await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.retention_days == 1512 + + assert response.locked is True + + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test_update_bucket_async_from_dict(): + await test_update_bucket_async(request_type=dict) + + +def test_update_bucket_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + + client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) + + await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_bucket( + transport: str = "grpc", request_type=logging_config.DeleteBucketRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_bucket_from_dict(): + test_delete_bucket(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_bucket_async_from_dict(): + await test_delete_bucket_async(request_type=dict) + + +def test_delete_bucket_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value = None + + client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_undelete_bucket( + transport: str = "grpc", request_type=logging_config.UndeleteBucketRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UndeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_undelete_bucket_from_dict(): + test_undelete_bucket(request_type=dict) + + +@pytest.mark.asyncio +async def test_undelete_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UndeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_undelete_bucket_async_from_dict(): + await test_undelete_bucket_async(request_type=dict) + + +def test_undelete_bucket_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UndeleteBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value = None + + client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_undelete_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UndeleteBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_list_views( + transport: str = "grpc", request_type=logging_config.ListViewsRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.ListViewsRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListViewsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_views_from_dict(): + test_list_views(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_views_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.ListViewsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListViewsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_views_async_from_dict(): + await test_list_views_async(request_type=dict) + + +def test_list_views_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListViewsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = logging_config.ListViewsResponse() + + client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_views_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListViewsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse() + ) + + await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_views_flattened(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_views(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_views_flattened_error(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_views( + logging_config.ListViewsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_views_flattened_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_views(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_views_flattened_error_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_views( + logging_config.ListViewsRequest(), parent="parent_value", + ) + + +def test_list_views_pager(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse(views=[], next_page_token="def",), + logging_config.ListViewsResponse( + views=[logging_config.LogView(),], next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[logging_config.LogView(), logging_config.LogView(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_views(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogView) for i in results) + + +def test_list_views_pages(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse(views=[], next_page_token="def",), + logging_config.ListViewsResponse( + views=[logging_config.LogView(),], next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[logging_config.LogView(), logging_config.LogView(),], + ), + RuntimeError, + ) + pages = list(client.list_views(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_views_async_pager(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse(views=[], next_page_token="def",), + logging_config.ListViewsResponse( + views=[logging_config.LogView(),], next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[logging_config.LogView(), logging_config.LogView(),], + ), + RuntimeError, + ) + async_pager = await client.list_views(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogView) for i in responses) + + +@pytest.mark.asyncio +async def test_list_views_async_pages(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse(views=[], next_page_token="def",), + logging_config.ListViewsResponse( + views=[logging_config.LogView(),], next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[logging_config.LogView(), logging_config.LogView(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_views(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_view(transport: str = "grpc", request_type=logging_config.GetViewRequest): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogView( + name="name_value", description="description_value", filter="filter_value", + ) + + response = client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetViewRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogView) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + +def test_get_view_from_dict(): + test_get_view(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) + + response = await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetViewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + +@pytest.mark.asyncio +async def test_get_view_async_from_dict(): + await test_get_view_async(request_type=dict) + + +def test_get_view_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetViewRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = logging_config.LogView() + + client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio -async def test_get_bucket_async( - transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest +async def test_get_view_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetViewRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView() + ) + + await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_create_view( + transport: str = "grpc", request_type=logging_config.CreateViewRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogView( + name="name_value", description="description_value", filter="filter_value", + ) + + response = client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.CreateViewRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogView) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + +def test_create_view_from_dict(): + test_create_view(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest ): client = ConfigServiceV2AsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, @@ -841,55 +1989,52 @@ async def test_get_bucket_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket( + logging_config.LogView( name="name_value", description="description_value", - retention_days=1512, - lifecycle_state=logging_config.LifecycleState.ACTIVE, + filter="filter_value", ) ) - response = await client.get_bucket(request) + response = await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() + assert args[0] == logging_config.CreateViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) + assert isinstance(response, logging_config.LogView) assert response.name == "name_value" assert response.description == "description_value" - assert response.retention_days == 1512 - - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.filter == "filter_value" @pytest.mark.asyncio -async def test_get_bucket_async_from_dict(): - await test_get_bucket_async(request_type=dict) +async def test_create_view_async_from_dict(): + await test_create_view_async(request_type=dict) -def test_get_bucket_field_headers(): +def test_create_view_field_headers(): client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetBucketRequest() - request.name = "name/value" + request = logging_config.CreateViewRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: - call.return_value = logging_config.LogBucket() + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value = logging_config.LogView() - client.get_bucket(request) + client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -898,25 +2043,25 @@ def test_get_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio -async def test_get_bucket_field_headers_async(): +async def test_create_view_field_headers_async(): client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetBucketRequest() - request.name = "name/value" + request = logging_config.CreateViewRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket() + logging_config.LogView() ) - await client.get_bucket(request) + await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -925,11 +2070,11 @@ async def test_get_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] -def test_update_bucket( - transport: str = "grpc", request_type=logging_config.UpdateBucketRequest +def test_update_view( + transport: str = "grpc", request_type=logging_config.UpdateViewRequest ): client = ConfigServiceV2Client( credentials=credentials.AnonymousCredentials(), transport=transport, @@ -940,43 +2085,38 @@ def test_update_bucket( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogBucket( - name="name_value", - description="description_value", - retention_days=1512, - lifecycle_state=logging_config.LifecycleState.ACTIVE, + call.return_value = logging_config.LogView( + name="name_value", description="description_value", filter="filter_value", ) - response = client.update_bucket(request) + response = client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + assert args[0] == logging_config.UpdateViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) + assert isinstance(response, logging_config.LogView) assert response.name == "name_value" assert response.description == "description_value" - assert response.retention_days == 1512 - - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.filter == "filter_value" -def test_update_bucket_from_dict(): - test_update_bucket(request_type=dict) +def test_update_view_from_dict(): + test_update_view(request_type=dict) @pytest.mark.asyncio -async def test_update_bucket_async( - transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest +async def test_update_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest ): client = ConfigServiceV2AsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, @@ -987,55 +2127,52 @@ async def test_update_bucket_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket( + logging_config.LogView( name="name_value", description="description_value", - retention_days=1512, - lifecycle_state=logging_config.LifecycleState.ACTIVE, + filter="filter_value", ) ) - response = await client.update_bucket(request) + response = await client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + assert args[0] == logging_config.UpdateViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) + assert isinstance(response, logging_config.LogView) assert response.name == "name_value" assert response.description == "description_value" - assert response.retention_days == 1512 - - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.filter == "filter_value" @pytest.mark.asyncio -async def test_update_bucket_async_from_dict(): - await test_update_bucket_async(request_type=dict) +async def test_update_view_async_from_dict(): + await test_update_view_async(request_type=dict) -def test_update_bucket_field_headers(): +def test_update_view_field_headers(): client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateBucketRequest() + request = logging_config.UpdateViewRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: - call.return_value = logging_config.LogBucket() + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value = logging_config.LogView() - client.update_bucket(request) + client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1048,21 +2185,136 @@ def test_update_bucket_field_headers(): @pytest.mark.asyncio -async def test_update_bucket_field_headers_async(): +async def test_update_view_field_headers_async(): client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateBucketRequest() + request = logging_config.UpdateViewRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket() + logging_config.LogView() ) - await client.update_bucket(request) + await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_view( + transport: str = "grpc", request_type=logging_config.DeleteViewRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteViewRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_view_from_dict(): + test_delete_view(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteViewRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_view_async_from_dict(): + await test_delete_view_async(request_type=dict) + + +def test_delete_view_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteViewRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = None + + client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_view_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteViewRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3941,7 +5193,15 @@ def test_config_service_v2_base_transport(): methods = ( "list_buckets", "get_bucket", + "create_bucket", "update_bucket", + "delete_bucket", + "undelete_bucket", + "list_views", + "get_view", + "create_view", + "update_view", + "delete_view", "list_sinks", "get_sink", "create_sink", @@ -4260,8 +5520,35 @@ def test_parse_log_sink_path(): assert expected == actual +def test_log_view_path(): + project = "oyster" + location = "nudibranch" + bucket = "cuttlefish" + view = "mussel" + + expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( + project=project, location=location, bucket=bucket, view=view, + ) + actual = ConfigServiceV2Client.log_view_path(project, location, bucket, view) + assert expected == actual + + +def test_parse_log_view_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "bucket": "scallop", + "view": "abalone", + } + path = ConfigServiceV2Client.log_view_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_view_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -4272,7 +5559,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "clam", } path = ConfigServiceV2Client.common_billing_account_path(**expected) @@ -4282,7 +5569,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "whelk" expected = "folders/{folder}".format(folder=folder,) actual = ConfigServiceV2Client.common_folder_path(folder) @@ -4291,7 +5578,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "octopus", } path = ConfigServiceV2Client.common_folder_path(**expected) @@ -4301,7 +5588,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "oyster" expected = "organizations/{organization}".format(organization=organization,) actual = ConfigServiceV2Client.common_organization_path(organization) @@ -4310,7 +5597,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "nudibranch", } path = ConfigServiceV2Client.common_organization_path(**expected) @@ -4320,7 +5607,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "cuttlefish" expected = "projects/{project}".format(project=project,) actual = ConfigServiceV2Client.common_project_path(project) @@ -4329,7 +5616,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "mussel", } path = ConfigServiceV2Client.common_project_path(**expected) @@ -4339,8 +5626,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -4351,8 +5638,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "scallop", + "location": "abalone", } path = ConfigServiceV2Client.common_location_path(**expected) From 4aee39a66183103225301926e27848b6959c4ff1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 4 Dec 2020 21:07:49 +0100 Subject: [PATCH 348/855] chore(deps): update dependency google-cloud-logging to v2.0.1 (#109) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 379ac65b1c45..bdb659d04691 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==2.0.0 +google-cloud-logging==2.0.1 From 28b64cec44e90d9e98d8c287d9fcf1fcc919f3d9 Mon Sep 17 00:00:00 2001 From: Brady Kieffer Date: Mon, 7 Dec 2020 18:18:33 -0500 Subject: [PATCH 349/855] fix: Remove keyword only argument for RequestsMiddleware (#113) * fix: Remove keyword only argument for RequestsMiddleware Remove keyword only arguments from request middleware. This causes django to fail when attempting to load middleware. Django currently only supports handlers being passed in as args. * Test that we can instantiate middleware with or without kwargs * Make get_response a required parameter in RequestMiddleware --- .../cloud/logging_v2/handlers/middleware/request.py | 2 +- .../tests/unit/handlers/middleware/test_request.py | 13 +++++++++++++ .../tests/unit/handlers/test__helpers.py | 4 ++-- 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/middleware/request.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/middleware/request.py index da361b9679b4..1804947ec367 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/middleware/request.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/middleware/request.py @@ -42,7 +42,7 @@ def _get_django_request(): class RequestMiddleware(MiddlewareMixin): """Saves the request in thread local""" - def __init__(self, *, get_response=None): + def __init__(self, get_response): self.get_response = get_response def process_request(self, request): diff --git a/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py b/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py index 16d3f9ba2ebd..d0e3daf2473d 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py +++ b/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py @@ -41,6 +41,9 @@ def _get_target_class(self): return request.RequestMiddleware def _make_one(self, *args, **kw): + if not args and "get_response" not in kw: + kw["get_response"] = None + return self._get_target_class()(*args, **kw) def test_process_request(self): @@ -54,6 +57,16 @@ def test_process_request(self): django_request = request._get_django_request() self.assertEqual(django_request, mock_request) + def test_can_instantiate_middleware_without_kwargs(self): + handler = mock.Mock() + middleware = self._make_one(handler) + self.assertEqual(middleware.get_response, handler) + + def test_can_instantiate_middleware_with_kwargs(self): + handler = mock.Mock() + middleware = self._make_one(get_response=handler) + self.assertEqual(middleware.get_response, handler) + class Test__get_django_request(DjangoBase): @staticmethod diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index 0cd3b30d819b..1fbf6c86011e 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -87,7 +87,7 @@ def test_no_context_header(self): django_request = RequestFactory().get("/") - middleware = request.RequestMiddleware() + middleware = request.RequestMiddleware(None) middleware.process_request(django_request) trace_id = self._call_fut() self.assertIsNone(trace_id) @@ -104,7 +104,7 @@ def test_valid_context_header(self): "/", **{django_trace_header: django_trace_id} ) - middleware = request.RequestMiddleware() + middleware = request.RequestMiddleware(None) middleware.process_request(django_request) trace_id = self._call_fut() From 0d9b3cafe196182796359c0d5b2e14bad6e19843 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 8 Dec 2020 11:36:43 -0800 Subject: [PATCH 350/855] fix: Add submodule imports for handlers to logging alias (#117) --- .../google/cloud/logging/handlers/__init__.py | 27 ++++++++++++ .../logging/handlers/middleware/__init__.py | 17 ++++++++ .../logging/handlers/transports/__init__.py | 28 +++++++++++++ .../tests/unit/test_logging_shim.py | 41 ++++++++++++++++++- 4 files changed, 112 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py create mode 100644 packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py new file mode 100644 index 000000000000..29ed8f0d165c --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py @@ -0,0 +1,27 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Python :mod:`logging` handlers for Google Cloud Logging.""" + +from google.cloud.logging_v2.handlers.app_engine import AppEngineHandler +from google.cloud.logging_v2.handlers.container_engine import ContainerEngineHandler +from google.cloud.logging_v2.handlers.handlers import CloudLoggingHandler +from google.cloud.logging_v2.handlers.handlers import setup_logging + +__all__ = [ + "AppEngineHandler", + "CloudLoggingHandler", + "ContainerEngineHandler", + "setup_logging", +] diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py new file mode 100644 index 000000000000..bd32e4a90101 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/middleware/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2017 Google LLC All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.logging_v2.handlers.middleware.request import RequestMiddleware + +__all__ = ["RequestMiddleware"] diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py new file mode 100644 index 000000000000..5a64caa07783 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/transports/__init__.py @@ -0,0 +1,28 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Transport classes for Python logging integration. +Currently two options are provided, a synchronous transport that makes +an API call for each log statement, and an asynchronous handler that +sends the API using a :class:`~google.cloud.logging.logger.Batch` object in +the background. +""" + +from google.cloud.logging_v2.handlers.transports.base import Transport +from google.cloud.logging_v2.handlers.transports.sync import SyncTransport +from google.cloud.logging_v2.handlers.transports.background_thread import ( + BackgroundThreadTransport, +) + +__all__ = ["BackgroundThreadTransport", "SyncTransport", "Transport"] diff --git a/packages/google-cloud-logging/tests/unit/test_logging_shim.py b/packages/google-cloud-logging/tests/unit/test_logging_shim.py index 507b7c635900..ae09c37faeea 100644 --- a/packages/google-cloud-logging/tests/unit/test_logging_shim.py +++ b/packages/google-cloud-logging/tests/unit/test_logging_shim.py @@ -17,7 +17,7 @@ class TestLoggingShim(unittest.TestCase): - def test_shim_matches_logging_v2(self): + def test_root_shim_matches_logging_v2(self): from google.cloud import logging from google.cloud import logging_v2 @@ -26,4 +26,43 @@ def test_shim_matches_logging_v2(self): for name in logging.__all__: found = getattr(logging, name) expected = getattr(logging_v2, name) + if name == "handlers": + # handler has separate shim + self.assertTrue(found) + self.assertIs(type(found), type(expected)) + else: + # other attributes should be identical + self.assertIs(found, expected) + + def test_handler_shim_matches_logging_v2(self): + from google.cloud.logging import handlers + from google.cloud.logging_v2 import handlers as handlers_2 + + self.assertEqual(handlers.__all__, handlers_2.__all__) + + for name in handlers.__all__: + found = getattr(handlers, name) + expected = getattr(handlers_2, name) + self.assertIs(found, expected) + + def test_middleware_shim_matches_logging_v2(self): + from google.cloud.logging.handlers import middleware + from google.cloud.logging_v2.handlers import middleware as middleware_2 + + self.assertEqual(middleware.__all__, middleware_2.__all__) + + for name in middleware.__all__: + found = getattr(middleware, name) + expected = getattr(middleware_2, name) + self.assertIs(found, expected) + + def test_transports_shim_matches_logging_v2(self): + from google.cloud.logging.handlers import transports + from google.cloud.logging_v2.handlers import transports as transports_2 + + self.assertEqual(transports.__all__, transports_2.__all__) + + for name in transports.__all__: + found = getattr(transports, name) + expected = getattr(transports_2, name) self.assertIs(found, expected) From ef8819adc08e7cae113f0940adeafc7201e47520 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 8 Dec 2020 13:02:15 -0800 Subject: [PATCH 351/855] test: use logging API in unit tests when possible (#118) --- .../tests/unit/handlers/test_app_engine.py | 2 +- .../unit/handlers/test_container_engine.py | 4 +- .../tests/unit/handlers/test_handlers.py | 6 +- .../transports/test_background_thread.py | 4 +- .../unit/handlers/transports/test_base.py | 2 +- .../unit/handlers/transports/test_sync.py | 2 +- .../tests/unit/test__gapic.py | 10 +-- .../tests/unit/test__http.py | 24 +++---- .../tests/unit/test_client.py | 52 +++++++-------- .../tests/unit/test_entries.py | 20 +++--- .../tests/unit/test_logger.py | 64 +++++++++---------- .../tests/unit/test_metric.py | 2 +- .../tests/unit/test_sink.py | 2 +- 13 files changed, 95 insertions(+), 99 deletions(-) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index 2a80e79b1ee4..ea16e3c85dcb 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -22,7 +22,7 @@ class TestAppEngineHandler(unittest.TestCase): PROJECT = "PROJECT" def _get_target_class(self): - from google.cloud.logging_v2.handlers.app_engine import AppEngineHandler + from google.cloud.logging.handlers import AppEngineHandler return AppEngineHandler diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py index c5d6df65f0fe..d2ae838811a0 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py @@ -19,9 +19,7 @@ class TestContainerEngineHandler(unittest.TestCase): PROJECT = "PROJECT" def _get_target_class(self): - from google.cloud.logging_v2.handlers.container_engine import ( - ContainerEngineHandler, - ) + from google.cloud.logging.handlers import ContainerEngineHandler return ContainerEngineHandler diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 1c5492e1aa0e..e967b2015e49 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -22,7 +22,7 @@ class TestCloudLoggingHandler(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.handlers.handlers import CloudLoggingHandler + from google.cloud.logging.handlers import CloudLoggingHandler return CloudLoggingHandler @@ -47,7 +47,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): import io - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource resource = Resource("resource_type", {"resource_label": "value"}) labels = {"handler_lable": "value"} @@ -91,7 +91,7 @@ def test_emit(self): class TestSetupLogging(unittest.TestCase): def _call_fut(self, handler, excludes=None): - from google.cloud.logging_v2.handlers.handlers import setup_logging + from google.cloud.logging.handlers import setup_logging if excludes: return setup_logging(handler, excluded_loggers=excludes) diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 71d868d8690c..e9626a759dd1 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -25,9 +25,7 @@ class TestBackgroundThreadHandler(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.handlers.transports import ( - BackgroundThreadTransport, - ) + from google.cloud.logging.handlers.transports import BackgroundThreadTransport return BackgroundThreadTransport diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py index bff253f94eb9..4cbfab02e92e 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py @@ -21,7 +21,7 @@ class TestBaseHandler(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.handlers.transports import Transport + from google.cloud.logging.handlers.transports import Transport return Transport diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py index 7bc2cd46fe75..0ee6db229a75 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py @@ -22,7 +22,7 @@ class TestSyncHandler(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.handlers.transports import SyncTransport + from google.cloud.logging.handlers.transports import SyncTransport return SyncTransport diff --git a/packages/google-cloud-logging/tests/unit/test__gapic.py b/packages/google-cloud-logging/tests/unit/test__gapic.py index 75aa20d46757..5da1c71222dd 100644 --- a/packages/google-cloud-logging/tests/unit/test__gapic.py +++ b/packages/google-cloud-logging/tests/unit/test__gapic.py @@ -17,7 +17,7 @@ import google.auth.credentials import mock -import google.cloud.logging_v2 +import google.cloud.logging from google.cloud import logging_v2 from google.cloud.logging_v2 import _gapic from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client @@ -91,7 +91,7 @@ def test_list_entries_with_options(self): result = client.list_entries( [PROJECT_PATH], filter_=FILTER, - order_by=google.cloud.logging_v2.ASCENDING, + order_by=google.cloud.logging.ASCENDING, page_size=42, page_token="token", ) @@ -103,7 +103,7 @@ def test_list_entries_with_options(self): request = call.call_args.args[0] assert request.resource_names == [PROJECT_PATH] assert request.filter == FILTER - assert request.order_by == google.cloud.logging_v2.ASCENDING + assert request.order_by == google.cloud.logging.ASCENDING assert request.page_size == 42 assert request.page_token == "token" @@ -179,7 +179,7 @@ def test_list_sinks(self): # Check the response assert len(sinks) == 1 sink = sinks[0] - assert isinstance(sink, google.cloud.logging_v2.sink.Sink) + assert isinstance(sink, google.cloud.logging.Sink) assert sink.name == self.SINK_NAME assert sink.destination == self.DESTINATION_URI assert sink.filter_ == FILTER @@ -351,7 +351,7 @@ def test_list_metrics(self): # Check the response assert len(metrics) == 1 metric = metrics[0] - assert isinstance(metric, google.cloud.logging_v2.metric.Metric) + assert isinstance(metric, google.cloud.logging.Metric) assert metric.name == self.METRIC_PATH assert metric.description == self.DESCRIPTION assert metric.filter_ == FILTER diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index 0cf8dcfddeaf..e927f6c1555a 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -130,9 +130,9 @@ def _make_timestamp(): return NOW, _datetime_to_rfc3339_w_nanos(NOW) def test_list_entries_no_paging(self): - from google.cloud.logging_v2.client import Client - from google.cloud.logging_v2.entries import TextEntry - from google.cloud.logging_v2.logger import Logger + from google.cloud.logging import Client + from google.cloud.logging import TextEntry + from google.cloud.logging import Logger NOW, TIMESTAMP = self._make_timestamp() IID = "IID" @@ -184,11 +184,11 @@ def test_list_entries_no_paging(self): ) def test_list_entries_w_paging(self): - from google.cloud.logging_v2 import DESCENDING - from google.cloud.logging_v2.client import Client - from google.cloud.logging_v2.logger import Logger - from google.cloud.logging_v2.entries import ProtobufEntry - from google.cloud.logging_v2.entries import StructEntry + from google.cloud.logging import DESCENDING + from google.cloud.logging import Client + from google.cloud.logging import Logger + from google.cloud.logging import ProtobufEntry + from google.cloud.logging import StructEntry PROJECT1 = "PROJECT1" PROJECT1_PATH = f"projects/{PROJECT1}" @@ -362,7 +362,7 @@ def test_ctor(self): self.assertEqual(api.api_request, connection.api_request) def test_list_sinks_no_paging(self): - from google.cloud.logging_v2.sink import Sink + from google.cloud.logging import Sink TOKEN = "TOKEN" RETURNED = { @@ -402,7 +402,7 @@ def test_list_sinks_no_paging(self): ) def test_list_sinks_w_paging(self): - from google.cloud.logging_v2.sink import Sink + from google.cloud.logging import Sink TOKEN = "TOKEN" PAGE_SIZE = 42 @@ -633,7 +633,7 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_list_metrics_no_paging(self): - from google.cloud.logging_v2.metric import Metric + from google.cloud.logging import Metric TOKEN = "TOKEN" RETURNED = { @@ -667,7 +667,7 @@ def test_list_metrics_no_paging(self): ) def test_list_metrics_w_paging(self): - from google.cloud.logging_v2.metric import Metric + from google.cloud.logging import Metric TOKEN = "TOKEN" PAGE_SIZE = 42 diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 29934c389a21..8083e3c56182 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -43,7 +43,7 @@ class TestClient(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.client import Client + from google.cloud.logging import Client return Client @@ -238,7 +238,7 @@ def make_api(client_obj): self.assertIs(again, api) def test_logger(self): - from google.cloud.logging_v2.logger import Logger + from google.cloud.logging import Logger creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -249,7 +249,7 @@ def test_logger(self): self.assertEqual(logger.project, self.PROJECT) def test_list_entries_defaults(self): - from google.cloud.logging_v2.entries import TextEntry + from google.cloud.logging import TextEntry IID = "IID" TEXT = "TEXT" @@ -308,10 +308,10 @@ def test_list_entries_defaults(self): self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit(self): - from google.cloud.logging_v2 import DESCENDING - from google.cloud.logging_v2.entries import ProtobufEntry - from google.cloud.logging_v2.entries import StructEntry - from google.cloud.logging_v2.logger import Logger + from google.cloud.logging import DESCENDING + from google.cloud.logging import ProtobufEntry + from google.cloud.logging import StructEntry + from google.cloud.logging import Logger PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" @@ -404,10 +404,10 @@ def test_list_entries_explicit(self): self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit_timestamp(self): - from google.cloud.logging_v2 import DESCENDING - from google.cloud.logging_v2.entries import ProtobufEntry - from google.cloud.logging_v2.entries import StructEntry - from google.cloud.logging_v2.logger import Logger + from google.cloud.logging import DESCENDING + from google.cloud.logging import ProtobufEntry + from google.cloud.logging import StructEntry + from google.cloud.logging import Logger PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" @@ -492,7 +492,7 @@ def test_list_entries_explicit_timestamp(self): ) def test_sink_defaults(self): - from google.cloud.logging_v2.sink import Sink + from google.cloud.logging import Sink creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -505,7 +505,7 @@ def test_sink_defaults(self): self.assertEqual(sink.parent, self.PROJECT_PATH) def test_sink_explicit(self): - from google.cloud.logging_v2.sink import Sink + from google.cloud.logging import Sink creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -520,7 +520,7 @@ def test_sink_explicit(self): self.assertEqual(sink.parent, self.PROJECT_PATH) def test_list_sinks_no_paging(self): - from google.cloud.logging_v2.sink import Sink + from google.cloud.logging import Sink PROJECT = "PROJECT" TOKEN = "TOKEN" @@ -559,7 +559,7 @@ def test_list_sinks_no_paging(self): ) def test_list_sinks_with_paging(self): - from google.cloud.logging_v2.sink import Sink + from google.cloud.logging import Sink PROJECT = "PROJECT" SINK_NAME = "sink_name" @@ -603,7 +603,7 @@ def test_list_sinks_with_paging(self): ) def test_metric_defaults(self): - from google.cloud.logging_v2.metric import Metric + from google.cloud.logging import Metric creds = _make_credentials() @@ -617,7 +617,7 @@ def test_metric_defaults(self): self.assertEqual(metric.project, self.PROJECT) def test_metric_explicit(self): - from google.cloud.logging_v2.metric import Metric + from google.cloud.logging import Metric creds = _make_credentials() @@ -633,7 +633,7 @@ def test_metric_explicit(self): self.assertEqual(metric.project, self.PROJECT) def test_list_metrics_no_paging(self): - from google.cloud.logging_v2.metric import Metric + from google.cloud.logging import Metric metrics = [ { @@ -669,7 +669,7 @@ def test_list_metrics_no_paging(self): ) def test_list_metrics_with_paging(self): - from google.cloud.logging_v2.metric import Metric + from google.cloud.logging import Metric token = "TOKEN" next_token = "T00KEN" @@ -719,7 +719,7 @@ def test_get_default_handler_app_engine(self): import os from google.cloud._testing import _Monkey from google.cloud.logging_v2.client import _APPENGINE_FLEXIBLE_ENV_VM - from google.cloud.logging_v2.handlers import AppEngineHandler + from google.cloud.logging.handlers import AppEngineHandler credentials = _make_credentials() client = self._make_one( @@ -734,7 +734,7 @@ def test_get_default_handler_app_engine(self): self.assertIsInstance(handler, AppEngineHandler) def test_get_default_handler_container_engine(self): - from google.cloud.logging_v2.handlers import ContainerEngineHandler + from google.cloud.logging.handlers import ContainerEngineHandler credentials = _make_credentials() client = self._make_one( @@ -753,8 +753,8 @@ def test_get_default_handler_container_engine(self): def test_get_default_handler_general(self): import io - from google.cloud.logging_v2.handlers import CloudLoggingHandler - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging import Resource name = "test-logger" resource = Resource("resource_type", {"resource_label": "value"}) @@ -778,7 +778,7 @@ def test_get_default_handler_general(self): self.assertEqual(handler.labels, labels) def test_setup_logging(self): - from google.cloud.logging_v2.handlers import CloudLoggingHandler + from google.cloud.logging.handlers import CloudLoggingHandler credentials = _make_credentials() client = self._make_one( @@ -804,8 +804,8 @@ def test_setup_logging(self): def test_setup_logging_w_extra_kwargs(self): import io - from google.cloud.logging_v2.handlers import CloudLoggingHandler - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging import Resource name = "test-logger" resource = Resource("resource_type", {"resource_label": "value"}) diff --git a/packages/google-cloud-logging/tests/unit/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py index 5b7763f45e40..ef90b8159a17 100644 --- a/packages/google-cloud-logging/tests/unit/test_entries.py +++ b/packages/google-cloud-logging/tests/unit/test_entries.py @@ -61,7 +61,7 @@ class TestLogEntry(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.entries import LogEntry + from google.cloud.logging import LogEntry return LogEntry @@ -90,7 +90,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) IID = "IID" @@ -178,7 +178,7 @@ def test_from_api_repr_missing_data_no_loggers(self): def test_from_api_repr_w_loggers_no_logger_match(self): from datetime import datetime from google.cloud._helpers import UTC - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource klass = self._get_target_class() client = _Client(self.PROJECT) @@ -332,7 +332,7 @@ def test_to_api_repr_w_source_location_no_line(self): def test_to_api_repr_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource from google.cloud._helpers import _datetime_to_rfc3339 LOG_NAME = "test.log" @@ -395,7 +395,7 @@ class TestTextEntry(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.entries import TextEntry + from google.cloud.logging import TextEntry return TextEntry @@ -417,7 +417,7 @@ def test_to_api_repr_defaults(self): def test_to_api_repr_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource from google.cloud._helpers import _datetime_to_rfc3339 LOG_NAME = "test.log" @@ -483,7 +483,7 @@ class TestStructEntry(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.entries import StructEntry + from google.cloud.logging import StructEntry return StructEntry @@ -505,7 +505,7 @@ def test_to_api_repr_defaults(self): def test_to_api_repr_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource from google.cloud._helpers import _datetime_to_rfc3339 LOG_NAME = "test.log" @@ -571,7 +571,7 @@ class TestProtobufEntry(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.entries import ProtobufEntry + from google.cloud.logging import ProtobufEntry return ProtobufEntry @@ -652,7 +652,7 @@ def test_to_api_repr_proto_defaults(self): def test_to_api_repr_proto_explicit(self): import datetime from google.protobuf.json_format import MessageToDict - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource from google.cloud._helpers import _datetime_to_rfc3339 from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 853bcce22c44..8693306332f7 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -36,7 +36,7 @@ class TestLogger(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.logger import Logger + from google.cloud.logging import Logger return Logger @@ -75,7 +75,7 @@ def test_ctor_explicit(self): self.assertEqual(logger.labels, LABELS) def test_batch_w_bound_client(self): - from google.cloud.logging_v2.logger import Batch + from google.cloud.logging import Batch conn = object() client = _Client(self.PROJECT, conn) @@ -86,7 +86,7 @@ def test_batch_w_bound_client(self): self.assertIs(batch.client, client) def test_batch_w_alternate_client(self): - from google.cloud.logging_v2.logger import Batch + from google.cloud.logging import Batch conn1 = object() conn2 = object() @@ -117,7 +117,7 @@ def test_log_empty_defaults_w_default_labels(self): def test_log_empty_w_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource ALT_LOG_NAME = "projects/foo/logs/alt.log.name" DEFAULT_LABELS = {"foo": "spam"} @@ -207,7 +207,7 @@ def test_log_text_w_unicode_and_default_labels(self): def test_log_text_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource ALT_LOG_NAME = "projects/foo/logs/alt.log.name" TEXT = "TEXT" @@ -300,7 +300,7 @@ def test_log_struct_w_default_labels(self): def test_log_struct_w_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource ALT_LOG_NAME = "projects/foo/logs/alt.log.name" STRUCT = {"message": "MESSAGE", "weather": "cloudy"} @@ -405,7 +405,7 @@ def test_log_proto_w_explicit(self): from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource message = Struct(fields={"foo": Value(bool_value=True)}) ALT_LOG_NAME = "projects/foo/logs/alt.log.name" @@ -486,7 +486,7 @@ def test_delete_w_alternate_client(self): ) def test_list_entries_defaults(self): - from google.cloud.logging_v2.client import Client + from google.cloud.logging import Client TOKEN = "TOKEN" @@ -530,8 +530,8 @@ def test_list_entries_defaults(self): self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit(self): - from google.cloud.logging_v2 import DESCENDING - from google.cloud.logging_v2.client import Client + from google.cloud.logging import DESCENDING + from google.cloud.logging import Client PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" @@ -590,8 +590,8 @@ def test_list_entries_explicit(self): self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit_timestamp(self): - from google.cloud.logging_v2 import DESCENDING - from google.cloud.logging_v2.client import Client + from google.cloud.logging import DESCENDING + from google.cloud.logging import Client PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" @@ -641,7 +641,7 @@ class TestBatch(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.logger import Batch + from google.cloud.logging import Batch return Batch @@ -657,7 +657,7 @@ def test_ctor_defaults(self): self.assertEqual(len(batch.entries), 0) def test_log_empty_defaults(self): - from google.cloud.logging_v2.entries import LogEntry + from google.cloud.logging import LogEntry ENTRY = LogEntry() client = _Client(project=self.PROJECT, connection=_make_credentials()) @@ -668,8 +668,8 @@ def test_log_empty_defaults(self): def test_log_empty_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource - from google.cloud.logging_v2.entries import LogEntry + from google.cloud.logging import Resource + from google.cloud.logging import LogEntry LABELS = {"foo": "bar", "baz": "qux"} IID = "IID" @@ -714,7 +714,7 @@ def test_log_empty_explicit(self): def test_log_text_defaults(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE - from google.cloud.logging_v2.entries import TextEntry + from google.cloud.logging import TextEntry TEXT = "This is the entry text" ENTRY = TextEntry(payload=TEXT, resource=_GLOBAL_RESOURCE) @@ -726,8 +726,8 @@ def test_log_text_defaults(self): def test_log_text_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource - from google.cloud.logging_v2.entries import TextEntry + from google.cloud.logging import Resource + from google.cloud.logging import TextEntry TEXT = "This is the entry text" LABELS = {"foo": "bar", "baz": "qux"} @@ -775,7 +775,7 @@ def test_log_text_explicit(self): def test_log_struct_defaults(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE - from google.cloud.logging_v2.entries import StructEntry + from google.cloud.logging import StructEntry STRUCT = {"message": "Message text", "weather": "partly cloudy"} ENTRY = StructEntry(payload=STRUCT, resource=_GLOBAL_RESOURCE) @@ -787,8 +787,8 @@ def test_log_struct_defaults(self): def test_log_struct_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource - from google.cloud.logging_v2.entries import StructEntry + from google.cloud.logging import Resource + from google.cloud.logging import StructEntry STRUCT = {"message": "Message text", "weather": "partly cloudy"} LABELS = {"foo": "bar", "baz": "qux"} @@ -836,7 +836,7 @@ def test_log_struct_explicit(self): def test_log_proto_defaults(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE - from google.cloud.logging_v2.entries import ProtobufEntry + from google.cloud.logging import ProtobufEntry from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value @@ -850,8 +850,8 @@ def test_log_proto_defaults(self): def test_log_proto_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource - from google.cloud.logging_v2.entries import ProtobufEntry + from google.cloud.logging import Resource + from google.cloud.logging import ProtobufEntry from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value @@ -900,7 +900,7 @@ def test_log_proto_explicit(self): def test_commit_w_unknown_entry_type(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE - from google.cloud.logging_v2.entries import LogEntry + from google.cloud.logging import LogEntry logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) @@ -918,7 +918,7 @@ def test_commit_w_unknown_entry_type(self): def test_commit_w_resource_specified(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) @@ -1035,7 +1035,7 @@ def test_commit_w_alternate_client(self): from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - from google.cloud.logging_v2.logger import Logger + from google.cloud.logging import Logger from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE TEXT = "This is the entry text" @@ -1087,7 +1087,7 @@ def test_context_mgr_success(self): from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - from google.cloud.logging_v2.logger import Logger + from google.cloud.logging import Logger from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE TEXT = "This is the entry text" @@ -1137,9 +1137,9 @@ def test_context_mgr_failure(self): import datetime from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - from google.cloud.logging_v2.entries import TextEntry - from google.cloud.logging_v2.entries import StructEntry - from google.cloud.logging_v2.entries import ProtobufEntry + from google.cloud.logging import TextEntry + from google.cloud.logging import StructEntry + from google.cloud.logging import ProtobufEntry TEXT = "This is the entry text" STRUCT = {"message": TEXT, "weather": "partly cloudy"} diff --git a/packages/google-cloud-logging/tests/unit/test_metric.py b/packages/google-cloud-logging/tests/unit/test_metric.py index a71fd763f9ed..83b49d02dfa1 100644 --- a/packages/google-cloud-logging/tests/unit/test_metric.py +++ b/packages/google-cloud-logging/tests/unit/test_metric.py @@ -25,7 +25,7 @@ class TestMetric(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.metric import Metric + from google.cloud.logging import Metric return Metric diff --git a/packages/google-cloud-logging/tests/unit/test_sink.py b/packages/google-cloud-logging/tests/unit/test_sink.py index cac6040589d4..1e4852ab523b 100644 --- a/packages/google-cloud-logging/tests/unit/test_sink.py +++ b/packages/google-cloud-logging/tests/unit/test_sink.py @@ -27,7 +27,7 @@ class TestSink(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.sink import Sink + from google.cloud.logging import Sink return Sink From 98ec9dd72c83c82982108e8a4250fe510e644da1 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 11 Dec 2020 16:50:27 -0800 Subject: [PATCH 352/855] chore: Re-generated to pick up changes from synthtool. (#127) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore: add config / docs for 'pre-commit' support Source-Author: Tres Seaver Source-Date: Tue Dec 1 16:01:20 2020 -0500 Source-Repo: googleapis/synthtool Source-Sha: 32af6da519a6b042e3da62008e2a75e991efb6b4 Source-Link: https://github.com/googleapis/synthtool/commit/32af6da519a6b042e3da62008e2a75e991efb6b4 * chore(deps): update precommit hook pre-commit/pre-commit-hooks to v3.3.0 Source-Author: WhiteSource Renovate Source-Date: Wed Dec 2 17:18:24 2020 +0100 Source-Repo: googleapis/synthtool Source-Sha: 69629b64b83c6421d616be2b8e11795738ec8a6c Source-Link: https://github.com/googleapis/synthtool/commit/69629b64b83c6421d616be2b8e11795738ec8a6c * test(python): give filesystem paths to pytest-cov https://pytest-cov.readthedocs.io/en/latest/config.html The pytest-cov docs seem to suggest a filesystem path is expected. Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Dec 2 09:28:04 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: f94318521f63085b9ccb43d42af89f153fb39f15 Source-Link: https://github.com/googleapis/synthtool/commit/f94318521f63085b9ccb43d42af89f153fb39f15 * chore: update noxfile.py.j2 * Update noxfile.py.j2 add changes from @glasnt to the template template to ensure that enforcing type hinting doesn't fail for repos with the sample noxfile (aka all samples repos) See https://github.com/GoogleCloudPlatform/python-docs-samples/pull/4869/files for context * fix typo Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Thu Dec 3 13:44:30 2020 -0800 Source-Repo: googleapis/synthtool Source-Sha: 18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1 Source-Link: https://github.com/googleapis/synthtool/commit/18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1 --- .../.pre-commit-config.yaml | 17 ++ .../google-cloud-logging/CONTRIBUTING.rst | 10 + packages/google-cloud-logging/noxfile.py | 5 +- .../samples/snippets/README.rst | 191 ------------------ .../samples/snippets/noxfile.py | 19 +- packages/google-cloud-logging/synth.metadata | 8 +- 6 files changed, 42 insertions(+), 208 deletions(-) create mode 100644 packages/google-cloud-logging/.pre-commit-config.yaml delete mode 100644 packages/google-cloud-logging/samples/snippets/README.rst diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml new file mode 100644 index 000000000000..6ad83346e261 --- /dev/null +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -0,0 +1,17 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.3.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml +- repo: https://github.com/psf/black + rev: 19.10b0 + hooks: + - id: black +- repo: https://gitlab.com/pycqa/flake8 + rev: 3.8.4 + hooks: + - id: flake8 diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index ef2706b7773c..ab6c09b8f074 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -111,6 +111,16 @@ Coding Style should point to the official ``googleapis`` checkout and the the branch should be the main branch on that remote (``master``). +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + Exceptions to PEP8: - Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 1844aa62ba8b..3db66c649206 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -79,9 +79,8 @@ def default(session): session.run( "py.test", "--quiet", - "--cov=google.cloud.logging", - "--cov=google.cloud", - "--cov=tests.unit", + "--cov=google/cloud", + "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", "--cov-report=", diff --git a/packages/google-cloud-logging/samples/snippets/README.rst b/packages/google-cloud-logging/samples/snippets/README.rst deleted file mode 100644 index 9a38dca7d2d1..000000000000 --- a/packages/google-cloud-logging/samples/snippets/README.rst +++ /dev/null @@ -1,191 +0,0 @@ - -.. This file is automatically generated. Do not edit this file directly. - -Cloud Logging Python Samples -=============================================================================== - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/README.rst - - -This directory contains samples for Cloud Logging. `Cloud Logging`_ allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud Platform and Amazon Web Services. - - - - -.. _Cloud Logging: https://cloud.google.com/logging/docs - - -Setup -------------------------------------------------------------------------------- - - - -Authentication -++++++++++++++ - -This sample requires you to have authentication setup. Refer to the -`Authentication Getting Started Guide`_ for instructions on setting up -credentials for applications. - -.. _Authentication Getting Started Guide: - https://cloud.google.com/docs/authentication/getting-started - - - - -Install Dependencies -++++++++++++++++++++ - -#. Clone python-docs-samples and change directory to the sample directory you want to use. - - .. code-block:: bash - - $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git - -#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. - - .. _Python Development Environment Setup Guide: - https://cloud.google.com/python/setup - -#. Create a virtualenv. Samples are compatible with Python 3.6+. - - .. code-block:: bash - - $ virtualenv env - $ source env/bin/activate - -#. Install the dependencies needed to run the samples. - - .. code-block:: bash - - $ pip install -r requirements.txt - -.. _pip: https://pip.pypa.io/ -.. _virtualenv: https://virtualenv.pypa.io/ - - - - - - -Samples -------------------------------------------------------------------------------- - - -Quickstart -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/quickstart.py,logging/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python quickstart.py - - - - -Snippets -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/snippets.py,logging/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python snippets.py - - - usage: snippets.py [-h] logger_name {list,write,delete} ... - - This application demonstrates how to perform basic operations on logs and - log entries with Cloud Logging. - - For more information, see the README.md under /logging and the - documentation at https://cloud.google.com/logging/docs. - - positional arguments: - logger_name Logger name - {list,write,delete} - list Lists the most recent entries for a given logger. - write Writes log entries to the given logger. - delete Deletes a logger and all its entries. Note that a - deletion can take several minutes to take effect. - - optional arguments: - -h, --help show this help message and exit - - - - - -Export -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/export.py,logging/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python export.py - - - usage: export.py [-h] {list,create,update,delete} ... - - positional arguments: - {list,create,update,delete} - list Lists all sinks. - create Lists all sinks. - update Changes a sink's filter. The filter determines which - logs this sink matches and will be exported to the - destination. For example a filter of 'severity>=INFO' - will send all logs that have a severity of INFO or - greater to the destination. See https://cloud.google.c - om/logging/docs/view/advanced_filters for more filter - information. - delete Deletes a sink. - - optional arguments: - -h, --help show this help message and exit - - - - - - - - - -The client library -------------------------------------------------------------------------------- - -This sample uses the `Google Cloud Client Library for Python`_. -You can read the documentation for more details on API usage and use GitHub -to `browse the source`_ and `report issues`_. - -.. _Google Cloud Client Library for Python: - https://googlecloudplatform.github.io/google-cloud-python/ -.. _browse the source: - https://github.com/GoogleCloudPlatform/google-cloud-python -.. _report issues: - https://github.com/GoogleCloudPlatform/google-cloud-python/issues - - - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 2a06290780bc..bca0522ec4d9 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -68,7 +69,7 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} @@ -97,7 +98,7 @@ def get_pytest_env_vars(): # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -135,7 +136,7 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): +def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: @@ -148,15 +149,13 @@ def lint(session): "." ] session.run("flake8", *args) - - # # Black # @nox.session -def blacken(session): +def blacken(session: nox.sessions.Session) -> None: session.install("black") python_files = [path for path in os.listdir(".") if path.endswith(".py")] @@ -170,7 +169,7 @@ def blacken(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): session.install("-r", "requirements.txt") @@ -196,7 +195,7 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) @@ -211,7 +210,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -234,7 +233,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 4e26d487701d..a6ed7f34c53d 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "4e24b3c360adef8d7761573d789867857586337d" + "sha": "7eaa5853f3a45e3db015a09841b98aeab461e6f3" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "a073c873f3928c561bdf87fdfbf1d081d1998984" + "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "a073c873f3928c561bdf87fdfbf1d081d1998984" + "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" } } ], @@ -85,6 +85,7 @@ ".kokoro/test-samples.sh", ".kokoro/trampoline.sh", ".kokoro/trampoline_v2.sh", + ".pre-commit-config.yaml", ".trampolinerc", "CODE_OF_CONDUCT.md", "CONTRIBUTING.rst", @@ -134,7 +135,6 @@ "renovate.json", "samples/AUTHORING_GUIDE.md", "samples/CONTRIBUTING.md", - "samples/snippets/README.rst", "samples/snippets/noxfile.py", "scripts/decrypt-secrets.sh", "scripts/readme-gen/readme_gen.py", From 75096aa9d4bea1e30655e8fb5eddf9090f570dd4 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 11 Dec 2020 16:50:47 -0800 Subject: [PATCH 353/855] chore: Re-generated to pick up changes from googleapis. (#126) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * feat: add the Tailing API to get a live stream of the tail end of filtered logs PiperOrigin-RevId: 344435830 Source-Author: Google APIs Source-Date: Thu Nov 26 09:56:05 2020 -0800 Source-Repo: googleapis/googleapis Source-Sha: e8857c4c36948e7e0500377cd7fcecbf2459afc8 Source-Link: https://github.com/googleapis/googleapis/commit/e8857c4c36948e7e0500377cd7fcecbf2459afc8 --- .../cloud/logging_v2/proto/logging.proto | 99 +++++++++++++++ .../logging_service_v2/async_client.py | 67 +++++++++- .../services/logging_service_v2/client.py | 54 +++++++- .../logging_service_v2/transports/base.py | 27 ++++ .../logging_service_v2/transports/grpc.py | 28 +++++ .../transports/grpc_asyncio.py | 30 +++++ .../google/cloud/logging_v2/types/__init__.py | 4 + .../google/cloud/logging_v2/types/logging.py | 118 ++++++++++++++++++ packages/google-cloud-logging/synth.metadata | 4 +- .../logging_v2/test_logging_service_v2.py | 76 +++++++++++ 10 files changed, 503 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto index 58647b92ff04..f8b01a71e6b4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto +++ b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto @@ -125,6 +125,15 @@ service LoggingServiceV2 { }; option (google.api.method_signature) = "parent"; } + + // Streaming read of log entries as they are ingested. Until the stream is + // terminated, it will continue reading logs. + rpc TailLogEntries(stream TailLogEntriesRequest) returns (stream TailLogEntriesResponse) { + option (google.api.http) = { + post: "/v2/entries:tail" + body: "*" + }; + } } // The parameters to DeleteLog. @@ -254,6 +263,11 @@ message ListLogEntriesRequest { // "billingAccounts/[BILLING_ACCOUNT_ID]" // "folders/[FOLDER_ID]" // + // May alternatively be one or more views + // projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + // organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + // billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + // folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] // // Projects listed in the `project_ids` field are added to this list. repeated string resource_names = 8 [ @@ -363,6 +377,19 @@ message ListLogsRequest { // `nextPageToken` from the previous response. The values of other method // parameters should be identical to those in the previous call. string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The resource name that owns the logs: + // projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + // organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + // billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + // folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + // + // To support legacy queries, it could also be: + // "projects/[PROJECT_ID]" + // "organizations/[ORGANIZATION_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]" + // "folders/[FOLDER_ID]" + repeated string resource_names = 8 [(google.api.field_behavior) = OPTIONAL]; } // Result returned from ListLogs. @@ -377,3 +404,75 @@ message ListLogsResponse { // method again using the value of `nextPageToken` as `pageToken`. string next_page_token = 2; } + +// The parameters to `TailLogEntries`. +message TailLogEntriesRequest { + // Required. Name of a parent resource from which to retrieve log entries: + // + // "projects/[PROJECT_ID]" + // "organizations/[ORGANIZATION_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]" + // "folders/[FOLDER_ID]" + // + // May alternatively be one or more views: + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + // "organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + repeated string resource_names = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. A filter that chooses which log entries to return. See [Advanced + // Logs Filters](https://cloud.google.com/logging/docs/view/advanced_filters). + // Only log entries that match the filter are returned. An empty filter + // matches all log entries in the resources listed in `resource_names`. + // Referencing a parent resource that is not in `resource_names` will cause + // the filter to return no results. The maximum length of the filter is 20000 + // characters. + string filter = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The amount of time to buffer log entries at the server before + // being returned to prevent out of order results due to late arriving log + // entries. Valid values are between 0-60000 milliseconds. Defaults to 2000 + // milliseconds. + google.protobuf.Duration buffer_window = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// Result returned from `TailLogEntries`. +message TailLogEntriesResponse { + // Information about entries that were omitted from the session. + message SuppressionInfo { + // An indicator of why entries were omitted. + enum Reason { + // Unexpected default. + REASON_UNSPECIFIED = 0; + + // Indicates suppression occurred due to relevant entries being + // received in excess of rate limits. For quotas and limits, see + // [Logging API quotas and + // limits](https://cloud.google.com/logging/quotas#api-limits). + RATE_LIMIT = 1; + + // Indicates suppression occurred due to the client not consuming + // responses quickly enough. + NOT_CONSUMED = 2; + } + + // The reason that entries were omitted from the session. + Reason reason = 1; + + // A lower bound on the count of entries omitted due to `reason`. + int32 suppressed_count = 2; + } + + // A list of log entries. Each response in the stream will order entries with + // increasing values of `LogEntry.timestamp`. Ordering is not guaranteed + // between separate responses. + repeated LogEntry entries = 1; + + // If entries that otherwise would have been included in the session were not + // sent back to the client, counts of relevant entries omitted from the + // session with the reason that they were not included. There will be at most + // one of each reason per response. The counts represent the number of + // suppressed entries since the last streamed response. + repeated SuppressionInfo suppression_info = 2; +} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index e6dd57247dc3..82ee957a3c4a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -18,7 +18,16 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import ( + Dict, + AsyncIterable, + Awaitable, + AsyncIterator, + Sequence, + Tuple, + Type, + Union, +) import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore @@ -430,6 +439,12 @@ async def list_log_entries( "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" + May alternatively be one or more views + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + Projects listed in the ``project_ids`` field are added to this list. This corresponds to the ``resource_names`` field @@ -690,6 +705,56 @@ async def list_logs( # Done; return the response. return response + def tail_log_entries( + self, + requests: AsyncIterator[logging.TailLogEntriesRequest] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: + r"""Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Args: + requests (AsyncIterator[`~.logging.TailLogEntriesRequest`]): + The request object AsyncIterator. The parameters to `TailLogEntries`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[~.logging.TailLogEntriesResponse]: + Result returned from ``TailLogEntries``. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.tail_log_entries, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 79a9ed1af652..a54252bf7409 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -19,7 +19,17 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import ( + Callable, + Dict, + Optional, + Iterable, + Iterator, + Sequence, + Tuple, + Type, + Union, +) import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore @@ -598,6 +608,12 @@ def list_log_entries( "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" + May alternatively be one or more views + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + Projects listed in the ``project_ids`` field are added to this list. This corresponds to the ``resource_names`` field @@ -833,6 +849,42 @@ def list_logs( # Done; return the response. return response + def tail_log_entries( + self, + requests: Iterator[logging.TailLogEntriesRequest] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[logging.TailLogEntriesResponse]: + r"""Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Args: + requests (Iterator[`~.logging.TailLogEntriesRequest`]): + The request object iterator. The parameters to `TailLogEntries`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[~.logging.TailLogEntriesResponse]: + Result returned from ``TailLogEntries``. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.tail_log_entries] + + # Send the request. + response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index c8bcbcbf9524..be9dcdbfee87 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -186,6 +186,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.tail_log_entries: gapic_v1.method.wrap_method( + self.tail_log_entries, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=3600.0, + client_info=client_info, + ), } @property @@ -244,5 +259,17 @@ def list_logs( ]: raise NotImplementedError() + @property + def tail_log_entries( + self, + ) -> typing.Callable[ + [logging.TailLogEntriesRequest], + typing.Union[ + logging.TailLogEntriesResponse, + typing.Awaitable[logging.TailLogEntriesResponse], + ], + ]: + raise NotImplementedError() + __all__ = ("LoggingServiceV2Transport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 4c0636e47e8c..d774281b9d20 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -380,5 +380,33 @@ def list_logs( ) return self._stubs["list_logs"] + @property + def tail_log_entries( + self, + ) -> Callable[[logging.TailLogEntriesRequest], logging.TailLogEntriesResponse]: + r"""Return a callable for the tail log entries method over gRPC. + + Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Returns: + Callable[[~.TailLogEntriesRequest], + ~.TailLogEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "tail_log_entries" not in self._stubs: + self._stubs["tail_log_entries"] = self.grpc_channel.stream_stream( + "/google.logging.v2.LoggingServiceV2/TailLogEntries", + request_serializer=logging.TailLogEntriesRequest.serialize, + response_deserializer=logging.TailLogEntriesResponse.deserialize, + ) + return self._stubs["tail_log_entries"] + __all__ = ("LoggingServiceV2GrpcTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 8a26a078e1fa..686eb52e016e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -390,5 +390,35 @@ def list_logs( ) return self._stubs["list_logs"] + @property + def tail_log_entries( + self, + ) -> Callable[ + [logging.TailLogEntriesRequest], Awaitable[logging.TailLogEntriesResponse] + ]: + r"""Return a callable for the tail log entries method over gRPC. + + Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Returns: + Callable[[~.TailLogEntriesRequest], + Awaitable[~.TailLogEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "tail_log_entries" not in self._stubs: + self._stubs["tail_log_entries"] = self.grpc_channel.stream_stream( + "/google.logging.v2.LoggingServiceV2/TailLogEntries", + request_serializer=logging.TailLogEntriesRequest.serialize, + response_deserializer=logging.TailLogEntriesResponse.deserialize, + ) + return self._stubs["tail_log_entries"] + __all__ = ("LoggingServiceV2GrpcAsyncIOTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py index b24bf3b8c508..4c85fbb46dcc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py @@ -66,6 +66,8 @@ ListMonitoredResourceDescriptorsResponse, ListLogsRequest, ListLogsResponse, + TailLogEntriesRequest, + TailLogEntriesResponse, ) from .logging_metrics import ( LogMetric, @@ -125,6 +127,8 @@ "ListMonitoredResourceDescriptorsResponse", "ListLogsRequest", "ListLogsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", "LogMetric", "ListLogMetricsRequest", "ListLogMetricsResponse", diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py index 0d44439abff7..cec8993f5cc8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py @@ -20,6 +20,7 @@ from google.api import monitored_resource_pb2 as monitored_resource # type: ignore from google.cloud.logging_v2.types import log_entry +from google.protobuf import duration_pb2 as duration # type: ignore from google.rpc import status_pb2 as status # type: ignore @@ -36,6 +37,8 @@ "ListMonitoredResourceDescriptorsResponse", "ListLogsRequest", "ListLogsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", }, ) @@ -208,6 +211,12 @@ class ListLogEntriesRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" + May alternatively be one or more views + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + Projects listed in the ``project_ids`` field are added to this list. filter (str): @@ -358,6 +367,16 @@ class ListLogsRequest(proto.Message): ``pageToken`` must be the value of ``nextPageToken`` from the previous response. The values of other method parameters should be identical to those in the previous call. + resource_names (Sequence[str]): + Optional. The resource name that owns the logs: + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + + To support legacy queries, it could also be: + "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". """ parent = proto.Field(proto.STRING, number=1) @@ -366,6 +385,8 @@ class ListLogsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=3) + resource_names = proto.RepeatedField(proto.STRING, number=8) + class ListLogsResponse(proto.Message): r"""Result returned from ListLogs. @@ -391,4 +412,101 @@ def raw_page(self): next_page_token = proto.Field(proto.STRING, number=2) +class TailLogEntriesRequest(proto.Message): + r"""The parameters to ``TailLogEntries``. + + Attributes: + resource_names (Sequence[str]): + Required. Name of a parent resource from which to retrieve + log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + May alternatively be one or more views: + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + "organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]". + filter (str): + Optional. A filter that chooses which log entries to return. + See `Advanced Logs + Filters `__. + Only log entries that match the filter are returned. An + empty filter matches all log entries in the resources listed + in ``resource_names``. Referencing a parent resource that is + not in ``resource_names`` will cause the filter to return no + results. The maximum length of the filter is 20000 + characters. + buffer_window (~.duration.Duration): + Optional. The amount of time to buffer log + entries at the server before being returned to + prevent out of order results due to late + arriving log entries. Valid values are between + 0-60000 milliseconds. Defaults to 2000 + milliseconds. + """ + + resource_names = proto.RepeatedField(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + buffer_window = proto.Field(proto.MESSAGE, number=3, message=duration.Duration,) + + +class TailLogEntriesResponse(proto.Message): + r"""Result returned from ``TailLogEntries``. + + Attributes: + entries (Sequence[~.log_entry.LogEntry]): + A list of log entries. Each response in the stream will + order entries with increasing values of + ``LogEntry.timestamp``. Ordering is not guaranteed between + separate responses. + suppression_info (Sequence[~.logging.TailLogEntriesResponse.SuppressionInfo]): + If entries that otherwise would have been + included in the session were not sent back to + the client, counts of relevant entries omitted + from the session with the reason that they were + not included. There will be at most one of each + reason per response. The counts represent the + number of suppressed entries since the last + streamed response. + """ + + class SuppressionInfo(proto.Message): + r"""Information about entries that were omitted from the session. + + Attributes: + reason (~.logging.TailLogEntriesResponse.SuppressionInfo.Reason): + The reason that entries were omitted from the + session. + suppressed_count (int): + A lower bound on the count of entries omitted due to + ``reason``. + """ + + class Reason(proto.Enum): + r"""An indicator of why entries were omitted.""" + REASON_UNSPECIFIED = 0 + RATE_LIMIT = 1 + NOT_CONSUMED = 2 + + reason = proto.Field( + proto.ENUM, number=1, enum="TailLogEntriesResponse.SuppressionInfo.Reason", + ) + + suppressed_count = proto.Field(proto.INT32, number=2) + + entries = proto.RepeatedField(proto.MESSAGE, number=1, message=log_entry.LogEntry,) + + suppression_info = proto.RepeatedField( + proto.MESSAGE, number=2, message=SuppressionInfo, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index a6ed7f34c53d..c3228fbcfab9 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "be0bdf86cd31aa7c1a7b30a9a2e9f2fd53ee3d91", - "internalRef": "342353190" + "sha": "e8857c4c36948e7e0500377cd7fcecbf2459afc8", + "internalRef": "344435830" } }, { diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 2c08f63b296a..f6cb5d7a154d 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1698,6 +1698,81 @@ async def test_list_logs_async_pages(): assert page_.raw_page.next_page_token == token +def test_tail_log_entries( + transport: str = "grpc", request_type=logging.TailLogEntriesRequest +): + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.tail_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = iter([logging.TailLogEntriesResponse()]) + + response = client.tail_log_entries(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, logging.TailLogEntriesResponse) + + +def test_tail_log_entries_from_dict(): + test_tail_log_entries(request_type=dict) + + +@pytest.mark.asyncio +async def test_tail_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest +): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.tail_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[logging.TailLogEntriesResponse()] + ) + + response = await client.tail_log_entries(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, logging.TailLogEntriesResponse) + + +@pytest.mark.asyncio +async def test_tail_log_entries_async_from_dict(): + await test_tail_log_entries_async(request_type=dict) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( @@ -1800,6 +1875,7 @@ def test_logging_service_v2_base_transport(): "list_log_entries", "list_monitored_resource_descriptors", "list_logs", + "tail_log_entries", ) for method in methods: with pytest.raises(NotImplementedError): From e2b6e5c7bfe91b8aa6d1f011d0b088d992b71dd0 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 14 Dec 2020 12:42:53 -0800 Subject: [PATCH 354/855] fix: remove client recv msg limit fix: add enums to `types/__init__.py` (#131) PiperOrigin-RevId: 347055288 Source-Author: Google APIs Source-Date: Fri Dec 11 12:44:37 2020 -0800 Source-Repo: googleapis/googleapis Source-Sha: dd372aa22ded7a8ba6f0e03a80e06358a3fa0907 Source-Link: https://github.com/googleapis/googleapis/commit/dd372aa22ded7a8ba6f0e03a80e06358a3fa0907 --- .../services/config_service_v2/transports/__init__.py | 1 - .../services/config_service_v2/transports/grpc.py | 10 +++++++++- .../config_service_v2/transports/grpc_asyncio.py | 8 ++++++++ .../services/logging_service_v2/transports/__init__.py | 1 - .../services/logging_service_v2/transports/grpc.py | 10 +++++++++- .../logging_service_v2/transports/grpc_asyncio.py | 8 ++++++++ .../services/metrics_service_v2/transports/__init__.py | 1 - .../services/metrics_service_v2/transports/grpc.py | 10 +++++++++- .../metrics_service_v2/transports/grpc_asyncio.py | 8 ++++++++ .../google/cloud/logging_v2/types/__init__.py | 3 ++- packages/google-cloud-logging/synth.metadata | 6 +++--- .../unit/gapic/logging_v2/test_config_service_v2.py | 8 ++++++++ .../unit/gapic/logging_v2/test_logging_service_v2.py | 8 ++++++++ .../unit/gapic/logging_v2/test_metrics_service_v2.py | 8 ++++++++ 14 files changed, 80 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index c4ae13076d0c..30282e2d26ce 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -28,7 +28,6 @@ _transport_registry["grpc"] = ConfigServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport - __all__ = ( "ConfigServiceV2Transport", "ConfigServiceV2GrpcTransport", diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index f083373b1e5b..a64405fba954 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -147,6 +147,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -165,6 +169,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._stubs = {} # type: Dict[str, Callable] @@ -191,7 +199,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 7376164e4730..aa094ea0ee9c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -192,6 +192,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -210,6 +214,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) # Run the base constructor. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index 910a38ecdb10..cd979b771d26 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -28,7 +28,6 @@ _transport_registry["grpc"] = LoggingServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport - __all__ = ( "LoggingServiceV2Transport", "LoggingServiceV2GrpcTransport", diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index d774281b9d20..f8007bb0d06b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -147,6 +147,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -165,6 +169,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._stubs = {} # type: Dict[str, Callable] @@ -191,7 +199,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 686eb52e016e..6adea9ca5bdc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -192,6 +192,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -210,6 +214,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) # Run the base constructor. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index eef07abd795d..f748403b4305 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -28,7 +28,6 @@ _transport_registry["grpc"] = MetricsServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport - __all__ = ( "MetricsServiceV2Transport", "MetricsServiceV2GrpcTransport", diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 0a6f25bd6dce..1cb9262abaa5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -147,6 +147,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -165,6 +169,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._stubs = {} # type: Dict[str, Callable] @@ -191,7 +199,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 9ec30eed080f..ddbd16da633c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -192,6 +192,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -210,6 +214,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) # Run the base constructor. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py index 4c85fbb46dcc..dce385af3cda 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py @@ -54,6 +54,7 @@ GetCmekSettingsRequest, UpdateCmekSettingsRequest, CmekSettings, + LifecycleState, ) from .logging import ( DeleteLogRequest, @@ -79,7 +80,6 @@ DeleteLogMetricRequest, ) - __all__ = ( "LogEntry", "LogEntryOperation", @@ -117,6 +117,7 @@ "GetCmekSettingsRequest", "UpdateCmekSettingsRequest", "CmekSettings", + "LifecycleState", "DeleteLogRequest", "WriteLogEntriesRequest", "WriteLogEntriesResponse", diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index c3228fbcfab9..6ab2fc041011 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "7eaa5853f3a45e3db015a09841b98aeab461e6f3" + "sha": "3a25c8cd9bd06e5a8f488945c9bc94380e2bf0d1" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e8857c4c36948e7e0500377cd7fcecbf2459afc8", - "internalRef": "344435830" + "sha": "dd372aa22ded7a8ba6f0e03a80e06358a3fa0907", + "internalRef": "347055288" } }, { diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 25e35e5c64c4..47a41f25c3cb 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -5382,6 +5382,10 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source( ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred @@ -5428,6 +5432,10 @@ def test_config_service_v2_transport_channel_mtls_with_adc(transport_class): ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index f6cb5d7a154d..2b8129f299af 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -2048,6 +2048,10 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred @@ -2095,6 +2099,10 @@ def test_logging_service_v2_transport_channel_mtls_with_adc(transport_class): ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 0cf2e894407b..0bc10e4bc960 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1993,6 +1993,10 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred @@ -2040,6 +2044,10 @@ def test_metrics_service_v2_transport_channel_mtls_with_adc(transport_class): ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel From 17cd4baa3835dd7f45e7cfe5cee6bd1bdba11d9c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 16 Dec 2020 11:07:42 -0800 Subject: [PATCH 355/855] chore: release 2.0.2 (#116) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-logging/CHANGELOG.md | 9 +++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index a85cfb01e3e3..658443a46940 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,15 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +### [2.0.2](https://www.github.com/googleapis/python-logging/compare/v2.0.1...v2.0.2) (2020-12-14) + + +### Bug Fixes + +* Add submodule imports for handlers to logging alias ([#117](https://www.github.com/googleapis/python-logging/issues/117)) ([6843a3a](https://www.github.com/googleapis/python-logging/commit/6843a3aee3c0908ddbc493e7a9ecdddd01df34ef)) +* remove client recv msg limit fix: add enums to `types/__init__.py` ([#131](https://www.github.com/googleapis/python-logging/issues/131)) ([6349b89](https://www.github.com/googleapis/python-logging/commit/6349b899811cbb16f5548df0b77564b46666c4e7)) +* Remove keyword only argument for RequestsMiddleware ([#113](https://www.github.com/googleapis/python-logging/issues/113)) ([e704f28](https://www.github.com/googleapis/python-logging/commit/e704f287a40db38d0da42fa5e21e7a9ef73922ec)) + ### [2.0.1](https://www.github.com/googleapis/python-logging/compare/v2.0.0...v2.0.1) (2020-12-02) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 635bd1a57868..d50ed511d06b 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.0.1" +version = "2.0.2" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From b6fe79ff31aa1360f31c7b7b9ef7b7358a88e8f5 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 16 Dec 2020 15:21:09 -0800 Subject: [PATCH 356/855] feat: support http_request field (#120) --- .../cloud/logging_v2/handlers/_helpers.py | 88 ++++++--- .../cloud/logging_v2/handlers/app_engine.py | 19 +- .../handlers/transports/background_thread.py | 36 +--- .../logging_v2/handlers/transports/base.py | 8 +- .../logging_v2/handlers/transports/sync.py | 15 +- .../tests/unit/handlers/test__helpers.py | 175 +++++++++++++++--- .../tests/unit/handlers/test_app_engine.py | 62 ++++--- .../transports/test_background_thread.py | 31 +--- .../unit/handlers/transports/test_sync.py | 3 + 9 files changed, 283 insertions(+), 154 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py index 3150e46c351f..9821e95afebf 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py @@ -23,9 +23,14 @@ flask = None from google.cloud.logging_v2.handlers.middleware.request import _get_django_request +from google.logging.type.http_request_pb2 import HttpRequest _DJANGO_TRACE_HEADER = "HTTP_X_CLOUD_TRACE_CONTEXT" +_DJANGO_USERAGENT_HEADER = "HTTP_USER_AGENT" +_DJANGO_REMOTE_ADDR_HEADER = "REMOTE_ADDR" +_DJANGO_REFERER_HEADER = "HTTP_REFERER" _FLASK_TRACE_HEADER = "X_CLOUD_TRACE_CONTEXT" +_PROTOCOL_HEADER = "SERVER_PROTOCOL" def format_stackdriver_json(record, message): @@ -46,59 +51,86 @@ def format_stackdriver_json(record, message): return json.dumps(payload) -def get_trace_id_from_flask(): - """Get trace_id from flask request headers. +def get_request_data_from_flask(): + """Get http_request and trace data from flask request headers. Returns: - str: TraceID in HTTP request headers. + Tuple[Optional[google.logging.type.http_request_pb2.HttpRequest], Optional[str]]: + Data related to the current http request and the trace_id for the + request. Both fields will be None if a flask request isn't found. """ if flask is None or not flask.request: - return None + return None, None + + # build http_request + http_request = HttpRequest( + request_method=flask.request.method, + request_url=flask.request.url, + request_size=flask.request.content_length, + user_agent=flask.request.user_agent.string, + remote_ip=flask.request.remote_addr, + referer=flask.request.referrer, + protocol=flask.request.environ.get(_PROTOCOL_HEADER), + ) + # find trace id + trace_id = None header = flask.request.headers.get(_FLASK_TRACE_HEADER) + if header: + trace_id = header.split("/", 1)[0] - if header is None: - return None - - trace_id = header.split("/", 1)[0] - - return trace_id + return http_request, trace_id -def get_trace_id_from_django(): - """Get trace_id from django request headers. +def get_request_data_from_django(): + """Get http_request and trace data from django request headers. Returns: - str: TraceID in HTTP request headers. + Tuple[Optional[google.logging.type.http_request_pb2.HttpRequest], Optional[str]]: + Data related to the current http request and the trace_id for the + request. Both fields will be None if a django request isn't found. """ request = _get_django_request() if request is None: - return None + return None, None + # build http_request + http_request = HttpRequest( + request_method=request.method, + request_url=request.build_absolute_uri(), + request_size=len(request.body), + user_agent=request.META.get(_DJANGO_USERAGENT_HEADER), + remote_ip=request.META.get(_DJANGO_REMOTE_ADDR_HEADER), + referer=request.META.get(_DJANGO_REFERER_HEADER), + protocol=request.META.get(_PROTOCOL_HEADER), + ) + # find trace id + trace_id = None header = request.META.get(_DJANGO_TRACE_HEADER) - if header is None: - return None - - trace_id = header.split("/", 1)[0] + if header: + trace_id = header.split("/", 1)[0] - return trace_id + return http_request, trace_id -def get_trace_id(): - """Helper to get trace_id from web application request header. +def get_request_data(): + """Helper to get http_request and trace data from supported web + frameworks (currently supported: Flask and Django). Returns: - str: TraceID in HTTP request headers. + Tuple[Optional[google.logging.type.http_request_pb2.HttpRequest], Optional[str]]: + Data related to the current http request and the trace_id for the + request. Both fields will be None if a supported web request isn't found. """ checkers = ( - get_trace_id_from_django, - get_trace_id_from_flask, + get_request_data_from_django, + get_request_data_from_flask, ) for checker in checkers: - trace_id = checker() - if trace_id is not None: - return trace_id + http_request, trace_id = checker() + if http_request is not None: + return http_request, trace_id - return None + return None, None diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py index fed9bd205add..4d1fe8085291 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py @@ -21,7 +21,7 @@ import logging import os -from google.cloud.logging_v2.handlers._helpers import get_trace_id +from google.cloud.logging_v2.handlers._helpers import get_request_data from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport from google.cloud.logging_v2.resource import Resource @@ -96,7 +96,7 @@ def get_gae_labels(self): """ gae_labels = {} - trace_id = get_trace_id() + _, trace_id = get_request_data() if trace_id is not None: gae_labels[_TRACE_ID_LABEL] = trace_id @@ -114,11 +114,14 @@ def emit(self, record): """ message = super(AppEngineHandler, self).format(record) gae_labels = self.get_gae_labels() - trace_id = ( - "projects/%s/traces/%s" % (self.project_id, gae_labels[_TRACE_ID_LABEL]) - if _TRACE_ID_LABEL in gae_labels - else None - ) + http_request, trace_id = get_request_data() + if trace_id is not None: + trace_id = f"projects/{self.project_id}/traces/{trace_id}" self.transport.send( - record, message, resource=self.resource, labels=gae_labels, trace=trace_id + record, + message, + resource=self.resource, + labels=gae_labels, + trace=trace_id, + http_request=http_request, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py index 873fa452d294..3d654dbd8e75 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py @@ -222,31 +222,21 @@ def _main_thread_terminated(self): file=sys.stderr, ) - def enqueue( - self, record, message, *, resource=None, labels=None, trace=None, span_id=None - ): + def enqueue(self, record, message, **kwargs): """Queues a log entry to be written by the background thread. Args: record (logging.LogRecord): Python log record that the handler was called with. message (str): The message from the ``LogRecord`` after being formatted by the associated log formatters. - resource (Optional[google.cloud.logging_v2.resource.Resource]): - Monitored resource of the entry - labels (Optional[dict]): Mapping of labels for the entry. - trace (Optional[str]): TraceID to apply to the logging entry. - span_id (Optional[str]): Span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. + kwargs: Additional optional arguments for the logger """ queue_entry = { "info": {"message": message, "python_logger": record.name}, "severity": _helpers._normalize_severity(record.levelno), - "resource": resource, - "labels": labels, - "trace": trace, - "span_id": span_id, "timestamp": datetime.datetime.utcfromtimestamp(record.created), } + queue_entry.update(kwargs) self._queue.put_nowait(queue_entry) def flush(self): @@ -291,30 +281,16 @@ def __init__( ) self.worker.start() - def send( - self, record, message, resource=None, labels=None, trace=None, span_id=None - ): + def send(self, record, message, **kwargs): """Overrides Transport.send(). Args: record (logging.LogRecord): Python log record that the handler was called with. message (str): The message from the ``LogRecord`` after being formatted by the associated log formatters. - resource (Optional[google.cloud.logging_v2.resource.Resource]): - Monitored resource of the entry. - labels (Optional[dict]): Mapping of labels for the entry. - trace (Optional[str]): TraceID to apply to the logging entry. - span_id (Optional[str]): span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. + kwargs: Additional optional arguments for the logger """ - self.worker.enqueue( - record, - message, - resource=resource, - labels=labels, - trace=trace, - span_id=span_id, - ) + self.worker.enqueue(record, message, **kwargs) def flush(self): """Submit any pending log records.""" diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py index c94c7ad704c3..d60a5a070876 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py @@ -22,18 +22,14 @@ class Transport(object): client and name object, and must override :meth:`send`. """ - def send( - self, record, message, *, resource=None, labels=None, trace=None, span_id=None - ): + def send(self, record, message, **kwargs): """Transport send to be implemented by subclasses. Args: record (logging.LogRecord): Python log record that the handler was called with. message (str): The message from the ``LogRecord`` after being formatted by the associated log formatters. - resource (Optional[google.cloud.logging_v2.resource.Resource]): - Monitored resource of the entry. - labels (Optional[dict]): Mapping of labels for the entry. + kwargs: Additional optional arguments for the logger """ raise NotImplementedError diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py index 550c29391548..35ee73daa19d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py @@ -30,9 +30,7 @@ class SyncTransport(Transport): def __init__(self, client, name): self.logger = client.logger(name) - def send( - self, record, message, *, resource=None, labels=None, trace=None, span_id=None - ): + def send(self, record, message, **kwargs): """Overrides transport.send(). Args: @@ -40,16 +38,9 @@ def send( Python log record that the handler was called with. message (str): The message from the ``LogRecord`` after being formatted by the associated log formatters. - resource (Optional[~logging_v2.resource.Resource]): - Monitored resource of the entry. - labels (Optional[dict]): Mapping of labels for the entry. + kwargs: Additional optional arguments for the logger """ info = {"message": message, "python_logger": record.name} self.logger.log_struct( - info, - severity=_helpers._normalize_severity(record.levelno), - resource=resource, - labels=labels, - trace=trace, - span_id=span_id, + info, severity=_helpers._normalize_severity(record.levelno), **kwargs, ) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index 1fbf6c86011e..8fb37305be70 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -16,13 +16,18 @@ import mock +_FLASK_TRACE_ID = "flask-id" +_FLASK_HTTP_REQUEST = {"request_url": "https://flask.palletsprojects.com/en/1.1.x/"} +_DJANGO_TRACE_ID = "django-id" +_DJANGO_HTTP_REQUEST = {"request_url": "https://www.djangoproject.com/"} -class Test_get_trace_id_from_flask(unittest.TestCase): + +class Test_get_request_data_from_flask(unittest.TestCase): @staticmethod def _call_fut(): from google.cloud.logging_v2.handlers import _helpers - return _helpers.get_trace_id_from_flask() + return _helpers.get_request_data_from_flask() @staticmethod def create_app(): @@ -39,13 +44,14 @@ def index(): def test_no_context_header(self): app = self.create_app() with app.test_request_context(path="/", headers={}): - trace_id = self._call_fut() + http_request, trace_id = self._call_fut() self.assertIsNone(trace_id) + self.assertEqual(http_request.request_method, "GET") def test_valid_context_header(self): flask_trace_header = "X_CLOUD_TRACE_CONTEXT" - expected_trace_id = "testtraceidflask" + expected_trace_id = _FLASK_TRACE_ID flask_trace_id = expected_trace_id + "/testspanid" app = self.create_app() @@ -54,17 +60,57 @@ def test_valid_context_header(self): ) with context: - trace_id = self._call_fut() + http_request, trace_id = self._call_fut() self.assertEqual(trace_id, expected_trace_id) + self.assertEqual(http_request.request_method, "GET") + + def test_http_request_populated(self): + expected_path = "http://testserver/123" + expected_agent = "Mozilla/5.0" + expected_referrer = "self" + expected_ip = "10.1.2.3" + body_content = "test" + headers = { + "User-Agent": expected_agent, + "Referer": expected_referrer, + } + app = self.create_app() + with app.test_client() as c: + c.put( + path=expected_path, + data=body_content, + environ_base={"REMOTE_ADDR": expected_ip}, + headers=headers, + ) + http_request, trace_id = self._call_fut() + + self.assertEqual(http_request.request_method, "PUT") + self.assertEqual(http_request.request_url, expected_path) + self.assertEqual(http_request.user_agent, expected_agent) + self.assertEqual(http_request.referer, expected_referrer) + self.assertEqual(http_request.remote_ip, expected_ip) + self.assertEqual(http_request.request_size, len(body_content)) + self.assertEqual(http_request.protocol, "HTTP/1.1") + + def test_http_request_sparse(self): + expected_path = "http://testserver/123" + app = self.create_app() + with app.test_client() as c: + c.put(path=expected_path) + http_request, trace_id = self._call_fut() + self.assertEqual(http_request.request_method, "PUT") + self.assertEqual(http_request.request_url, expected_path) + self.assertEqual(http_request.protocol, "HTTP/1.1") -class Test_get_trace_id_from_django(unittest.TestCase): + +class Test_get_request_data_from_django(unittest.TestCase): @staticmethod def _call_fut(): from google.cloud.logging_v2.handlers import _helpers - return _helpers.get_trace_id_from_django() + return _helpers.get_request_data_from_django() def setUp(self): from django.conf import settings @@ -89,7 +135,8 @@ def test_no_context_header(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) - trace_id = self._call_fut() + http_request, trace_id = self._call_fut() + self.assertEqual(http_request.request_method, "GET") self.assertIsNone(trace_id) def test_valid_context_header(self): @@ -106,61 +153,137 @@ def test_valid_context_header(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) - trace_id = self._call_fut() + http_request, trace_id = self._call_fut() self.assertEqual(trace_id, expected_trace_id) + self.assertEqual(http_request.request_method, "GET") + + def test_http_request_populated(self): + from django.test import RequestFactory + from google.cloud.logging_v2.handlers.middleware import request + + expected_path = "http://testserver/123" + expected_agent = "Mozilla/5.0" + expected_referrer = "self" + body_content = "test" + django_request = RequestFactory().put( + expected_path, + data=body_content, + HTTP_USER_AGENT=expected_agent, + HTTP_REFERER=expected_referrer, + ) + + middleware = request.RequestMiddleware(None) + middleware.process_request(django_request) + http_request, trace_id = self._call_fut() + self.assertEqual(http_request.request_method, "PUT") + self.assertEqual(http_request.request_url, expected_path) + self.assertEqual(http_request.user_agent, expected_agent) + self.assertEqual(http_request.referer, expected_referrer) + self.assertEqual(http_request.remote_ip, "127.0.0.1") + self.assertEqual(http_request.request_size, len(body_content)) + self.assertEqual(http_request.protocol, "HTTP/1.1") + + def test_http_request_sparse(self): + from django.test import RequestFactory + from google.cloud.logging_v2.handlers.middleware import request + + expected_path = "http://testserver/123" + django_request = RequestFactory().put(expected_path) + middleware = request.RequestMiddleware(None) + middleware.process_request(django_request) + http_request, trace_id = self._call_fut() + self.assertEqual(http_request.request_method, "PUT") + self.assertEqual(http_request.request_url, expected_path) + self.assertEqual(http_request.remote_ip, "127.0.0.1") + self.assertEqual(http_request.protocol, "HTTP/1.1") -class Test_get_trace_id(unittest.TestCase): +class Test_get_request_data(unittest.TestCase): @staticmethod def _call_fut(): from google.cloud.logging_v2.handlers import _helpers - return _helpers.get_trace_id() + return _helpers.get_request_data() def _helper(self, django_return, flask_return): django_patch = mock.patch( - "google.cloud.logging_v2.handlers._helpers.get_trace_id_from_django", + "google.cloud.logging_v2.handlers._helpers.get_request_data_from_django", return_value=django_return, ) flask_patch = mock.patch( - "google.cloud.logging_v2.handlers._helpers.get_trace_id_from_flask", + "google.cloud.logging_v2.handlers._helpers.get_request_data_from_flask", return_value=flask_return, ) with django_patch as django_mock: with flask_patch as flask_mock: - trace_id = self._call_fut() + result = self._call_fut() - return django_mock, flask_mock, trace_id + return django_mock, flask_mock, result def test_from_django(self): - django_mock, flask_mock, trace_id = self._helper("test-django-trace-id", None) - self.assertEqual(trace_id, django_mock.return_value) + django_expected = (_DJANGO_HTTP_REQUEST, _DJANGO_TRACE_ID) + flask_expected = (None, None) + django_mock, flask_mock, output = self._helper(django_expected, flask_expected) + self.assertEqual(output, django_expected) django_mock.assert_called_once_with() flask_mock.assert_not_called() def test_from_flask(self): - django_mock, flask_mock, trace_id = self._helper(None, "test-flask-trace-id") - self.assertEqual(trace_id, flask_mock.return_value) + django_expected = (None, None) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID) + + django_mock, flask_mock, output = self._helper(django_expected, flask_expected) + self.assertEqual(output, flask_expected) django_mock.assert_called_once_with() flask_mock.assert_called_once_with() def test_from_django_and_flask(self): - django_mock, flask_mock, trace_id = self._helper( - "test-django-trace-id", "test-flask-trace-id" - ) + django_expected = (_DJANGO_HTTP_REQUEST, _DJANGO_TRACE_ID) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID) + + django_mock, flask_mock, output = self._helper(django_expected, flask_expected) + # Django wins. - self.assertEqual(trace_id, django_mock.return_value) + self.assertEqual(output, django_expected) django_mock.assert_called_once_with() flask_mock.assert_not_called() - def test_missing(self): - django_mock, flask_mock, trace_id = self._helper(None, None) - self.assertIsNone(trace_id) + def test_missing_http_request(self): + flask_expected = (None, _FLASK_TRACE_ID) + django_expected = (None, _DJANGO_TRACE_ID) + django_mock, flask_mock, output = self._helper(django_expected, flask_expected) + + # function only returns trace if http_request data is present + self.assertEqual(output, (None, None)) + + django_mock.assert_called_once_with() + flask_mock.assert_called_once_with() + + def test_missing_trace_id(self): + flask_expected = (_FLASK_HTTP_REQUEST, None) + django_expected = (None, _DJANGO_TRACE_ID) + django_mock, flask_mock, output = self._helper(django_expected, flask_expected) + + # trace_id is optional + self.assertEqual(output, flask_expected) + + django_mock.assert_called_once_with() + flask_mock.assert_called_once_with() + + def test_missing_both(self): + flask_expected = (None, None) + django_expected = (None, None) + django_mock, flask_mock, output = self._helper(django_expected, flask_expected) + self.assertEqual(output, (None, None)) django_mock.assert_called_once_with() flask_mock.assert_called_once_with() + + def test_wo_libraries(self): + output = self._call_fut() + self.assertEqual(output, (None, None)) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index ea16e3c85dcb..71672fa6fb0a 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -87,36 +87,54 @@ def test_constructor_w_gae_flex_env(self): self.assertIs(handler.stream, stream) def test_emit(self): - client = mock.Mock(project=self.PROJECT, spec=["project"]) - handler = self._make_one(client, transport=_Transport) - gae_resource = handler.get_gae_resource() - gae_labels = handler.get_gae_labels() - trace = None - logname = "app" - message = "hello world" - record = logging.LogRecord(logname, logging, None, None, message, None, None) - handler.emit(record) - - self.assertIs(handler.transport.client, client) - self.assertEqual(handler.transport.name, logname) - self.assertEqual( - handler.transport.send_called_with, - (record, message, gae_resource, gae_labels, trace), + expected_http_request = {"request_url": "test"} + trace_id = "trace-test" + expected_trace_id = f"projects/{self.PROJECT}/traces/{trace_id}" + get_request_patch = mock.patch( + "google.cloud.logging_v2.handlers.app_engine.get_request_data", + return_value=(expected_http_request, trace_id), ) + with get_request_patch: + # library integrations mocked to return test data + client = mock.Mock(project=self.PROJECT, spec=["project"]) + handler = self._make_one(client, transport=_Transport) + gae_resource = handler.get_gae_resource() + gae_labels = handler.get_gae_labels() + logname = "app" + message = "hello world" + record = logging.LogRecord( + logname, logging, None, None, message, None, None + ) + handler.project_id = self.PROJECT + handler.emit(record) + + self.assertIs(handler.transport.client, client) + self.assertEqual(handler.transport.name, logname) + self.assertEqual( + handler.transport.send_called_with, + ( + record, + message, + gae_resource, + gae_labels, + expected_trace_id, + expected_http_request, + ), + ) def _get_gae_labels_helper(self, trace_id): - get_trace_patch = mock.patch( - "google.cloud.logging_v2.handlers.app_engine.get_trace_id", - return_value=trace_id, + get_request_patch = mock.patch( + "google.cloud.logging_v2.handlers.app_engine.get_request_data", + return_value=(None, trace_id), ) client = mock.Mock(project=self.PROJECT, spec=["project"]) # The handler actually calls ``get_gae_labels()``. - with get_trace_patch as mock_get_trace: + with get_request_patch as mock_get_request: handler = self._make_one(client, transport=_Transport) gae_labels = handler.get_gae_labels() - self.assertEqual(mock_get_trace.mock_calls, [mock.call()]) + self.assertEqual(mock_get_request.mock_calls, [mock.call()]) return gae_labels @@ -138,5 +156,5 @@ def __init__(self, client, name): self.client = client self.name = name - def send(self, record, message, resource, labels, trace): - self.send_called_with = (record, message, resource, labels, trace) + def send(self, record, message, resource, labels, trace, http_request): + self.send_called_with = (record, message, resource, labels, trace, http_request) diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index e9626a759dd1..5410c5f10547 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -64,12 +64,7 @@ def test_send(self): transport.send(record, message, resource=_GLOBAL_RESOURCE) transport.worker.enqueue.assert_called_once_with( - record, - message, - resource=_GLOBAL_RESOURCE, - labels=None, - trace=None, - span_id=None, + record, message, resource=_GLOBAL_RESOURCE, ) def test_trace_send(self): @@ -91,12 +86,7 @@ def test_trace_send(self): transport.send(record, message, resource=_GLOBAL_RESOURCE, trace=trace) transport.worker.enqueue.assert_called_once_with( - record, - message, - resource=_GLOBAL_RESOURCE, - labels=None, - trace=trace, - span_id=None, + record, message, resource=_GLOBAL_RESOURCE, trace=trace, ) def test_span_send(self): @@ -118,12 +108,7 @@ def test_span_send(self): transport.send(record, message, resource=_GLOBAL_RESOURCE, span_id=span_id) transport.worker.enqueue.assert_called_once_with( - record, - message, - resource=_GLOBAL_RESOURCE, - labels=None, - trace=None, - span_id=span_id, + record, message, resource=_GLOBAL_RESOURCE, span_id=span_id, ) def test_flush(self): @@ -297,11 +282,12 @@ def test_enqueue_defaults(self): expected_info = {"message": message, "python_logger": "testing"} self.assertEqual(entry["info"], expected_info) self.assertEqual(entry["severity"], LogSeverity.INFO) - self.assertIsNone(entry["resource"]) - self.assertIsNone(entry["labels"]) - self.assertIsNone(entry["trace"]) - self.assertIsNone(entry["span_id"]) self.assertIsInstance(entry["timestamp"], datetime.datetime) + self.assertNotIn("resource", entry.keys()) + self.assertNotIn("labels", entry.keys()) + self.assertNotIn("trace", entry.keys()) + self.assertNotIn("span_id", entry.keys()) + self.assertNotIn("http_request", entry.keys()) def test_enqueue_explicit(self): import datetime @@ -503,6 +489,7 @@ def log_struct( trace=None, span_id=None, timestamp=None, + http_request=None, ): from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py index 0ee6db229a75..9f06427573c1 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py @@ -58,6 +58,7 @@ def test_send(self): None, None, None, + None, ) self.assertEqual(transport.logger.log_struct_called_with, EXPECTED_SENT) @@ -76,6 +77,7 @@ def log_struct( labels=None, trace=None, span_id=None, + http_request=None, ): self.log_struct_called_with = ( message, @@ -84,6 +86,7 @@ def log_struct( labels, trace, span_id, + http_request, ) From 6e9ea1c0b57d00ee706d5e7998b2af0447391121 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 17 Dec 2020 09:43:50 -0800 Subject: [PATCH 357/855] docs: fix usage guide (#140) --- packages/google-cloud-logging/docs/usage.rst | 72 +++---- .../samples/snippets/requirements.txt | 5 +- .../snippets/usage_guide.py} | 177 +++++++----------- .../samples/snippets/usage_guide_test.py | 90 +++++++++ 4 files changed, 192 insertions(+), 152 deletions(-) rename packages/google-cloud-logging/{docs/snippets.py => samples/snippets/usage_guide.py} (65%) create mode 100644 packages/google-cloud-logging/samples/snippets/usage_guide_test.py diff --git a/packages/google-cloud-logging/docs/usage.rst b/packages/google-cloud-logging/docs/usage.rst index 4714144f926d..1ea9440fcd26 100644 --- a/packages/google-cloud-logging/docs/usage.rst +++ b/packages/google-cloud-logging/docs/usage.rst @@ -8,21 +8,21 @@ To write log entries, first create a :class:`~google.cloud.logging.logger.Logger`, passing the "log name" with which to associate the entries: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_create] :end-before: [END logger_create] :dedent: 4 Write a simple text entry to the logger. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_log_text] :end-before: [END logger_log_text] :dedent: 4 Write a dictionary entry to the logger. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_log_struct] :end-before: [END logger_log_struct] :dedent: 4 @@ -34,7 +34,7 @@ Supported Resource values are listed at `Monitored Resource Types`_ .. _Monitored Resource Types: https://cloud.google.com/logging/docs/api/v2/resource-list -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_log_resource_text] :end-before: [END logger_log_resource_text] :dedent: 4 @@ -44,7 +44,7 @@ Retrieving log entries Fetch entries for the default project. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START client_list_entries_default] :end-before: [END client_list_entries_default] :dedent: 4 @@ -59,41 +59,27 @@ will be instances of one of the following classes: - :class:`~google.cloud.logging.entries.StructEntry` - :class:`~google.cloud.logging.entries.ProtobufEntry` -Fetch entries across multiple projects. - -.. literalinclude:: snippets.py - :start-after: [START client_list_entries_multi_project] - :end-before: [END client_list_entries_multi_project] - :dedent: 4 - Filter entries retrieved using the `Advanced Logs Filters`_ syntax .. _Advanced Logs Filters: https://cloud.google.com/logging/docs/view/advanced_filters Fetch entries for the default project. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START client_list_entries_filter] :end-before: [END client_list_entries_filter] :dedent: 4 Sort entries in descending timestamp order. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START client_list_entries_order_by] :end-before: [END client_list_entries_order_by] :dedent: 4 -Retrieve entries in batches of 10, iterating until done. - -.. literalinclude:: snippets.py - :start-after: [START client_list_entries_paged] - :end-before: [END client_list_entries_paged] - :dedent: 4 - Retrieve entries for a single logger, sorting in descending timestamp order: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_list_entries] :end-before: [END logger_list_entries] :dedent: 4 @@ -102,7 +88,7 @@ Retrieve entries for a single logger, sorting in descending timestamp order: Delete all entries for a logger ------------------------------- -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_delete] :end-before: [END logger_delete] :dedent: 8 @@ -116,35 +102,35 @@ used within Cloud Monitoring to create charts and alerts. List all metrics for a project: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START client_list_metrics] :end-before: [END client_list_metrics] :dedent: 4 Create a metric: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START metric_create] :end-before: [END metric_create] :dedent: 4 Refresh local information about a metric: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START metric_reload] :end-before: [END metric_reload] :dedent: 4 Update a metric: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START metric_update] :end-before: [END metric_update] :dedent: 4 Delete a metric: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START metric_delete] :end-before: [END metric_delete] :dedent: 4 @@ -166,14 +152,14 @@ Make sure that the storage bucket you want to export logs too has Add ``cloud-logs@google.com`` as the owner of the bucket: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_bucket_permissions] :end-before: [END sink_bucket_permissions] :dedent: 4 Create a Cloud Storage sink: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_storage_create] :end-before: [END sink_storage_create] :dedent: 4 @@ -189,14 +175,14 @@ See: `Setting permissions for BigQuery`_ .. _Setting permissions for BigQuery: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_bigquery -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_dataset_permissions] :end-before: [END sink_dataset_permissions] :dedent: 4 Create a BigQuery sink: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_bigquery_create] :end-before: [END sink_bigquery_create] :dedent: 4 @@ -212,14 +198,14 @@ See: `Setting permissions for Pub/Sub`_ .. _Setting permissions for Pub/Sub: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_logs_to_cloud_pubsub -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_topic_permissions] :end-before: [END sink_topic_permissions] :dedent: 4 Create a Cloud Pub/Sub sink: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_pubsub_create] :end-before: [END sink_pubsub_create] :dedent: 4 @@ -229,28 +215,28 @@ Manage Sinks List all sinks for a project: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START client_list_sinks] :end-before: [END client_list_sinks] :dedent: 4 Refresh local information about a sink: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_reload] :end-before: [END sink_reload] :dedent: 4 Update a sink: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_update] :end-before: [END sink_update] :dedent: 4 Delete a sink: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_delete] :end-before: [END sink_delete] :dedent: 4 @@ -263,7 +249,7 @@ Cloud Logging. There are different handler options to accomplish this. To automatically pick the default for your current environment, use :meth:`~google.cloud.logging.client.Client.get_default_handler`. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START create_default_handler] :end-before: [END create_default_handler] :dedent: 4 @@ -274,7 +260,7 @@ as well as any other loggers created. A helper method :meth:`~google.cloud.logging.client.Client.setup_logging` is provided to configure this automatically. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START setup_logging] :end-before: [END setup_logging] :dedent: 4 @@ -286,7 +272,7 @@ to configure this automatically. You can also exclude certain loggers: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START setup_logging_excludes] :end-before: [END setup_logging_excludes] :dedent: 4 @@ -300,7 +286,7 @@ directly create a :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler` instance which will write directly to the API. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START create_cloud_handler] :end-before: [END create_cloud_handler] :dedent: 4 @@ -316,7 +302,7 @@ All logs will go to a single custom log, which defaults to "python". The name of the Python logger will be included in the structured log entry under the "python_logger" field. You can change it by providing a name to the handler: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START create_named_handler] :end-before: [END create_named_handler] :dedent: 4 diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index bdb659d04691..b071a67f3f3b 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1 +1,4 @@ -google-cloud-logging==2.0.1 +google-cloud-logging==2.0.2 +google-cloud-storage==1.35.0 +google-cloud-pubsub==2.2.0 +google-cloud-bigquery==2.6.1 diff --git a/packages/google-cloud-logging/docs/snippets.py b/packages/google-cloud-logging/samples/snippets/usage_guide.py similarity index 65% rename from packages/google-cloud-logging/docs/snippets.py rename to packages/google-cloud-logging/samples/snippets/usage_guide.py index da9ba9b2d857..b28d10980ee1 100644 --- a/packages/google-cloud-logging/docs/snippets.py +++ b/packages/google-cloud-logging/samples/snippets/usage_guide.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Testable usage examples for Cloud Logging API wrapper +"""Samples embedded in the Usage Guide (docs/usage.rst) Each example function takes a ``client`` argument (which must be an instance of :class:`google.cloud.logging.client.Client`) and uses it to perform a task @@ -23,9 +23,10 @@ need to be deleted during teardown. """ +import os import time -from google.cloud.logging.client import Client +from google.cloud.logging import Client def snippet(func): @@ -42,25 +43,6 @@ def do_something_with(item): # pylint: disable=unused-argument pass -# pylint: disable=reimported,unused-variable,unused-argument -@snippet -def instantiate_client(_unused_client, _unused_to_delete): - """Instantiate client.""" - - # [START client_create_default] - from google.cloud import logging - - client = logging.Client() - # [END client_create_default] - - credentials = object() - # [START client_create_explicit] - from google.cloud import logging - - client = logging.Client(project="my-project", credentials=credentials) - # [END client_create_explicit] - - # pylint: enable=reimported,unused-variable,unused-argument @@ -71,55 +53,32 @@ def client_list_entries(client, to_delete): # pylint: disable=unused-argument # [START client_list_entries_default] for entry in client.list_entries(): # API call(s) do_something_with(entry) - # [END client_list_entries_default] + # [END client_list_entries_default] + break # [START client_list_entries_filter] - FILTER = "logName:log_name AND textPayload:simple" - for entry in client.list_entries(filter_=FILTER): # API call(s) + filter_str = "logName:log_name AND textPayload:simple" + for entry in client.list_entries(filter_=filter_str): # API call(s) do_something_with(entry) - # [END client_list_entries_filter] + # [END client_list_entries_filter] + break # [START client_list_entries_order_by] from google.cloud.logging import DESCENDING for entry in client.list_entries(order_by=DESCENDING): # API call(s) do_something_with(entry) - # [END client_list_entries_order_by] - - # [START client_list_entries_paged] - iterator = client.list_entries() - pages = iterator.pages - - page1 = next(pages) # API call - for entry in page1: - do_something_with(entry) - - page2 = next(pages) # API call - for entry in page2: - do_something_with(entry) - # [END client_list_entries_paged] - - -# @snippet Commented because we need real project IDs to test -def client_list_entries_multi_project( - client, to_delete -): # pylint: disable=unused-argument - """List entries via client across multiple projects.""" - - # [START client_list_entries_multi_project] - resource_names = ["projects/one-project", "projects/another-project"] - for entry in client.list_entries(resource_names=resource_names): # API call(s) - do_something_with(entry) - # [END client_list_entries_multi_project] + # [END client_list_entries_order_by] + break @snippet def logger_usage(client, to_delete): """Logger usage.""" - LOG_NAME = "logger_usage_%d" % (_millis()) + log_name = "logger_usage_%d" % (_millis()) # [START logger_create] - logger = client.logger(LOG_NAME) + logger = client.logger(log_name) # [END logger_create] to_delete.append(logger) @@ -134,7 +93,7 @@ def logger_usage(client, to_delete): # [END logger_log_struct] # [START logger_log_resource_text] - from google.cloud.logging.resource import Resource + from google.cloud.logging import Resource res = Resource( type="generic_node", @@ -168,11 +127,11 @@ def _logger_delete(): @snippet def metric_crud(client, to_delete): """Metric CRUD.""" - METRIC_NAME = "robots-%d" % (_millis(),) - DESCRIPTION = "Robots all up in your server" - FILTER = "logName:apache-access AND textPayload:robot" - UPDATED_FILTER = "textPayload:robot" - UPDATED_DESCRIPTION = "Danger, Will Robinson!" + metric_name = "robots-%d" % (_millis(),) + description = "Robots all up in your server" + filter = "logName:apache-access AND textPayload:robot" + updated_filter = "textPayload:robot" + updated_description = "Danger, Will Robinson!" # [START client_list_metrics] for metric in client.list_metrics(): # API call(s) @@ -180,7 +139,7 @@ def metric_crud(client, to_delete): # [END client_list_metrics] # [START metric_create] - metric = client.metric(METRIC_NAME, filter_=FILTER, description=DESCRIPTION) + metric = client.metric(metric_name, filter_=filter, description=description) assert not metric.exists() # API call metric.create() # API call assert metric.exists() # API call @@ -188,20 +147,20 @@ def metric_crud(client, to_delete): to_delete.append(metric) # [START metric_reload] - existing_metric = client.metric(METRIC_NAME) + existing_metric = client.metric(metric_name) existing_metric.reload() # API call # [END metric_reload] - assert existing_metric.filter_ == FILTER - assert existing_metric.description == DESCRIPTION + assert existing_metric.filter_ == filter + assert existing_metric.description == description # [START metric_update] - existing_metric.filter_ = UPDATED_FILTER - existing_metric.description = UPDATED_DESCRIPTION + existing_metric.filter_ = updated_filter + existing_metric.description = updated_description existing_metric.update() # API call # [END metric_update] existing_metric.reload() - assert existing_metric.filter_ == UPDATED_FILTER - assert existing_metric.description == UPDATED_DESCRIPTION + assert existing_metric.filter_ == updated_filter + assert existing_metric.description == updated_description def _metric_delete(): # [START metric_delete] @@ -215,9 +174,9 @@ def _metric_delete(): def _sink_storage_setup(client): from google.cloud import storage - BUCKET_NAME = "sink-storage-%d" % (_millis(),) + bucket_name = "sink-storage-%d" % (_millis(),) client = storage.Client() - bucket = client.bucket(BUCKET_NAME) + bucket = client.bucket(bucket_name) bucket.create() # [START sink_bucket_permissions] @@ -236,12 +195,12 @@ def sink_storage(client, to_delete): """Sink log entries to storage.""" bucket = _sink_storage_setup(client) to_delete.append(bucket) - SINK_NAME = "robots-storage-%d" % (_millis(),) - FILTER = "textPayload:robot" + sink_name = "robots-storage-%d" % (_millis(),) + filter = "textPayload:robot" # [START sink_storage_create] - DESTINATION = "storage.googleapis.com/%s" % (bucket.name,) - sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) + destination = "storage.googleapis.com/%s" % (bucket.name,) + sink = client.sink(sink_name, filter_=filter, destination=destination) assert not sink.exists() # API call sink.create() # API call assert sink.exists() # API call @@ -252,19 +211,17 @@ def sink_storage(client, to_delete): def _sink_bigquery_setup(client): from google.cloud import bigquery - DATASET_NAME = "sink_bigquery_%d" % (_millis(),) + dataset_name = "sink_bigquery_%d" % (_millis(),) client = bigquery.Client() - dataset = client.dataset(DATASET_NAME) - dataset.create() - dataset.reload() + dataset = client.create_dataset(dataset_name) # [START sink_dataset_permissions] - from google.cloud.bigquery.dataset import AccessGrant + from google.cloud.bigquery.dataset import AccessEntry - grants = dataset.access_grants - grants.append(AccessGrant("WRITER", "groupByEmail", "cloud-logs@google.com")) - dataset.access_grants = grants - dataset.update() # API call + entry_list = dataset.access_entries + entry_list.append(AccessEntry("WRITER", "groupByEmail", "cloud-logs@google.com")) + dataset.access_entries = entry_list + client.update_dataset(dataset, ["access_entries"]) # API call # [END sink_dataset_permissions] return dataset @@ -274,13 +231,12 @@ def _sink_bigquery_setup(client): def sink_bigquery(client, to_delete): """Sink log entries to bigquery.""" dataset = _sink_bigquery_setup(client) - to_delete.append(dataset) - SINK_NAME = "robots-bigquery-%d" % (_millis(),) - FILTER = "textPayload:robot" + sink_name = "robots-bigquery-%d" % (_millis(),) + filter_str = "textPayload:robot" # [START sink_bigquery_create] - DESTINATION = "bigquery.googleapis.com%s" % (dataset.path,) - sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) + destination = "bigquery.googleapis.com%s" % (dataset.path,) + sink = client.sink(sink_name, filter_=filter_str, destination=destination) assert not sink.exists() # API call sink.create() # API call assert sink.exists() # API call @@ -291,15 +247,21 @@ def sink_bigquery(client, to_delete): def _sink_pubsub_setup(client): from google.cloud import pubsub - TOPIC_NAME = "sink-pubsub-%d" % (_millis(),) - client = pubsub.Client() - topic = client.topic(TOPIC_NAME) - topic.create() + client = pubsub.PublisherClient() + + project_id = os.environ["GOOGLE_CLOUD_PROJECT"] + topic_id = "sink-pubsub-%d" % (_millis(),) # [START sink_topic_permissions] - policy = topic.get_iam_policy() # API call - policy.owners.add(policy.group("cloud-logs@google.com")) - topic.set_iam_policy(policy) # API call + topic_path = client.topic_path(project_id, topic_id) + topic = client.create_topic(request={"name": topic_path}) + + policy = client.get_iam_policy(request={"resource": topic_path}) # API call + policy.bindings.add(role="roles/owner", members=["group:cloud-logs@google.com"]) + + client.set_iam_policy( + request={"resource": topic_path, "policy": policy} + ) # API call # [END sink_topic_permissions] return topic @@ -309,19 +271,18 @@ def _sink_pubsub_setup(client): def sink_pubsub(client, to_delete): """Sink log entries to pubsub.""" topic = _sink_pubsub_setup(client) - to_delete.append(topic) - SINK_NAME = "robots-pubsub-%d" % (_millis(),) - FILTER = "logName:apache-access AND textPayload:robot" - UPDATED_FILTER = "textPayload:robot" + sink_name = "robots-pubsub-%d" % (_millis(),) + filter_str = "logName:apache-access AND textPayload:robot" + updated_filter = "textPayload:robot" # [START sink_pubsub_create] - DESTINATION = "pubsub.googleapis.com/%s" % (topic.full_name,) - sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) + destination = "pubsub.googleapis.com/%s" % (topic.name,) + sink = client.sink(sink_name, filter_=filter_str, destination=destination) assert not sink.exists() # API call sink.create() # API call assert sink.exists() # API call # [END sink_pubsub_create] - to_delete.insert(0, sink) # delete sink before topic + created_sink = sink # [START client_list_sinks] for sink in client.list_sinks(): # API call(s) @@ -329,23 +290,23 @@ def sink_pubsub(client, to_delete): # [END client_list_sinks] # [START sink_reload] - existing_sink = client.sink(SINK_NAME) + existing_sink = client.sink(sink_name) existing_sink.reload() # [END sink_reload] - assert existing_sink.filter_ == FILTER - assert existing_sink.destination == DESTINATION + assert existing_sink.filter_ == filter_str + assert existing_sink.destination == destination # [START sink_update] - existing_sink.filter_ = UPDATED_FILTER + existing_sink.filter_ = updated_filter existing_sink.update() # [END sink_update] existing_sink.reload() - assert existing_sink.filter_ == UPDATED_FILTER + assert existing_sink.filter_ == updated_filter + sink = created_sink # [START sink_delete] sink.delete() # [END sink_delete] - to_delete.pop(0) @snippet diff --git a/packages/google-cloud-logging/samples/snippets/usage_guide_test.py b/packages/google-cloud-logging/samples/snippets/usage_guide_test.py new file mode 100644 index 000000000000..f02d82fbde79 --- /dev/null +++ b/packages/google-cloud-logging/samples/snippets/usage_guide_test.py @@ -0,0 +1,90 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.cloud.logging import Client + +import usage_guide + + +def test_logger_usage(): + client = Client() + + to_delete = [] + usage_guide.logger_usage(client, to_delete) + + for item in to_delete: + usage_guide._backoff_not_found(item.delete) + + +def test_metric_crud(): + client = Client() + + to_delete = [] + usage_guide.metric_crud(client, to_delete) + + for item in to_delete: + usage_guide._backoff_not_found(item.delete) + + +def test_sink_storage(): + client = Client() + + to_delete = [] + usage_guide.sink_storage(client, to_delete) + + for item in to_delete: + usage_guide._backoff_not_found(item.delete) + + +def test_sink_bigquery(): + client = Client() + + to_delete = [] + usage_guide.sink_bigquery(client, to_delete) + + for item in to_delete: + usage_guide._backoff_not_found(item.delete) + + +def test_sink_pubsub(): + client = Client() + + to_delete = [] + usage_guide.sink_pubsub(client, to_delete) + + for item in to_delete: + usage_guide._backoff_not_found(item.delete) + + +def test_logging_handler(): + client = Client() + + usage_guide.logging_handler(client) + + +def test_setup_logging(): + client = Client() + + usage_guide.setup_logging(client) + + +def test_client_list_entries(): + client = Client() + + to_delete = [] + usage_guide.client_list_entries(client, to_delete) + + for item in to_delete: + usage_guide._backoff_not_found(item.delete) From 2a643ebe0c7a686bc39b0853f1c7efaffba66d2f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 17 Dec 2020 19:24:24 +0100 Subject: [PATCH 358/855] chore(deps): update precommit hook pre-commit/pre-commit-hooks to v3.4.0 (#133) --- packages/google-cloud-logging/.pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index 6ad83346e261..a9024b15d725 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -2,7 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.3.0 + rev: v3.4.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer From f7aa809de13a936ec4758ad09b123563c2ec00dc Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 23 Dec 2020 16:00:06 -0800 Subject: [PATCH 359/855] chore: update python versions in CONTRIBUTING (#142) --- packages/google-cloud-logging/CONTRIBUTING.rst | 11 +++++------ packages/google-cloud-logging/synth.metadata | 6 +++--- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index ab6c09b8f074..cd48664d8ac0 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -21,8 +21,8 @@ In order to add a feature: - The feature must be documented in both the API and narrative documentation. -- The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. +- The feature must work fully on the following CPython versions: + 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -202,25 +202,24 @@ Supported Python Versions We support: -- `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ +- `Python 3.9`_ -.. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-logging/blob/master/noxfile.py -Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version -3.5. Reasons for this include: +3.6. Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 6ab2fc041011..6764913e0c4f 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "3a25c8cd9bd06e5a8f488945c9bc94380e2bf0d1" + "sha": "f1e7613c06874691da03b4a19f5ea43e508651a2" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" + "sha": "b670a77a454f415d247907908e8ee7943e06d718" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" + "sha": "b670a77a454f415d247907908e8ee7943e06d718" } } ], From d74bf6f74e64e7a0b66977456d3cad6197c6c9b4 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 23 Dec 2020 16:00:33 -0800 Subject: [PATCH 360/855] chore: upgrade pre-commit version (#141) autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. --- packages/google-cloud-logging/.pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index a9024b15d725..6ad83346e261 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -2,7 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.4.0 + rev: v3.3.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer From 1d93ec1ebe328d6066ca753b894a238e62a87c3e Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 28 Dec 2020 09:15:38 -0800 Subject: [PATCH 361/855] chore: update precommit version and exclude `.nox` from linting (#144) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore: exclude `.nox` directories from linting The samples tests create `.nox` directories with all dependencies installed. These directories should be excluded from linting. I've tested this change locally, and it significantly speeds up linting on my machine. Source-Author: Tim Swast Source-Date: Tue Dec 22 13:04:04 2020 -0600 Source-Repo: googleapis/synthtool Source-Sha: 373861061648b5fe5e0ac4f8a38b32d639ee93e4 Source-Link: https://github.com/googleapis/synthtool/commit/373861061648b5fe5e0ac4f8a38b32d639ee93e4 --- packages/google-cloud-logging/.flake8 | 1 + packages/google-cloud-logging/.pre-commit-config.yaml | 2 +- packages/google-cloud-logging/synth.metadata | 6 +++--- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.flake8 b/packages/google-cloud-logging/.flake8 index ed9316381c9c..29227d4cf419 100644 --- a/packages/google-cloud-logging/.flake8 +++ b/packages/google-cloud-logging/.flake8 @@ -26,6 +26,7 @@ exclude = *_pb2.py # Standard linting exemptions. + **/.nox/** __pycache__, .git, *.pyc, diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index 6ad83346e261..a9024b15d725 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -2,7 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.3.0 + rev: v3.4.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 6764913e0c4f..d3cd0a5ccf64 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "f1e7613c06874691da03b4a19f5ea43e508651a2" + "sha": "f81e7a694d24f3ba2ad4380bbf500b8bc463e314" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "b670a77a454f415d247907908e8ee7943e06d718" + "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "b670a77a454f415d247907908e8ee7943e06d718" + "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4" } } ], From bd236fdec8e264d299a84c35230e83a22bf52aaa Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 6 Jan 2021 16:59:26 -0700 Subject: [PATCH 362/855] chore: add constraints file (#145) --- .../google-cloud-logging/testing/constraints-3.10.txt | 0 .../google-cloud-logging/testing/constraints-3.11.txt | 0 .../google-cloud-logging/testing/constraints-3.6.txt | 10 ++++++++++ .../google-cloud-logging/testing/constraints-3.7.txt | 0 .../google-cloud-logging/testing/constraints-3.8.txt | 0 .../google-cloud-logging/testing/constraints-3.9.txt | 0 6 files changed, 10 insertions(+) create mode 100644 packages/google-cloud-logging/testing/constraints-3.10.txt create mode 100644 packages/google-cloud-logging/testing/constraints-3.11.txt create mode 100644 packages/google-cloud-logging/testing/constraints-3.6.txt create mode 100644 packages/google-cloud-logging/testing/constraints-3.7.txt create mode 100644 packages/google-cloud-logging/testing/constraints-3.8.txt create mode 100644 packages/google-cloud-logging/testing/constraints-3.9.txt diff --git a/packages/google-cloud-logging/testing/constraints-3.10.txt b/packages/google-cloud-logging/testing/constraints-3.10.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/testing/constraints-3.11.txt b/packages/google-cloud-logging/testing/constraints-3.11.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/testing/constraints-3.6.txt b/packages/google-cloud-logging/testing/constraints-3.6.txt new file mode 100644 index 000000000000..0e0bdeb0b73e --- /dev/null +++ b/packages/google-cloud-logging/testing/constraints-3.6.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +google-api-core==1.22.0 +google-cloud-core==1.4.1 +proto-plus==1.11.0 \ No newline at end of file diff --git a/packages/google-cloud-logging/testing/constraints-3.7.txt b/packages/google-cloud-logging/testing/constraints-3.7.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/testing/constraints-3.8.txt b/packages/google-cloud-logging/testing/constraints-3.8.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/testing/constraints-3.9.txt b/packages/google-cloud-logging/testing/constraints-3.9.txt new file mode 100644 index 000000000000..e69de29bb2d1 From c8db402b20fa737c2c420283f3328db0ee88ee21 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 8 Jan 2021 16:19:44 -0800 Subject: [PATCH 363/855] feat: allow modifying LogEntry data using extra argument (#129) --- .../cloud/logging_v2/handlers/app_engine.py | 20 ++++-- .../cloud/logging_v2/handlers/handlers.py | 22 ++++++- .../tests/system/test_system.py | 36 ++++++++++- .../tests/unit/handlers/test_app_engine.py | 62 ++++++++++++++++++- .../tests/unit/handlers/test_handlers.py | 60 +++++++++++++++++- 5 files changed, 188 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py index 4d1fe8085291..a5d57c53e375 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py @@ -113,15 +113,25 @@ def emit(self, record): record (logging.LogRecord): The record to be logged. """ message = super(AppEngineHandler, self).format(record) + inferred_http, inferred_trace = get_request_data() + if inferred_trace is not None: + inferred_trace = f"projects/{self.project_id}/traces/{inferred_trace}" + # allow user overrides + trace = getattr(record, "trace", inferred_trace) + span_id = getattr(record, "span_id", None) + http_request = getattr(record, "http_request", inferred_http) + resource = getattr(record, "resource", self.resource) + user_labels = getattr(record, "labels", {}) + # merge labels gae_labels = self.get_gae_labels() - http_request, trace_id = get_request_data() - if trace_id is not None: - trace_id = f"projects/{self.project_id}/traces/{trace_id}" + gae_labels.update(user_labels) + # send off request self.transport.send( record, message, - resource=self.resource, + resource=resource, labels=gae_labels, - trace=trace_id, + trace=trace, + span_id=span_id, http_request=http_request, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index d45c7b61b4b5..fd99f7adc865 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -87,6 +87,7 @@ def __init__( self.name = name self.client = client self.transport = transport(client, name) + self.project_id = client.project self.resource = resource self.labels = labels @@ -101,7 +102,26 @@ def emit(self, record): record (logging.LogRecord): The record to be logged. """ message = super(CloudLoggingHandler, self).format(record) - self.transport.send(record, message, resource=self.resource, labels=self.labels) + trace_id = getattr(record, "trace", None) + span_id = getattr(record, "span_id", None) + http_request = getattr(record, "http_request", None) + resource = getattr(record, "resource", self.resource) + user_labels = getattr(record, "labels", {}) + # merge labels + total_labels = self.labels if self.labels is not None else {} + total_labels.update(user_labels) + if len(total_labels) == 0: + total_labels = None + # send off request + self.transport.send( + record, + message, + resource=resource, + labels=(total_labels if total_labels else None), + trace=trace_id, + span_id=span_id, + http_request=http_request, + ) def setup_logging( diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index f9cb96e18591..dc5785155edc 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -27,7 +27,8 @@ from google.api_core.exceptions import ServiceUnavailable import google.cloud.logging from google.cloud._helpers import UTC -from google.cloud.logging_v2.handlers.handlers import CloudLoggingHandler +from google.cloud.logging_v2.handlers import AppEngineHandler +from google.cloud.logging_v2.handlers import CloudLoggingHandler from google.cloud.logging_v2.handlers.transports import SyncTransport from google.cloud.logging_v2 import client from google.cloud.logging_v2.resource import Resource @@ -308,6 +309,39 @@ def test_log_handler_sync(self): self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) + def test_handlers_w_extras(self): + LOG_MESSAGE = "Testing with injected extras." + + for cls in [CloudLoggingHandler, AppEngineHandler]: + LOGGER_NAME = f"{cls.__name__}-handler_extras" + handler_name = self._logger_name(LOGGER_NAME) + + handler = cls(Config.CLIENT, name=handler_name, transport=SyncTransport) + + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler.name) + self.to_delete.append(logger) + + cloud_logger = logging.getLogger(LOGGER_NAME) + cloud_logger.addHandler(handler) + expected_request = {"requestUrl": "localhost"} + extra = { + "trace": "123", + "span_id": "456", + "http_request": expected_request, + "resource": Resource(type="cloudiot_device", labels={}), + "labels": {"test-label": "manual"}, + } + cloud_logger.warn(LOG_MESSAGE, extra=extra) + + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].trace, extra["trace"]) + self.assertEqual(entries[0].span_id, extra["span_id"]) + self.assertEqual(entries[0].http_request, expected_request) + self.assertEqual(entries[0].labels, extra["labels"]) + self.assertEqual(entries[0].resource.type, extra["resource"].type) + def test_log_root_handler(self): LOG_MESSAGE = "It was the best of times." diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index 71672fa6fb0a..1ac9c5dd574f 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -118,10 +118,60 @@ def test_emit(self): gae_resource, gae_labels, expected_trace_id, + None, expected_http_request, ), ) + def test_emit_manual_field_override(self): + from google.cloud.logging_v2.resource import Resource + + inferred_http_request = {"request_url": "test"} + inferred_trace_id = "trace-test" + get_request_patch = mock.patch( + "google.cloud.logging_v2.handlers.app_engine.get_request_data", + return_value=(inferred_http_request, inferred_trace_id), + ) + with get_request_patch: + # library integrations mocked to return test data + client = mock.Mock(project=self.PROJECT, spec=["project"]) + handler = self._make_one(client, transport=_Transport) + gae_labels = handler.get_gae_labels() + logname = "app" + message = "hello world" + record = logging.LogRecord( + logname, logging, None, None, message, None, None + ) + handler.project_id = self.PROJECT + # set attributes manually + expected_trace = "123" + setattr(record, "trace", expected_trace) + expected_span = "456" + setattr(record, "span_id", expected_span) + expected_http = {"reuqest_url": "manual"} + setattr(record, "http_request", expected_http) + expected_resource = Resource(type="test", labels={}) + setattr(record, "resource", expected_resource) + additional_labels = {"test-label": "manual"} + expected_labels = dict(gae_labels) + expected_labels.update(additional_labels) + setattr(record, "labels", additional_labels) + handler.emit(record) + self.assertIs(handler.transport.client, client) + self.assertEqual(handler.transport.name, logname) + self.assertEqual( + handler.transport.send_called_with, + ( + record, + message, + expected_resource, + expected_labels, + expected_trace, + expected_span, + expected_http, + ), + ) + def _get_gae_labels_helper(self, trace_id): get_request_patch = mock.patch( "google.cloud.logging_v2.handlers.app_engine.get_request_data", @@ -156,5 +206,13 @@ def __init__(self, client, name): self.client = client self.name = name - def send(self, record, message, resource, labels, trace, http_request): - self.send_called_with = (record, message, resource, labels, trace, http_request) + def send(self, record, message, resource, labels, trace, span_id, http_request): + self.send_called_with = ( + record, + message, + resource, + labels, + trace, + span_id, + http_request, + ) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index e967b2015e49..d84c1963505d 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -85,7 +85,44 @@ def test_emit(self): self.assertEqual( handler.transport.send_called_with, - (record, message, _GLOBAL_RESOURCE, None), + (record, message, _GLOBAL_RESOURCE, None, None, None, None), + ) + + def test_emit_manual_field_override(self): + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.resource import Resource + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE + ) + logname = "loggername" + message = "hello world" + record = logging.LogRecord(logname, logging, None, None, message, None, None) + # set attributes manually + expected_trace = "123" + setattr(record, "trace", expected_trace) + expected_span = "456" + setattr(record, "span_id", expected_span) + expected_http = {"reuqest_url": "manual"} + setattr(record, "http_request", expected_http) + expected_resource = Resource(type="test", labels={}) + setattr(record, "resource", expected_resource) + expected_labels = {"test-label": "manual"} + setattr(record, "labels", expected_labels) + handler.emit(record) + + self.assertEqual( + handler.transport.send_called_with, + ( + record, + message, + expected_resource, + expected_labels, + expected_trace, + expected_span, + expected_http, + ), ) @@ -148,5 +185,22 @@ def __init__(self, client, name): self.client = client self.name = name - def send(self, record, message, resource, labels=None): - self.send_called_with = (record, message, resource, labels) + def send( + self, + record, + message, + resource, + labels=None, + trace=None, + span_id=None, + http_request=None, + ): + self.send_called_with = ( + record, + message, + resource, + labels, + trace, + span_id, + http_request, + ) From 914f7dffcc159f585e432a564310b45e17af4ab7 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 12 Jan 2021 10:58:47 -0800 Subject: [PATCH 364/855] fix: add InternalServerError to list of expected errors (#151) --- packages/google-cloud-logging/tests/system/test_system.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index dc5785155edc..45126f5e59b2 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -20,6 +20,7 @@ from google.api_core.exceptions import BadGateway from google.api_core.exceptions import Conflict +from google.api_core.exceptions import InternalServerError from google.api_core.exceptions import NotFound from google.api_core.exceptions import TooManyRequests from google.api_core.exceptions import ResourceExhausted @@ -68,7 +69,9 @@ def _list_entries(logger): :returns: List of all entries consumed. """ inner = RetryResult(_has_entries, max_tries=9)(_consume_entries) - outer = RetryErrors((ServiceUnavailable, ResourceExhausted), max_tries=9)(inner) + outer = RetryErrors( + (ServiceUnavailable, ResourceExhausted, InternalServerError), max_tries=9 + )(inner) return outer(logger) From f7a7d4323ed4e6a0fa35ce9ff1e27454e44cff2f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 12 Jan 2021 13:22:47 -0800 Subject: [PATCH 365/855] chore: release 2.1.0 (#138) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-logging/CHANGELOG.md | 18 ++++++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 658443a46940..9cab925d8e73 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,24 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [2.1.0](https://www.github.com/googleapis/python-logging/compare/v2.0.2...v2.1.0) (2021-01-12) + + +### Features + +* allow modifying LogEntry data using extra argument ([#129](https://www.github.com/googleapis/python-logging/issues/129)) ([92b287f](https://www.github.com/googleapis/python-logging/commit/92b287f424418fde137cc81f370dcab07f84023b)) +* support http_request field ([#120](https://www.github.com/googleapis/python-logging/issues/120)) ([ba94afb](https://www.github.com/googleapis/python-logging/commit/ba94afb7d0a5371f2d2de4232de56df34e8a1f99)) + + +### Bug Fixes + +* add InternalServerError to list of expected errors ([#151](https://www.github.com/googleapis/python-logging/issues/151)) ([9bf49f5](https://www.github.com/googleapis/python-logging/commit/9bf49f51df5321e8b9c39018dff7d767347256d6)) + + +### Documentation + +* fix usage guide ([#140](https://www.github.com/googleapis/python-logging/issues/140)) ([1ca3981](https://www.github.com/googleapis/python-logging/commit/1ca398103fdfefb5576d6ef2ba20cfa4bd4ab252)) + ### [2.0.2](https://www.github.com/googleapis/python-logging/compare/v2.0.1...v2.0.2) (2020-12-14) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index d50ed511d06b..96df33d25b94 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.0.2" +version = "2.1.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 0f3d4b13a8b0c7bd786dbb2925b4222edb622670 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 12 Jan 2021 22:23:17 +0100 Subject: [PATCH 366/855] chore(deps): update dependency google-cloud-bigquery to v2.6.2 (#150) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index b071a67f3f3b..759dba97eaff 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.0.2 google-cloud-storage==1.35.0 google-cloud-pubsub==2.2.0 -google-cloud-bigquery==2.6.1 +google-cloud-bigquery==2.6.2 From 62ce64865ff58372332ffe2bd52e2b015f73c38a Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 12 Jan 2021 13:23:56 -0800 Subject: [PATCH 367/855] chore: Re-generated to pick up changes from synthtool. (#148) * chore(python): fix column sizing issue in docs Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Thu Jan 7 11:58:32 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: f15b57ccfd71106c2299e9b89835fe6e55015662 Source-Link: https://github.com/googleapis/synthtool/commit/f15b57ccfd71106c2299e9b89835fe6e55015662 * chore(python): use 'http' in LICENSE Co-authored-by: Tim Swast Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Thu Jan 7 13:05:12 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 41a4e56982620d3edcf110d76f4fcdfdec471ac8 Source-Link: https://github.com/googleapis/synthtool/commit/41a4e56982620d3edcf110d76f4fcdfdec471ac8 --- packages/google-cloud-logging/LICENSE | 7 ++++--- packages/google-cloud-logging/docs/_static/custom.css | 7 ++++++- packages/google-cloud-logging/synth.metadata | 6 +++--- 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/LICENSE b/packages/google-cloud-logging/LICENSE index a8ee855de2aa..d64569567334 100644 --- a/packages/google-cloud-logging/LICENSE +++ b/packages/google-cloud-logging/LICENSE @@ -1,6 +1,7 @@ - Apache License + + Apache License Version 2.0, January 2004 - https://www.apache.org/licenses/ + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +193,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - https://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/packages/google-cloud-logging/docs/_static/custom.css b/packages/google-cloud-logging/docs/_static/custom.css index 0abaf229fce3..bcd37bbd3c4a 100644 --- a/packages/google-cloud-logging/docs/_static/custom.css +++ b/packages/google-cloud-logging/docs/_static/custom.css @@ -1,4 +1,9 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index d3cd0a5ccf64..a74610af8603 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "f81e7a694d24f3ba2ad4380bbf500b8bc463e314" + "sha": "212b4143d4e681356efc4bccff35cf7a435717ca" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4" + "sha": "41a4e56982620d3edcf110d76f4fcdfdec471ac8" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4" + "sha": "41a4e56982620d3edcf110d76f4fcdfdec471ac8" } } ], From 9b0ee5eb5b3db5ea78c5ec1b1aef459bb9959b20 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 14 Jan 2021 11:24:23 -0800 Subject: [PATCH 368/855] fix: use dict for http request (#156) --- .../cloud/logging_v2/handlers/_helpers.py | 43 ++++++++------- .../tests/unit/handlers/test__helpers.py | 54 +++++++++---------- 2 files changed, 48 insertions(+), 49 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py index 9821e95afebf..5a4abdbcc54a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py @@ -23,7 +23,6 @@ flask = None from google.cloud.logging_v2.handlers.middleware.request import _get_django_request -from google.logging.type.http_request_pb2 import HttpRequest _DJANGO_TRACE_HEADER = "HTTP_X_CLOUD_TRACE_CONTEXT" _DJANGO_USERAGENT_HEADER = "HTTP_USER_AGENT" @@ -55,7 +54,7 @@ def get_request_data_from_flask(): """Get http_request and trace data from flask request headers. Returns: - Tuple[Optional[google.logging.type.http_request_pb2.HttpRequest], Optional[str]]: + Tuple[Optional[dict], Optional[str]]: Data related to the current http request and the trace_id for the request. Both fields will be None if a flask request isn't found. """ @@ -63,15 +62,15 @@ def get_request_data_from_flask(): return None, None # build http_request - http_request = HttpRequest( - request_method=flask.request.method, - request_url=flask.request.url, - request_size=flask.request.content_length, - user_agent=flask.request.user_agent.string, - remote_ip=flask.request.remote_addr, - referer=flask.request.referrer, - protocol=flask.request.environ.get(_PROTOCOL_HEADER), - ) + http_request = { + "requestMethod": flask.request.method, + "requestUrl": flask.request.url, + "requestSize": flask.request.content_length, + "userAgent": flask.request.user_agent.string, + "remoteIp": flask.request.remote_addr, + "referer": flask.request.referrer, + "protocol": flask.request.environ.get(_PROTOCOL_HEADER), + } # find trace id trace_id = None @@ -86,7 +85,7 @@ def get_request_data_from_django(): """Get http_request and trace data from django request headers. Returns: - Tuple[Optional[google.logging.type.http_request_pb2.HttpRequest], Optional[str]]: + Tuple[Optional[dict], Optional[str]]: Data related to the current http request and the trace_id for the request. Both fields will be None if a django request isn't found. """ @@ -95,15 +94,15 @@ def get_request_data_from_django(): if request is None: return None, None # build http_request - http_request = HttpRequest( - request_method=request.method, - request_url=request.build_absolute_uri(), - request_size=len(request.body), - user_agent=request.META.get(_DJANGO_USERAGENT_HEADER), - remote_ip=request.META.get(_DJANGO_REMOTE_ADDR_HEADER), - referer=request.META.get(_DJANGO_REFERER_HEADER), - protocol=request.META.get(_PROTOCOL_HEADER), - ) + http_request = { + "requestMethod": request.method, + "requestUrl": request.build_absolute_uri(), + "requestSize": len(request.body), + "userAgent": request.META.get(_DJANGO_USERAGENT_HEADER), + "remoteIp": request.META.get(_DJANGO_REMOTE_ADDR_HEADER), + "referer": request.META.get(_DJANGO_REFERER_HEADER), + "protocol": request.META.get(_PROTOCOL_HEADER), + } # find trace id trace_id = None @@ -119,7 +118,7 @@ def get_request_data(): frameworks (currently supported: Flask and Django). Returns: - Tuple[Optional[google.logging.type.http_request_pb2.HttpRequest], Optional[str]]: + Tuple[Optional[dict], Optional[str]]: Data related to the current http request and the trace_id for the request. Both fields will be None if a supported web request isn't found. """ diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index 8fb37305be70..f1d89dffca09 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -17,9 +17,9 @@ import mock _FLASK_TRACE_ID = "flask-id" -_FLASK_HTTP_REQUEST = {"request_url": "https://flask.palletsprojects.com/en/1.1.x/"} +_FLASK_HTTP_REQUEST = {"requestUrl": "https://flask.palletsprojects.com/en/1.1.x/"} _DJANGO_TRACE_ID = "django-id" -_DJANGO_HTTP_REQUEST = {"request_url": "https://www.djangoproject.com/"} +_DJANGO_HTTP_REQUEST = {"requestUrl": "https://www.djangoproject.com/"} class Test_get_request_data_from_flask(unittest.TestCase): @@ -47,7 +47,7 @@ def test_no_context_header(self): http_request, trace_id = self._call_fut() self.assertIsNone(trace_id) - self.assertEqual(http_request.request_method, "GET") + self.assertEqual(http_request["requestMethod"], "GET") def test_valid_context_header(self): flask_trace_header = "X_CLOUD_TRACE_CONTEXT" @@ -63,7 +63,7 @@ def test_valid_context_header(self): http_request, trace_id = self._call_fut() self.assertEqual(trace_id, expected_trace_id) - self.assertEqual(http_request.request_method, "GET") + self.assertEqual(http_request["requestMethod"], "GET") def test_http_request_populated(self): expected_path = "http://testserver/123" @@ -86,13 +86,13 @@ def test_http_request_populated(self): ) http_request, trace_id = self._call_fut() - self.assertEqual(http_request.request_method, "PUT") - self.assertEqual(http_request.request_url, expected_path) - self.assertEqual(http_request.user_agent, expected_agent) - self.assertEqual(http_request.referer, expected_referrer) - self.assertEqual(http_request.remote_ip, expected_ip) - self.assertEqual(http_request.request_size, len(body_content)) - self.assertEqual(http_request.protocol, "HTTP/1.1") + self.assertEqual(http_request["requestMethod"], "PUT") + self.assertEqual(http_request["requestUrl"], expected_path) + self.assertEqual(http_request["userAgent"], expected_agent) + self.assertEqual(http_request["referer"], expected_referrer) + self.assertEqual(http_request["remoteIp"], expected_ip) + self.assertEqual(http_request["requestSize"], len(body_content)) + self.assertEqual(http_request["protocol"], "HTTP/1.1") def test_http_request_sparse(self): expected_path = "http://testserver/123" @@ -100,9 +100,9 @@ def test_http_request_sparse(self): with app.test_client() as c: c.put(path=expected_path) http_request, trace_id = self._call_fut() - self.assertEqual(http_request.request_method, "PUT") - self.assertEqual(http_request.request_url, expected_path) - self.assertEqual(http_request.protocol, "HTTP/1.1") + self.assertEqual(http_request["requestMethod"], "PUT") + self.assertEqual(http_request["requestUrl"], expected_path) + self.assertEqual(http_request["protocol"], "HTTP/1.1") class Test_get_request_data_from_django(unittest.TestCase): @@ -136,7 +136,7 @@ def test_no_context_header(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) http_request, trace_id = self._call_fut() - self.assertEqual(http_request.request_method, "GET") + self.assertEqual(http_request["requestMethod"], "GET") self.assertIsNone(trace_id) def test_valid_context_header(self): @@ -156,7 +156,7 @@ def test_valid_context_header(self): http_request, trace_id = self._call_fut() self.assertEqual(trace_id, expected_trace_id) - self.assertEqual(http_request.request_method, "GET") + self.assertEqual(http_request["requestMethod"], "GET") def test_http_request_populated(self): from django.test import RequestFactory @@ -176,13 +176,13 @@ def test_http_request_populated(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) http_request, trace_id = self._call_fut() - self.assertEqual(http_request.request_method, "PUT") - self.assertEqual(http_request.request_url, expected_path) - self.assertEqual(http_request.user_agent, expected_agent) - self.assertEqual(http_request.referer, expected_referrer) - self.assertEqual(http_request.remote_ip, "127.0.0.1") - self.assertEqual(http_request.request_size, len(body_content)) - self.assertEqual(http_request.protocol, "HTTP/1.1") + self.assertEqual(http_request["requestMethod"], "PUT") + self.assertEqual(http_request["requestUrl"], expected_path) + self.assertEqual(http_request["userAgent"], expected_agent) + self.assertEqual(http_request["referer"], expected_referrer) + self.assertEqual(http_request["remoteIp"], "127.0.0.1") + self.assertEqual(http_request["requestSize"], len(body_content)) + self.assertEqual(http_request["protocol"], "HTTP/1.1") def test_http_request_sparse(self): from django.test import RequestFactory @@ -193,10 +193,10 @@ def test_http_request_sparse(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) http_request, trace_id = self._call_fut() - self.assertEqual(http_request.request_method, "PUT") - self.assertEqual(http_request.request_url, expected_path) - self.assertEqual(http_request.remote_ip, "127.0.0.1") - self.assertEqual(http_request.protocol, "HTTP/1.1") + self.assertEqual(http_request["requestMethod"], "PUT") + self.assertEqual(http_request["requestUrl"], expected_path) + self.assertEqual(http_request["remoteIp"], "127.0.0.1") + self.assertEqual(http_request["protocol"], "HTTP/1.1") class Test_get_request_data(unittest.TestCase): From 300d36aebc2859a67714c733dfca36cac3adecbd Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 14 Jan 2021 12:01:51 -0800 Subject: [PATCH 369/855] chore: release 2.1.1 (#157) --- packages/google-cloud-logging/CHANGELOG.md | 7 +++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 9cab925d8e73..b5808e4b0b1f 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +### [2.1.1](https://www.github.com/googleapis/python-logging/compare/v2.1.0...v2.1.1) (2021-01-14) + + +### Bug Fixes + +* use dict for http request ([#156](https://www.github.com/googleapis/python-logging/issues/156)) ([dc26668](https://www.github.com/googleapis/python-logging/commit/dc266688b1e465112de0e3fe2e8d98003f6e7033)) + ## [2.1.0](https://www.github.com/googleapis/python-logging/compare/v2.0.2...v2.1.0) (2021-01-12) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 96df33d25b94..4009bca9a6c4 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.1.0" +version = "2.1.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From e4b77cd578e49c366cfe7e1bbbb633d9ba163e2b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 26 Jan 2021 15:52:44 -0800 Subject: [PATCH 370/855] test: reduced time filter length in system tests (#161) --- .../tests/system/test_system.py | 31 +++++++++++-------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 45126f5e59b2..9cd0ac25340f 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -12,7 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -import datetime +from datetime import datetime +from datetime import timedelta +from datetime import timezone import logging import os import pytest @@ -41,19 +43,21 @@ _RESOURCE_ID = unique_resource_id("-") DEFAULT_FILTER = "logName:syslog AND severity>=INFO" DEFAULT_DESCRIPTION = "System testing" +_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f%z" retry_429 = RetryErrors(TooManyRequests) +_ten_mins_ago = datetime.now(timezone.utc) - timedelta(minutes=10) +_time_filter = f'timestamp>="{_ten_mins_ago.strftime(_TIME_FORMAT)}"' -def _consume_entries(logger): - """Consume all log entries from logger iterator. +def _consume_entries(logger): + """Consume all recent log entries from logger iterator. :type logger: :class:`~google.cloud.logging.logger.Logger` :param logger: A Logger containing entries. - :rtype: list :returns: List of all entries consumed. """ - return list(logger.list_entries()) + return list(logger.list_entries(filter_=_time_filter)) def _list_entries(logger): @@ -68,9 +72,12 @@ def _list_entries(logger): :rtype: list :returns: List of all entries consumed. """ - inner = RetryResult(_has_entries, max_tries=9)(_consume_entries) + inner = RetryResult(_has_entries, delay=1, backoff=2, max_tries=6)(_consume_entries) outer = RetryErrors( - (ServiceUnavailable, ResourceExhausted, InternalServerError), max_tries=9 + (ServiceUnavailable, ResourceExhausted, InternalServerError), + delay=1, + backoff=2, + max_tries=6, )(inner) return outer(logger) @@ -147,7 +154,7 @@ def test_list_entry_with_unregistered(self): pool.FindMessageTypeByName(type_name) type_url = "type.googleapis.com/" + type_name - filter_ = self.TYPE_FILTER.format(type_url) + filter_ = self.TYPE_FILTER.format(type_url) + f" AND {_time_filter}" entry_iter = iter(Config.CLIENT.list_entries(page_size=1, filter_=filter_)) retry = RetryErrors(TooManyRequests) @@ -172,11 +179,9 @@ def test_log_text(self): self.assertEqual(entries[0].payload, TEXT_PAYLOAD) def test_log_text_with_timestamp(self): - import datetime - text_payload = "System test: test_log_text_with_timestamp" logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) - now = datetime.datetime.utcnow() + now = datetime.utcnow() self.to_delete.append(logger) @@ -185,13 +190,13 @@ def test_log_text_with_timestamp(self): self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, text_payload) self.assertEqual(entries[0].timestamp, now.replace(tzinfo=UTC)) - self.assertIsInstance(entries[0].received_timestamp, datetime.datetime) + self.assertIsInstance(entries[0].received_timestamp, datetime) def test_log_text_with_resource(self): text_payload = "System test: test_log_text_with_timestamp" logger = Config.CLIENT.logger(self._logger_name("log_text_res")) - now = datetime.datetime.utcnow() + now = datetime.utcnow() resource = Resource( type="gae_app", labels={"module_id": "default", "version_id": "test", "zone": ""}, From dafd94d37d95f55d5af21853a3870779c6950503 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 26 Jan 2021 16:04:29 -0800 Subject: [PATCH 371/855] fix: django content length extraction bug (#160) --- .../google/cloud/logging_v2/handlers/_helpers.py | 10 +++++++++- .../tests/unit/handlers/test__helpers.py | 3 +++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py index 5a4abdbcc54a..fff1e9a892d0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py @@ -24,6 +24,7 @@ from google.cloud.logging_v2.handlers.middleware.request import _get_django_request +_DJANGO_CONTENT_LENGTH = "CONTENT_LENGTH" _DJANGO_TRACE_HEADER = "HTTP_X_CLOUD_TRACE_CONTEXT" _DJANGO_USERAGENT_HEADER = "HTTP_USER_AGENT" _DJANGO_REMOTE_ADDR_HEADER = "REMOTE_ADDR" @@ -93,11 +94,18 @@ def get_request_data_from_django(): if request is None: return None, None + + # convert content_length to int if it exists + content_length = None + try: + content_length = int(request.META.get(_DJANGO_CONTENT_LENGTH)) + except (ValueError, TypeError): + content_length = None # build http_request http_request = { "requestMethod": request.method, "requestUrl": request.build_absolute_uri(), - "requestSize": len(request.body), + "requestSize": content_length, "userAgent": request.META.get(_DJANGO_USERAGENT_HEADER), "remoteIp": request.META.get(_DJANGO_REMOTE_ADDR_HEADER), "referer": request.META.get(_DJANGO_REFERER_HEADER), diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index f1d89dffca09..fd17f6ffd33c 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -172,6 +172,9 @@ def test_http_request_populated(self): HTTP_USER_AGENT=expected_agent, HTTP_REFERER=expected_referrer, ) + # ensure test passes even after request has been read + # context: https://github.com/googleapis/python-logging/issues/159 + django_request.read() middleware = request.RequestMiddleware(None) middleware.process_request(django_request) From 29617192dd297bfa30927b728812cfa0eac1e05e Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 26 Jan 2021 16:05:28 -0800 Subject: [PATCH 372/855] chore: Re-generated to pick up changes from synthtool. (#153) * chore(python): skip docfx in main presubmit * chore(python): skip docfx in main presubmit * fix: properly template the repo name Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Fri Jan 8 10:32:13 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: fb53b6fb373b7c3edf4e55f3e8036bc6d73fa483 Source-Link: https://github.com/googleapis/synthtool/commit/fb53b6fb373b7c3edf4e55f3e8036bc6d73fa483 * chore: add missing quotation mark Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Mon Jan 11 09:43:06 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 16ec872dd898d7de6e1822badfac32484b5d9031 Source-Link: https://github.com/googleapis/synthtool/commit/16ec872dd898d7de6e1822badfac32484b5d9031 --- packages/google-cloud-logging/.kokoro/build.sh | 16 ++++++++++------ .../.kokoro/docs/docs-presubmit.cfg | 11 +++++++++++ packages/google-cloud-logging/.trampolinerc | 2 ++ packages/google-cloud-logging/noxfile.py | 11 +++++++++++ packages/google-cloud-logging/synth.metadata | 6 +++--- 5 files changed, 37 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-logging/.kokoro/build.sh b/packages/google-cloud-logging/.kokoro/build.sh index a194a9eadccb..7145c57b0bf4 100755 --- a/packages/google-cloud-logging/.kokoro/build.sh +++ b/packages/google-cloud-logging/.kokoro/build.sh @@ -15,7 +15,11 @@ set -eo pipefail -cd github/python-logging +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-logging" +fi + +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -30,16 +34,16 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3.6 -m nox -s "${NOX_SESSION:-}" + python3 -m nox -s ${NOX_SESSION:-} else - python3.6 -m nox + python3 -m nox fi diff --git a/packages/google-cloud-logging/.kokoro/docs/docs-presubmit.cfg b/packages/google-cloud-logging/.kokoro/docs/docs-presubmit.cfg index 1118107829b7..3d5288befd4a 100644 --- a/packages/google-cloud-logging/.kokoro/docs/docs-presubmit.cfg +++ b/packages/google-cloud-logging/.kokoro/docs/docs-presubmit.cfg @@ -15,3 +15,14 @@ env_vars: { key: "TRAMPOLINE_IMAGE_UPLOAD" value: "false" } + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/packages/google-cloud-logging/.trampolinerc b/packages/google-cloud-logging/.trampolinerc index 995ee29111e1..c7d663ae9c57 100644 --- a/packages/google-cloud-logging/.trampolinerc +++ b/packages/google-cloud-logging/.trampolinerc @@ -18,12 +18,14 @@ required_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Add env vars which are passed down into the container here. pass_down_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Prevent unintentional override on the default image. diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 3db66c649206..e6873664062c 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -30,6 +30,17 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index a74610af8603..a8052ac3b5f9 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "212b4143d4e681356efc4bccff35cf7a435717ca" + "sha": "4da135f3e9708737eb20ed3156e38b17a87e2f61" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "41a4e56982620d3edcf110d76f4fcdfdec471ac8" + "sha": "16ec872dd898d7de6e1822badfac32484b5d9031" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "41a4e56982620d3edcf110d76f4fcdfdec471ac8" + "sha": "16ec872dd898d7de6e1822badfac32484b5d9031" } } ], From 31dde407f5113c3bd12c9f13bc70d87c80ab7c49 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 27 Jan 2021 01:05:50 +0100 Subject: [PATCH 373/855] chore(deps): update dependency google-cloud-logging to v2.1.1 (#152) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 759dba97eaff..db786f8893d9 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==2.0.2 +google-cloud-logging==2.1.1 google-cloud-storage==1.35.0 google-cloud-pubsub==2.2.0 google-cloud-bigquery==2.6.2 From 86f13f8bce01fd60dffef80a49d237b997080419 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 26 Jan 2021 16:16:28 -0800 Subject: [PATCH 374/855] chore: upgrade gapic-generator-python to 0.39.1 (#147) feat: add 'from_service_account_info' factory to clients fix: fix sphinx identifiers PiperOrigin-RevId: 350246057 Source-Author: Google APIs Source-Date: Tue Jan 5 16:44:11 2021 -0800 Source-Repo: googleapis/googleapis Source-Sha: 520682435235d9c503983a360a2090025aa47cd1 Source-Link: https://github.com/googleapis/googleapis/commit/520682435235d9c503983a360a2090025aa47cd1 Co-authored-by: Daniel Sanche --- packages/google-cloud-logging/.coveragerc | 34 +-- .../config_service_v2/async_client.py | 167 ++++++++------ .../services/config_service_v2/client.py | 212 ++++++++++-------- .../services/config_service_v2/pagers.py | 64 +++--- .../logging_service_v2/async_client.py | 44 ++-- .../services/logging_service_v2/client.py | 77 ++++--- .../services/logging_service_v2/pagers.py | 48 ++-- .../metrics_service_v2/async_client.py | 35 +-- .../services/metrics_service_v2/client.py | 66 ++++-- .../services/metrics_service_v2/pagers.py | 16 +- .../cloud/logging_v2/types/log_entry.py | 20 +- .../google/cloud/logging_v2/types/logging.py | 20 +- .../cloud/logging_v2/types/logging_config.py | 60 ++--- .../cloud/logging_v2/types/logging_metrics.py | 18 +- packages/google-cloud-logging/synth.metadata | 7 +- .../logging_v2/test_config_service_v2.py | 28 ++- .../logging_v2/test_logging_service_v2.py | 28 ++- .../logging_v2/test_metrics_service_v2.py | 28 ++- 18 files changed, 559 insertions(+), 413 deletions(-) diff --git a/packages/google-cloud-logging/.coveragerc b/packages/google-cloud-logging/.coveragerc index 0d8e6297dc9c..cfcd5ac606f1 100644 --- a/packages/google-cloud-logging/.coveragerc +++ b/packages/google-cloud-logging/.coveragerc @@ -1,38 +1,18 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! [run] branch = True -omit = - google/cloud/__init__.py [report] fail_under = 100 show_missing = True +omit = + google/cloud/logging/__init__.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py - google/cloud/__init__.py + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 73737c1d835a..9603b3754c8f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -90,6 +90,7 @@ class ConfigServiceV2AsyncClient: ConfigServiceV2Client.parse_common_location_path ) + from_service_account_info = ConfigServiceV2Client.from_service_account_info from_service_account_file = ConfigServiceV2Client.from_service_account_file from_service_account_json = from_service_account_file @@ -166,7 +167,7 @@ async def list_buckets( r"""Lists buckets. Args: - request (:class:`~.logging_config.ListBucketsRequest`): + request (:class:`google.cloud.logging_v2.types.ListBucketsRequest`): The request object. The parameters to `ListBuckets`. parent (:class:`str`): Required. The parent resource whose buckets are to be @@ -182,6 +183,7 @@ async def list_buckets( Note: The locations portion of the resource must be specified, but supplying the character ``-`` in place of [LOCATION_ID] will return all buckets. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -193,7 +195,7 @@ async def list_buckets( sent along with the request as metadata. Returns: - ~.pagers.ListBucketsAsyncPager: + google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager: The response from ListBuckets. Iterating over this object will yield results and resolve additional pages @@ -255,7 +257,7 @@ async def get_bucket( r"""Gets a bucket. Args: - request (:class:`~.logging_config.GetBucketRequest`): + request (:class:`google.cloud.logging_v2.types.GetBucketRequest`): The request object. The parameters to `GetBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -265,7 +267,7 @@ async def get_bucket( sent along with the request as metadata. Returns: - ~.logging_config.LogBucket: + google.cloud.logging_v2.types.LogBucket: Describes a repository of logs. """ # Create or coerce a protobuf request object. @@ -305,7 +307,7 @@ async def create_bucket( cannot be changed. Args: - request (:class:`~.logging_config.CreateBucketRequest`): + request (:class:`google.cloud.logging_v2.types.CreateBucketRequest`): The request object. The parameters to `CreateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -315,7 +317,7 @@ async def create_bucket( sent along with the request as metadata. Returns: - ~.logging_config.LogBucket: + google.cloud.logging_v2.types.LogBucket: Describes a repository of logs. """ # Create or coerce a protobuf request object. @@ -363,7 +365,7 @@ async def update_bucket( A buckets region may not be modified after it is created. Args: - request (:class:`~.logging_config.UpdateBucketRequest`): + request (:class:`google.cloud.logging_v2.types.UpdateBucketRequest`): The request object. The parameters to `UpdateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -373,7 +375,7 @@ async def update_bucket( sent along with the request as metadata. Returns: - ~.logging_config.LogBucket: + google.cloud.logging_v2.types.LogBucket: Describes a repository of logs. """ # Create or coerce a protobuf request object. @@ -413,7 +415,7 @@ async def delete_bucket( the bucket will be permanently deleted. Args: - request (:class:`~.logging_config.DeleteBucketRequest`): + request (:class:`google.cloud.logging_v2.types.DeleteBucketRequest`): The request object. The parameters to `DeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -457,7 +459,7 @@ async def undelete_bucket( may be undeleted within the grace period of 7 days. Args: - request (:class:`~.logging_config.UndeleteBucketRequest`): + request (:class:`google.cloud.logging_v2.types.UndeleteBucketRequest`): The request object. The parameters to `UndeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -501,14 +503,15 @@ async def list_views( r"""Lists views on a bucket. Args: - request (:class:`~.logging_config.ListViewsRequest`): + request (:class:`google.cloud.logging_v2.types.ListViewsRequest`): The request object. The parameters to `ListViews`. parent (:class:`str`): Required. The bucket whose views are to be listed: :: - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -520,7 +523,7 @@ async def list_views( sent along with the request as metadata. Returns: - ~.pagers.ListViewsAsyncPager: + google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager: The response from ListViews. Iterating over this object will yield results and resolve additional pages @@ -582,7 +585,7 @@ async def get_view( r"""Gets a view. Args: - request (:class:`~.logging_config.GetViewRequest`): + request (:class:`google.cloud.logging_v2.types.GetViewRequest`): The request object. The parameters to `GetView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -592,7 +595,7 @@ async def get_view( sent along with the request as metadata. Returns: - ~.logging_config.LogView: + google.cloud.logging_v2.types.LogView: Describes a view over logs in a bucket. @@ -633,7 +636,7 @@ async def create_view( contain a maximum of 50 views. Args: - request (:class:`~.logging_config.CreateViewRequest`): + request (:class:`google.cloud.logging_v2.types.CreateViewRequest`): The request object. The parameters to `CreateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -643,7 +646,7 @@ async def create_view( sent along with the request as metadata. Returns: - ~.logging_config.LogView: + google.cloud.logging_v2.types.LogView: Describes a view over logs in a bucket. @@ -684,7 +687,7 @@ async def update_view( existing view with values from the new view: ``filter``. Args: - request (:class:`~.logging_config.UpdateViewRequest`): + request (:class:`google.cloud.logging_v2.types.UpdateViewRequest`): The request object. The parameters to `UpdateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -694,7 +697,7 @@ async def update_view( sent along with the request as metadata. Returns: - ~.logging_config.LogView: + google.cloud.logging_v2.types.LogView: Describes a view over logs in a bucket. @@ -734,7 +737,7 @@ async def delete_view( r"""Deletes a view from a bucket. Args: - request (:class:`~.logging_config.DeleteViewRequest`): + request (:class:`google.cloud.logging_v2.types.DeleteViewRequest`): The request object. The parameters to `DeleteView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -778,7 +781,7 @@ async def list_sinks( r"""Lists sinks. Args: - request (:class:`~.logging_config.ListSinksRequest`): + request (:class:`google.cloud.logging_v2.types.ListSinksRequest`): The request object. The parameters to `ListSinks`. parent (:class:`str`): Required. The parent resource whose sinks are to be @@ -789,7 +792,8 @@ async def list_sinks( "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]". + "folders/[FOLDER_ID]" + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -801,8 +805,8 @@ async def list_sinks( sent along with the request as metadata. Returns: - ~.pagers.ListSinksAsyncPager: - Result returned from ``ListSinks``. + google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager: + Result returned from ListSinks. Iterating over this object will yield results and resolve additional pages automatically. @@ -874,7 +878,7 @@ async def get_sink( r"""Gets a sink. Args: - request (:class:`~.logging_config.GetSinkRequest`): + request (:class:`google.cloud.logging_v2.types.GetSinkRequest`): The request object. The parameters to `GetSink`. sink_name (:class:`str`): Required. The resource name of the sink: @@ -887,6 +891,7 @@ async def get_sink( "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: ``"projects/my-project-id/sinks/my-sink-id"``. + This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -898,7 +903,7 @@ async def get_sink( sent along with the request as metadata. Returns: - ~.logging_config.LogSink: + google.cloud.logging_v2.types.LogSink: Describes a sink used to export log entries to one of the following destinations in any project: a Cloud @@ -977,7 +982,7 @@ async def create_sink( entries only from the resource owning the sink. Args: - request (:class:`~.logging_config.CreateSinkRequest`): + request (:class:`google.cloud.logging_v2.types.CreateSinkRequest`): The request object. The parameters to `CreateSink`. parent (:class:`str`): Required. The resource in which to create the sink: @@ -991,12 +996,14 @@ async def create_sink( Examples: ``"projects/my-logging-project"``, ``"organizations/123456789"``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - sink (:class:`~.logging_config.LogSink`): + sink (:class:`google.cloud.logging_v2.types.LogSink`): Required. The new sink, whose ``name`` parameter is a sink identifier that is not already in use. + This corresponds to the ``sink`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1008,7 +1015,7 @@ async def create_sink( sent along with the request as metadata. Returns: - ~.logging_config.LogSink: + google.cloud.logging_v2.types.LogSink: Describes a sink used to export log entries to one of the following destinations in any project: a Cloud @@ -1079,7 +1086,7 @@ async def update_sink( the ``unique_writer_identity`` field. Args: - request (:class:`~.logging_config.UpdateSinkRequest`): + request (:class:`google.cloud.logging_v2.types.UpdateSinkRequest`): The request object. The parameters to `UpdateSink`. sink_name (:class:`str`): Required. The full resource name of the sink to update, @@ -1093,16 +1100,18 @@ async def update_sink( "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: ``"projects/my-project-id/sinks/my-sink-id"``. + This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - sink (:class:`~.logging_config.LogSink`): + sink (:class:`google.cloud.logging_v2.types.LogSink`): Required. The updated sink, whose name is the same identifier that appears as part of ``sink_name``. + This corresponds to the ``sink`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Optional. Field mask that specifies the fields in ``sink`` that need an update. A sink field will be overwritten if, and only if, it is in the update mask. @@ -1118,6 +1127,7 @@ async def update_sink( https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask Example: ``updateMask=filter``. + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1129,7 +1139,7 @@ async def update_sink( sent along with the request as metadata. Returns: - ~.logging_config.LogSink: + google.cloud.logging_v2.types.LogSink: Describes a sink used to export log entries to one of the following destinations in any project: a Cloud @@ -1208,7 +1218,7 @@ async def delete_sink( then that service account is also deleted. Args: - request (:class:`~.logging_config.DeleteSinkRequest`): + request (:class:`google.cloud.logging_v2.types.DeleteSinkRequest`): The request object. The parameters to `DeleteSink`. sink_name (:class:`str`): Required. The full resource name of the sink to delete, @@ -1222,6 +1232,7 @@ async def delete_sink( "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: ``"projects/my-project-id/sinks/my-sink-id"``. + This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1293,7 +1304,7 @@ async def list_exclusions( r"""Lists all the exclusions in a parent resource. Args: - request (:class:`~.logging_config.ListExclusionsRequest`): + request (:class:`google.cloud.logging_v2.types.ListExclusionsRequest`): The request object. The parameters to `ListExclusions`. parent (:class:`str`): Required. The parent resource whose exclusions are to be @@ -1304,7 +1315,8 @@ async def list_exclusions( "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]". + "folders/[FOLDER_ID]" + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1316,8 +1328,8 @@ async def list_exclusions( sent along with the request as metadata. Returns: - ~.pagers.ListExclusionsAsyncPager: - Result returned from ``ListExclusions``. + google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager: + Result returned from ListExclusions. Iterating over this object will yield results and resolve additional pages automatically. @@ -1389,7 +1401,7 @@ async def get_exclusion( r"""Gets the description of an exclusion. Args: - request (:class:`~.logging_config.GetExclusionRequest`): + request (:class:`google.cloud.logging_v2.types.GetExclusionRequest`): The request object. The parameters to `GetExclusion`. name (:class:`str`): Required. The resource name of an existing exclusion: @@ -1403,6 +1415,7 @@ async def get_exclusion( Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1414,7 +1427,7 @@ async def get_exclusion( sent along with the request as metadata. Returns: - ~.logging_config.LogExclusion: + google.cloud.logging_v2.types.LogExclusion: Specifies a set of log entries that are not to be stored in Logging. If your GCP resource receives a large volume of @@ -1492,7 +1505,7 @@ async def create_exclusion( resource. Args: - request (:class:`~.logging_config.CreateExclusionRequest`): + request (:class:`google.cloud.logging_v2.types.CreateExclusionRequest`): The request object. The parameters to `CreateExclusion`. parent (:class:`str`): Required. The parent resource in which to create the @@ -1507,13 +1520,15 @@ async def create_exclusion( Examples: ``"projects/my-logging-project"``, ``"organizations/123456789"``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - exclusion (:class:`~.logging_config.LogExclusion`): + exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): Required. The new exclusion, whose ``name`` parameter is an exclusion name that is not already used in the parent resource. + This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1525,7 +1540,7 @@ async def create_exclusion( sent along with the request as metadata. Returns: - ~.logging_config.LogExclusion: + google.cloud.logging_v2.types.LogExclusion: Specifies a set of log entries that are not to be stored in Logging. If your GCP resource receives a large volume of @@ -1594,7 +1609,7 @@ async def update_exclusion( exclusion. Args: - request (:class:`~.logging_config.UpdateExclusionRequest`): + request (:class:`google.cloud.logging_v2.types.UpdateExclusionRequest`): The request object. The parameters to `UpdateExclusion`. name (:class:`str`): Required. The resource name of the exclusion to update: @@ -1608,16 +1623,18 @@ async def update_exclusion( Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - exclusion (:class:`~.logging_config.LogExclusion`): + exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): Required. New values for the existing exclusion. Only the fields specified in ``update_mask`` are relevant. + This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. A non-empty list of fields to change in the existing exclusion. New values for the fields are taken from the corresponding fields in the @@ -1628,6 +1645,7 @@ async def update_exclusion( For example, to change the filter and description of an exclusion, specify an ``update_mask`` of ``"filter,description"``. + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1639,7 +1657,7 @@ async def update_exclusion( sent along with the request as metadata. Returns: - ~.logging_config.LogExclusion: + google.cloud.logging_v2.types.LogExclusion: Specifies a set of log entries that are not to be stored in Logging. If your GCP resource receives a large volume of @@ -1707,7 +1725,7 @@ async def delete_exclusion( r"""Deletes an exclusion. Args: - request (:class:`~.logging_config.DeleteExclusionRequest`): + request (:class:`google.cloud.logging_v2.types.DeleteExclusionRequest`): The request object. The parameters to `DeleteExclusion`. name (:class:`str`): Required. The resource name of an existing exclusion to @@ -1722,6 +1740,7 @@ async def delete_exclusion( Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1798,7 +1817,7 @@ async def get_cmek_settings( for more information. Args: - request (:class:`~.logging_config.GetCmekSettingsRequest`): + request (:class:`google.cloud.logging_v2.types.GetCmekSettingsRequest`): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. See [Enabling CMEK for Logs @@ -1812,19 +1831,19 @@ async def get_cmek_settings( sent along with the request as metadata. Returns: - ~.logging_config.CmekSettings: - Describes the customer-managed encryption key (CMEK) - settings associated with a project, folder, - organization, billing account, or flexible resource. + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. - See `Enabling CMEK for Logs - Router `__ - for more information. + See [Enabling CMEK for Logs + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. """ # Create or coerce a protobuf request object. @@ -1876,7 +1895,7 @@ async def update_cmek_settings( for more information. Args: - request (:class:`~.logging_config.UpdateCmekSettingsRequest`): + request (:class:`google.cloud.logging_v2.types.UpdateCmekSettingsRequest`): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. See [Enabling CMEK for Logs @@ -1890,19 +1909,19 @@ async def update_cmek_settings( sent along with the request as metadata. Returns: - ~.logging_config.CmekSettings: - Describes the customer-managed encryption key (CMEK) - settings associated with a project, folder, - organization, billing account, or flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See `Enabling CMEK for Logs - Router `__ - for more information. + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See [Enabling CMEK for Logs + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index a16f5f20b8bf..7d6492ba0eaf 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -112,6 +112,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -124,7 +140,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + ConfigServiceV2Client: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -283,10 +299,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.ConfigServiceV2Transport]): The + transport (Union[str, ConfigServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -396,9 +412,9 @@ def list_buckets( r"""Lists buckets. Args: - request (:class:`~.logging_config.ListBucketsRequest`): + request (google.cloud.logging_v2.types.ListBucketsRequest): The request object. The parameters to `ListBuckets`. - parent (:class:`str`): + parent (str): Required. The parent resource whose buckets are to be listed: @@ -412,6 +428,7 @@ def list_buckets( Note: The locations portion of the resource must be specified, but supplying the character ``-`` in place of [LOCATION_ID] will return all buckets. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -423,7 +440,7 @@ def list_buckets( sent along with the request as metadata. Returns: - ~.pagers.ListBucketsPager: + google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager: The response from ListBuckets. Iterating over this object will yield results and resolve additional pages @@ -486,7 +503,7 @@ def get_bucket( r"""Gets a bucket. Args: - request (:class:`~.logging_config.GetBucketRequest`): + request (google.cloud.logging_v2.types.GetBucketRequest): The request object. The parameters to `GetBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -496,7 +513,7 @@ def get_bucket( sent along with the request as metadata. Returns: - ~.logging_config.LogBucket: + google.cloud.logging_v2.types.LogBucket: Describes a repository of logs. """ # Create or coerce a protobuf request object. @@ -537,7 +554,7 @@ def create_bucket( cannot be changed. Args: - request (:class:`~.logging_config.CreateBucketRequest`): + request (google.cloud.logging_v2.types.CreateBucketRequest): The request object. The parameters to `CreateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -547,7 +564,7 @@ def create_bucket( sent along with the request as metadata. Returns: - ~.logging_config.LogBucket: + google.cloud.logging_v2.types.LogBucket: Describes a repository of logs. """ # Create or coerce a protobuf request object. @@ -596,7 +613,7 @@ def update_bucket( A buckets region may not be modified after it is created. Args: - request (:class:`~.logging_config.UpdateBucketRequest`): + request (google.cloud.logging_v2.types.UpdateBucketRequest): The request object. The parameters to `UpdateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -606,7 +623,7 @@ def update_bucket( sent along with the request as metadata. Returns: - ~.logging_config.LogBucket: + google.cloud.logging_v2.types.LogBucket: Describes a repository of logs. """ # Create or coerce a protobuf request object. @@ -647,7 +664,7 @@ def delete_bucket( the bucket will be permanently deleted. Args: - request (:class:`~.logging_config.DeleteBucketRequest`): + request (google.cloud.logging_v2.types.DeleteBucketRequest): The request object. The parameters to `DeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -692,7 +709,7 @@ def undelete_bucket( may be undeleted within the grace period of 7 days. Args: - request (:class:`~.logging_config.UndeleteBucketRequest`): + request (google.cloud.logging_v2.types.UndeleteBucketRequest): The request object. The parameters to `UndeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -737,14 +754,15 @@ def list_views( r"""Lists views on a bucket. Args: - request (:class:`~.logging_config.ListViewsRequest`): + request (google.cloud.logging_v2.types.ListViewsRequest): The request object. The parameters to `ListViews`. - parent (:class:`str`): + parent (str): Required. The bucket whose views are to be listed: :: - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -756,7 +774,7 @@ def list_views( sent along with the request as metadata. Returns: - ~.pagers.ListViewsPager: + google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager: The response from ListViews. Iterating over this object will yield results and resolve additional pages @@ -819,7 +837,7 @@ def get_view( r"""Gets a view. Args: - request (:class:`~.logging_config.GetViewRequest`): + request (google.cloud.logging_v2.types.GetViewRequest): The request object. The parameters to `GetView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -829,7 +847,7 @@ def get_view( sent along with the request as metadata. Returns: - ~.logging_config.LogView: + google.cloud.logging_v2.types.LogView: Describes a view over logs in a bucket. @@ -871,7 +889,7 @@ def create_view( contain a maximum of 50 views. Args: - request (:class:`~.logging_config.CreateViewRequest`): + request (google.cloud.logging_v2.types.CreateViewRequest): The request object. The parameters to `CreateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -881,7 +899,7 @@ def create_view( sent along with the request as metadata. Returns: - ~.logging_config.LogView: + google.cloud.logging_v2.types.LogView: Describes a view over logs in a bucket. @@ -923,7 +941,7 @@ def update_view( existing view with values from the new view: ``filter``. Args: - request (:class:`~.logging_config.UpdateViewRequest`): + request (google.cloud.logging_v2.types.UpdateViewRequest): The request object. The parameters to `UpdateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -933,7 +951,7 @@ def update_view( sent along with the request as metadata. Returns: - ~.logging_config.LogView: + google.cloud.logging_v2.types.LogView: Describes a view over logs in a bucket. @@ -974,7 +992,7 @@ def delete_view( r"""Deletes a view from a bucket. Args: - request (:class:`~.logging_config.DeleteViewRequest`): + request (google.cloud.logging_v2.types.DeleteViewRequest): The request object. The parameters to `DeleteView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1019,9 +1037,9 @@ def list_sinks( r"""Lists sinks. Args: - request (:class:`~.logging_config.ListSinksRequest`): + request (google.cloud.logging_v2.types.ListSinksRequest): The request object. The parameters to `ListSinks`. - parent (:class:`str`): + parent (str): Required. The parent resource whose sinks are to be listed: @@ -1030,7 +1048,8 @@ def list_sinks( "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]". + "folders/[FOLDER_ID]" + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1042,8 +1061,8 @@ def list_sinks( sent along with the request as metadata. Returns: - ~.pagers.ListSinksPager: - Result returned from ``ListSinks``. + google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager: + Result returned from ListSinks. Iterating over this object will yield results and resolve additional pages automatically. @@ -1106,9 +1125,9 @@ def get_sink( r"""Gets a sink. Args: - request (:class:`~.logging_config.GetSinkRequest`): + request (google.cloud.logging_v2.types.GetSinkRequest): The request object. The parameters to `GetSink`. - sink_name (:class:`str`): + sink_name (str): Required. The resource name of the sink: :: @@ -1119,6 +1138,7 @@ def get_sink( "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: ``"projects/my-project-id/sinks/my-sink-id"``. + This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1130,7 +1150,7 @@ def get_sink( sent along with the request as metadata. Returns: - ~.logging_config.LogSink: + google.cloud.logging_v2.types.LogSink: Describes a sink used to export log entries to one of the following destinations in any project: a Cloud @@ -1200,9 +1220,9 @@ def create_sink( entries only from the resource owning the sink. Args: - request (:class:`~.logging_config.CreateSinkRequest`): + request (google.cloud.logging_v2.types.CreateSinkRequest): The request object. The parameters to `CreateSink`. - parent (:class:`str`): + parent (str): Required. The resource in which to create the sink: :: @@ -1214,12 +1234,14 @@ def create_sink( Examples: ``"projects/my-logging-project"``, ``"organizations/123456789"``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - sink (:class:`~.logging_config.LogSink`): + sink (google.cloud.logging_v2.types.LogSink): Required. The new sink, whose ``name`` parameter is a sink identifier that is not already in use. + This corresponds to the ``sink`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1231,7 +1253,7 @@ def create_sink( sent along with the request as metadata. Returns: - ~.logging_config.LogSink: + google.cloud.logging_v2.types.LogSink: Describes a sink used to export log entries to one of the following destinations in any project: a Cloud @@ -1303,9 +1325,9 @@ def update_sink( the ``unique_writer_identity`` field. Args: - request (:class:`~.logging_config.UpdateSinkRequest`): + request (google.cloud.logging_v2.types.UpdateSinkRequest): The request object. The parameters to `UpdateSink`. - sink_name (:class:`str`): + sink_name (str): Required. The full resource name of the sink to update, including the parent resource and the sink identifier: @@ -1317,16 +1339,18 @@ def update_sink( "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: ``"projects/my-project-id/sinks/my-sink-id"``. + This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - sink (:class:`~.logging_config.LogSink`): + sink (google.cloud.logging_v2.types.LogSink): Required. The updated sink, whose name is the same identifier that appears as part of ``sink_name``. + This corresponds to the ``sink`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. Field mask that specifies the fields in ``sink`` that need an update. A sink field will be overwritten if, and only if, it is in the update mask. @@ -1342,6 +1366,7 @@ def update_sink( https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask Example: ``updateMask=filter``. + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1353,7 +1378,7 @@ def update_sink( sent along with the request as metadata. Returns: - ~.logging_config.LogSink: + google.cloud.logging_v2.types.LogSink: Describes a sink used to export log entries to one of the following destinations in any project: a Cloud @@ -1423,9 +1448,9 @@ def delete_sink( then that service account is also deleted. Args: - request (:class:`~.logging_config.DeleteSinkRequest`): + request (google.cloud.logging_v2.types.DeleteSinkRequest): The request object. The parameters to `DeleteSink`. - sink_name (:class:`str`): + sink_name (str): Required. The full resource name of the sink to delete, including the parent resource and the sink identifier: @@ -1437,6 +1462,7 @@ def delete_sink( "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: ``"projects/my-project-id/sinks/my-sink-id"``. + This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1499,9 +1525,9 @@ def list_exclusions( r"""Lists all the exclusions in a parent resource. Args: - request (:class:`~.logging_config.ListExclusionsRequest`): + request (google.cloud.logging_v2.types.ListExclusionsRequest): The request object. The parameters to `ListExclusions`. - parent (:class:`str`): + parent (str): Required. The parent resource whose exclusions are to be listed. @@ -1510,7 +1536,8 @@ def list_exclusions( "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]". + "folders/[FOLDER_ID]" + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1522,8 +1549,8 @@ def list_exclusions( sent along with the request as metadata. Returns: - ~.pagers.ListExclusionsPager: - Result returned from ``ListExclusions``. + google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager: + Result returned from ListExclusions. Iterating over this object will yield results and resolve additional pages automatically. @@ -1586,9 +1613,9 @@ def get_exclusion( r"""Gets the description of an exclusion. Args: - request (:class:`~.logging_config.GetExclusionRequest`): + request (google.cloud.logging_v2.types.GetExclusionRequest): The request object. The parameters to `GetExclusion`. - name (:class:`str`): + name (str): Required. The resource name of an existing exclusion: :: @@ -1600,6 +1627,7 @@ def get_exclusion( Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1611,7 +1639,7 @@ def get_exclusion( sent along with the request as metadata. Returns: - ~.logging_config.LogExclusion: + google.cloud.logging_v2.types.LogExclusion: Specifies a set of log entries that are not to be stored in Logging. If your GCP resource receives a large volume of @@ -1680,9 +1708,9 @@ def create_exclusion( resource. Args: - request (:class:`~.logging_config.CreateExclusionRequest`): + request (google.cloud.logging_v2.types.CreateExclusionRequest): The request object. The parameters to `CreateExclusion`. - parent (:class:`str`): + parent (str): Required. The parent resource in which to create the exclusion: @@ -1695,13 +1723,15 @@ def create_exclusion( Examples: ``"projects/my-logging-project"``, ``"organizations/123456789"``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - exclusion (:class:`~.logging_config.LogExclusion`): + exclusion (google.cloud.logging_v2.types.LogExclusion): Required. The new exclusion, whose ``name`` parameter is an exclusion name that is not already used in the parent resource. + This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1713,7 +1743,7 @@ def create_exclusion( sent along with the request as metadata. Returns: - ~.logging_config.LogExclusion: + google.cloud.logging_v2.types.LogExclusion: Specifies a set of log entries that are not to be stored in Logging. If your GCP resource receives a large volume of @@ -1783,9 +1813,9 @@ def update_exclusion( exclusion. Args: - request (:class:`~.logging_config.UpdateExclusionRequest`): + request (google.cloud.logging_v2.types.UpdateExclusionRequest): The request object. The parameters to `UpdateExclusion`. - name (:class:`str`): + name (str): Required. The resource name of the exclusion to update: :: @@ -1797,16 +1827,18 @@ def update_exclusion( Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - exclusion (:class:`~.logging_config.LogExclusion`): + exclusion (google.cloud.logging_v2.types.LogExclusion): Required. New values for the existing exclusion. Only the fields specified in ``update_mask`` are relevant. + This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. A non-empty list of fields to change in the existing exclusion. New values for the fields are taken from the corresponding fields in the @@ -1817,6 +1849,7 @@ def update_exclusion( For example, to change the filter and description of an exclusion, specify an ``update_mask`` of ``"filter,description"``. + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1828,7 +1861,7 @@ def update_exclusion( sent along with the request as metadata. Returns: - ~.logging_config.LogExclusion: + google.cloud.logging_v2.types.LogExclusion: Specifies a set of log entries that are not to be stored in Logging. If your GCP resource receives a large volume of @@ -1897,9 +1930,9 @@ def delete_exclusion( r"""Deletes an exclusion. Args: - request (:class:`~.logging_config.DeleteExclusionRequest`): + request (google.cloud.logging_v2.types.DeleteExclusionRequest): The request object. The parameters to `DeleteExclusion`. - name (:class:`str`): + name (str): Required. The resource name of an existing exclusion to delete: @@ -1912,6 +1945,7 @@ def delete_exclusion( Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1979,7 +2013,7 @@ def get_cmek_settings( for more information. Args: - request (:class:`~.logging_config.GetCmekSettingsRequest`): + request (google.cloud.logging_v2.types.GetCmekSettingsRequest): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. See [Enabling CMEK for Logs @@ -1993,19 +2027,19 @@ def get_cmek_settings( sent along with the request as metadata. Returns: - ~.logging_config.CmekSettings: - Describes the customer-managed encryption key (CMEK) - settings associated with a project, folder, - organization, billing account, or flexible resource. + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. - See `Enabling CMEK for Logs - Router `__ - for more information. + See [Enabling CMEK for Logs + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. """ # Create or coerce a protobuf request object. @@ -2058,7 +2092,7 @@ def update_cmek_settings( for more information. Args: - request (:class:`~.logging_config.UpdateCmekSettingsRequest`): + request (google.cloud.logging_v2.types.UpdateCmekSettingsRequest): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. See [Enabling CMEK for Logs @@ -2072,19 +2106,19 @@ def update_cmek_settings( sent along with the request as metadata. Returns: - ~.logging_config.CmekSettings: - Describes the customer-managed encryption key (CMEK) - settings associated with a project, folder, - organization, billing account, or flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See `Enabling CMEK for Logs - Router `__ - for more information. + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See [Enabling CMEK for Logs + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index 8e1c4ee0d4ce..af5c5faf6af0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -24,7 +24,7 @@ class ListBucketsPager: """A pager for iterating through ``list_buckets`` requests. This class thinly wraps an initial - :class:`~.logging_config.ListBucketsResponse` object, and + :class:`google.cloud.logging_v2.types.ListBucketsResponse` object, and provides an ``__iter__`` method to iterate through its ``buckets`` field. @@ -33,7 +33,7 @@ class ListBucketsPager: through the ``buckets`` field on the corresponding responses. - All the usual :class:`~.logging_config.ListBucketsResponse` + All the usual :class:`google.cloud.logging_v2.types.ListBucketsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +51,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging_config.ListBucketsRequest`): + request (google.cloud.logging_v2.types.ListBucketsRequest): The initial request object. - response (:class:`~.logging_config.ListBucketsResponse`): + response (google.cloud.logging_v2.types.ListBucketsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +86,7 @@ class ListBucketsAsyncPager: """A pager for iterating through ``list_buckets`` requests. This class thinly wraps an initial - :class:`~.logging_config.ListBucketsResponse` object, and + :class:`google.cloud.logging_v2.types.ListBucketsResponse` object, and provides an ``__aiter__`` method to iterate through its ``buckets`` field. @@ -95,7 +95,7 @@ class ListBucketsAsyncPager: through the ``buckets`` field on the corresponding responses. - All the usual :class:`~.logging_config.ListBucketsResponse` + All the usual :class:`google.cloud.logging_v2.types.ListBucketsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -113,9 +113,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging_config.ListBucketsRequest`): + request (google.cloud.logging_v2.types.ListBucketsRequest): The initial request object. - response (:class:`~.logging_config.ListBucketsResponse`): + response (google.cloud.logging_v2.types.ListBucketsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -152,7 +152,7 @@ class ListViewsPager: """A pager for iterating through ``list_views`` requests. This class thinly wraps an initial - :class:`~.logging_config.ListViewsResponse` object, and + :class:`google.cloud.logging_v2.types.ListViewsResponse` object, and provides an ``__iter__`` method to iterate through its ``views`` field. @@ -161,7 +161,7 @@ class ListViewsPager: through the ``views`` field on the corresponding responses. - All the usual :class:`~.logging_config.ListViewsResponse` + All the usual :class:`google.cloud.logging_v2.types.ListViewsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -179,9 +179,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging_config.ListViewsRequest`): + request (google.cloud.logging_v2.types.ListViewsRequest): The initial request object. - response (:class:`~.logging_config.ListViewsResponse`): + response (google.cloud.logging_v2.types.ListViewsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -214,7 +214,7 @@ class ListViewsAsyncPager: """A pager for iterating through ``list_views`` requests. This class thinly wraps an initial - :class:`~.logging_config.ListViewsResponse` object, and + :class:`google.cloud.logging_v2.types.ListViewsResponse` object, and provides an ``__aiter__`` method to iterate through its ``views`` field. @@ -223,7 +223,7 @@ class ListViewsAsyncPager: through the ``views`` field on the corresponding responses. - All the usual :class:`~.logging_config.ListViewsResponse` + All the usual :class:`google.cloud.logging_v2.types.ListViewsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -241,9 +241,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging_config.ListViewsRequest`): + request (google.cloud.logging_v2.types.ListViewsRequest): The initial request object. - response (:class:`~.logging_config.ListViewsResponse`): + response (google.cloud.logging_v2.types.ListViewsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -280,7 +280,7 @@ class ListSinksPager: """A pager for iterating through ``list_sinks`` requests. This class thinly wraps an initial - :class:`~.logging_config.ListSinksResponse` object, and + :class:`google.cloud.logging_v2.types.ListSinksResponse` object, and provides an ``__iter__`` method to iterate through its ``sinks`` field. @@ -289,7 +289,7 @@ class ListSinksPager: through the ``sinks`` field on the corresponding responses. - All the usual :class:`~.logging_config.ListSinksResponse` + All the usual :class:`google.cloud.logging_v2.types.ListSinksResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -307,9 +307,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging_config.ListSinksRequest`): + request (google.cloud.logging_v2.types.ListSinksRequest): The initial request object. - response (:class:`~.logging_config.ListSinksResponse`): + response (google.cloud.logging_v2.types.ListSinksResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -342,7 +342,7 @@ class ListSinksAsyncPager: """A pager for iterating through ``list_sinks`` requests. This class thinly wraps an initial - :class:`~.logging_config.ListSinksResponse` object, and + :class:`google.cloud.logging_v2.types.ListSinksResponse` object, and provides an ``__aiter__`` method to iterate through its ``sinks`` field. @@ -351,7 +351,7 @@ class ListSinksAsyncPager: through the ``sinks`` field on the corresponding responses. - All the usual :class:`~.logging_config.ListSinksResponse` + All the usual :class:`google.cloud.logging_v2.types.ListSinksResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -369,9 +369,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging_config.ListSinksRequest`): + request (google.cloud.logging_v2.types.ListSinksRequest): The initial request object. - response (:class:`~.logging_config.ListSinksResponse`): + response (google.cloud.logging_v2.types.ListSinksResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -408,7 +408,7 @@ class ListExclusionsPager: """A pager for iterating through ``list_exclusions`` requests. This class thinly wraps an initial - :class:`~.logging_config.ListExclusionsResponse` object, and + :class:`google.cloud.logging_v2.types.ListExclusionsResponse` object, and provides an ``__iter__`` method to iterate through its ``exclusions`` field. @@ -417,7 +417,7 @@ class ListExclusionsPager: through the ``exclusions`` field on the corresponding responses. - All the usual :class:`~.logging_config.ListExclusionsResponse` + All the usual :class:`google.cloud.logging_v2.types.ListExclusionsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -435,9 +435,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging_config.ListExclusionsRequest`): + request (google.cloud.logging_v2.types.ListExclusionsRequest): The initial request object. - response (:class:`~.logging_config.ListExclusionsResponse`): + response (google.cloud.logging_v2.types.ListExclusionsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -470,7 +470,7 @@ class ListExclusionsAsyncPager: """A pager for iterating through ``list_exclusions`` requests. This class thinly wraps an initial - :class:`~.logging_config.ListExclusionsResponse` object, and + :class:`google.cloud.logging_v2.types.ListExclusionsResponse` object, and provides an ``__aiter__`` method to iterate through its ``exclusions`` field. @@ -479,7 +479,7 @@ class ListExclusionsAsyncPager: through the ``exclusions`` field on the corresponding responses. - All the usual :class:`~.logging_config.ListExclusionsResponse` + All the usual :class:`google.cloud.logging_v2.types.ListExclusionsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -497,9 +497,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging_config.ListExclusionsRequest`): + request (google.cloud.logging_v2.types.ListExclusionsRequest): The initial request object. - response (:class:`~.logging_config.ListExclusionsResponse`): + response (google.cloud.logging_v2.types.ListExclusionsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 82ee957a3c4a..0c1ae3fae897 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -87,6 +87,7 @@ class LoggingServiceV2AsyncClient: LoggingServiceV2Client.parse_common_location_path ) + from_service_account_info = LoggingServiceV2Client.from_service_account_info from_service_account_file = LoggingServiceV2Client.from_service_account_file from_service_account_json = from_service_account_file @@ -167,7 +168,7 @@ async def delete_log( with a timestamp before the operation will be deleted. Args: - request (:class:`~.logging.DeleteLogRequest`): + request (:class:`google.cloud.logging_v2.types.DeleteLogRequest`): The request object. The parameters to DeleteLog. log_name (:class:`str`): Required. The resource name of the log to delete: @@ -184,6 +185,7 @@ async def delete_log( ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. For more information about log names, see [LogEntry][google.logging.v2.LogEntry]. + This corresponds to the ``log_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -262,7 +264,7 @@ async def write_log_entries( organizations, billing accounts or folders) Args: - request (:class:`~.logging.WriteLogEntriesRequest`): + request (:class:`google.cloud.logging_v2.types.WriteLogEntriesRequest`): The request object. The parameters to WriteLogEntries. log_name (:class:`str`): Optional. A default log resource name that is assigned @@ -288,10 +290,11 @@ async def write_log_entries( folder that is receiving new log entries, whether the resource is specified in ``logName`` or in an individual log entry. + This corresponds to the ``log_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - resource (:class:`~.monitored_resource.MonitoredResource`): + resource (:class:`google.api.monitored_resource_pb2.MonitoredResource`): Optional. A default monitored resource object that is assigned to all log entries in ``entries`` that do not specify a value for ``resource``. Example: @@ -303,19 +306,21 @@ async def write_log_entries( "zone": "us-central1-a", "instance_id": "00000000000000000000" }} See [LogEntry][google.logging.v2.LogEntry]. + This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Sequence[~.logging.WriteLogEntriesRequest.LabelsEntry]`): + labels (:class:`Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]`): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a label in this parameter, then the log entry's label is not changed. See [LogEntry][google.logging.v2.LogEntry]. + This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - entries (:class:`Sequence[~.log_entry.LogEntry]`): + entries (:class:`Sequence[google.cloud.logging_v2.types.LogEntry]`): Required. The log entries to send to Logging. The order of log entries in this list does not matter. Values supplied in this method's ``log_name``, ``resource``, @@ -345,6 +350,7 @@ async def write_log_entries( for calls to ``entries.write``, you should try to include several log entries in this list, rather than calling this method for each individual log entry. + This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -356,7 +362,7 @@ async def write_log_entries( sent along with the request as metadata. Returns: - ~.logging.WriteLogEntriesResponse: + google.cloud.logging_v2.types.WriteLogEntriesResponse: Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. @@ -426,7 +432,7 @@ async def list_log_entries( Logs `__. Args: - request (:class:`~.logging.ListLogEntriesRequest`): + request (:class:`google.cloud.logging_v2.types.ListLogEntriesRequest`): The request object. The parameters to `ListLogEntries`. resource_names (:class:`Sequence[str]`): Required. Names of one or more parent resources from @@ -447,6 +453,7 @@ async def list_log_entries( Projects listed in the ``project_ids`` field are added to this list. + This corresponds to the ``resource_names`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -460,6 +467,7 @@ async def list_log_entries( resource that is not listed in ``resource_names`` will cause the filter to return no results. The maximum length of the filter is 20000 characters. + This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -472,6 +480,7 @@ async def list_log_entries( option returns entries in order of decreasing timestamps (newest first). Entries with equal timestamps are returned in order of their ``insert_id`` values. + This corresponds to the ``order_by`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -483,8 +492,8 @@ async def list_log_entries( sent along with the request as metadata. Returns: - ~.pagers.ListLogEntriesAsyncPager: - Result returned from ``ListLogEntries``. + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager: + Result returned from ListLogEntries. Iterating over this object will yield results and resolve additional pages automatically. @@ -555,7 +564,7 @@ async def list_monitored_resource_descriptors( used by Logging. Args: - request (:class:`~.logging.ListMonitoredResourceDescriptorsRequest`): + request (:class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest`): The request object. The parameters to ListMonitoredResourceDescriptors @@ -566,7 +575,7 @@ async def list_monitored_resource_descriptors( sent along with the request as metadata. Returns: - ~.pagers.ListMonitoredResourceDescriptorsAsyncPager: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager: Result returned from ListMonitoredResourceDescriptors. Iterating over this object will yield @@ -622,7 +631,7 @@ async def list_logs( listed. Args: - request (:class:`~.logging.ListLogsRequest`): + request (:class:`google.cloud.logging_v2.types.ListLogsRequest`): The request object. The parameters to ListLogs. parent (:class:`str`): Required. The resource name that owns the logs: @@ -632,7 +641,8 @@ async def list_logs( "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]". + "folders/[FOLDER_ID]" + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -644,7 +654,7 @@ async def list_logs( sent along with the request as metadata. Returns: - ~.pagers.ListLogsAsyncPager: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager: Result returned from ListLogs. Iterating over this object will yield results and resolve additional pages @@ -718,7 +728,7 @@ def tail_log_entries( logs. Args: - requests (AsyncIterator[`~.logging.TailLogEntriesRequest`]): + requests (AsyncIterator[`google.cloud.logging_v2.types.TailLogEntriesRequest`]): The request object AsyncIterator. The parameters to `TailLogEntries`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -727,8 +737,8 @@ def tail_log_entries( sent along with the request as metadata. Returns: - AsyncIterable[~.logging.TailLogEntriesResponse]: - Result returned from ``TailLogEntries``. + AsyncIterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: + Result returned from TailLogEntries. """ # Wrap the RPC method; this adds retry and timeout information, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index a54252bf7409..a340eb205996 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -122,6 +122,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -134,7 +150,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + LoggingServiceV2Client: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -237,10 +253,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.LoggingServiceV2Transport]): The + transport (Union[str, LoggingServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -354,9 +370,9 @@ def delete_log( with a timestamp before the operation will be deleted. Args: - request (:class:`~.logging.DeleteLogRequest`): + request (google.cloud.logging_v2.types.DeleteLogRequest): The request object. The parameters to DeleteLog. - log_name (:class:`str`): + log_name (str): Required. The resource name of the log to delete: :: @@ -371,6 +387,7 @@ def delete_log( ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. For more information about log names, see [LogEntry][google.logging.v2.LogEntry]. + This corresponds to the ``log_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -440,9 +457,9 @@ def write_log_entries( organizations, billing accounts or folders) Args: - request (:class:`~.logging.WriteLogEntriesRequest`): + request (google.cloud.logging_v2.types.WriteLogEntriesRequest): The request object. The parameters to WriteLogEntries. - log_name (:class:`str`): + log_name (str): Optional. A default log resource name that is assigned to all log entries in ``entries`` that do not specify a value for ``log_name``: @@ -466,10 +483,11 @@ def write_log_entries( folder that is receiving new log entries, whether the resource is specified in ``logName`` or in an individual log entry. + This corresponds to the ``log_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - resource (:class:`~.monitored_resource.MonitoredResource`): + resource (google.api.monitored_resource_pb2.MonitoredResource): Optional. A default monitored resource object that is assigned to all log entries in ``entries`` that do not specify a value for ``resource``. Example: @@ -481,19 +499,21 @@ def write_log_entries( "zone": "us-central1-a", "instance_id": "00000000000000000000" }} See [LogEntry][google.logging.v2.LogEntry]. + This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Sequence[~.logging.WriteLogEntriesRequest.LabelsEntry]`): + labels (Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a label in this parameter, then the log entry's label is not changed. See [LogEntry][google.logging.v2.LogEntry]. + This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - entries (:class:`Sequence[~.log_entry.LogEntry]`): + entries (Sequence[google.cloud.logging_v2.types.LogEntry]): Required. The log entries to send to Logging. The order of log entries in this list does not matter. Values supplied in this method's ``log_name``, ``resource``, @@ -523,6 +543,7 @@ def write_log_entries( for calls to ``entries.write``, you should try to include several log entries in this list, rather than calling this method for each individual log entry. + This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -534,7 +555,7 @@ def write_log_entries( sent along with the request as metadata. Returns: - ~.logging.WriteLogEntriesResponse: + google.cloud.logging_v2.types.WriteLogEntriesResponse: Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. @@ -595,9 +616,9 @@ def list_log_entries( Logs `__. Args: - request (:class:`~.logging.ListLogEntriesRequest`): + request (google.cloud.logging_v2.types.ListLogEntriesRequest): The request object. The parameters to `ListLogEntries`. - resource_names (:class:`Sequence[str]`): + resource_names (Sequence[str]): Required. Names of one or more parent resources from which to retrieve log entries: @@ -616,10 +637,11 @@ def list_log_entries( Projects listed in the ``project_ids`` field are added to this list. + This corresponds to the ``resource_names`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - filter (:class:`str`): + filter (str): Optional. A filter that chooses which log entries to return. See `Advanced Logs Queries `__. @@ -629,10 +651,11 @@ def list_log_entries( resource that is not listed in ``resource_names`` will cause the filter to return no results. The maximum length of the filter is 20000 characters. + This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - order_by (:class:`str`): + order_by (str): Optional. How the results should be sorted. Presently, the only permitted values are ``"timestamp asc"`` (default) and ``"timestamp desc"``. The first option @@ -641,6 +664,7 @@ def list_log_entries( option returns entries in order of decreasing timestamps (newest first). Entries with equal timestamps are returned in order of their ``insert_id`` values. + This corresponds to the ``order_by`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -652,8 +676,8 @@ def list_log_entries( sent along with the request as metadata. Returns: - ~.pagers.ListLogEntriesPager: - Result returned from ``ListLogEntries``. + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager: + Result returned from ListLogEntries. Iterating over this object will yield results and resolve additional pages automatically. @@ -715,7 +739,7 @@ def list_monitored_resource_descriptors( used by Logging. Args: - request (:class:`~.logging.ListMonitoredResourceDescriptorsRequest`): + request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): The request object. The parameters to ListMonitoredResourceDescriptors @@ -726,7 +750,7 @@ def list_monitored_resource_descriptors( sent along with the request as metadata. Returns: - ~.pagers.ListMonitoredResourceDescriptorsPager: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager: Result returned from ListMonitoredResourceDescriptors. Iterating over this object will yield @@ -775,9 +799,9 @@ def list_logs( listed. Args: - request (:class:`~.logging.ListLogsRequest`): + request (google.cloud.logging_v2.types.ListLogsRequest): The request object. The parameters to ListLogs. - parent (:class:`str`): + parent (str): Required. The resource name that owns the logs: :: @@ -785,7 +809,8 @@ def list_logs( "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]". + "folders/[FOLDER_ID]" + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -797,7 +822,7 @@ def list_logs( sent along with the request as metadata. Returns: - ~.pagers.ListLogsPager: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager: Result returned from ListLogs. Iterating over this object will yield results and resolve additional pages @@ -862,7 +887,7 @@ def tail_log_entries( logs. Args: - requests (Iterator[`~.logging.TailLogEntriesRequest`]): + requests (Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]): The request object iterator. The parameters to `TailLogEntries`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -871,8 +896,8 @@ def tail_log_entries( sent along with the request as metadata. Returns: - Iterable[~.logging.TailLogEntriesResponse]: - Result returned from ``TailLogEntries``. + Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: + Result returned from TailLogEntries. """ # Wrap the RPC method; this adds retry and timeout information, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 72bbe8e23aa2..5492a3a30c5a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -26,7 +26,7 @@ class ListLogEntriesPager: """A pager for iterating through ``list_log_entries`` requests. This class thinly wraps an initial - :class:`~.logging.ListLogEntriesResponse` object, and + :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` object, and provides an ``__iter__`` method to iterate through its ``entries`` field. @@ -35,7 +35,7 @@ class ListLogEntriesPager: through the ``entries`` field on the corresponding responses. - All the usual :class:`~.logging.ListLogEntriesResponse` + All the usual :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -53,9 +53,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging.ListLogEntriesRequest`): + request (google.cloud.logging_v2.types.ListLogEntriesRequest): The initial request object. - response (:class:`~.logging.ListLogEntriesResponse`): + response (google.cloud.logging_v2.types.ListLogEntriesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -88,7 +88,7 @@ class ListLogEntriesAsyncPager: """A pager for iterating through ``list_log_entries`` requests. This class thinly wraps an initial - :class:`~.logging.ListLogEntriesResponse` object, and + :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` object, and provides an ``__aiter__`` method to iterate through its ``entries`` field. @@ -97,7 +97,7 @@ class ListLogEntriesAsyncPager: through the ``entries`` field on the corresponding responses. - All the usual :class:`~.logging.ListLogEntriesResponse` + All the usual :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -115,9 +115,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging.ListLogEntriesRequest`): + request (google.cloud.logging_v2.types.ListLogEntriesRequest): The initial request object. - response (:class:`~.logging.ListLogEntriesResponse`): + response (google.cloud.logging_v2.types.ListLogEntriesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -154,7 +154,7 @@ class ListMonitoredResourceDescriptorsPager: """A pager for iterating through ``list_monitored_resource_descriptors`` requests. This class thinly wraps an initial - :class:`~.logging.ListMonitoredResourceDescriptorsResponse` object, and + :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` object, and provides an ``__iter__`` method to iterate through its ``resource_descriptors`` field. @@ -163,7 +163,7 @@ class ListMonitoredResourceDescriptorsPager: through the ``resource_descriptors`` field on the corresponding responses. - All the usual :class:`~.logging.ListMonitoredResourceDescriptorsResponse` + All the usual :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -181,9 +181,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging.ListMonitoredResourceDescriptorsRequest`): + request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): The initial request object. - response (:class:`~.logging.ListMonitoredResourceDescriptorsResponse`): + response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -216,7 +216,7 @@ class ListMonitoredResourceDescriptorsAsyncPager: """A pager for iterating through ``list_monitored_resource_descriptors`` requests. This class thinly wraps an initial - :class:`~.logging.ListMonitoredResourceDescriptorsResponse` object, and + :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` object, and provides an ``__aiter__`` method to iterate through its ``resource_descriptors`` field. @@ -225,7 +225,7 @@ class ListMonitoredResourceDescriptorsAsyncPager: through the ``resource_descriptors`` field on the corresponding responses. - All the usual :class:`~.logging.ListMonitoredResourceDescriptorsResponse` + All the usual :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -245,9 +245,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging.ListMonitoredResourceDescriptorsRequest`): + request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): The initial request object. - response (:class:`~.logging.ListMonitoredResourceDescriptorsResponse`): + response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -288,7 +288,7 @@ class ListLogsPager: """A pager for iterating through ``list_logs`` requests. This class thinly wraps an initial - :class:`~.logging.ListLogsResponse` object, and + :class:`google.cloud.logging_v2.types.ListLogsResponse` object, and provides an ``__iter__`` method to iterate through its ``log_names`` field. @@ -297,7 +297,7 @@ class ListLogsPager: through the ``log_names`` field on the corresponding responses. - All the usual :class:`~.logging.ListLogsResponse` + All the usual :class:`google.cloud.logging_v2.types.ListLogsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -315,9 +315,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging.ListLogsRequest`): + request (google.cloud.logging_v2.types.ListLogsRequest): The initial request object. - response (:class:`~.logging.ListLogsResponse`): + response (google.cloud.logging_v2.types.ListLogsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -350,7 +350,7 @@ class ListLogsAsyncPager: """A pager for iterating through ``list_logs`` requests. This class thinly wraps an initial - :class:`~.logging.ListLogsResponse` object, and + :class:`google.cloud.logging_v2.types.ListLogsResponse` object, and provides an ``__aiter__`` method to iterate through its ``log_names`` field. @@ -359,7 +359,7 @@ class ListLogsAsyncPager: through the ``log_names`` field on the corresponding responses. - All the usual :class:`~.logging.ListLogsResponse` + All the usual :class:`google.cloud.logging_v2.types.ListLogsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -377,9 +377,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging.ListLogsRequest`): + request (google.cloud.logging_v2.types.ListLogsRequest): The initial request object. - response (:class:`~.logging.ListLogsResponse`): + response (google.cloud.logging_v2.types.ListLogsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index bd3c759a1d7e..2c592e6859af 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -80,6 +80,7 @@ class MetricsServiceV2AsyncClient: MetricsServiceV2Client.parse_common_location_path ) + from_service_account_info = MetricsServiceV2Client.from_service_account_info from_service_account_file = MetricsServiceV2Client.from_service_account_file from_service_account_json = from_service_account_file @@ -156,7 +157,7 @@ async def list_log_metrics( r"""Lists logs-based metrics. Args: - request (:class:`~.logging_metrics.ListLogMetricsRequest`): + request (:class:`google.cloud.logging_v2.types.ListLogMetricsRequest`): The request object. The parameters to ListLogMetrics. parent (:class:`str`): Required. The name of the project containing the @@ -164,7 +165,8 @@ async def list_log_metrics( :: - "projects/[PROJECT_ID]". + "projects/[PROJECT_ID]" + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -176,7 +178,7 @@ async def list_log_metrics( sent along with the request as metadata. Returns: - ~.pagers.ListLogMetricsAsyncPager: + google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager: Result returned from ListLogMetrics. Iterating over this object will yield results and resolve additional pages @@ -249,14 +251,15 @@ async def get_log_metric( r"""Gets a logs-based metric. Args: - request (:class:`~.logging_metrics.GetLogMetricRequest`): + request (:class:`google.cloud.logging_v2.types.GetLogMetricRequest`): The request object. The parameters to GetLogMetric. metric_name (:class:`str`): Required. The resource name of the desired metric: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -268,7 +271,7 @@ async def get_log_metric( sent along with the request as metadata. Returns: - ~.logging_metrics.LogMetric: + google.cloud.logging_v2.types.LogMetric: Describes a logs-based metric. The value of the metric is the number of log entries that match a logs filter in a @@ -345,7 +348,7 @@ async def create_log_metric( r"""Creates a logs-based metric. Args: - request (:class:`~.logging_metrics.CreateLogMetricRequest`): + request (:class:`google.cloud.logging_v2.types.CreateLogMetricRequest`): The request object. The parameters to CreateLogMetric. parent (:class:`str`): Required. The resource name of the project in which to @@ -356,13 +359,15 @@ async def create_log_metric( "projects/[PROJECT_ID]" The new metric must be provided in the request. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - metric (:class:`~.logging_metrics.LogMetric`): + metric (:class:`google.cloud.logging_v2.types.LogMetric`): Required. The new logs-based metric, which must not have an identifier that already exists. + This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -374,7 +379,7 @@ async def create_log_metric( sent along with the request as metadata. Returns: - ~.logging_metrics.LogMetric: + google.cloud.logging_v2.types.LogMetric: Describes a logs-based metric. The value of the metric is the number of log entries that match a logs filter in a @@ -441,7 +446,7 @@ async def update_log_metric( r"""Creates or updates a logs-based metric. Args: - request (:class:`~.logging_metrics.UpdateLogMetricRequest`): + request (:class:`google.cloud.logging_v2.types.UpdateLogMetricRequest`): The request object. The parameters to UpdateLogMetric. metric_name (:class:`str`): Required. The resource name of the metric to update: @@ -454,10 +459,11 @@ async def update_log_metric( it's ``name`` field must be the same as ``[METRIC_ID]`` If the metric does not exist in ``[PROJECT_ID]``, then a new metric is created. + This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - metric (:class:`~.logging_metrics.LogMetric`): + metric (:class:`google.cloud.logging_v2.types.LogMetric`): Required. The updated metric. This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this @@ -470,7 +476,7 @@ async def update_log_metric( sent along with the request as metadata. Returns: - ~.logging_metrics.LogMetric: + google.cloud.logging_v2.types.LogMetric: Describes a logs-based metric. The value of the metric is the number of log entries that match a logs filter in a @@ -548,14 +554,15 @@ async def delete_log_metric( r"""Deletes a logs-based metric. Args: - request (:class:`~.logging_metrics.DeleteLogMetricRequest`): + request (:class:`google.cloud.logging_v2.types.DeleteLogMetricRequest`): The request object. The parameters to DeleteLogMetric. metric_name (:class:`str`): Required. The resource name of the metric to delete: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index d03ce86cdc25..cc6e491fcb4b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -114,6 +114,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -126,7 +142,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + MetricsServiceV2Client: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -231,10 +247,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.MetricsServiceV2Transport]): The + transport (Union[str, MetricsServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -344,15 +360,16 @@ def list_log_metrics( r"""Lists logs-based metrics. Args: - request (:class:`~.logging_metrics.ListLogMetricsRequest`): + request (google.cloud.logging_v2.types.ListLogMetricsRequest): The request object. The parameters to ListLogMetrics. - parent (:class:`str`): + parent (str): Required. The name of the project containing the metrics: :: - "projects/[PROJECT_ID]". + "projects/[PROJECT_ID]" + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -364,7 +381,7 @@ def list_log_metrics( sent along with the request as metadata. Returns: - ~.pagers.ListLogMetricsPager: + google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager: Result returned from ListLogMetrics. Iterating over this object will yield results and resolve additional pages @@ -428,14 +445,15 @@ def get_log_metric( r"""Gets a logs-based metric. Args: - request (:class:`~.logging_metrics.GetLogMetricRequest`): + request (google.cloud.logging_v2.types.GetLogMetricRequest): The request object. The parameters to GetLogMetric. - metric_name (:class:`str`): + metric_name (str): Required. The resource name of the desired metric: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -447,7 +465,7 @@ def get_log_metric( sent along with the request as metadata. Returns: - ~.logging_metrics.LogMetric: + google.cloud.logging_v2.types.LogMetric: Describes a logs-based metric. The value of the metric is the number of log entries that match a logs filter in a @@ -515,9 +533,9 @@ def create_log_metric( r"""Creates a logs-based metric. Args: - request (:class:`~.logging_metrics.CreateLogMetricRequest`): + request (google.cloud.logging_v2.types.CreateLogMetricRequest): The request object. The parameters to CreateLogMetric. - parent (:class:`str`): + parent (str): Required. The resource name of the project in which to create the metric: @@ -526,13 +544,15 @@ def create_log_metric( "projects/[PROJECT_ID]" The new metric must be provided in the request. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - metric (:class:`~.logging_metrics.LogMetric`): + metric (google.cloud.logging_v2.types.LogMetric): Required. The new logs-based metric, which must not have an identifier that already exists. + This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -544,7 +564,7 @@ def create_log_metric( sent along with the request as metadata. Returns: - ~.logging_metrics.LogMetric: + google.cloud.logging_v2.types.LogMetric: Describes a logs-based metric. The value of the metric is the number of log entries that match a logs filter in a @@ -612,9 +632,9 @@ def update_log_metric( r"""Creates or updates a logs-based metric. Args: - request (:class:`~.logging_metrics.UpdateLogMetricRequest`): + request (google.cloud.logging_v2.types.UpdateLogMetricRequest): The request object. The parameters to UpdateLogMetric. - metric_name (:class:`str`): + metric_name (str): Required. The resource name of the metric to update: :: @@ -625,10 +645,11 @@ def update_log_metric( it's ``name`` field must be the same as ``[METRIC_ID]`` If the metric does not exist in ``[PROJECT_ID]``, then a new metric is created. + This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - metric (:class:`~.logging_metrics.LogMetric`): + metric (google.cloud.logging_v2.types.LogMetric): Required. The updated metric. This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this @@ -641,7 +662,7 @@ def update_log_metric( sent along with the request as metadata. Returns: - ~.logging_metrics.LogMetric: + google.cloud.logging_v2.types.LogMetric: Describes a logs-based metric. The value of the metric is the number of log entries that match a logs filter in a @@ -710,14 +731,15 @@ def delete_log_metric( r"""Deletes a logs-based metric. Args: - request (:class:`~.logging_metrics.DeleteLogMetricRequest`): + request (google.cloud.logging_v2.types.DeleteLogMetricRequest): The request object. The parameters to DeleteLogMetric. - metric_name (:class:`str`): + metric_name (str): Required. The resource name of the metric to delete: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 09010a6858b2..51c3985980cd 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -24,7 +24,7 @@ class ListLogMetricsPager: """A pager for iterating through ``list_log_metrics`` requests. This class thinly wraps an initial - :class:`~.logging_metrics.ListLogMetricsResponse` object, and + :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` object, and provides an ``__iter__`` method to iterate through its ``metrics`` field. @@ -33,7 +33,7 @@ class ListLogMetricsPager: through the ``metrics`` field on the corresponding responses. - All the usual :class:`~.logging_metrics.ListLogMetricsResponse` + All the usual :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +51,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging_metrics.ListLogMetricsRequest`): + request (google.cloud.logging_v2.types.ListLogMetricsRequest): The initial request object. - response (:class:`~.logging_metrics.ListLogMetricsResponse`): + response (google.cloud.logging_v2.types.ListLogMetricsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +86,7 @@ class ListLogMetricsAsyncPager: """A pager for iterating through ``list_log_metrics`` requests. This class thinly wraps an initial - :class:`~.logging_metrics.ListLogMetricsResponse` object, and + :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` object, and provides an ``__aiter__`` method to iterate through its ``metrics`` field. @@ -95,7 +95,7 @@ class ListLogMetricsAsyncPager: through the ``metrics`` field on the corresponding responses. - All the usual :class:`~.logging_metrics.ListLogMetricsResponse` + All the usual :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -113,9 +113,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.logging_metrics.ListLogMetricsRequest`): + request (google.cloud.logging_v2.types.ListLogMetricsRequest): The initial request object. - response (:class:`~.logging_metrics.ListLogMetricsResponse`): + response (google.cloud.logging_v2.types.ListLogMetricsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py index a481557fd1fd..e63d6086f95f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py @@ -66,14 +66,14 @@ class LogEntry(proto.Message): Listing the log entry will not show the leading slash and filtering for a log name with a leading slash will never return any results. - resource (~.monitored_resource.MonitoredResource): + resource (google.api.monitored_resource_pb2.MonitoredResource): Required. The monitored resource that produced this log entry. Example: a log entry that reports a database error would be associated with the monitored resource designating the particular database that reported the error. - proto_payload (~.gp_any.Any): + proto_payload (google.protobuf.any_pb2.Any): The log entry payload, represented as a protocol buffer. Some Google Cloud Platform services use this field for their log entry @@ -86,10 +86,10 @@ class LogEntry(proto.Message): text_payload (str): The log entry payload, represented as a Unicode string (UTF-8). - json_payload (~.struct.Struct): + json_payload (google.protobuf.struct_pb2.Struct): The log entry payload, represented as a structure that is expressed as a JSON object. - timestamp (~.gp_timestamp.Timestamp): + timestamp (google.protobuf.timestamp_pb2.Timestamp): Optional. The time the event described by the log entry occurred. This time is used to compute the log entry's age and to enforce the logs retention period. If this field is @@ -104,10 +104,10 @@ class LogEntry(proto.Message): in the past, and that don't exceed 24 hours in the future. Log entries outside those time boundaries aren't ingested by Logging. - receive_timestamp (~.gp_timestamp.Timestamp): + receive_timestamp (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the log entry was received by Logging. - severity (~.log_severity.LogSeverity): + severity (google.logging.type.log_severity_pb2.LogSeverity): Optional. The severity of the log entry. The default value is ``LogSeverity.DEFAULT``. insert_id (str): @@ -125,14 +125,14 @@ class LogEntry(proto.Message): In queries, the ``insert_id`` is also used to order log entries that have the same ``log_name`` and ``timestamp`` values. - http_request (~.glt_http_request.HttpRequest): + http_request (google.logging.type.http_request_pb2.HttpRequest): Optional. Information about the HTTP request associated with this log entry, if applicable. - labels (Sequence[~.log_entry.LogEntry.LabelsEntry]): + labels (Sequence[google.cloud.logging_v2.types.LogEntry.LabelsEntry]): Optional. A set of user-defined (key, value) data that provides additional information about the log entry. - operation (~.log_entry.LogEntryOperation): + operation (google.cloud.logging_v2.types.LogEntryOperation): Optional. Information about an operation associated with the log entry, if applicable. trace (str): @@ -158,7 +158,7 @@ class LogEntry(proto.Message): log entry was written, or the sampling decision was unknown at the time. A non-sampled ``trace`` value is still useful as a request correlation identifier. The default is False. - source_location (~.log_entry.LogEntrySourceLocation): + source_location (google.cloud.logging_v2.types.LogEntrySourceLocation): Optional. Source code location information associated with the log entry, if any. """ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py index cec8993f5cc8..ca739c02ce02 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py @@ -94,7 +94,7 @@ class WriteLogEntriesRequest(proto.Message): each project, organization, billing account, or folder that is receiving new log entries, whether the resource is specified in ``logName`` or in an individual log entry. - resource (~.monitored_resource.MonitoredResource): + resource (google.api.monitored_resource_pb2.MonitoredResource): Optional. A default monitored resource object that is assigned to all log entries in ``entries`` that do not specify a value for ``resource``. Example: @@ -106,13 +106,13 @@ class WriteLogEntriesRequest(proto.Message): "zone": "us-central1-a", "instance_id": "00000000000000000000" }} See [LogEntry][google.logging.v2.LogEntry]. - labels (Sequence[~.logging.WriteLogEntriesRequest.LabelsEntry]): + labels (Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a label in this parameter, then the log entry's label is not changed. See [LogEntry][google.logging.v2.LogEntry]. - entries (Sequence[~.log_entry.LogEntry]): + entries (Sequence[google.cloud.logging_v2.types.LogEntry]): Required. The log entries to send to Logging. The order of log entries in this list does not matter. Values supplied in this method's ``log_name``, ``resource``, and ``labels`` @@ -181,7 +181,7 @@ class WriteLogEntriesPartialErrors(proto.Message): r"""Error details for WriteLogEntries with partial success. Attributes: - log_entry_errors (Sequence[~.logging.WriteLogEntriesPartialErrors.LogEntryErrorsEntry]): + log_entry_errors (Sequence[google.cloud.logging_v2.types.WriteLogEntriesPartialErrors.LogEntryErrorsEntry]): When ``WriteLogEntriesRequest.partial_success`` is true, records the error status for entries that were not written due to a permanent error, keyed by the entry's zero-based @@ -267,7 +267,7 @@ class ListLogEntriesResponse(proto.Message): r"""Result returned from ``ListLogEntries``. Attributes: - entries (Sequence[~.log_entry.LogEntry]): + entries (Sequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. If ``entries`` is empty, ``nextPageToken`` may still be returned, indicating that more entries may exist. See ``nextPageToken`` for more @@ -323,7 +323,7 @@ class ListMonitoredResourceDescriptorsResponse(proto.Message): r"""Result returned from ListMonitoredResourceDescriptors. Attributes: - resource_descriptors (Sequence[~.monitored_resource.MonitoredResourceDescriptor]): + resource_descriptors (Sequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): A list of resource descriptors. next_page_token (str): If there might be more results than those appearing in this @@ -442,7 +442,7 @@ class TailLogEntriesRequest(proto.Message): not in ``resource_names`` will cause the filter to return no results. The maximum length of the filter is 20000 characters. - buffer_window (~.duration.Duration): + buffer_window (google.protobuf.duration_pb2.Duration): Optional. The amount of time to buffer log entries at the server before being returned to prevent out of order results due to late @@ -462,12 +462,12 @@ class TailLogEntriesResponse(proto.Message): r"""Result returned from ``TailLogEntries``. Attributes: - entries (Sequence[~.log_entry.LogEntry]): + entries (Sequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. Each response in the stream will order entries with increasing values of ``LogEntry.timestamp``. Ordering is not guaranteed between separate responses. - suppression_info (Sequence[~.logging.TailLogEntriesResponse.SuppressionInfo]): + suppression_info (Sequence[google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo]): If entries that otherwise would have been included in the session were not sent back to the client, counts of relevant entries omitted @@ -482,7 +482,7 @@ class SuppressionInfo(proto.Message): r"""Information about entries that were omitted from the session. Attributes: - reason (~.logging.TailLogEntriesResponse.SuppressionInfo.Reason): + reason (google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo.Reason): The reason that entries were omitted from the session. suppressed_count (int): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index aaf057acffc8..0d1f896e09f6 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -84,11 +84,11 @@ class LogBucket(proto.Message): location can not be changed. description (str): Describes this bucket. - create_time (~.timestamp.Timestamp): + create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The creation timestamp of the bucket. This is not set for any of the default buckets. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The last update timestamp of the bucket. retention_days (int): @@ -103,7 +103,7 @@ class LogBucket(proto.Message): The retention period on a locked bucket may not be changed. Locked buckets may only be deleted if they are empty. - lifecycle_state (~.logging_config.LifecycleState): + lifecycle_state (google.cloud.logging_v2.types.LifecycleState): Output only. The bucket lifecycle state. """ @@ -133,10 +133,10 @@ class LogView(proto.Message): location/buckets/my-bucket-id/views/my-view description (str): Describes this view. - create_time (~.timestamp.Timestamp): + create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The creation timestamp of the view. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The last update timestamp of the view. filter (str): @@ -206,12 +206,12 @@ class LogSink(proto.Message): disabled (bool): Optional. If set to True, then this sink is disabled and it does not export any log entries. - exclusions (Sequence[~.logging_config.LogExclusion]): + exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): Optional. Log entries that match any of the exclusion filters will not be exported. If a log entry is matched by both ``filter`` and one of ``exclusion_filters`` it will not be exported. - output_version_format (~.logging_config.LogSink.VersionFormat): + output_version_format (google.cloud.logging_v2.types.LogSink.VersionFormat): Deprecated. This field is unused. writer_identity (str): Output only. An IAM identity—a service account or @@ -248,14 +248,14 @@ class LogSink(proto.Message): logName:("projects/test-project1/" OR "projects/test-project2/") AND resource.type=gce_instance - bigquery_options (~.logging_config.BigQueryOptions): + bigquery_options (google.cloud.logging_v2.types.BigQueryOptions): Optional. Options that affect sinks exporting data to BigQuery. - create_time (~.timestamp.Timestamp): + create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The creation timestamp of the sink. This field may not be present for older sinks. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The last update timestamp of the sink. This field may not be present for older sinks. @@ -366,7 +366,7 @@ class ListBucketsResponse(proto.Message): r"""The response from ListBuckets. Attributes: - buckets (Sequence[~.logging_config.LogBucket]): + buckets (Sequence[google.cloud.logging_v2.types.LogBucket]): A list of buckets. next_page_token (str): If there might be more results than appear in this response, @@ -401,7 +401,7 @@ class CreateBucketRequest(proto.Message): ``"my-bucket"``. Identifiers are limited to 100 characters and can include only letters, digits, underscores, hyphens, and periods. - bucket (~.logging_config.LogBucket): + bucket (google.cloud.logging_v2.types.LogBucket): Required. The new bucket. The region specified in the new bucket must be compliant with any Location Restriction Org Policy. The @@ -434,9 +434,9 @@ class UpdateBucketRequest(proto.Message): Also requires permission "resourcemanager.projects.updateLiens" to set the locked property - bucket (~.logging_config.LogBucket): + bucket (google.cloud.logging_v2.types.LogBucket): Required. The updated bucket. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask that specifies the fields in ``bucket`` that need an update. A bucket field will be overwritten if, and only if, it is in the update mask. ``name`` and output @@ -552,7 +552,7 @@ class ListViewsResponse(proto.Message): r"""The response from ListViews. Attributes: - views (Sequence[~.logging_config.LogView]): + views (Sequence[google.cloud.logging_v2.types.LogView]): A list of views. next_page_token (str): If there might be more results than appear in this response, @@ -585,7 +585,7 @@ class CreateViewRequest(proto.Message): ``"projects/my-logging-project/locations/my-location/buckets/my-bucket"`` view_id (str): Required. The id to use for this view. - view (~.logging_config.LogView): + view (google.cloud.logging_v2.types.LogView): Required. The new view. """ @@ -609,9 +609,9 @@ class UpdateViewRequest(proto.Message): Example: ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. - view (~.logging_config.LogView): + view (google.cloud.logging_v2.types.LogView): Required. The updated view. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. Field mask that specifies the fields in ``view`` that need an update. A field will be overwritten if, and only if, it is in the update mask. ``name`` and output only @@ -703,7 +703,7 @@ class ListSinksResponse(proto.Message): r"""Result returned from ``ListSinks``. Attributes: - sinks (Sequence[~.logging_config.LogSink]): + sinks (Sequence[google.cloud.logging_v2.types.LogSink]): A list of sinks. next_page_token (str): If there might be more results than appear in this response, @@ -757,7 +757,7 @@ class CreateSinkRequest(proto.Message): Examples: ``"projects/my-logging-project"``, ``"organizations/123456789"``. - sink (~.logging_config.LogSink): + sink (google.cloud.logging_v2.types.LogSink): Required. The new sink, whose ``name`` parameter is a sink identifier that is not already in use. unique_writer_identity (bool): @@ -800,7 +800,7 @@ class UpdateSinkRequest(proto.Message): "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: ``"projects/my-project-id/sinks/my-sink-id"``. - sink (~.logging_config.LogSink): + sink (google.cloud.logging_v2.types.LogSink): Required. The updated sink, whose name is the same identifier that appears as part of ``sink_name``. unique_writer_identity (bool): @@ -819,7 +819,7 @@ class UpdateSinkRequest(proto.Message): account. - It is an error if the old value is true and the new value is set to false or defaulted to false. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. Field mask that specifies the fields in ``sink`` that need an update. A sink field will be overwritten if, and only if, it is in the update mask. ``name`` and output @@ -901,12 +901,12 @@ class LogExclusion(proto.Message): and it does not exclude any log entries. You can [update an exclusion][google.logging.v2.ConfigServiceV2.UpdateExclusion] to change the value of this field. - create_time (~.timestamp.Timestamp): + create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The creation timestamp of the exclusion. This field may not be present for older exclusions. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The last update timestamp of the exclusion. This field may not be present for older @@ -964,7 +964,7 @@ class ListExclusionsResponse(proto.Message): r"""Result returned from ``ListExclusions``. Attributes: - exclusions (Sequence[~.logging_config.LogExclusion]): + exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): A list of exclusions. next_page_token (str): If there might be more results than appear in this response, @@ -1020,7 +1020,7 @@ class CreateExclusionRequest(proto.Message): Examples: ``"projects/my-logging-project"``, ``"organizations/123456789"``. - exclusion (~.logging_config.LogExclusion): + exclusion (google.cloud.logging_v2.types.LogExclusion): Required. The new exclusion, whose ``name`` parameter is an exclusion name that is not already used in the parent resource. @@ -1047,10 +1047,10 @@ class UpdateExclusionRequest(proto.Message): Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``. - exclusion (~.logging_config.LogExclusion): + exclusion (google.cloud.logging_v2.types.LogExclusion): Required. New values for the existing exclusion. Only the fields specified in ``update_mask`` are relevant. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. A non-empty list of fields to change in the existing exclusion. New values for the fields are taken from the corresponding fields in the @@ -1145,13 +1145,13 @@ class UpdateCmekSettingsRequest(proto.Message): Note: CMEK for the Logs Router can currently only be configured for GCP organizations. Once configured, it applies to all projects and folders in the GCP organization. - cmek_settings (~.logging_config.CmekSettings): + cmek_settings (google.cloud.logging_v2.types.CmekSettings): Required. The CMEK settings to update. See `Enabling CMEK for Logs Router `__ for more information. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. Field mask identifying which fields from ``cmek_settings`` should be updated. A field will be overwritten if and only if it is in the update mask. Output diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py index a9642d13ba52..c2a8a60072a5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py @@ -78,7 +78,7 @@ class LogMetric(proto.Message): "resource.type=gae_app AND severity>=ERROR" The maximum length of the filter is 20000 characters. - metric_descriptor (~.ga_metric.MetricDescriptor): + metric_descriptor (google.api.metric_pb2.MetricDescriptor): Optional. The metric descriptor associated with the logs-based metric. If unspecified, it uses a default metric descriptor with a DELTA metric kind, INT64 value type, with @@ -128,7 +128,7 @@ class LogMetric(proto.Message): Example: ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` - label_extractors (Sequence[~.logging_metrics.LogMetric.LabelExtractorsEntry]): + label_extractors (Sequence[google.cloud.logging_v2.types.LogMetric.LabelExtractorsEntry]): Optional. A map from a label key string to an extractor expression which is used to extract data from a log entry field and assign as the label value. Each label key @@ -146,20 +146,20 @@ class LogMetric(proto.Message): Note that there are upper bounds on the maximum number of labels and the number of active time series that are allowed in a project. - bucket_options (~.distribution.Distribution.BucketOptions): + bucket_options (google.api.distribution_pb2.BucketOptions): Optional. The ``bucket_options`` are required when the logs-based metric is using a DISTRIBUTION value type and it describes the bucket boundaries used to create a histogram of the extracted values. - create_time (~.timestamp.Timestamp): + create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The creation timestamp of the metric. This field may not be present for older metrics. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The last update timestamp of the metric. This field may not be present for older metrics. - version (~.logging_metrics.LogMetric.ApiVersion): + version (google.cloud.logging_v2.types.LogMetric.ApiVersion): Deprecated. The API version that created or updated this metric. The v2 format is used by default and cannot be changed. @@ -229,7 +229,7 @@ class ListLogMetricsResponse(proto.Message): r"""Result returned from ListLogMetrics. Attributes: - metrics (Sequence[~.logging_metrics.LogMetric]): + metrics (Sequence[google.cloud.logging_v2.types.LogMetric]): A list of logs-based metrics. next_page_token (str): If there might be more results than appear in this response, @@ -275,7 +275,7 @@ class CreateLogMetricRequest(proto.Message): "projects/[PROJECT_ID]" The new metric must be provided in the request. - metric (~.logging_metrics.LogMetric): + metric (google.cloud.logging_v2.types.LogMetric): Required. The new logs-based metric, which must not have an identifier that already exists. """ @@ -300,7 +300,7 @@ class UpdateLogMetricRequest(proto.Message): ``name`` field must be the same as ``[METRIC_ID]`` If the metric does not exist in ``[PROJECT_ID]``, then a new metric is created. - metric (~.logging_metrics.LogMetric): + metric (google.cloud.logging_v2.types.LogMetric): Required. The updated metric. """ diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index a8052ac3b5f9..a562b73f20a6 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "4da135f3e9708737eb20ed3156e38b17a87e2f61" + "sha": "9611810083e3184073b26ce5a143b70ce1324502" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "dd372aa22ded7a8ba6f0e03a80e06358a3fa0907", - "internalRef": "347055288" + "sha": "520682435235d9c503983a360a2090025aa47cd1", + "internalRef": "350246057" } }, { @@ -42,6 +42,7 @@ } ], "generatedFiles": [ + ".coveragerc", ".flake8", ".github/CONTRIBUTING.md", ".github/ISSUE_TEMPLATE/bug_report.md", diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 47a41f25c3cb..a2685b497c5f 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -88,8 +88,21 @@ def test__get_default_mtls_endpoint(): ) +def test_config_service_v2_client_from_service_account_info(): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = ConfigServiceV2Client.from_service_account_info(info) + assert client.transport._credentials == creds + + assert client.transport._host == "logging.googleapis.com:443" + + @pytest.mark.parametrize( - "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient] + "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] ) def test_config_service_v2_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -108,7 +121,10 @@ def test_config_service_v2_client_from_service_account_file(client_class): def test_config_service_v2_client_get_transport_class(): transport = ConfigServiceV2Client.get_transport_class() - assert transport == transports.ConfigServiceV2GrpcTransport + available_transports = [ + transports.ConfigServiceV2GrpcTransport, + ] + assert transport in available_transports transport = ConfigServiceV2Client.get_transport_class("grpc") assert transport == transports.ConfigServiceV2GrpcTransport @@ -5311,7 +5327,7 @@ def test_config_service_v2_host_with_port(): def test_config_service_v2_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.ConfigServiceV2GrpcTransport( @@ -5323,7 +5339,7 @@ def test_config_service_v2_grpc_transport_channel(): def test_config_service_v2_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.ConfigServiceV2GrpcAsyncIOTransport( @@ -5348,7 +5364,7 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -5406,7 +5422,7 @@ def test_config_service_v2_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 2b8129f299af..110a383c05fe 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -95,8 +95,21 @@ def test__get_default_mtls_endpoint(): ) +def test_logging_service_v2_client_from_service_account_info(): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = LoggingServiceV2Client.from_service_account_info(info) + assert client.transport._credentials == creds + + assert client.transport._host == "logging.googleapis.com:443" + + @pytest.mark.parametrize( - "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient] + "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] ) def test_logging_service_v2_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -115,7 +128,10 @@ def test_logging_service_v2_client_from_service_account_file(client_class): def test_logging_service_v2_client_get_transport_class(): transport = LoggingServiceV2Client.get_transport_class() - assert transport == transports.LoggingServiceV2GrpcTransport + available_transports = [ + transports.LoggingServiceV2GrpcTransport, + ] + assert transport in available_transports transport = LoggingServiceV2Client.get_transport_class("grpc") assert transport == transports.LoggingServiceV2GrpcTransport @@ -1976,7 +1992,7 @@ def test_logging_service_v2_host_with_port(): def test_logging_service_v2_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.LoggingServiceV2GrpcTransport( @@ -1988,7 +2004,7 @@ def test_logging_service_v2_grpc_transport_channel(): def test_logging_service_v2_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.LoggingServiceV2GrpcAsyncIOTransport( @@ -2013,7 +2029,7 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -2072,7 +2088,7 @@ def test_logging_service_v2_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 0bc10e4bc960..8ae5fdc5481d 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -94,8 +94,21 @@ def test__get_default_mtls_endpoint(): ) +def test_metrics_service_v2_client_from_service_account_info(): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = MetricsServiceV2Client.from_service_account_info(info) + assert client.transport._credentials == creds + + assert client.transport._host == "logging.googleapis.com:443" + + @pytest.mark.parametrize( - "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient] + "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] ) def test_metrics_service_v2_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -114,7 +127,10 @@ def test_metrics_service_v2_client_from_service_account_file(client_class): def test_metrics_service_v2_client_get_transport_class(): transport = MetricsServiceV2Client.get_transport_class() - assert transport == transports.MetricsServiceV2GrpcTransport + available_transports = [ + transports.MetricsServiceV2GrpcTransport, + ] + assert transport in available_transports transport = MetricsServiceV2Client.get_transport_class("grpc") assert transport == transports.MetricsServiceV2GrpcTransport @@ -1921,7 +1937,7 @@ def test_metrics_service_v2_host_with_port(): def test_metrics_service_v2_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.MetricsServiceV2GrpcTransport( @@ -1933,7 +1949,7 @@ def test_metrics_service_v2_grpc_transport_channel(): def test_metrics_service_v2_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.MetricsServiceV2GrpcAsyncIOTransport( @@ -1958,7 +1974,7 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -2017,7 +2033,7 @@ def test_metrics_service_v2_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel From cd76cfca2b338d21bfc69ac3847f3bce7fdb687f Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 26 Jan 2021 16:37:20 -0800 Subject: [PATCH 375/855] chore: Re-generated to pick up changes from self (#146) --- packages/google-cloud-logging/logging-v2-py.tar.gz | 0 packages/google-cloud-logging/synth.metadata | 3 ++- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-logging/logging-v2-py.tar.gz diff --git a/packages/google-cloud-logging/logging-v2-py.tar.gz b/packages/google-cloud-logging/logging-v2-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index a562b73f20a6..28c8b61cc295 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "9611810083e3184073b26ce5a143b70ce1324502" + "sha": "c89dea4899a2fd7175c2849f158fb921fc017a15" } }, { @@ -131,6 +131,7 @@ "google/cloud/logging_v2/types/logging.py", "google/cloud/logging_v2/types/logging_config.py", "google/cloud/logging_v2/types/logging_metrics.py", + "logging-v2-py.tar.gz", "mypy.ini", "noxfile.py", "renovate.json", From e17de0395ff995337e69bba88e163daa33b63e04 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 26 Jan 2021 16:59:23 -0800 Subject: [PATCH 376/855] chore: release 2.2.0 (#163) --- packages/google-cloud-logging/CHANGELOG.md | 13 +++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index b5808e4b0b1f..b89976db1bcc 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,19 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [2.2.0](https://www.github.com/googleapis/python-logging/compare/v2.1.1...v2.2.0) (2021-01-27) + + +### Features + +* add 'from_service_account_info' factory to clients ([a9ff2b7](https://www.github.com/googleapis/python-logging/commit/a9ff2b7984a54542963fc8d52864365ef1562f57)) + + +### Bug Fixes + +* django content length extraction bug ([#160](https://www.github.com/googleapis/python-logging/issues/160)) ([93eeaef](https://www.github.com/googleapis/python-logging/commit/93eeaef1cce286aa8aa830d2369212b912d184b6)) +* fix sphinx identifiers ([a9ff2b7](https://www.github.com/googleapis/python-logging/commit/a9ff2b7984a54542963fc8d52864365ef1562f57)) + ### [2.1.1](https://www.github.com/googleapis/python-logging/compare/v2.1.0...v2.1.1) (2021-01-14) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 4009bca9a6c4..8ede9877ec2e 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.1.1" +version = "2.2.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From c2e46689f19629703027224274b45a1d65b34894 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 27 Jan 2021 19:22:40 +0100 Subject: [PATCH 377/855] chore(deps): update dependency google-cloud-logging to v2.2.0 (#164) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index db786f8893d9..cb5be4aef45e 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==2.1.1 +google-cloud-logging==2.2.0 google-cloud-storage==1.35.0 google-cloud-pubsub==2.2.0 google-cloud-bigquery==2.6.2 From 4a1c8d17ed0d5e52bc7f04f6db15eca821be70c0 Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Fri, 29 Jan 2021 17:08:02 -0800 Subject: [PATCH 378/855] build: migrate to flakybot (#170) --- packages/google-cloud-logging/.kokoro/test-samples.sh | 8 ++++---- packages/google-cloud-logging/.kokoro/trampoline_v2.sh | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/.kokoro/test-samples.sh b/packages/google-cloud-logging/.kokoro/test-samples.sh index ba97b53d500c..e75891832fd4 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples.sh @@ -87,11 +87,11 @@ for file in samples/**/requirements.txt; do python3.6 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? - # If this is a periodic build, send the test log to the Build Cop Bot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop - $KOKORO_GFILE_DIR/linux_amd64/buildcop + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot fi if [[ $EXIT -ne 0 ]]; then diff --git a/packages/google-cloud-logging/.kokoro/trampoline_v2.sh b/packages/google-cloud-logging/.kokoro/trampoline_v2.sh index 719bcd5ba84d..4af6cdc26dbc 100755 --- a/packages/google-cloud-logging/.kokoro/trampoline_v2.sh +++ b/packages/google-cloud-logging/.kokoro/trampoline_v2.sh @@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then "KOKORO_GITHUB_COMMIT" "KOKORO_GITHUB_PULL_REQUEST_NUMBER" "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For Build Cop Bot + # For FlakyBot "KOKORO_GITHUB_COMMIT_URL" "KOKORO_GITHUB_PULL_REQUEST_URL" ) From c99de593d61c7360268314724b439250f1f1c9fa Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 2 Feb 2021 19:32:33 +0100 Subject: [PATCH 379/855] chore(deps): update dependency google-cloud-bigquery to v2.7.0 (#166) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index cb5be4aef45e..94edb42b3c9c 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.2.0 google-cloud-storage==1.35.0 google-cloud-pubsub==2.2.0 -google-cloud-bigquery==2.6.2 +google-cloud-bigquery==2.7.0 From 614808af51c6c1b31997c2b7cf4b3da810339844 Mon Sep 17 00:00:00 2001 From: Nicole Zhu <69952136+nicoleczhu@users.noreply.github.com> Date: Thu, 4 Feb 2021 10:59:04 -0800 Subject: [PATCH 380/855] docs: add python std_logging to sample browser (#173) * docs: add existing region tag to python std logging This is so this existing Python example will show up here: https://cloud.google.com/logging/docs/samples/logging-stdlogging --- packages/google-cloud-logging/samples/snippets/handler.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/google-cloud-logging/samples/snippets/handler.py b/packages/google-cloud-logging/samples/snippets/handler.py index 9a63d022f93b..0a708c1383cc 100644 --- a/packages/google-cloud-logging/samples/snippets/handler.py +++ b/packages/google-cloud-logging/samples/snippets/handler.py @@ -16,6 +16,7 @@ def use_logging_handler(): + # [START logging_stdlogging] # [START logging_handler_setup] # Imports the Cloud Logging client library import google.cloud.logging @@ -43,6 +44,7 @@ def use_logging_handler(): # [END logging_handler_usage] print("Logged: {}".format(text)) + # [END logging_stdlogging] if __name__ == "__main__": From cffaff472ca21b3cca4cb3f30a3dca6469c12b19 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 8 Feb 2021 09:25:39 -0800 Subject: [PATCH 381/855] chore: update templates (#168) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore: add 3.9 to noxfile template Since the python-docs-samples noxfile-template doesn't sync with this, I wanted to make sure the noxfile template matched the most recent change [here](https://github.com/GoogleCloudPlatform/python-docs-samples/pull/4968/files) cc @tmatsuo Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Fri Jan 15 17:24:05 2021 -0800 Source-Repo: googleapis/synthtool Source-Sha: 56ddc68f36b32341e9f22c2c59b4ce6aa3ba635f Source-Link: https://github.com/googleapis/synthtool/commit/56ddc68f36b32341e9f22c2c59b4ce6aa3ba635f * build(python): make `NOX_SESSION` optional I added this accidentally in #889. `NOX_SESSION` should be passed down if it is set but not marked required. Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Tue Jan 19 09:38:04 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: ba960d730416fe05c50547e975ce79fcee52c671 Source-Link: https://github.com/googleapis/synthtool/commit/ba960d730416fe05c50547e975ce79fcee52c671 * chore: Add header checker config to python library synth Now that we have it working in [python-docs-samples](https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/.github/header-checker-lint.yml) we should consider adding it to the 🐍 libraries :) Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Mon Jan 25 13:24:08 2021 -0800 Source-Repo: googleapis/synthtool Source-Sha: 573f7655311b553a937f9123bee17bf78497db95 Source-Link: https://github.com/googleapis/synthtool/commit/573f7655311b553a937f9123bee17bf78497db95 * chore: add noxfile parameters for extra dependencies Also, add tests for some noxfile parameters for assurance that the template generates valid Python. Co-authored-by: Jeffrey Rennie Source-Author: Tim Swast Source-Date: Tue Jan 26 12:26:57 2021 -0600 Source-Repo: googleapis/synthtool Source-Sha: 778d8beae28d6d87eb01fdc839a4b4d966ed2ebe Source-Link: https://github.com/googleapis/synthtool/commit/778d8beae28d6d87eb01fdc839a4b4d966ed2ebe --- .../.github/header-checker-lint.yml | 15 +++++++++++++++ packages/google-cloud-logging/.trampolinerc | 1 - .../google-cloud-logging/logging-v2-py.tar.gz | 0 packages/google-cloud-logging/noxfile.py | 1 + .../samples/snippets/noxfile.py | 2 +- packages/google-cloud-logging/synth.metadata | 8 ++++---- 6 files changed, 21 insertions(+), 6 deletions(-) create mode 100644 packages/google-cloud-logging/.github/header-checker-lint.yml delete mode 100644 packages/google-cloud-logging/logging-v2-py.tar.gz diff --git a/packages/google-cloud-logging/.github/header-checker-lint.yml b/packages/google-cloud-logging/.github/header-checker-lint.yml new file mode 100644 index 000000000000..fc281c05bd55 --- /dev/null +++ b/packages/google-cloud-logging/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.trampolinerc b/packages/google-cloud-logging/.trampolinerc index c7d663ae9c57..383b6ec89fbc 100644 --- a/packages/google-cloud-logging/.trampolinerc +++ b/packages/google-cloud-logging/.trampolinerc @@ -18,7 +18,6 @@ required_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" - "NOX_SESSION" ) # Add env vars which are passed down into the container here. diff --git a/packages/google-cloud-logging/logging-v2-py.tar.gz b/packages/google-cloud-logging/logging-v2-py.tar.gz deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index e6873664062c..19eef5d15cfb 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -84,6 +84,7 @@ def default(session): session.install("asyncmock", "pytest-asyncio") session.install("mock", "pytest", "pytest-cov", "flask", "webob", "django") + session.install("-e", ".") # Run py.test against the unit tests. diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index bca0522ec4d9..97bf7da80e39 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -85,7 +85,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 28c8b61cc295..407d059c2489 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "c89dea4899a2fd7175c2849f158fb921fc017a15" + "sha": "3d765ae66979ae066bcea07fe9ca01008ec4e4d7" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "16ec872dd898d7de6e1822badfac32484b5d9031" + "sha": "778d8beae28d6d87eb01fdc839a4b4d966ed2ebe" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "16ec872dd898d7de6e1822badfac32484b5d9031" + "sha": "778d8beae28d6d87eb01fdc839a4b4d966ed2ebe" } } ], @@ -49,6 +49,7 @@ ".github/ISSUE_TEMPLATE/feature_request.md", ".github/ISSUE_TEMPLATE/support_request.md", ".github/PULL_REQUEST_TEMPLATE.md", + ".github/header-checker-lint.yml", ".github/release-please.yml", ".github/snippet-bot.yml", ".gitignore", @@ -131,7 +132,6 @@ "google/cloud/logging_v2/types/logging.py", "google/cloud/logging_v2/types/logging_config.py", "google/cloud/logging_v2/types/logging_metrics.py", - "logging-v2-py.tar.gz", "mypy.ini", "noxfile.py", "renovate.json", From b1e7c91453e7900bfb2e78acd28678b76b92f12f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 9 Feb 2021 12:07:46 -0800 Subject: [PATCH 382/855] test: added submodule and kokoro configs for environment tests (#175) --- packages/google-cloud-logging/.flake8 | 3 + packages/google-cloud-logging/.gitmodules | 3 + .../appengine_flex_container/common.cfg | 34 +++ .../appengine_flex_container/continuous.cfg | 1 + .../appengine_flex_container/presubmit.cfg | 1 + .../appengine_flex_python/common.cfg | 34 +++ .../appengine_flex_python/continuous.cfg | 1 + .../appengine_flex_python/presubmit.cfg | 1 + .../environment/appengine_standard/common.cfg | 34 +++ .../appengine_standard/continuous.cfg | 1 + .../appengine_standard/presubmit.cfg | 1 + .../.kokoro/environment/cloudrun/common.cfg | 34 +++ .../environment/cloudrun/continuous.cfg | 1 + .../environment/cloudrun/presubmit.cfg | 1 + .../.kokoro/environment/compute/common.cfg | 34 +++ .../environment/compute/continuous.cfg | 1 + .../.kokoro/environment/compute/presubmit.cfg | 1 + .../.kokoro/environment/functions/common.cfg | 34 +++ .../environment/functions/continuous.cfg | 1 + .../environment/functions/presubmit.cfg | 1 + .../.kokoro/environment/kubernetes/common.cfg | 34 +++ .../environment/kubernetes/continuous.cfg | 1 + .../environment/kubernetes/presubmit.cfg | 1 + .../.kokoro/environment_tests.sh | 64 ++++++ packages/google-cloud-logging/.trampolinerc | 6 +- packages/google-cloud-logging/synth.py | 21 +- .../tests/environment/.gitignore | 138 ++++++++++++ .../tests/environment/CODE_OF_CONDUCT.md | 93 ++++++++ .../tests/environment/CONTRIBUTING.md | 28 +++ .../tests/environment/LICENSE | 202 ++++++++++++++++++ .../tests/environment/README.md | 66 ++++++ .../deployable/python/.dockerignore | 2 + .../environment/deployable/python/.gitignore | 2 + .../environment/deployable/python/Dockerfile | 37 ++++ .../deployable/python/requirements.txt | 2 + .../environment/deployable/python/router.py | 116 ++++++++++ .../environment/deployable/python/snippets.py | 47 ++++ .../python/appengine_flex_container.sh | 79 +++++++ .../python/appengine_flex_python.sh | 83 +++++++ .../env_scripts/python/appengine_standard.sh | 83 +++++++ .../envctl/env_scripts/python/cloudrun.sh | 87 ++++++++ .../envctl/env_scripts/python/compute.sh | 61 ++++++ .../envctl/env_scripts/python/functions.sh | 76 +++++++ .../envctl/env_scripts/python/kubernetes.sh | 101 +++++++++ .../tests/environment/envctl/envctl | 129 +++++++++++ .../tests/environment/noxfile.py | 161 ++++++++++++++ .../tests/environment/tests/__init__.py | 13 ++ .../environment/tests/common/__init__.py | 13 ++ .../tests/environment/tests/common/common.py | 103 +++++++++ .../environment/tests/common/script_utils.py | 69 ++++++ .../environment/tests/python/__init__.py | 13 ++ .../python/test_appengine_flex_container.py | 26 +++ .../python/test_appengine_flex_python.py | 26 +++ .../tests/python/test_appengine_standard.py | 26 +++ .../environment/tests/python/test_cloudrun.py | 26 +++ .../environment/tests/python/test_compute.py | 26 +++ .../tests/python/test_functions.py | 26 +++ .../tests/python/test_kubernetes.py | 26 +++ tests/environment | 1 + 59 files changed, 2331 insertions(+), 5 deletions(-) create mode 100644 packages/google-cloud-logging/.gitmodules create mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/presubmit.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/presubmit.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_standard/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_standard/presubmit.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/cloudrun/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/cloudrun/presubmit.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/compute/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/compute/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/compute/presubmit.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/functions/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/functions/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/functions/presubmit.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/kubernetes/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/kubernetes/presubmit.cfg create mode 100755 packages/google-cloud-logging/.kokoro/environment_tests.sh create mode 100644 packages/google-cloud-logging/tests/environment/.gitignore create mode 100644 packages/google-cloud-logging/tests/environment/CODE_OF_CONDUCT.md create mode 100644 packages/google-cloud-logging/tests/environment/CONTRIBUTING.md create mode 100644 packages/google-cloud-logging/tests/environment/LICENSE create mode 100644 packages/google-cloud-logging/tests/environment/README.md create mode 100644 packages/google-cloud-logging/tests/environment/deployable/python/.dockerignore create mode 100644 packages/google-cloud-logging/tests/environment/deployable/python/.gitignore create mode 100644 packages/google-cloud-logging/tests/environment/deployable/python/Dockerfile create mode 100644 packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt create mode 100644 packages/google-cloud-logging/tests/environment/deployable/python/router.py create mode 100644 packages/google-cloud-logging/tests/environment/deployable/python/snippets.py create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_flex_container.sh create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_flex_python.sh create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_standard.sh create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/cloudrun.sh create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/compute.sh create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh create mode 100755 packages/google-cloud-logging/tests/environment/envctl/envctl create mode 100644 packages/google-cloud-logging/tests/environment/noxfile.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/__init__.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/common/__init__.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/common/common.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/common/script_utils.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/python/__init__.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/python/test_compute.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/python/test_functions.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py create mode 160000 tests/environment diff --git a/packages/google-cloud-logging/.flake8 b/packages/google-cloud-logging/.flake8 index 29227d4cf419..1a7e4989e86a 100644 --- a/packages/google-cloud-logging/.flake8 +++ b/packages/google-cloud-logging/.flake8 @@ -18,6 +18,9 @@ [flake8] ignore = E203, E266, E501, W503 exclude = + # Exclude environment test code. + tests/environment/** + # Exclude generated code. **/proto/** **/gapic/** diff --git a/packages/google-cloud-logging/.gitmodules b/packages/google-cloud-logging/.gitmodules new file mode 100644 index 000000000000..19484c9e8d3e --- /dev/null +++ b/packages/google-cloud-logging/.gitmodules @@ -0,0 +1,3 @@ +[submodule "tests/environment"] + path = tests/environment + url = https://github.com/googleapis/env-tests-logging.git diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg new file mode 100644 index 000000000000..6271ac646573 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + + +# Specify which tests to run +env_vars: { + key: "ENVIRONMENT" + value: "appengine_flex_container" +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "python-logging/.kokoro/environment_tests.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/continuous.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/continuous.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/presubmit.cfg new file mode 100644 index 000000000000..18a4c35325b8 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/presubmit.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg new file mode 100644 index 000000000000..2c64e607ecf0 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + + +# Specify which tests to run +env_vars: { + key: "ENVIRONMENT" + value: "appengine_flex_python" +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "python-logging/.kokoro/environment_tests.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/continuous.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/continuous.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/presubmit.cfg new file mode 100644 index 000000000000..18a4c35325b8 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/presubmit.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg new file mode 100644 index 000000000000..7cd4797e09b3 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + + +# Specify which tests to run +env_vars: { + key: "ENVIRONMENT" + value: "appengine_standard" +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "python-logging/.kokoro/environment_tests.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_standard/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_standard/continuous.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/appengine_standard/continuous.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_standard/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_standard/presubmit.cfg new file mode 100644 index 000000000000..18a4c35325b8 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/appengine_standard/presubmit.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg b/packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg new file mode 100644 index 000000000000..a54b820a84be --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + + +# Specify which tests to run +env_vars: { + key: "ENVIRONMENT" + value: "cloudrun" +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "python-logging/.kokoro/environment_tests.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/environment/cloudrun/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/cloudrun/continuous.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/cloudrun/continuous.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/cloudrun/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/cloudrun/presubmit.cfg new file mode 100644 index 000000000000..18a4c35325b8 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/cloudrun/presubmit.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.kokoro/environment/compute/common.cfg b/packages/google-cloud-logging/.kokoro/environment/compute/common.cfg new file mode 100644 index 000000000000..bebc25341cc4 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/compute/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + + +# Specify which tests to run +env_vars: { + key: "ENVIRONMENT" + value: "compute" +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "python-logging/.kokoro/environment_tests.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/environment/compute/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/compute/continuous.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/compute/continuous.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/compute/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/compute/presubmit.cfg new file mode 100644 index 000000000000..18a4c35325b8 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/compute/presubmit.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.kokoro/environment/functions/common.cfg b/packages/google-cloud-logging/.kokoro/environment/functions/common.cfg new file mode 100644 index 000000000000..8ae854c4662e --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/functions/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + + +# Specify which tests to run +env_vars: { + key: "ENVIRONMENT" + value: "functions" +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "python-logging/.kokoro/environment_tests.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/environment/functions/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/functions/continuous.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/functions/continuous.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/functions/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/functions/presubmit.cfg new file mode 100644 index 000000000000..18a4c35325b8 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/functions/presubmit.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg b/packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg new file mode 100644 index 000000000000..3c9aadb67d92 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + + +# Specify which tests to run +env_vars: { + key: "ENVIRONMENT" + value: "kubernetes" +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "python-logging/.kokoro/environment_tests.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/environment/kubernetes/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/kubernetes/continuous.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/kubernetes/continuous.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/kubernetes/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/kubernetes/presubmit.cfg new file mode 100644 index 000000000000..18a4c35325b8 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/kubernetes/presubmit.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.kokoro/environment_tests.sh b/packages/google-cloud-logging/.kokoro/environment_tests.sh new file mode 100755 index 000000000000..528cbc47882e --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment_tests.sh @@ -0,0 +1,64 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eox pipefail + +if [[ -z "${ENVIRONMENT:-}" ]]; then + echo "ENVIRONMENT not set. Exiting" + exit 1 +fi + +cd python-logging/tests/environment + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Setup service account credentials. +export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +gcloud auth activate-service-account --key-file=$GOOGLE_APPLICATION_CREDENTIALS + +# Setup project id. +export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +gcloud config set project $PROJECT_ID + +# authenticate docker +gcloud auth configure-docker -q + +# Remove old nox +python3.6 -m pip uninstall --yes --quiet nox-automation + +# Install nox +python3.6 -m pip install --upgrade --quiet nox +python3.6 -m nox --version + +# create a unique id for this run +UUID=$(python -c 'import uuid; print(uuid.uuid1())' | head -c 7) +export ENVCTL_ID=ci-$UUID +echo $ENVCTL_ID + +# Run the specified environment test +set +e +python3.6 -m nox --session "tests(language='python', platform='$ENVIRONMENT')" +TEST_STATUS_CODE=$? + +# destroy resources +echo "cleaning up..." +/workspace/python-logging/tests/environment/envctl/envctl python $ENVIRONMENT destroy + +# exit with proper status code +exit $TEST_STATUS_CODE diff --git a/packages/google-cloud-logging/.trampolinerc b/packages/google-cloud-logging/.trampolinerc index 383b6ec89fbc..2c845a3de623 100644 --- a/packages/google-cloud-logging/.trampolinerc +++ b/packages/google-cloud-logging/.trampolinerc @@ -15,13 +15,11 @@ # Template for .trampolinerc # Add required env vars here. -required_envvars+=( - "STAGING_BUCKET" - "V2_STAGING_BUCKET" -) +required_envvars+=() # Add env vars which are passed down into the container here. pass_down_envvars+=( + "ENVIRONMENT" "STAGING_BUCKET" "V2_STAGING_BUCKET" "NOX_SESSION" diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/synth.py index 5be81736175d..4ee4bd1df2a2 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/synth.py @@ -68,6 +68,25 @@ ) s.move(templated_files, excludes=[".coveragerc", "docs/multiprocessing.rst"]) +# adjust .trampolinerc for environment tests +s.replace( + ".trampolinerc", + "required_envvars[^\)]*\)", + "required_envvars+=()" +) +s.replace( + ".trampolinerc", + "pass_down_envvars\+\=\(", + 'pass_down_envvars+=(\n "ENVIRONMENT"' +) + +# don't lint environment tests +s.replace( + ".flake8", + "exclude =", + 'exclude =\n # Exclude environment test code.\n tests/environment/**\n' +) + # -------------------------------------------------------------------------- # Samples templates # -------------------------------------------------------------------------- @@ -80,4 +99,4 @@ python.py_samples() -s.shell.run(["nox", "-s", "blacken"], hide_output=False) \ No newline at end of file +s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/packages/google-cloud-logging/tests/environment/.gitignore b/packages/google-cloud-logging/tests/environment/.gitignore new file mode 100644 index 000000000000..a81c8ee12195 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/.gitignore @@ -0,0 +1,138 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ diff --git a/packages/google-cloud-logging/tests/environment/CODE_OF_CONDUCT.md b/packages/google-cloud-logging/tests/environment/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..dc079b4d66eb --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/CODE_OF_CONDUCT.md @@ -0,0 +1,93 @@ +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + +Reports should be directed to *[PROJECT STEWARD NAME(s) AND EMAIL(s)]*, the +Project Steward(s) for *[PROJECT NAME]*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html diff --git a/packages/google-cloud-logging/tests/environment/CONTRIBUTING.md b/packages/google-cloud-logging/tests/environment/CONTRIBUTING.md new file mode 100644 index 000000000000..6272489dae31 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/CONTRIBUTING.md @@ -0,0 +1,28 @@ +# How to Contribute + +We'd love to accept your patches and contributions to this project. There are +just a few small guidelines you need to follow. + +## Contributor License Agreement + +Contributions to this project must be accompanied by a Contributor License +Agreement. You (or your employer) retain the copyright to your contribution; +this simply gives us permission to use and redistribute your contributions as +part of the project. Head over to to see +your current agreements on file or to sign a new one. + +You generally only need to submit a CLA once, so if you've already submitted one +(even if it was for a different project), you probably don't need to do it +again. + +## Code Reviews + +All submissions, including submissions by project members, require review. We +use GitHub pull requests for this purpose. Consult +[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more +information on using pull requests. + +## Community Guidelines + +This project follows [Google's Open Source Community +Guidelines](https://opensource.google/conduct/). diff --git a/packages/google-cloud-logging/tests/environment/LICENSE b/packages/google-cloud-logging/tests/environment/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-logging/tests/environment/README.md b/packages/google-cloud-logging/tests/environment/README.md new file mode 100644 index 000000000000..97d9a7ab780b --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/README.md @@ -0,0 +1,66 @@ +# Logging Environment Tests + +This repo was designed to be run as a [submodule](https://git-scm.com/book/en/v2/Git-Tools-Submodules) of the GCP Logging repositories. +It contains common tests to ensure behavioral consistency across languages and GCP platforms. + +## Usage + +These tests were implemented in [pytest](https://docs.pytest.org/en/stable/) triggered by [nox](https://nox.thea.codes/en/stable/) + +Run the full suite of tests: + +``` +nox -s tests +``` + +Run a specific test: + +``` +nox --session "tests(language='python', platform='cloudrun')" +``` + +By default, each test will tear down and re-create the GCP environment to ensure a clean environment. +To re-use an existing environment while debugging, set the `NO_CLEAN` flag before running tests: + +``` +export NO_CLEAN=true +``` + +To allow parallel runs, envctl appends an id to each deployed resource. By default, this will be the runner's `hostname`, +but it can be customized by setting `ENVCTL_ID` + +## Architecture + +### deployable/ + +The Deployable is a frankenstein-like package of code that can be deployed to any supported GCP environment. +It contains a pub/sub listener, and a set of snippets that can be triggered by pub/sub messages. In this way, +it allows us to trigger the same code in every GCP environment for consistency checks. + +### envctl/ + +*envctl* is a command line tool that allows us to spin up and down deployments in an automated way. +It should accept three arguments: a language, a GCP environment name, and an action. +*envctl* can be used to spin up and down test environments in a simple, reproducible way. + +Implementation code for each language/environment pair can be found in `envctl/env_scripts` + +*envctl* exposes the following sub-commands: +- `envctl deploy` + - deploys a fresh environment +- `envctl verify` + - verifies that an environment is up (returns true/false) +- `envctl destroy` + - destroys an existing environment +- `envctl trigger ` + - sends a pub/sub message to trigger a snippet in an environment +- `envctl filter_string` + - returns a filter that finds logs created by the environment +- `envctl logs` + - returns a list of recent logs from the environment + +### tests/ + +Contains the pytest code that tests each environment. Common logic is stored as abstract super-classes in `tests/common`. +Concrete implementations for each environment can be found in `tests//test_.py`. +Test files in `tests/` can inherit from any file in `tests/common` logic as needed, in order to share test logic between environments. diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/.dockerignore b/packages/google-cloud-logging/tests/environment/deployable/python/.dockerignore new file mode 100644 index 000000000000..8d6fb10f7d61 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/python/.dockerignore @@ -0,0 +1,2 @@ +Dockerfile +*/.nox diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/.gitignore b/packages/google-cloud-logging/tests/environment/deployable/python/.gitignore new file mode 100644 index 000000000000..e0333f99bd3a --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/python/.gitignore @@ -0,0 +1,2 @@ +python-logging +*.tar diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/Dockerfile b/packages/google-cloud-logging/tests/environment/deployable/python/Dockerfile new file mode 100644 index 000000000000..621f30743dd8 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/python/Dockerfile @@ -0,0 +1,37 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM python:3.7-slim + +# This Dockerfile must be built at the repo root + +# show python logs as they occur +ENV PYTHONUNBUFFERED=0 + +# Install test code dependencies. +COPY requirements.txt /app/ +RUN pip install -r /app/requirements.txt + +# Add test scripts. +COPY router.py /app/ +COPY snippets.py /app/ + +# install logging from local directory. +WORKDIR /app +ENV PATH="/app:${PATH}" +COPY python-logging /app/python-logging +RUN pip install -e /app/python-logging + +# Start script +CMD python router.py diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt new file mode 100644 index 000000000000..4af7b519062c --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt @@ -0,0 +1,2 @@ +flask +google-cloud-pubsub diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/router.py b/packages/google-cloud-logging/tests/environment/deployable/python/router.py new file mode 100644 index 000000000000..705c0daf8939 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/python/router.py @@ -0,0 +1,116 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from google.cloud import pubsub_v1 +import uuid +import snippets +from inspect import getmembers, isfunction +import google.auth +import logging +from flask import Flask, request +import base64 + +try: + import google.cloud.logging +except ImportError: + # import at runtime for GAE environments + import pip + import importlib + import site + + pip.main(["install", "-e", "./python-logging"]) + importlib.reload(site) + import google.cloud.logging + +app = Flask(__name__) + +_test_functions = { + name: func for (name, func) in getmembers(snippets) if isfunction(func) +} + + +# used in Cloud Functions +def pubsub_gcf(event, context): + client = google.cloud.logging.Client() + client.setup_logging() + + if "data" not in event: + logging.error("invalid pubsub message") + msg_str = base64.b64decode(event["data"]).decode("utf-8") + kwargs = event.get("attributes", {}) + found_func = _test_functions.get(msg_str, None) + if found_func: + found_func(**kwargs) + else: + logging.error(f"function {msg_str} not found") + + +# grabs pubsub message out of request +# used in Cloud Run +@app.route("/", methods=["POST"]) +def pubsub_http(): + envelope = request.get_json() + if not envelope or not isinstance(envelope, dict) or "message" not in envelope: + return "Bad Request: invalid pub/sub message", 400 + pubsub_message = envelope["message"] + kwargs = ( + pubsub_message["attributes"] if "attributes" in pubsub_message.keys() else {} + ) + msg_str = base64.b64decode(pubsub_message["data"]).decode("utf-8").strip() + found_func = _test_functions.get(msg_str, None) + if found_func: + found_func(**kwargs) + return ("", 200) + else: + return f"Bad Request: function {msg_str} not found", 400 + + +# recieves pubsub messages when the script is run directly (GKE) +def pubsub_callback(message): + msg_str = message.data.decode("utf-8") + kwargs = message.attributes + message.ack() + found_func = _test_functions.get(msg_str, None) + if found_func: + found_func(**kwargs) + else: + logging.error(f"function {msg_str} not found") + + +if __name__ == "__main__": + # set up logging + client = google.cloud.logging.Client() + client.setup_logging() + + if os.getenv("ENABLE_SUBSCRIBER", None): + # set up pubsub listener + topic_id = os.getenv("PUBSUB_TOPIC", "logging-test") + _, project_id = google.auth.default() + subscription_id = f"{topic_id}-subscriber" + subscriber = pubsub_v1.SubscriberClient() + topic_name = f"projects/{project_id}/topics/{topic_id}" + subscription_name = f"projects/{project_id}/subscriptions/{subscription_id}" + subscriber.create_subscription(name=subscription_name, topic=topic_name) + future = subscriber.subscribe(subscription_name, pubsub_callback) + try: + print(f"listening for pubsub messages at {topic_id}") + future.result() + except KeyboardInterrupt: + future.cancel() + + # set up flask server + if os.getenv("ENABLE_FLASK", None): + port = os.getenv("PORT", 8080) + app.run(debug=True, host="0.0.0.0", port=port) diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py new file mode 100644 index 000000000000..01c30b1db0a8 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -0,0 +1,47 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +try: + import google.cloud.logging +except ImportError: + # import at runtime for GAE environments + import pip + import importlib + import site + + pip.main(["install", "-e", "./python-logging"]) + importlib.reload(site) + import google.cloud.logging + + +def simple_log(log_name=None, log_text="simple_log", **kwargs): + client = google.cloud.logging.Client() + logger = client.logger(log_name) + logger.log_text(log_text) + + +def pylogging(log_text="pylogging", severity="warning", **kwargs): + # allowed severity: debug, info, warning, error, critical + if severity == "debug": + logging.debug(log_text) + elif severity == "info": + logging.info(log_text) + elif severity == "warning": + logging.warning(log_text) + elif severity == "error": + logging.error(log_text) + else: + logging.critical(log_text) diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_flex_container.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_flex_container.sh new file mode 100755 index 000000000000..bef35326ec4f --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_flex_container.sh @@ -0,0 +1,79 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + + +SERVICE_NAME="log-py-flex-con-$(echo $ENVCTL_ID | head -c 10)" + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete container images + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null + # delete service + gcloud app services delete $SERVICE_NAME -q + set -e +} + +verify() { + set +e + gcloud app services describe $SERVICE_NAME -q > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + + +deploy() { + # create pub/sub topic + set +e + gcloud pubsub topics create $SERVICE_NAME 2>/dev/null + set -e + + build_container + + cat < $TMP_DIR/app.yaml + runtime: custom + service: $SERVICE_NAME + env: flex + manual_scaling: + instances: 1 + env_variables: + ENABLE_SUBSCRIBER: "true" + PUBSUB_TOPIC: $SERVICE_NAME +EOF + + # deploy + pushd $TMP_DIR + gcloud app deploy --image-url $GCR_PATH -q + popd +} + +filter-string() { + echo "resource.type=\"gae_app\" AND resource.labels.module_id=\"$SERVICE_NAME\"" +} + + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_flex_python.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_flex_python.sh new file mode 100755 index 000000000000..87fba162af44 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_flex_python.sh @@ -0,0 +1,83 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +SERVICE_NAME="logging-py-flex-$(echo $ENVCTL_ID | head -c 10)" + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete service + gcloud app services delete $SERVICE_NAME -q +} + +verify() { + set +e + gcloud app services describe $SERVICE_NAME -q > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +deploy() { + # create pub/sub topic + set +e + gcloud pubsub topics create $SERVICE_NAME 2>/dev/null + set -e + # set up deployment directory + # copy over local copy of library + pushd $SUPERREPO_ROOT + tar -cvf $TMP_DIR/lib.tar --exclude tests --exclude .nox --exclude samples --exclude docs --exclude __pycache__ . + popd + mkdir $TMP_DIR/python-logging + tar -xvf $TMP_DIR/lib.tar --directory $TMP_DIR/python-logging + # copy test scripts + cp $REPO_ROOT/deployable/python/*.py $TMP_DIR + cp $REPO_ROOT/deployable/python/requirements.txt $TMP_DIR + # build app.yaml + cat < $TMP_DIR/app.yaml + runtime: python + service: $SERVICE_NAME + env: flex + entrypoint: python router.py + runtime_config: + python_version: 3 + manual_scaling: + instances: 1 + env_variables: + ENABLE_SUBSCRIBER: "true" + PUBSUB_TOPIC: $SERVICE_NAME +EOF + # deploy + pushd $TMP_DIR + gcloud app deploy -q + popd +} + +filter-string() { + echo "resource.type=\"gae_app\" AND resource.labels.module_id=\"$SERVICE_NAME\"" +} + + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_standard.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_standard.sh new file mode 100755 index 000000000000..80334e5e0619 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_standard.sh @@ -0,0 +1,83 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + + +SERVICE_NAME="logging-py-standard-$(echo $ENVCTL_ID | head -c 10)"\ + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete service + gcloud app services delete $SERVICE_NAME -q 2> /dev/null + set -e +} + +verify() { + set +e + gcloud app services describe $SERVICE_NAME -q > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +deploy() { + # create pub/sub topic + set +e + gcloud pubsub topics create $SERVICE_NAME 2>/dev/null + set -e + # set up deployment directory + # copy over local copy of library + pushd $SUPERREPO_ROOT + tar -cvf $TMP_DIR/lib.tar --exclude tests --exclude .nox --exclude samples --exclude docs --exclude __pycache__ . + popd + mkdir $TMP_DIR/python-logging + tar -xvf $TMP_DIR/lib.tar --directory $TMP_DIR/python-logging + # copy test scripts + cp $REPO_ROOT/deployable/python/*.py $TMP_DIR + echo "-e ./python-logging" | cat $REPO_ROOT/deployable/python/requirements.txt - > $TMP_DIR/requirements.txt + # build app.yaml + cat < $TMP_DIR/app.yaml + runtime: python37 + service: $SERVICE_NAME + entrypoint: python router.py + manual_scaling: + instances: 1 + env_variables: + ENABLE_SUBSCRIBER: "true" + PUBSUB_TOPIC: $SERVICE_NAME +EOF + # deploy + pushd $TMP_DIR + gcloud app deploy -q + popd +} + +filter-string() { + echo "resource.type=\"gae_app\" AND resource.labels.module_id=\"$SERVICE_NAME\"" +} + + + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/cloudrun.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/cloudrun.sh new file mode 100755 index 000000000000..1fe9461e53e3 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/cloudrun.sh @@ -0,0 +1,87 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + + +SERVICE_NAME="log-py-run-$(echo $ENVCTL_ID | head -c 10)" +SA_NAME=$SERVICE_NAME-invoker + +add_service_accounts() { + set +e + local PROJECT_ID=$(gcloud config list --format 'value(core.project)') + local PROJECT_NUMBER=$(gcloud projects list --filter=$PROJECT_ID --format="value(PROJECT_NUMBER)") + gcloud projects add-iam-policy-binding $PROJECT_ID \ + --member=serviceAccount:service-$PROJECT_NUMBER@gcp-sa-pubsub.iam.gserviceaccount.com \ + --role=roles/iam.serviceAccountTokenCreator 2> /dev/null + gcloud iam service-accounts create $SA_NAME \ + --display-name "Pub/Sub Invoker" 2> /dev/null + gcloud run services add-iam-policy-binding $SERVICE_NAME \ + --member=serviceAccount:$SA_NAME@$PROJECT_ID.iam.gserviceaccount.com \ + --role=roles/run.invoker 2> /dev/null + RUN_URL=$(gcloud run services list --filter=$SERVICE_NAME --format="value(URL)") + gcloud pubsub subscriptions create $SERVICE_NAME-subscriber --topic $SERVICE_NAME \ + --push-endpoint=$RUN_URL \ + --push-auth-service-account=$SA_NAME@$PROJECT_ID.iam.gserviceaccount.com 2> /dev/null + set -e +} + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete service account + gcloud iam service-accounts delete $SA_NAME -q 2> /dev/null + # delete container images + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null + # delete service + gcloud run services delete $SERVICE_NAME -q 2> /dev/null + set -e +} + +verify() { + set +e + gcloud run services describe $SERVICE_NAME > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +deploy() { + build_container + gcloud config set run/platform managed + gcloud config set run/region us-west1 + gcloud run deploy \ + --allow-unauthenticated \ + --image $GCR_PATH \ + --update-env-vars ENABLE_FLASK=true \ + $SERVICE_NAME + # create pubsub subscription + add_service_accounts +} + +filter-string() { + echo "resource.type=\"global\"" +} + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/compute.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/compute.sh new file mode 100755 index 000000000000..c1cd01c05e77 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/compute.sh @@ -0,0 +1,61 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +SERVICE_NAME="logging-py-gce-$(echo $ENVCTL_ID | head -c 10)" + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete container images + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null + # delete service + gcloud compute instances delete $SERVICE_NAME -q + set -e +} + +verify() { + set +e + gcloud compute instances describe $SERVICE_NAME > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +deploy() { + build_container + gcloud beta compute instances create-with-container \ + $SERVICE_NAME \ + --container-image $GCR_PATH \ + --container-env PUBSUB_TOPIC="$SERVICE_NAME",ENABLE_SUBSCRIBER="true" +} + +filter-string() { + #INSTANCE_ID=$(gcloud compute instances list --filter="name~^$SERVICE_NAME$" --format="value(ID)") + #echo "resource.type=\"gce_instance\" AND resource.labels.instance_id=\"$INSTANCE_ID\"" + echo "resource.type=\"global\"" +} + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh new file mode 100755 index 000000000000..89b94b324607 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh @@ -0,0 +1,76 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + + +SERVICE_NAME="log-py-func-$(echo $ENVCTL_ID | head -c 10)" + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete service + gcloud functions delete $SERVICE_NAME --region us-west2 -q 2> /dev/null + set -e +} + +verify() { + set +e + gcloud functions describe $SERVICE_NAME --region us-west2 + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +deploy() { + # create pub/sub topic + set +e + gcloud pubsub topics create $SERVICE_NAME 2>/dev/null + set -e + local RUNTIME="${2:-python37}" + # set up deployment directory + # copy over local copy of library + pushd $SUPERREPO_ROOT + tar -cvf $TMP_DIR/lib.tar --exclude tests --exclude .nox --exclude samples --exclude docs --exclude __pycache__ . + popd + mkdir $TMP_DIR/python-logging + tar -xvf $TMP_DIR/lib.tar --directory $TMP_DIR/python-logging + # copy test scripts + cp $REPO_ROOT/deployable/python/router.py $TMP_DIR/main.py + cp $REPO_ROOT/deployable/python/*.py $TMP_DIR/ + echo "-e ./python-logging" | cat $REPO_ROOT/deployable/python/requirements.txt - > $TMP_DIR/requirements.txt + # deploy function + pushd $TMP_DIR + gcloud functions deploy $SERVICE_NAME \ + --entry-point pubsub_gcf \ + --trigger-topic $SERVICE_NAME \ + --runtime $RUNTIME \ + --region us-west2 + popd +} + +filter-string() { + echo "resource.type=\"cloud_function\" AND resource.labels.function_name=\"$SERVICE_NAME\"" +} + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh new file mode 100755 index 000000000000..7a791456d04c --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh @@ -0,0 +1,101 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +SERVICE_NAME="logging-py-gke-$(echo $ENVCTL_ID | head -c 10)" +ZONE=us-central1-a + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete container images + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null + # delete cluster + gcloud container clusters delete --zone $ZONE $SERVICE_NAME -q + set -e +} + +verify() { + set +e + gcloud container clusters describe --zone $ZONE $SERVICE_NAME > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +attach_or_create_gke_cluster(){ + set +e + gcloud container clusters get-credentials $SERVICE_NAME + if [[ $? -ne 0 ]]; then + echo "cluster not found. creating..." + gcloud container clusters create $SERVICE_NAME \ + --zone $ZONE \ + --scopes "https://www.googleapis.com/auth/pubsub" + fi + set -e +} + +deploy() { + local SCRIPT="${1:-router.py}" + + attach_or_create_gke_cluster + build_container + cat < $TMP_DIR/gke.yaml + apiVersion: apps/v1 + kind: Deployment + metadata: + name: $SERVICE_NAME + spec: + selector: + matchLabels: + app: $SERVICE_NAME + template: + metadata: + labels: + app: $SERVICE_NAME + spec: + containers: + - name: $SERVICE_NAME + image: $GCR_PATH + env: + - name: PUBSUB_TOPIC + value: $SERVICE_NAME + - name: ENABLE_SUBSCRIBER + value: "true" +EOF + # clean cluster + set +e + kubectl delete deployments --all 2>/dev/null + kubectl delete -f $TMP_DIR 2>/dev/null + set -e + # deploy test container + kubectl apply -f $TMP_DIR +} + +filter-string() { + echo "resource.type=\"k8s_container\" AND resource.labels.cluster_name=\"$SERVICE_NAME\"" +} + diff --git a/packages/google-cloud-logging/tests/environment/envctl/envctl b/packages/google-cloud-logging/tests/environment/envctl/envctl new file mode 100755 index 000000000000..961fa9ecdb65 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/envctl @@ -0,0 +1,129 @@ +#!/bin/bash +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +# set an ID to use for future runs +ENVCTL_ID="${ENVCTL_ID:-$(hostname)}" + +# find the associated project +PROJECT_ID="${PROJECT_ID:-$(gcloud config get-value project)}" + +# ensure the working dir is the repo root +SCRIPT_DIR=$(realpath $(dirname "$0")) +REPO_ROOT=$(realpath $SCRIPT_DIR/../) +cd $REPO_ROOT +# find super-repo path +set +e +SUPERREPO_ROOT=$(git -C ../ rev-parse --show-toplevel 2> /dev/null) +if [[ $? -ne 0 ]]; then + echo "ERROR: super-repo not found. envctl expects to be used as a git sub-module" + exit 1 +fi +set -e + +# create and destroy temporary dir +UUID=$(python -c 'import uuid; print(uuid.uuid1())') +TMP_DIR=$REPO_ROOT/tmp-$UUID +mkdir $TMP_DIR +function finish { + # Your cleanup code here + rm -rf $TMP_DIR +} +trap finish EXIT + +# shared logic +build_container() { + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + # copy super-repo into deployable dir + _env_tests_relative_path=${REPO_ROOT#"$SUPERREPO_ROOT/"} + _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE + # copy over local copy of library + pushd $SUPERREPO_ROOT + tar -cvf $_deployable_dir/lib.tar --exclude tests --exclude .nox --exclude samples --exclude docs --exclude __pycache__ . + popd + mkdir -p $_deployable_dir/python-logging + tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/python-logging + # build container + docker build -t $GCR_PATH $_deployable_dir + docker push $GCR_PATH +} + +logs() { + echo "resource filter: \"$(filter-string)\"" + echo "printing recent logs..." + gcloud logging read --freshness "5m" --limit 10 "$(filter-string)" +} + +trigger() { + echo publishing to topic: $SERVICE_NAME + local FUNCTION="${1-empty}" + if [[ $FUNCTION == "empty" ]]; then + echo "function not set" + exit 1 + fi + shift + if [[ -z "$@" ]]; then + echo calling $FUNCTION\(\) + gcloud pubsub topics publish "$SERVICE_NAME" --message="$FUNCTION" + else + echo calling $FUNCTION\($@\) + eval gcloud pubsub topics publish "$SERVICE_NAME" --message="$FUNCTION" --attribute=$@ + fi +} + +# entrypoint +LANGUAGE=${1:-none} +if [[ "$LANGUAGE" == "help" || "$LANGUAGE" == '-h' ]]; then + echo "usage:" + echo "$ envctl language environment action " + exit 0 +elif [[ ! -d "$SCRIPT_DIR/env_scripts/$LANGUAGE" ]]; then + echo Error not a valid language: $LANGUAGE + echo + echo "Select a supported language." + echo + printf "options:\n$(ls $SCRIPT_DIR/env_scripts/)" + exit 1 +fi +shift + +ENVIRONMENT=${1:-none} +if [[ -f "$SCRIPT_DIR/env_scripts/$LANGUAGE/$ENVIRONMENT.sh" ]]; then + source "$SCRIPT_DIR/env_scripts/$LANGUAGE/$ENVIRONMENT.sh" +else + echo Error: not a valid environment: $ENVIRONMENT + echo + echo "Select a supported GCP environment." + echo + printf "options:\n$(ls $SCRIPT_DIR/env_scripts/$LANGUAGE | sed -e "s/.sh//")" + exit 1 +fi +shift + +# create pub/sub topic +set +e +gcloud pubsub topics create $SERVICE_NAME 2>/dev/null +set -e + +ACTION=${1:-none} +set -u +if [[ "$(type -t $ACTION)" == "function" ]]; then + shift + $ACTION $@ +else + echo Error: not valid command: $ACTION + echo + echo "Select an action to run for the environment." + echo + echo "options:" + echo "deploy Spin up an environment." + echo "verify Check if an envirionment is active." + echo "destroy Destroy an existing environment." + echo "trigger Trigger a function in the environment." + echo "logs Print logs for the environment." + echo "filter-string Print the filter to obtain logs for this environment" + echo + exit 1 +fi +finish diff --git a/packages/google-cloud-logging/tests/environment/noxfile.py b/packages/google-cloud-logging/tests/environment/noxfile.py new file mode 100644 index 000000000000..31ad173635f7 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/noxfile.py @@ -0,0 +1,161 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import glob +import os +from pathlib import Path +import sys +from typing import Callable, Dict, List, Optional + +import nox + + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + +# Linting with flake8. + + +def _determine_local_import_names(start_dir: str) -> List[str]: + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] + +DEFAULT_PYTHON_VERSION = "3.8" +BLACK_PATHS = ["./deployable/python"] +BLACK_VERSION = "black==19.10b0" + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + black_files = [] + for target in BLACK_PATHS: + black_files += [ + f"{target}/{path}" for path in os.listdir(target) if path.endswith(".py") + ] + session.run( + "black", "--check", *black_files, + ) + session.run( + "flake8", "--exclude=deployable/python/python-logging", *BLACK_PATHS, + ) + + +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + + session.install("black") + black_files = [] + for target in BLACK_PATHS: + black_files += [ + f"{target}/{path}" for path in os.listdir(target) if path.endswith(".py") + ] + + session.run("black", *black_files) + + +# Environment Tests + + +@nox.session(python="3.8") +@nox.parametrize( + "platform", + [ + "compute", + "appengine_standard", + "appengine_flex_python", + "appengine_flex_container", + "kubernetesengine", + "cloudrun", + "functions", + ], +) +@nox.parametrize("language", ["python"]) +def tests(session, language, platform): + """Run the e2e environment test suite.""" + if os.environ.get("RUN_ENV_TESTS", "true") == "false": + session.skip("RUN_ENV_TESTS is set to false, skipping") + # Sanity check: Only run tests if the environment variable is set. + if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): + session.skip("Credentials must be set via environment variable") + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + session.install( + "mock", + "pytest", + "google-cloud-testutils", + "google-cloud-bigquery", + "google-cloud-pubsub", + "google-cloud-storage", + "google-cloud-testutils", + "google-cloud-logging", + ) + + test_path = f"./tests/{language}/test_{platform}.py" + if os.path.exists(test_path): + session.run("py.test", "-s", test_path, *session.posargs) + else: + session.skip(f"not yet implemented: {test_path}") diff --git a/packages/google-cloud-logging/tests/environment/tests/__init__.py b/packages/google-cloud-logging/tests/environment/tests/__init__.py new file mode 100644 index 000000000000..d46dbae5ebd0 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-logging/tests/environment/tests/common/__init__.py b/packages/google-cloud-logging/tests/environment/tests/common/__init__.py new file mode 100644 index 000000000000..d46dbae5ebd0 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/common/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py new file mode 100644 index 000000000000..c13b5cba8d1a --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -0,0 +1,103 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import google.cloud.logging +from google.cloud._helpers import UTC +from google.cloud.logging_v2.handlers.handlers import CloudLoggingHandler +from google.cloud.logging_v2.handlers.transports import SyncTransport +from google.cloud.logging_v2 import Client +from google.cloud.logging_v2.resource import Resource +from google.cloud.logging_v2 import entries +from google.cloud.logging_v2._helpers import LogSeverity + +from time import sleep +from datetime import datetime +from datetime import timezone +import os +import sys +import uuid +import inspect + +from .script_utils import ScriptRunner +from .script_utils import Command + + +class Common: + _client = Client() + # environment name must be set by subclass + environment = None + + def _get_logs(self, timestamp=None): + time_format = "%Y-%m-%dT%H:%M:%S.%f%z" + if not timestamp: + timestamp = datetime.now(timezone.utc) - timedelta(minutes=10) + _, filter_str = self._script.run_command(Command.GetFilter) + filter_str += ' AND timestamp > "%s"' % timestamp.strftime(time_format) + iterator = self._client.list_entries(filter_=filter_str) + entries = list(iterator) + return entries + + def _trigger(self, function, return_logs=True, **kwargs): + timestamp = datetime.now(timezone.utc) + args_str = ",".join([f'{k}="{v}"' for k, v in kwargs.items()]) + self._script.run_command(Command.Trigger, [function, args_str]) + # give the command time to be received + sleep(30) + if return_logs: + log_list = self._get_logs(timestamp) + return log_list + + @classmethod + def setUpClass(cls): + if not cls.environment: + raise NotImplementedError("environment not set by subclass") + if not cls.language: + raise NotImplementedError("language not set by subclass") + cls._script = ScriptRunner(cls.environment, cls.language) + # check if already setup + status, _ = cls._script.run_command(Command.Verify) + if status == 0: + if os.getenv("NO_CLEAN"): + # ready to go + return + else: + # reset environment + status, _ = cls._script.run_command(Command.Destroy) + assert status == 0 + # deploy test code to GCE + status, _ = cls._script.run_command(Command.Deploy) + # verify code is running + status, _ = cls._script.run_command(Command.Verify) + assert status == 0 + + @classmethod + def tearDown_class(cls): + # by default, destroy environment on each run + # allow skipping deletion for development + if not os.getenv("NO_CLEAN"): + cls._script.run_command(Command.Destroy) + + def test_receive_log(self): + log_text = f"{inspect.currentframe().f_code.co_name}: {uuid.uuid1()}" + log_list = self._trigger("pylogging", log_text=log_text) + found_log = None + for log in log_list: + message = ( + log.payload.get("message", None) + if isinstance(log.payload, dict) + else str(log.payload) + ) + if message and log_text in message: + found_log = log + self.assertIsNotNone(found_log, "expected log text not found") diff --git a/packages/google-cloud-logging/tests/environment/tests/common/script_utils.py b/packages/google-cloud-logging/tests/environment/tests/common/script_utils.py new file mode 100644 index 000000000000..800198d2b141 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/common/script_utils.py @@ -0,0 +1,69 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from time import sleep +from datetime import datetime +from datetime import timezone +import os +import sys +from shlex import split +import subprocess +import signal +from enum import Enum +from pathlib import Path + + +class Command(Enum): + Deploy = "deploy" + Destroy = "destroy" + Verify = "verify" + GetFilter = "filter-string" + Trigger = "trigger" + + +class ScriptRunner: + def __init__(self, environment, language): + run_dir = os.path.dirname(os.path.realpath(__file__)) + repo_root = Path(run_dir).parent.parent + self.script_path = os.path.join(repo_root, "envctl/envctl") + self.environment = environment + self.language = language + env_path = os.path.join( + repo_root, f"envctl/env_scripts/{language}/{environment}.sh" + ) + if not os.path.exists(env_path): + raise RuntimeError(f"{env_path} does not exist") + + def run_command(self, command, args=[]): + if not command or not isinstance(command, Command): + raise RuntimeError(f"unknown command: {command}") + os.setpgrp() + complete = False + try: + full_command = [self.script_path, self.language, self.environment] + split( + command.value + ) + for arg in args: + full_command.append(arg) + print(full_command) + result = subprocess.run(full_command, capture_output=True) + complete = True + return result.returncode, result.stdout.decode("utf-8") + except Exception as e: + print(e) + finally: + if not complete: + # kill background process if script is terminated + # os.killpg(0, signal.SIGTERM) + return 1, None diff --git a/packages/google-cloud-logging/tests/environment/tests/python/__init__.py b/packages/google-cloud-logging/tests/environment/tests/python/__init__.py new file mode 100644 index 000000000000..d46dbae5ebd0 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/python/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py new file mode 100644 index 000000000000..16bb2eae1f19 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py @@ -0,0 +1,26 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +import google.cloud.logging + +from ..common.common import Common + + +class TestAppEngineFlexContainer(Common, unittest.TestCase): + + environment = "appengine_flex_container" + language = "python" diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py new file mode 100644 index 000000000000..5c8fdacc9d04 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py @@ -0,0 +1,26 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +import google.cloud.logging + +from ..common.common import Common + + +class TestAppEngineFlex(Common, unittest.TestCase): + + environment = "appengine_flex_python" + language = "python" diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py new file mode 100644 index 000000000000..e4bb999b02a3 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py @@ -0,0 +1,26 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +import google.cloud.logging + +from ..common.common import Common + + +class TestAppEngineStandard(Common, unittest.TestCase): + + environment = "appengine_standard" + language = "python" diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py new file mode 100644 index 000000000000..1ee08268bfe3 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py @@ -0,0 +1,26 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +import google.cloud.logging + +from ..common.common import Common + + +class TestCloudRun(Common, unittest.TestCase): + + environment = "cloudrun" + language = "python" diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_compute.py b/packages/google-cloud-logging/tests/environment/tests/python/test_compute.py new file mode 100644 index 000000000000..4b71761a961f --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_compute.py @@ -0,0 +1,26 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +import google.cloud.logging + +from ..common.common import Common + + +class TestComputeEngine(Common, unittest.TestCase): + + environment = "compute" + language = "python" diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py new file mode 100644 index 000000000000..b3bb7557f247 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py @@ -0,0 +1,26 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +import google.cloud.logging + +from ..common.common import Common + + +class TestCloudFunctions(Common, unittest.TestCase): + + environment = "functions" + language = "python" diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py new file mode 100644 index 000000000000..e4e5bf59076c --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py @@ -0,0 +1,26 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +import google.cloud.logging + +from ..common.common import Common + + +class TestKubernetesEngine(Common, unittest.TestCase): + + environment = "kubernetes" + language = "python" diff --git a/tests/environment b/tests/environment new file mode 160000 index 000000000000..aec50a9122b0 --- /dev/null +++ b/tests/environment @@ -0,0 +1 @@ +Subproject commit aec50a9122b0ff3a41c20925d171d51d948f4b80 From 97abf76e960f3f783fed3c7c8dfe192973514587 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 9 Feb 2021 23:25:41 +0100 Subject: [PATCH 383/855] chore(deps): update dependency google-cloud-storage to v1.35.1 (#172) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 94edb42b3c9c..ce47db3a9270 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.2.0 -google-cloud-storage==1.35.0 +google-cloud-storage==1.35.1 google-cloud-pubsub==2.2.0 google-cloud-bigquery==2.7.0 From c5d98921618051fc506f810166c129b6a22bc434 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 9 Feb 2021 15:55:18 -0800 Subject: [PATCH 384/855] tests: update environment test config (#178) --- .../environment/appengine_flex_container/common.cfg | 2 +- .../.kokoro/environment/appengine_flex_python/common.cfg | 2 +- .../.kokoro/environment/appengine_standard/common.cfg | 2 +- .../.kokoro/environment/cloudrun/common.cfg | 2 +- .../.kokoro/environment/compute/common.cfg | 2 +- .../.kokoro/environment/functions/common.cfg | 2 +- .../.kokoro/environment/kubernetes/common.cfg | 2 +- .../google-cloud-logging/.kokoro/environment_tests.sh | 9 ++++++++- 8 files changed, 15 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg index 6271ac646573..1555bf28f46c 100644 --- a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg +++ b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg @@ -30,5 +30,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "python-logging/.kokoro/environment_tests.sh" + value: "github/python-logging/.kokoro/environment_tests.sh" } diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg index 2c64e607ecf0..9d3506cb97cf 100644 --- a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg +++ b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg @@ -30,5 +30,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "python-logging/.kokoro/environment_tests.sh" + value: "github/python-logging/.kokoro/environment_tests.sh" } diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg index 7cd4797e09b3..07242418acce 100644 --- a/packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg +++ b/packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg @@ -30,5 +30,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "python-logging/.kokoro/environment_tests.sh" + value: "github/python-logging/.kokoro/environment_tests.sh" } diff --git a/packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg b/packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg index a54b820a84be..a9a26f468471 100644 --- a/packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg +++ b/packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg @@ -30,5 +30,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "python-logging/.kokoro/environment_tests.sh" + value: "github/python-logging/.kokoro/environment_tests.sh" } diff --git a/packages/google-cloud-logging/.kokoro/environment/compute/common.cfg b/packages/google-cloud-logging/.kokoro/environment/compute/common.cfg index bebc25341cc4..ac601ef09d22 100644 --- a/packages/google-cloud-logging/.kokoro/environment/compute/common.cfg +++ b/packages/google-cloud-logging/.kokoro/environment/compute/common.cfg @@ -30,5 +30,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "python-logging/.kokoro/environment_tests.sh" + value: "github/python-logging/.kokoro/environment_tests.sh" } diff --git a/packages/google-cloud-logging/.kokoro/environment/functions/common.cfg b/packages/google-cloud-logging/.kokoro/environment/functions/common.cfg index 8ae854c4662e..96b0940fe842 100644 --- a/packages/google-cloud-logging/.kokoro/environment/functions/common.cfg +++ b/packages/google-cloud-logging/.kokoro/environment/functions/common.cfg @@ -30,5 +30,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "python-logging/.kokoro/environment_tests.sh" + value: "github/python-logging/.kokoro/environment_tests.sh" } diff --git a/packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg b/packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg index 3c9aadb67d92..a9fcc33e349f 100644 --- a/packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg +++ b/packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg @@ -30,5 +30,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "python-logging/.kokoro/environment_tests.sh" + value: "github/python-logging/.kokoro/environment_tests.sh" } diff --git a/packages/google-cloud-logging/.kokoro/environment_tests.sh b/packages/google-cloud-logging/.kokoro/environment_tests.sh index 528cbc47882e..6c7b3677dd27 100755 --- a/packages/google-cloud-logging/.kokoro/environment_tests.sh +++ b/packages/google-cloud-logging/.kokoro/environment_tests.sh @@ -20,7 +20,14 @@ if [[ -z "${ENVIRONMENT:-}" ]]; then exit 1 fi -cd python-logging/tests/environment +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-logging" +fi + +# make sure submodule is up to date +git submodule update --init --recursive + +cd "${PROJECT_ROOT}/tests/environment" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 From 48cc7c2117fb592b48a9cd40270c1c1e9a667889 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 9 Feb 2021 16:19:14 -0800 Subject: [PATCH 385/855] tests: clean up properly after completion (#179) --- packages/google-cloud-logging/.kokoro/environment_tests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/.kokoro/environment_tests.sh b/packages/google-cloud-logging/.kokoro/environment_tests.sh index 6c7b3677dd27..b7b96ce9de7a 100755 --- a/packages/google-cloud-logging/.kokoro/environment_tests.sh +++ b/packages/google-cloud-logging/.kokoro/environment_tests.sh @@ -65,7 +65,7 @@ TEST_STATUS_CODE=$? # destroy resources echo "cleaning up..." -/workspace/python-logging/tests/environment/envctl/envctl python $ENVIRONMENT destroy +${PROJECT_ROOT}/tests/environment/envctl/envctl python $ENVIRONMENT destroy # exit with proper status code exit $TEST_STATUS_CODE From 0bdc10c62fc7b1c531d95851c55d9ef9dd7e2779 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 10 Feb 2021 02:42:24 +0100 Subject: [PATCH 386/855] chore(deps): update dependency google-cloud-pubsub to v2.3.0 (#176) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index ce47db3a9270..45b03d5fc125 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.2.0 google-cloud-storage==1.35.1 -google-cloud-pubsub==2.2.0 +google-cloud-pubsub==2.3.0 google-cloud-bigquery==2.7.0 From 66c8a911be1b8be158b00ca8d720f2ab82394cfb Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 11 Feb 2021 09:37:29 -0800 Subject: [PATCH 387/855] tests: fix environment test issues (#181) --- .../.kokoro/environment_tests.sh | 12 ++++++++++++ .../env_scripts/python/appengine_standard.sh | 8 ++++++++ .../envctl/env_scripts/python/compute.sh | 11 ++++++++++- .../envctl/env_scripts/python/kubernetes.sh | 15 +++++++++++++++ .../tests/environment/noxfile.py | 2 +- tests/environment | 2 +- 6 files changed, 47 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/.kokoro/environment_tests.sh b/packages/google-cloud-logging/.kokoro/environment_tests.sh index b7b96ce9de7a..f8f138ea173e 100755 --- a/packages/google-cloud-logging/.kokoro/environment_tests.sh +++ b/packages/google-cloud-logging/.kokoro/environment_tests.sh @@ -43,6 +43,9 @@ gcloud auth activate-service-account --key-file=$GOOGLE_APPLICATION_CREDENTIALS export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") gcloud config set project $PROJECT_ID +# set a default zone. +gcloud config set compute/zone us-central1-b + # authenticate docker gcloud auth configure-docker -q @@ -53,6 +56,15 @@ python3.6 -m pip uninstall --yes --quiet nox-automation python3.6 -m pip install --upgrade --quiet nox python3.6 -m nox --version +# Install kubectl +if [[ "${ENVIRONMENT}" == "kubernetes" ]]; then + curl -LO https://dl.k8s.io/release/v1.20.0/bin/linux/amd64/kubectl + chmod +x kubectl + mkdir -p ~/.local/bin + mv ./kubectl ~/.local/bin + export PATH=$PATH:~/.local/bin +fi + # create a unique id for this run UUID=$(python -c 'import uuid; print(uuid.uuid1())' | head -c 7) export ENVCTL_ID=ci-$UUID diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_standard.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_standard.sh index 80334e5e0619..3b92e2b68c11 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_standard.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_standard.sh @@ -73,6 +73,14 @@ EOF pushd $TMP_DIR gcloud app deploy -q popd + # wait for the pub/sub subscriber to start + NUM_SUBSCRIBERS=0 + TRIES=0 + while [[ "${NUM_SUBSCRIBERS}" -lt 1 && "${TRIES}" -lt 10 ]]; do + sleep 30 + NUM_SUBSCRIBERS=$(gcloud pubsub topics list-subscriptions $SERVICE_NAME 2> /dev/null | wc -l) + TRIES=$((TRIES + 1)) + done } filter-string() { diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/compute.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/compute.sh index c1cd01c05e77..84b9982726a6 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/compute.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/compute.sh @@ -47,10 +47,19 @@ verify() { deploy() { build_container - gcloud beta compute instances create-with-container \ + gcloud compute instances create-with-container \ $SERVICE_NAME \ --container-image $GCR_PATH \ --container-env PUBSUB_TOPIC="$SERVICE_NAME",ENABLE_SUBSCRIBER="true" + # wait for the pub/sub subscriber to start + NUM_SUBSCRIBERS=0 + TRIES=0 + while [[ "${NUM_SUBSCRIBERS}" -lt 1 && "${TRIES}" -lt 10 ]]; do + sleep 30 + NUM_SUBSCRIBERS=$(gcloud pubsub topics list-subscriptions $SERVICE_NAME 2> /dev/null | wc -l) + TRIES=$((TRIES + 1)) + done + } filter-string() { diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh index 7a791456d04c..74881a3dc6d5 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh @@ -35,6 +35,11 @@ destroy() { verify() { set +e + gcloud pubsub subscriptions describe $SERVICE_NAME-subscriber 2> /dev/null + if [[ $? != 0 ]]; then + echo "FALSE" + exit 1 + fi gcloud container clusters describe --zone $ZONE $SERVICE_NAME > /dev/null 2> /dev/null if [[ $? == 0 ]]; then echo "TRUE" @@ -93,6 +98,16 @@ EOF set -e # deploy test container kubectl apply -f $TMP_DIR + # wait for pod to spin up + kubectl wait --for=condition=ready pod -l app=$SERVICE_NAME + # wait for the pub/sub subscriber to start + NUM_SUBSCRIBERS=0 + TRIES=0 + while [[ "${NUM_SUBSCRIBERS}" -lt 1 && "${TRIES}" -lt 10 ]]; do + sleep 30 + NUM_SUBSCRIBERS=$(gcloud pubsub topics list-subscriptions $SERVICE_NAME 2> /dev/null | wc -l) + TRIES=$((TRIES + 1)) + done } filter-string() { diff --git a/packages/google-cloud-logging/tests/environment/noxfile.py b/packages/google-cloud-logging/tests/environment/noxfile.py index 31ad173635f7..d62450baf0a6 100644 --- a/packages/google-cloud-logging/tests/environment/noxfile.py +++ b/packages/google-cloud-logging/tests/environment/noxfile.py @@ -124,7 +124,7 @@ def blacken(session: nox.sessions.Session) -> None: "appengine_standard", "appengine_flex_python", "appengine_flex_container", - "kubernetesengine", + "kubernetes", "cloudrun", "functions", ], diff --git a/tests/environment b/tests/environment index aec50a9122b0..e4cab8acdbf0 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit aec50a9122b0ff3a41c20925d171d51d948f4b80 +Subproject commit e4cab8acdbf0054a9e4b5b3b3a2bbc68f108d982 From 7c6b03bc4560917afff6c6b47b81642d1ce3d5bd Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 23 Feb 2021 20:23:51 -0800 Subject: [PATCH 388/855] tests: fix failing kubernetes test (#194) --- .../environment/deployable/python/snippets.py | 24 ++++++++++++++++ .../envctl/env_scripts/python/functions.sh | 2 +- .../envctl/env_scripts/python/kubernetes.sh | 3 +- .../tests/environment/tests/common/common.py | 28 ++++++++++++------- tests/environment | 2 +- 5 files changed, 46 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py index 01c30b1db0a8..443da3529942 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -13,6 +13,7 @@ # limitations under the License. import logging +import os try: import google.cloud.logging @@ -45,3 +46,26 @@ def pylogging(log_text="pylogging", severity="warning", **kwargs): logging.error(log_text) else: logging.critical(log_text) + +def print_handlers(**kwargs): + root_logger = logging.getLogger() + handlers_str = ', '.join([type(h).__name__ for h in root_logger.handlers]) + logging.info(handlers_str) + +def remove_stream_handlers(**kwargs): + logger = logging.getLogger() + for handler in logger.handlers: + if isinstance(handler, logging.StreamHandler): + logging.error(handler) + logger.removeHandler(handler) + +def print_env_vars(env_var=None, **kwargs): + if env_var: + value = os.environ.get(env_var, None) + if value: + logging.error(value) + else: + logging.error(f"{env_var}: not found") + else: + logging.error(os.environ) + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh index 89b94b324607..36fcb6e08855 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh @@ -48,7 +48,7 @@ deploy() { set +e gcloud pubsub topics create $SERVICE_NAME 2>/dev/null set -e - local RUNTIME="${2:-python37}" + local RUNTIME="${2:-python38}" # set up deployment directory # copy over local copy of library pushd $SUPERREPO_ROOT diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh index 74881a3dc6d5..7c85eb197596 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh @@ -58,7 +58,7 @@ attach_or_create_gke_cluster(){ echo "cluster not found. creating..." gcloud container clusters create $SERVICE_NAME \ --zone $ZONE \ - --scopes "https://www.googleapis.com/auth/pubsub" + --scopes=gke-default,pubsub fi set -e } @@ -98,6 +98,7 @@ EOF set -e # deploy test container kubectl apply -f $TMP_DIR + sleep 60 # wait for pod to spin up kubectl wait --for=condition=ready pod -l app=$SERVICE_NAME # wait for the pub/sub subscriber to start diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index c13b5cba8d1a..ad62cea056b6 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -24,6 +24,7 @@ from time import sleep from datetime import datetime from datetime import timezone +from datetime import timedelta import os import sys import uuid @@ -38,25 +39,24 @@ class Common: # environment name must be set by subclass environment = None - def _get_logs(self, timestamp=None): + def _add_time_condition_to_filter(self, filter_str, timestamp=None): time_format = "%Y-%m-%dT%H:%M:%S.%f%z" if not timestamp: timestamp = datetime.now(timezone.utc) - timedelta(minutes=10) - _, filter_str = self._script.run_command(Command.GetFilter) - filter_str += ' AND timestamp > "%s"' % timestamp.strftime(time_format) + return f'"{filter_str}" AND timestamp > "{timestamp.strftime(time_format)}"' + + + def _get_logs(self, filter_str=None): + if not filter_str: + _, filter_str = self._script.run_command(Command.GetFilter) iterator = self._client.list_entries(filter_=filter_str) entries = list(iterator) return entries - def _trigger(self, function, return_logs=True, **kwargs): + def _trigger(self, function, **kwargs): timestamp = datetime.now(timezone.utc) args_str = ",".join([f'{k}="{v}"' for k, v in kwargs.items()]) self._script.run_command(Command.Trigger, [function, args_str]) - # give the command time to be received - sleep(30) - if return_logs: - log_list = self._get_logs(timestamp) - return log_list @classmethod def setUpClass(cls): @@ -90,7 +90,15 @@ def tearDown_class(cls): def test_receive_log(self): log_text = f"{inspect.currentframe().f_code.co_name}: {uuid.uuid1()}" - log_list = self._trigger("pylogging", log_text=log_text) + self._trigger("pylogging", log_text=log_text) + # give the command time to be received + sleep(30) + filter_str = self._add_time_condition_to_filter(log_text) + # retrieve resulting logs + log_list = self._get_logs(filter_str) + + self.assertEqual(len(log_list), 1) + found_log = None for log in log_list: message = ( diff --git a/tests/environment b/tests/environment index e4cab8acdbf0..46658b49b65d 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit e4cab8acdbf0054a9e4b5b3b3a2bbc68f108d982 +Subproject commit 46658b49b65d40a1311704b3fe898f8737bb4137 From 5e4b26c59b3d44b67c52067ae9cded7013745602 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 25 Feb 2021 15:44:06 -0800 Subject: [PATCH 389/855] tests: retry flaky environment tests (#196) --- .../tests/environment/tests/common/common.py | 10 ++++++++-- .../google-cloud-logging/tests/system/test_system.py | 4 ++-- tests/environment | 2 +- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index ad62cea056b6..9d3f501f388d 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -30,9 +30,14 @@ import uuid import inspect +from test_utils.retry import RetryErrors + from .script_utils import ScriptRunner from .script_utils import Command +class LogsNotFound(RuntimeError): + """raised when filter returns no logs.""" + pass class Common: _client = Client() @@ -51,6 +56,8 @@ def _get_logs(self, filter_str=None): _, filter_str = self._script.run_command(Command.GetFilter) iterator = self._client.list_entries(filter_=filter_str) entries = list(iterator) + if not entries: + raise LogsNotFound return entries def _trigger(self, function, **kwargs): @@ -88,6 +95,7 @@ def tearDown_class(cls): if not os.getenv("NO_CLEAN"): cls._script.run_command(Command.Destroy) + @RetryErrors(exception=LogsNotFound) def test_receive_log(self): log_text = f"{inspect.currentframe().f_code.co_name}: {uuid.uuid1()}" self._trigger("pylogging", log_text=log_text) @@ -97,8 +105,6 @@ def test_receive_log(self): # retrieve resulting logs log_list = self._get_logs(filter_str) - self.assertEqual(len(log_list), 1) - found_log = None for log in log_list: message = ( diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 9cd0ac25340f..e6f5aa7cf520 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -72,10 +72,10 @@ def _list_entries(logger): :rtype: list :returns: List of all entries consumed. """ - inner = RetryResult(_has_entries, delay=1, backoff=2, max_tries=6)(_consume_entries) + inner = RetryResult(_has_entries, delay=2, backoff=2, max_tries=6)(_consume_entries) outer = RetryErrors( (ServiceUnavailable, ResourceExhausted, InternalServerError), - delay=1, + delay=2, backoff=2, max_tries=6, )(inner) diff --git a/tests/environment b/tests/environment index 46658b49b65d..f35514893542 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 46658b49b65d40a1311704b3fe898f8737bb4137 +Subproject commit f35514893542dfa29f65214eea96b490f04f3d72 From 0cabf7881802db775b3828203dd7cb04a582dc9f Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 25 Feb 2021 16:12:40 -0800 Subject: [PATCH 390/855] chore: Re-generated to pick up changes from synthtool. (#177) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * build: migrate to flakybot Source-Author: Justin Beckwith Source-Date: Thu Jan 28 22:22:38 2021 -0800 Source-Repo: googleapis/synthtool Source-Sha: d1bb9173100f62c0cfc8f3138b62241e7f47ca6a Source-Link: https://github.com/googleapis/synthtool/commit/d1bb9173100f62c0cfc8f3138b62241e7f47ca6a * chore(python): include py.typed files in release A py.typed file must be included in the released package for it to be considered typed by type checkers. https://www.python.org/dev/peps/pep-0561/#packaging-type-information. See https://github.com/googleapis/python-secret-manager/issues/79 Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Fri Feb 5 17:32:06 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 33366574ffb9e11737b3547eb6f020ecae0536e8 Source-Link: https://github.com/googleapis/synthtool/commit/33366574ffb9e11737b3547eb6f020ecae0536e8 * docs: update python contributing guide Adds details about blacken, updates version for system tests, and shows how to pass through pytest arguments. Source-Author: Chris Cotter Source-Date: Mon Feb 8 17:13:36 2021 -0500 Source-Repo: googleapis/synthtool Source-Sha: 4679e7e415221f03ff2a71e3ffad75b9ec41d87e Source-Link: https://github.com/googleapis/synthtool/commit/4679e7e415221f03ff2a71e3ffad75b9ec41d87e --- .../google-cloud-logging/CONTRIBUTING.rst | 22 +++++++++++++++---- packages/google-cloud-logging/MANIFEST.in | 4 ++-- packages/google-cloud-logging/synth.metadata | 6 ++--- 3 files changed, 23 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index cd48664d8ac0..f6ddd72684f0 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: $ nox -s unit-2.7 - $ nox -s unit-3.7 + $ nox -s unit-3.8 $ ... +- Args to pytest can be passed through the nox command separated by a `--`. For + example, to run a single test:: + + $ nox -s unit-3.8 -- -k + .. note:: The unit tests and system tests are described in the @@ -93,8 +98,12 @@ On Debian/Ubuntu:: ************ Coding Style ************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken -- PEP8 compliance, with exceptions defined in the linter configuration. +- PEP8 compliance is required, with exceptions defined in the linter configuration. If you have ``nox`` installed, you can test that you have not introduced any non-compliant code via:: @@ -133,13 +142,18 @@ Running System Tests - To run system tests, you can execute:: - $ nox -s system-3.7 + # Run all system tests + $ nox -s system-3.8 $ nox -s system-2.7 + # Run a single system test + $ nox -s system-3.8 -- -k + + .. note:: System tests are only configured to run under Python 2.7 and - Python 3.7. For expediency, we do not run them in older versions + Python 3.8. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-logging/MANIFEST.in b/packages/google-cloud-logging/MANIFEST.in index e9e29d12033d..e783f4c6209b 100644 --- a/packages/google-cloud-logging/MANIFEST.in +++ b/packages/google-cloud-logging/MANIFEST.in @@ -16,10 +16,10 @@ # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE -recursive-include google *.json *.proto +recursive-include google *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ # Exclude scripts for samples readmegen -prune scripts/readme-gen \ No newline at end of file +prune scripts/readme-gen diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 407d059c2489..9e8a2ffc4246 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "3d765ae66979ae066bcea07fe9ca01008ec4e4d7" + "sha": "11976b9f3834e4ab976940fe88c2f5784d1b1795" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "778d8beae28d6d87eb01fdc839a4b4d966ed2ebe" + "sha": "4679e7e415221f03ff2a71e3ffad75b9ec41d87e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "778d8beae28d6d87eb01fdc839a4b4d966ed2ebe" + "sha": "4679e7e415221f03ff2a71e3ffad75b9ec41d87e" } } ], From 4dbeee786006a3705cf4aedd6ba398633447b78c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 26 Feb 2021 01:12:59 +0100 Subject: [PATCH 391/855] chore(deps): update dependency google-cloud-pubsub to v2.4.0 (#191) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 45b03d5fc125..83dc20e1a7a9 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.2.0 google-cloud-storage==1.35.1 -google-cloud-pubsub==2.3.0 +google-cloud-pubsub==2.4.0 google-cloud-bigquery==2.7.0 From 125f3b768c2cff490ba0b04a3b129ad8098bddbb Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 26 Feb 2021 01:47:49 +0100 Subject: [PATCH 392/855] chore(deps): update dependency google-cloud-storage to v1.36.1 (#182) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 83dc20e1a7a9..d85a198523bf 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.2.0 -google-cloud-storage==1.35.1 +google-cloud-storage==1.36.1 google-cloud-pubsub==2.4.0 google-cloud-bigquery==2.7.0 From 42abc294a4e59d201b9a7bd6b674d82b51ab6679 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 25 Feb 2021 16:48:44 -0800 Subject: [PATCH 393/855] chore: Re-generated to pick up changes from googleapis. (#171) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore: update Go generator, rules_go, and protobuf PiperOrigin-RevId: 352816749 Source-Author: Google APIs Source-Date: Wed Jan 20 10:06:23 2021 -0800 Source-Repo: googleapis/googleapis Source-Sha: ceaaf31b3d13badab7cf9d3b570f5639db5593d9 Source-Link: https://github.com/googleapis/googleapis/commit/ceaaf31b3d13badab7cf9d3b570f5639db5593d9 * chore: upgrade gapic-generator-python to 0.40.5 PiperOrigin-RevId: 354996675 Source-Author: Google APIs Source-Date: Mon Feb 1 12:11:49 2021 -0800 Source-Repo: googleapis/googleapis Source-Sha: 20712b8fe95001b312f62c6c5f33e3e3ec92cfaf Source-Link: https://github.com/googleapis/googleapis/commit/20712b8fe95001b312f62c6c5f33e3e3ec92cfaf Co-authored-by: Daniel Sanche --- .../.kokoro/test-samples.sh | 8 +- .../.kokoro/trampoline_v2.sh | 2 +- .../services/config_service_v2/client.py | 18 +- .../config_service_v2/transports/grpc.py | 23 ++- .../transports/grpc_asyncio.py | 23 ++- .../services/logging_service_v2/client.py | 18 +- .../logging_service_v2/transports/grpc.py | 23 ++- .../transports/grpc_asyncio.py | 23 ++- .../services/metrics_service_v2/client.py | 18 +- .../metrics_service_v2/transports/grpc.py | 23 ++- .../transports/grpc_asyncio.py | 23 ++- packages/google-cloud-logging/synth.metadata | 6 +- .../logging_v2/test_config_service_v2.py | 187 ++++++++++------- .../logging_v2/test_logging_service_v2.py | 188 +++++++++++------- .../logging_v2/test_metrics_service_v2.py | 188 +++++++++++------- 15 files changed, 469 insertions(+), 302 deletions(-) diff --git a/packages/google-cloud-logging/.kokoro/test-samples.sh b/packages/google-cloud-logging/.kokoro/test-samples.sh index e75891832fd4..ba97b53d500c 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples.sh @@ -87,11 +87,11 @@ for file in samples/**/requirements.txt; do python3.6 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + # If this is a periodic build, send the test log to the Build Cop Bot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot + chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop + $KOKORO_GFILE_DIR/linux_amd64/buildcop fi if [[ $EXIT -ne 0 ]]; then diff --git a/packages/google-cloud-logging/.kokoro/trampoline_v2.sh b/packages/google-cloud-logging/.kokoro/trampoline_v2.sh index 4af6cdc26dbc..719bcd5ba84d 100755 --- a/packages/google-cloud-logging/.kokoro/trampoline_v2.sh +++ b/packages/google-cloud-logging/.kokoro/trampoline_v2.sh @@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then "KOKORO_GITHUB_COMMIT" "KOKORO_GITHUB_PULL_REQUEST_NUMBER" "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For FlakyBot + # For Build Cop Bot "KOKORO_GITHUB_COMMIT_URL" "KOKORO_GITHUB_PULL_REQUEST_URL" ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 7d6492ba0eaf..37a28d7a2b0a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -338,21 +338,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -395,7 +391,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index a64405fba954..b749eb5d3f33 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -58,6 +58,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -88,6 +89,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -104,6 +109,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -113,11 +123,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -161,12 +166,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index aa094ea0ee9c..f0f1ca07063c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -102,6 +102,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -133,6 +134,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -149,6 +154,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -158,11 +168,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -206,12 +211,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index a340eb205996..0e046c957313 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -292,21 +292,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -349,7 +345,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index f8007bb0d06b..e90b2a5fec13 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -58,6 +58,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -88,6 +89,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -104,6 +109,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -113,11 +123,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -161,12 +166,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 6adea9ca5bdc..40037da25ed7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -102,6 +102,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -133,6 +134,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -149,6 +154,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -158,11 +168,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -206,12 +211,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index cc6e491fcb4b..850236a573dc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -286,21 +286,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -343,7 +339,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 1cb9262abaa5..e55bf32e5cb7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -58,6 +58,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -88,6 +89,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -104,6 +109,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -113,11 +123,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -161,12 +166,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index ddbd16da633c..ec93d3850280 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -102,6 +102,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -133,6 +134,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -149,6 +154,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -158,11 +168,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -206,12 +211,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 9e8a2ffc4246..9ca738bf8df5 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "11976b9f3834e4ab976940fe88c2f5784d1b1795" + "sha": "18a0bb7fd8d158baef532b1c715524e5f6303311" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "520682435235d9c503983a360a2090025aa47cd1", - "internalRef": "350246057" + "sha": "20712b8fe95001b312f62c6c5f33e3e3ec92cfaf", + "internalRef": "354996675" } }, { diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index a2685b497c5f..c819769ee03b 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -175,7 +175,7 @@ def test_config_service_v2_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -191,7 +191,7 @@ def test_config_service_v2_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -207,7 +207,7 @@ def test_config_service_v2_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -235,7 +235,7 @@ def test_config_service_v2_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -296,29 +296,25 @@ def test_config_service_v2_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -327,66 +323,53 @@ def test_config_service_v2_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -412,7 +395,7 @@ def test_config_service_v2_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -442,7 +425,7 @@ def test_config_service_v2_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -461,7 +444,7 @@ def test_config_service_v2_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -5306,6 +5289,56 @@ def test_config_service_v2_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +def test_config_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_config_service_v2_host_no_port(): client = ConfigServiceV2Client( credentials=credentials.AnonymousCredentials(), @@ -5350,6 +5383,8 @@ def test_config_service_v2_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -5407,6 +5442,8 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source( assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 110a383c05fe..c811acac5374 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -182,7 +182,7 @@ def test_logging_service_v2_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -198,7 +198,7 @@ def test_logging_service_v2_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -214,7 +214,7 @@ def test_logging_service_v2_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -242,7 +242,7 @@ def test_logging_service_v2_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -303,29 +303,25 @@ def test_logging_service_v2_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -334,66 +330,53 @@ def test_logging_service_v2_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -419,7 +402,7 @@ def test_logging_service_v2_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -449,7 +432,7 @@ def test_logging_service_v2_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -468,7 +451,7 @@ def test_logging_service_v2_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -1971,6 +1954,57 @@ def test_logging_service_v2_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_logging_service_v2_host_no_port(): client = LoggingServiceV2Client( credentials=credentials.AnonymousCredentials(), @@ -2015,6 +2049,8 @@ def test_logging_service_v2_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2073,6 +2109,8 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 8ae5fdc5481d..7230b0d870d2 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -181,7 +181,7 @@ def test_metrics_service_v2_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -197,7 +197,7 @@ def test_metrics_service_v2_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -213,7 +213,7 @@ def test_metrics_service_v2_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -241,7 +241,7 @@ def test_metrics_service_v2_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -302,29 +302,25 @@ def test_metrics_service_v2_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -333,66 +329,53 @@ def test_metrics_service_v2_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -418,7 +401,7 @@ def test_metrics_service_v2_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -448,7 +431,7 @@ def test_metrics_service_v2_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -467,7 +450,7 @@ def test_metrics_service_v2_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -1916,6 +1899,57 @@ def test_metrics_service_v2_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_metrics_service_v2_host_no_port(): client = MetricsServiceV2Client( credentials=credentials.AnonymousCredentials(), @@ -1960,6 +1994,8 @@ def test_metrics_service_v2_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2018,6 +2054,8 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ From 24e5e2efd6a897737fd922ace31ce6eb938b85c7 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 26 Feb 2021 20:00:36 +0100 Subject: [PATCH 394/855] chore(deps): update dependency google-cloud-bigquery to v2.10.0 (#180) Co-authored-by: Daniel Sanche --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index d85a198523bf..131f3bbeeeb6 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.2.0 +google-cloud-bigquery==2.10.0 google-cloud-storage==1.36.1 google-cloud-pubsub==2.4.0 -google-cloud-bigquery==2.7.0 From 06cbcedf1cbf81a20bee11e250454a0909c8f99d Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 5 Mar 2021 14:17:46 -0800 Subject: [PATCH 395/855] feat: detect monitored resources on all GCP environments (#200) --- .../google/cloud/logging_v2/client.py | 27 +- .../handlers/_monitored_resources.py | 195 ++++++++ .../cloud/logging_v2/handlers/app_engine.py | 14 +- .../cloud/logging_v2/handlers/handlers.py | 9 +- .../tests/environment/.gitignore | 4 + .../tests/environment/README.md | 10 +- .../environment/deployable/go/.dockerignore | 2 + .../environment/deployable/go/Dockerfile | 42 ++ .../tests/environment/deployable/go/go.mod | 11 + .../tests/environment/deployable/go/go.sum | 441 ++++++++++++++++++ .../tests/environment/deployable/go/main.go | 111 +++++ .../environment/deployable/python/snippets.py | 24 +- .../envctl/env_scripts/go/cloudrun.sh | 105 +++++ .../envctl/env_scripts/python/cloudrun.sh | 11 +- .../tests/environment/noxfile.py | 2 +- .../tests/environment/renovate.json | 5 + .../environment/tests/common/appengine.py | 34 ++ .../tests/environment/tests/common/common.py | 33 +- .../tests/environment/tests/go/__init__.py | 13 + .../tests/go/test_appengine_flex_container.py | 26 ++ .../tests/go/test_appengine_standard.py | 26 ++ .../environment/tests/go/test_cloudrun.py | 26 ++ .../environment/tests/go/test_compute.py | 26 ++ .../environment/tests/go/test_functions.py | 26 ++ .../environment/tests/go/test_kubernetes.py | 26 ++ .../python/test_appengine_flex_container.py | 3 +- .../python/test_appengine_flex_python.py | 3 +- .../tests/python/test_appengine_standard.py | 3 +- .../environment/tests/python/test_cloudrun.py | 15 + .../environment/tests/python/test_compute.py | 10 + .../tests/python/test_functions.py | 10 + .../tests/python/test_kubernetes.py | 14 + .../handlers/test__monitored_resources.py | 249 ++++++++++ .../tests/unit/handlers/test_handlers.py | 31 +- .../tests/unit/test_client.py | 8 +- tests/environment | 2 +- 36 files changed, 1529 insertions(+), 68 deletions(-) create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py create mode 100644 packages/google-cloud-logging/tests/environment/deployable/go/.dockerignore create mode 100644 packages/google-cloud-logging/tests/environment/deployable/go/Dockerfile create mode 100644 packages/google-cloud-logging/tests/environment/deployable/go/go.mod create mode 100644 packages/google-cloud-logging/tests/environment/deployable/go/go.sum create mode 100644 packages/google-cloud-logging/tests/environment/deployable/go/main.go create mode 100644 packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh create mode 100644 packages/google-cloud-logging/tests/environment/renovate.json create mode 100644 packages/google-cloud-logging/tests/environment/tests/common/appengine.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/go/__init__.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/go/test_appengine_flex_container.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/go/test_appengine_standard.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/go/test_cloudrun.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/go/test_compute.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/go/test_functions.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py create mode 100644 packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py index ee65d288a093..f196f443a27d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -29,7 +29,6 @@ from google.cloud.client import ClientWithProject from google.cloud.environment_vars import DISABLE_GRPC from google.cloud.logging_v2._helpers import _add_defaults_to_filter -from google.cloud.logging_v2._helpers import retrieve_metadata_server from google.cloud.logging_v2._http import Connection from google.cloud.logging_v2._http import _LoggingAPI as JSONLoggingAPI from google.cloud.logging_v2._http import _MetricsAPI as JSONMetricsAPI @@ -39,6 +38,9 @@ from google.cloud.logging_v2.handlers import ContainerEngineHandler from google.cloud.logging_v2.handlers import setup_logging from google.cloud.logging_v2.handlers.handlers import EXCLUDED_LOGGER_DEFAULTS +from google.cloud.logging_v2.resource import Resource +from google.cloud.logging_v2.handlers._monitored_resources import detect_resource + from google.cloud.logging_v2.logger import Logger from google.cloud.logging_v2.metric import Metric @@ -48,14 +50,8 @@ _DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) _USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC -_APPENGINE_FLEXIBLE_ENV_VM = "GAE_APPENGINE_HOSTNAME" -"""Environment variable set in App Engine when vm:true is set.""" - -_APPENGINE_INSTANCE_ID = "GAE_INSTANCE" -"""Environment variable set in App Engine standard and flexible environment.""" - -_GKE_CLUSTER_NAME = "instance/attributes/cluster-name" -"""Attribute in metadata server when in GKE environment.""" +_GAE_RESOURCE_TYPE = "gae_app" +_GKE_RESOURCE_TYPE = "k8s_container" class Client(ClientWithProject): @@ -348,17 +344,20 @@ def get_default_handler(self, **kw): Returns: logging.Handler: The default log handler based on the environment """ - gke_cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME) + monitored_resource = kw.pop("resource", detect_resource(self.project)) if ( - _APPENGINE_FLEXIBLE_ENV_VM in os.environ - or _APPENGINE_INSTANCE_ID in os.environ + isinstance(monitored_resource, Resource) + and monitored_resource.type == _GAE_RESOURCE_TYPE ): return AppEngineHandler(self, **kw) - elif gke_cluster_name is not None: + elif ( + isinstance(monitored_resource, Resource) + and monitored_resource.type == _GKE_RESOURCE_TYPE + ): return ContainerEngineHandler(**kw) else: - return CloudLoggingHandler(self, **kw) + return CloudLoggingHandler(self, resource=monitored_resource, **kw) def setup_logging( self, *, log_level=logging.INFO, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, **kw diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py new file mode 100644 index 000000000000..4bc30d4fb361 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py @@ -0,0 +1,195 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from google.cloud.logging_v2.resource import Resource +from google.cloud.logging_v2._helpers import retrieve_metadata_server + +_GAE_SERVICE_ENV = "GAE_SERVICE" +_GAE_VERSION_ENV = "GAE_VERSION" +_GAE_INSTANCE_ENV = "GAE_INSTANCE" +_GAE_ENV_VARS = [_GAE_SERVICE_ENV, _GAE_VERSION_ENV, _GAE_INSTANCE_ENV] +"""Environment variables set in App Engine environment.""" + +_CLOUD_RUN_SERVICE_ID = "K_SERVICE" +_CLOUD_RUN_REVISION_ID = "K_REVISION" +_CLOUD_RUN_CONFIGURATION_ID = "K_CONFIGURATION" +_CLOUD_RUN_ENV_VARS = [ + _CLOUD_RUN_SERVICE_ID, + _CLOUD_RUN_REVISION_ID, + _CLOUD_RUN_CONFIGURATION_ID, +] +"""Environment variables set in Cloud Run environment.""" + +_FUNCTION_TARGET = "FUNCTION_TARGET" +_FUNCTION_SIGNATURE = "FUNCTION_SIGNATURE_TYPE" +_FUNCTION_NAME = "FUNCTION_NAME" +_FUNCTION_REGION = "FUNCTION_REGION" +_FUNCTION_ENTRY = "ENTRY_POINT" +_FUNCTION_ENV_VARS = [_FUNCTION_TARGET, _FUNCTION_SIGNATURE, _CLOUD_RUN_SERVICE_ID] +_LEGACY_FUNCTION_ENV_VARS = [_FUNCTION_NAME, _FUNCTION_REGION, _FUNCTION_ENTRY] +"""Environment variables set in Cloud Functions environments.""" + + +_REGION_ID = "instance/region" +_ZONE_ID = "instance/zone" +_GCE_INSTANCE_ID = "instance/id" +"""Attribute in metadata server for compute region and instance.""" + +_GKE_CLUSTER_NAME = "instance/attributes/cluster-name" +"""Attribute in metadata server when in GKE environment.""" + + +def _create_functions_resource(project): + """Create a standardized Cloud Functions resource. + Args: + project (str): The project ID to pass on to the resource + Returns: + google.cloud.logging.Resource + """ + region = retrieve_metadata_server(_REGION_ID) + if _FUNCTION_NAME in os.environ: + function_name = os.environ.get(_FUNCTION_NAME) + elif _CLOUD_RUN_SERVICE_ID in os.environ: + function_name = os.environ.get(_CLOUD_RUN_SERVICE_ID) + else: + function_name = "" + resource = Resource( + type="cloud_function", + labels={ + "project_id": project, + "function_name": function_name, + "region": region if region else "", + }, + ) + return resource + + +def _create_kubernetes_resource(project): + """Create a standardized Kubernetes resource. + Args: + project (str): The project ID to pass on to the resource + Returns: + google.cloud.logging.Resource + """ + zone = retrieve_metadata_server(_ZONE_ID) + cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME) + + resource = Resource( + type="k8s_container", + labels={ + "project_id": project, + "location": zone if zone else "", + "cluster_name": cluster_name if cluster_name else "", + }, + ) + return resource + + +def _create_compute_resource(project): + """Create a standardized Compute Engine resource. + Args: + project (str): The project ID to pass on to the resource + Returns: + google.cloud.logging.Resource + """ + instance = retrieve_metadata_server(_GCE_INSTANCE_ID) + zone = retrieve_metadata_server(_ZONE_ID) + resource = Resource( + type="gce_instance", + labels={ + "project_id": project, + "instance_id": instance if instance else "", + "zone": zone if zone else "", + }, + ) + return resource + + +def _create_cloud_run_resource(project): + """Create a standardized Cloud Run resource. + Args: + project (str): The project ID to pass on to the resource + Returns: + google.cloud.logging.Resource + """ + region = retrieve_metadata_server(_REGION_ID) + resource = Resource( + type="cloud_run_revision", + labels={ + "project_id": project, + "service_name": os.environ.get(_CLOUD_RUN_SERVICE_ID, ""), + "revision_name": os.environ.get(_CLOUD_RUN_REVISION_ID, ""), + "location": region if region else "", + "configuration_name": os.environ.get(_CLOUD_RUN_CONFIGURATION_ID, ""), + }, + ) + return resource + + +def _create_app_engine_resource(project): + """Create a standardized App Engine resource. + Args: + project (str): The project ID to pass on to the resource + Returns: + google.cloud.logging.Resource + """ + zone = retrieve_metadata_server(_ZONE_ID) + resource = Resource( + type="gae_app", + labels={ + "project_id": project, + "module_id": os.environ.get(_GAE_SERVICE_ENV, ""), + "version_id": os.environ.get(_GAE_VERSION_ENV, ""), + "zone": zone if zone else "", + }, + ) + return resource + + +def _create_global_resource(project): + return Resource(type="global", labels={"project_id": project}) + + +def detect_resource(project): + """Return the default monitored resource based on the local environment. + Args: + project (str): The project ID to pass on to the resource + Returns: + google.cloud.logging.Resource: The default resource based on the environment + """ + gke_cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME) + gce_instance_name = retrieve_metadata_server(_GCE_INSTANCE_ID) + + if all([env in os.environ for env in _GAE_ENV_VARS]): + # App Engine Flex or Standard + return _create_app_engine_resource(project) + elif gke_cluster_name is not None: + # Kubernetes Engine + return _create_kubernetes_resource(project) + elif all([env in os.environ for env in _LEGACY_FUNCTION_ENV_VARS]) or all( + [env in os.environ for env in _FUNCTION_ENV_VARS] + ): + # Cloud Functions + return _create_functions_resource(project) + elif all([env in os.environ for env in _CLOUD_RUN_ENV_VARS]): + # Cloud Run + return _create_cloud_run_resource(project) + elif gce_instance_name is not None: + # Compute Engine + return _create_compute_resource(project) + else: + # use generic global resource + return _create_global_resource(project) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py index a5d57c53e375..7d16ab07a6f4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py @@ -22,8 +22,10 @@ import os from google.cloud.logging_v2.handlers._helpers import get_request_data +from google.cloud.logging_v2.handlers._monitored_resources import ( + _create_app_engine_resource, +) from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport -from google.cloud.logging_v2.resource import Resource _DEFAULT_GAE_LOGGER_NAME = "app" @@ -75,15 +77,7 @@ def get_gae_resource(self): Returns: google.cloud.logging_v2.resource.Resource: Monitored resource for GAE. """ - gae_resource = Resource( - type="gae_app", - labels={ - "project_id": self.project_id, - "module_id": self.module_id, - "version_id": self.version_id, - }, - ) - return gae_resource + return _create_app_engine_resource(self.project_id) def get_gae_labels(self): """Return the labels for GAE app. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index fd99f7adc865..c2ad6f35511a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -17,7 +17,7 @@ import logging from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport -from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE +from google.cloud.logging_v2.handlers._monitored_resources import detect_resource DEFAULT_LOGGER_NAME = "python" @@ -59,7 +59,7 @@ def __init__( *, name=DEFAULT_LOGGER_NAME, transport=BackgroundThreadTransport, - resource=_GLOBAL_RESOURCE, + resource=None, labels=None, stream=None, ): @@ -78,12 +78,15 @@ def __init__( :class:`.BackgroundThreadTransport`. The other option is :class:`.SyncTransport`. resource (~logging_v2.resource.Resource): - Resource for this Handler. Defaults to ``GLOBAL_RESOURCE``. + Resource for this Handler. If not given, will be inferred from the environment. labels (Optional[dict]): Monitored resource of the entry, defaults to the global resource type. stream (Optional[IO]): Stream to be used by the handler. """ super(CloudLoggingHandler, self).__init__(stream) + if not resource: + # infer the correct monitored resource from the local environment + resource = detect_resource(client.project) self.name = name self.client = client self.transport = transport(client, name) diff --git a/packages/google-cloud-logging/tests/environment/.gitignore b/packages/google-cloud-logging/tests/environment/.gitignore index a81c8ee12195..676ba64117a3 100644 --- a/packages/google-cloud-logging/tests/environment/.gitignore +++ b/packages/google-cloud-logging/tests/environment/.gitignore @@ -136,3 +136,7 @@ dmypy.json # Cython debug symbols cython_debug/ + +# Go files +deployable/go/google-cloud-go +deployable/go/lib.tar \ No newline at end of file diff --git a/packages/google-cloud-logging/tests/environment/README.md b/packages/google-cloud-logging/tests/environment/README.md index 97d9a7ab780b..c8b8b90ac854 100644 --- a/packages/google-cloud-logging/tests/environment/README.md +++ b/packages/google-cloud-logging/tests/environment/README.md @@ -54,7 +54,8 @@ Implementation code for each language/environment pair can be found in `envctl/e - destroys an existing environment - `envctl trigger ` - sends a pub/sub message to trigger a snippet in an environment -- `envctl filter_string` + - optional arguments are embedded as [custom attributes](https://cloud.google.com/pubsub/docs/publisher#using_attributes) in Pub/Sub messages +- `envctl filter-string` - returns a filter that finds logs created by the environment - `envctl logs` - returns a list of recent logs from the environment @@ -64,3 +65,10 @@ Implementation code for each language/environment pair can be found in `envctl/e Contains the pytest code that tests each environment. Common logic is stored as abstract super-classes in `tests/common`. Concrete implementations for each environment can be found in `tests//test_.py`. Test files in `tests/` can inherit from any file in `tests/common` logic as needed, in order to share test logic between environments. + +### Shared Tests + +| Test Name | Optional Input | Description | +| -------------- | ---------------- | -------------------------------- | +| `simplelog` | `logname`, `logtext` | Logs a simple text payload | +| `standardlog` | `logname`, `logtext` | Logs a simple text payload using a standard library wrapper | diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/.dockerignore b/packages/google-cloud-logging/tests/environment/deployable/go/.dockerignore new file mode 100644 index 000000000000..8d6fb10f7d61 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/go/.dockerignore @@ -0,0 +1,2 @@ +Dockerfile +*/.nox diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/Dockerfile b/packages/google-cloud-logging/tests/environment/deployable/go/Dockerfile new file mode 100644 index 000000000000..6efa3a24f003 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/go/Dockerfile @@ -0,0 +1,42 @@ +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This Dockerfile is moved to repo root dir upon build +# Use the offical golang image to create a binary. +# This is based on Debian and sets the GOPATH to /go. +# https://hub.docker.com/_/golang +FROM golang:1.15-buster as builder + +# Create and change to the app directory. +WORKDIR /app + +# Copy contents of current directory into app/ +COPY . ./ + +# Build the binary +RUN go mod download +RUN go build -v -o server + +# Use the official Debian slim image for a lean production container. +# REQUIRED as Cloud Run is capped at 256M memory limit +FROM debian:buster-slim +RUN set -x && apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ + ca-certificates && \ + rm -rf /var/lib/apt/lists/* + +# Copy the binary to the production image from the builder stage. +COPY --from=builder /app/server /app/server + +# Run the web service on container startup. +CMD ["/app/server"] \ No newline at end of file diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/go.mod b/packages/google-cloud-logging/tests/environment/deployable/go/go.mod new file mode 100644 index 000000000000..c90011c2c812 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/go/go.mod @@ -0,0 +1,11 @@ +module github.com/googleapis/env-tests-logging/deployable/go/main + +go 1.15 + +require ( + cloud.google.com/go v0.78.0 + cloud.google.com/go/logging v1.3.0 +) + +replace cloud.google.com/go => ./google-cloud-go/. +replace cloud.google.com/go/logging => ./google-cloud-go/logging \ No newline at end of file diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/go.sum b/packages/google-cloud-logging/tests/environment/deployable/go/go.sum new file mode 100644 index 000000000000..dbd6eeaaca20 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/go/go.sum @@ -0,0 +1,441 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= +cloud.google.com/go v0.78.0 h1:oKpsiyKMfVpwR3zSAkQixGzlVE5ovitBuO0qSmCf0bI= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/logging v1.3.0 h1:Ncg2sBr+SUtdqSmwRJ5WwuFG8Um3p/aZmRp6EtD9kPw= +cloud.google.com/go/logging v1.3.0/go.mod h1:pA7Kcvk7H0jQMY5WV03GwzOt07Yjw9xVsQMQC5NUZQQ= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3 h1:JjCZWpVbqXDqFVmTfYWEVTMIYrL/NPdPSCHPJ0T/raM= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4 h1:L8R9j+yAqZuZjsqh/z+F1NCffTKKLShY6zXTItVIZ8M= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5 h1:dntmOdLpSpHlVqbW5Eay97DelsZHe+55D+xC6i0dDS0= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777 h1:003p0dJM77cxMSyCPFphvZf/Y5/NXf5fzg6ufd1/Oew= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99 h1:5vD4XjIc0X5+kHZjx4UecYdjA6mJo+XXNoaW0EjU5Os= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a h1:DcqTD9SDLc+1P/r1EmRBwnVsrOwW+kk2vWf9n+1sGhs= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210223095934-7937bea0104d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0 h1:uWrpz12dpVPn7cojP82mk02XDgTJLDPc2KbVTxrWb4A= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c h1:7A9LQhrZmuCPI79/sYSbscFqBp4XFYf6oaIQuV1xji4= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210224155714-063164c882e6 h1:bXUwz2WkXXrXgiLxww3vWmoSHLOGv4ipdPdTvKymcKw= +google.golang.org/genproto v0.0.0-20210224155714-063164c882e6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0 h1:TwIQcH3es+MojMVojxxfQ3l3OF2KzlRxML2xZq0kRo8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/main.go b/packages/google-cloud-logging/tests/environment/deployable/go/main.go new file mode 100644 index 000000000000..85da8c741dee --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/go/main.go @@ -0,0 +1,111 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "context" + "encoding/json" + "io/ioutil" + "log" + "net/http" + "os" + + // This is replaced by the local version of cloud logging + "cloud.google.com/go/compute/metadata" + "cloud.google.com/go/logging" +) + +// PubSubMessage is the logtext of a Pub/Sub event. +type pubSubMessage struct { + Message struct { + Data []byte `json:"data,omitempty"` + Attributes map[string]string `json:"attributes,omitempty"` + ID string `json:"id"` + } `json:"message"` + Subscription string `json:"subscription"` +} + +// CloudRun: Processes a Pub/Sub message through HTTP. +func pubsubHTTP(w http.ResponseWriter, r *http.Request) { + var m pubSubMessage + body, err := ioutil.ReadAll(r.Body) + if err != nil { + log.Printf("ioutil.ReadAll: %v", err) + http.Error(w, "Bad Request", http.StatusBadRequest) + return + } + if err := json.Unmarshal(body, &m); err != nil { + log.Printf("json.Unmarshal: %v", err) + http.Error(w, "Bad Request", http.StatusBadRequest) + return + } + + msg := string(m.Message.Data) + args := m.Message.Attributes + + switch msg { + case "simplelog": + simplelog(args) + case "stdLog": + break + default: + break + } +} + +func main() { + if os.Getenv("ENABLE_SUBSCRIBER") == "" { + + // Set up PubSub for CloudRun + http.HandleFunc("/", pubsubHTTP) + + port := os.Getenv("PORT") + if port == "" { + port = "8080" + log.Printf("Defaulting to port %s", port) + } + log.Printf("Listening on port %s", port) + if err := http.ListenAndServe(":"+port, nil); err != nil { + log.Fatal(err) + } + } +} + +// [Optional] envctl go trigger simpleLog logname=foo,logtext=bar +func simplelog(args map[string]string) { + ctx := context.Background() + projectID, err := metadata.ProjectID() + if err != nil { + log.Fatalf("metadata.ProjectID: %v", err) + } + client, err := logging.NewClient(ctx, projectID) + if err != nil { + log.Fatalf("Failed to create client: %v", err) + } + defer client.Close() + + logname := "my-log" + if val, ok := args["logname"]; ok { + logname = val + } + + logtext := "hello world" + if val, ok := args["logtext"]; ok { + logtext = val + } + + logger := client.Logger(logname).StandardLogger(logging.Info) + logger.Println(logtext) +} diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py index 443da3529942..4c7695012c51 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -15,8 +15,10 @@ import logging import os + try: import google.cloud.logging + from google.cloud.logging_v2._helpers import retrieve_metadata_server except ImportError: # import at runtime for GAE environments import pip @@ -26,15 +28,16 @@ pip.main(["install", "-e", "./python-logging"]) importlib.reload(site) import google.cloud.logging + from google.cloud.logging_v2._helpers import retrieve_metadata_server -def simple_log(log_name=None, log_text="simple_log", **kwargs): - client = google.cloud.logging.Client() - logger = client.logger(log_name) - logger.log_text(log_text) +# def simple_log(log_name=None, log_text="simple_log", **kwargs): +# client = google.cloud.logging.Client() +# logger = client.logger(log_name) +# logger.log_text(log_text) -def pylogging(log_text="pylogging", severity="warning", **kwargs): +def simplelog(log_text="pylogging", severity="warning", **kwargs): # allowed severity: debug, info, warning, error, critical if severity == "debug": logging.debug(log_text) @@ -47,11 +50,13 @@ def pylogging(log_text="pylogging", severity="warning", **kwargs): else: logging.critical(log_text) + def print_handlers(**kwargs): root_logger = logging.getLogger() - handlers_str = ', '.join([type(h).__name__ for h in root_logger.handlers]) + handlers_str = ", ".join([type(h).__name__ for h in root_logger.handlers]) logging.info(handlers_str) + def remove_stream_handlers(**kwargs): logger = logging.getLogger() for handler in logger.handlers: @@ -59,6 +64,7 @@ def remove_stream_handlers(**kwargs): logging.error(handler) logger.removeHandler(handler) + def print_env_vars(env_var=None, **kwargs): if env_var: value = os.environ.get(env_var, None) @@ -69,3 +75,9 @@ def print_env_vars(env_var=None, **kwargs): else: logging.error(os.environ) + +def get_metadata_server(metadata_key=None, **kwargs): + if metadata_key is None: + metadata_key = "" + data = retrieve_metadata_server(metadata_key) + logging.error(f"key: {metadata_key}, data:{data}") diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh new file mode 100644 index 000000000000..a0a2331e7a37 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh @@ -0,0 +1,105 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +# Note: there is a max character count constraint +SERVICE_NAME="log-go-run-$(echo $ENVCTL_ID | head -c 9)" +SA_NAME=$SERVICE_NAME-invoker +LIBRARY_NAME="google-cloud-go" + +add_service_accounts() { + set +e + local PROJECT_ID=$(gcloud config list --format 'value(core.project)') + local PROJECT_NUMBER=$(gcloud projects list --filter=$PROJECT_ID --format="value(PROJECT_NUMBER)") + gcloud projects add-iam-policy-binding $PROJECT_ID \ + --member=serviceAccount:service-$PROJECT_NUMBER@gcp-sa-pubsub.iam.gserviceaccount.com \ + --role=roles/iam.serviceAccountTokenCreator 2> /dev/null + gcloud iam service-accounts create $SA_NAME \ + --display-name "Pub/Sub Invoker" 2> /dev/null + gcloud run services add-iam-policy-binding $SERVICE_NAME \ + --member=serviceAccount:$SA_NAME@$PROJECT_ID.iam.gserviceaccount.com \ + --role=roles/run.invoker 2> /dev/null + RUN_URL=$(gcloud run services list --filter=$SERVICE_NAME --format="value(URL)") + gcloud pubsub subscriptions create $SERVICE_NAME-subscriber --topic $SERVICE_NAME \ + --push-endpoint=$RUN_URL \ + --push-auth-service-account=$SA_NAME@$PROJECT_ID.iam.gserviceaccount.com 2> /dev/null + set -e +} + +build_go_container() { + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + # copy super-repo into deployable dir + _env_tests_relative_path=${REPO_ROOT#"$SUPERREPO_ROOT/"} + _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE + + # copy over local copy of library + pushd $SUPERREPO_ROOT + tar -cvf $_deployable_dir/lib.tar --exclude internal/logging --exclude .nox --exclude docs --exclude __pycache__ . + popd + mkdir -p $_deployable_dir/$LIBRARY_NAME + tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/$LIBRARY_NAME + # build container + docker build -t $GCR_PATH $_deployable_dir + docker push $GCR_PATH +} + +deploy() { + build_go_container + + gcloud config set run/platform managed + gcloud config set run/region us-west1 + gcloud run deploy \ + --allow-unauthenticated \ + --image $GCR_PATH \ + $SERVICE_NAME + + # Create pubsub subscription + add_service_accounts +} + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete service account + gcloud iam service-accounts delete $SA_NAME -q 2> /dev/null + # delete container images + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null + # delete service + gcloud run services delete $SERVICE_NAME -q 2> /dev/null + set -e +} + +verify() { + set +e + gcloud run services describe $SERVICE_NAME > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +filter-string() { + echo "resource.type=\"global\"" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/cloudrun.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/cloudrun.sh index 1fe9461e53e3..4da3dc154099 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/cloudrun.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/cloudrun.sh @@ -27,16 +27,16 @@ add_service_accounts() { local PROJECT_NUMBER=$(gcloud projects list --filter=$PROJECT_ID --format="value(PROJECT_NUMBER)") gcloud projects add-iam-policy-binding $PROJECT_ID \ --member=serviceAccount:service-$PROJECT_NUMBER@gcp-sa-pubsub.iam.gserviceaccount.com \ - --role=roles/iam.serviceAccountTokenCreator 2> /dev/null + --role=roles/iam.serviceAccountTokenCreator gcloud iam service-accounts create $SA_NAME \ - --display-name "Pub/Sub Invoker" 2> /dev/null + --display-name "Pub/Sub Invoker" gcloud run services add-iam-policy-binding $SERVICE_NAME \ --member=serviceAccount:$SA_NAME@$PROJECT_ID.iam.gserviceaccount.com \ - --role=roles/run.invoker 2> /dev/null + --role=roles/run.invoker RUN_URL=$(gcloud run services list --filter=$SERVICE_NAME --format="value(URL)") gcloud pubsub subscriptions create $SERVICE_NAME-subscriber --topic $SERVICE_NAME \ --push-endpoint=$RUN_URL \ - --push-auth-service-account=$SA_NAME@$PROJECT_ID.iam.gserviceaccount.com 2> /dev/null + --push-auth-service-account=$SA_NAME@$PROJECT_ID.iam.gserviceaccount.com set -e } @@ -46,7 +46,7 @@ destroy() { gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null # delete service account - gcloud iam service-accounts delete $SA_NAME -q 2> /dev/null + gcloud iam service-accounts delete $SA_NAME@$PROJECT_ID.iam.gserviceaccount.com -q 2> /dev/null # delete container images export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null @@ -73,7 +73,6 @@ deploy() { gcloud config set run/platform managed gcloud config set run/region us-west1 gcloud run deploy \ - --allow-unauthenticated \ --image $GCR_PATH \ --update-env-vars ENABLE_FLASK=true \ $SERVICE_NAME diff --git a/packages/google-cloud-logging/tests/environment/noxfile.py b/packages/google-cloud-logging/tests/environment/noxfile.py index d62450baf0a6..9aca74865f7f 100644 --- a/packages/google-cloud-logging/tests/environment/noxfile.py +++ b/packages/google-cloud-logging/tests/environment/noxfile.py @@ -129,7 +129,7 @@ def blacken(session: nox.sessions.Session) -> None: "functions", ], ) -@nox.parametrize("language", ["python"]) +@nox.parametrize("language", ["python", "go"]) def tests(session, language, platform): """Run the e2e environment test suite.""" if os.environ.get("RUN_ENV_TESTS", "true") == "false": diff --git a/packages/google-cloud-logging/tests/environment/renovate.json b/packages/google-cloud-logging/tests/environment/renovate.json new file mode 100644 index 000000000000..f45d8f110c30 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/renovate.json @@ -0,0 +1,5 @@ +{ + "extends": [ + "config:base" + ] +} diff --git a/packages/google-cloud-logging/tests/environment/tests/common/appengine.py b/packages/google-cloud-logging/tests/environment/tests/common/appengine.py new file mode 100644 index 000000000000..a062af90756e --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/common/appengine.py @@ -0,0 +1,34 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect + +import google.cloud.logging + +from ..common.common import Common + + +class CommonAppEngine: + def test_monitored_resource(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text) + found_resource = log_list[-1].resource + + self.assertEqual(found_resource.type, "gae_app") + self.assertTrue(found_resource.labels["project_id"]) + self.assertTrue(found_resource.labels["module_id"]) + self.assertTrue(found_resource.labels["version_id"]) + self.assertTrue(found_resource.labels["zone"]) diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index 9d3f501f388d..e26c83272d1f 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -35,10 +35,13 @@ from .script_utils import ScriptRunner from .script_utils import Command + class LogsNotFound(RuntimeError): """raised when filter returns no logs.""" + pass + class Common: _client = Client() # environment name must be set by subclass @@ -50,7 +53,6 @@ def _add_time_condition_to_filter(self, filter_str, timestamp=None): timestamp = datetime.now(timezone.utc) - timedelta(minutes=10) return f'"{filter_str}" AND timestamp > "{timestamp.strftime(time_format)}"' - def _get_logs(self, filter_str=None): if not filter_str: _, filter_str = self._script.run_command(Command.GetFilter) @@ -65,6 +67,25 @@ def _trigger(self, function, **kwargs): args_str = ",".join([f'{k}="{v}"' for k, v in kwargs.items()]) self._script.run_command(Command.Trigger, [function, args_str]) + @RetryErrors(exception=LogsNotFound, delay=2) + def trigger_and_retrieve(self, log_text, append_uuid=True, max_tries=6): + if append_uuid: + log_text = f"{log_text} - {uuid.uuid1()}" + self._trigger("simplelog", log_text=log_text) + filter_str = self._add_time_condition_to_filter(log_text) + # give the command time to be received + tries = 0 + while tries < max_tries: + # retrieve resulting logs + try: + log_list = self._get_logs(filter_str) + return log_list + except LogsNotFound: + sleep(10) + tries += 1 + # log not found + raise LogsNotFound + @classmethod def setUpClass(cls): if not cls.environment: @@ -95,15 +116,9 @@ def tearDown_class(cls): if not os.getenv("NO_CLEAN"): cls._script.run_command(Command.Destroy) - @RetryErrors(exception=LogsNotFound) def test_receive_log(self): - log_text = f"{inspect.currentframe().f_code.co_name}: {uuid.uuid1()}" - self._trigger("pylogging", log_text=log_text) - # give the command time to be received - sleep(30) - filter_str = self._add_time_condition_to_filter(log_text) - # retrieve resulting logs - log_list = self._get_logs(filter_str) + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text) found_log = None for log in log_list: diff --git a/packages/google-cloud-logging/tests/environment/tests/go/__init__.py b/packages/google-cloud-logging/tests/environment/tests/go/__init__.py new file mode 100644 index 000000000000..d46dbae5ebd0 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/go/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_flex_container.py b/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_flex_container.py new file mode 100644 index 000000000000..56f629dd7ec3 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_flex_container.py @@ -0,0 +1,26 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +import google.cloud.logging + +from ..common.common import Common + + +class TestAppEngineFlexContainer(Common, unittest.TestCase): + + environment = "appengine_flex_container" + language = "go" diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_standard.py b/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_standard.py new file mode 100644 index 000000000000..d5f892d93835 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_standard.py @@ -0,0 +1,26 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +import google.cloud.logging + +from ..common.common import Common + + +class TestAppEngineStandard(Common, unittest.TestCase): + + environment = "appengine_standard" + language = "go" diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/go/test_cloudrun.py new file mode 100644 index 000000000000..44c9d097b60a --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_cloudrun.py @@ -0,0 +1,26 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +import google.cloud.logging + +from ..common.common import Common + + +class TestCloudRun(Common, unittest.TestCase): + + environment = "cloudrun" + language = "go" diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_compute.py b/packages/google-cloud-logging/tests/environment/tests/go/test_compute.py new file mode 100644 index 000000000000..96ce5786eeaa --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_compute.py @@ -0,0 +1,26 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +import google.cloud.logging + +from ..common.common import Common + + +class TestComputeEngine(Common, unittest.TestCase): + + environment = "compute" + language = "go" diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/go/test_functions.py new file mode 100644 index 000000000000..81ee601ec34f --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_functions.py @@ -0,0 +1,26 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +import google.cloud.logging + +from ..common.common import Common + + +class TestCloudFunctions(Common, unittest.TestCase): + + environment = "functions" + language = "go" diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py new file mode 100644 index 000000000000..85f7104555f0 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py @@ -0,0 +1,26 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +import google.cloud.logging + +from ..common.common import Common + + +class TestKubernetesEngine(Common, unittest.TestCase): + + environment = "kubernetes" + language = "go" diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py index 16bb2eae1f19..e8a55ad5341a 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py @@ -18,9 +18,10 @@ import google.cloud.logging from ..common.common import Common +from ..common.appengine import CommonAppEngine -class TestAppEngineFlexContainer(Common, unittest.TestCase): +class TestAppEngineFlexContainer(Common, CommonAppEngine, unittest.TestCase): environment = "appengine_flex_container" language = "python" diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py index 5c8fdacc9d04..23d1e30c48a4 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py @@ -18,9 +18,10 @@ import google.cloud.logging from ..common.common import Common +from ..common.appengine import CommonAppEngine -class TestAppEngineFlex(Common, unittest.TestCase): +class TestAppEngineFlex(Common, CommonAppEngine, unittest.TestCase): environment = "appengine_flex_python" language = "python" diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py index e4bb999b02a3..bb93ab8f00b0 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py @@ -18,9 +18,10 @@ import google.cloud.logging from ..common.common import Common +from ..common.appengine import CommonAppEngine -class TestAppEngineStandard(Common, unittest.TestCase): +class TestAppEngineStandard(Common, CommonAppEngine, unittest.TestCase): environment = "appengine_standard" language = "python" diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py index 1ee08268bfe3..7c83c1179a17 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py @@ -14,8 +14,11 @@ import logging import unittest +import inspect +import uuid import google.cloud.logging +from google.cloud.logging_v2.resource import Resource from ..common.common import Common @@ -24,3 +27,15 @@ class TestCloudRun(Common, unittest.TestCase): environment = "cloudrun" language = "python" + + def test_monitored_resource(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text) + found_resource = log_list[0].resource + + self.assertEqual(found_resource.type, "cloud_run_revision") + self.assertTrue(found_resource.labels["project_id"]) + self.assertTrue(found_resource.labels["service_name"]) + self.assertTrue(found_resource.labels["revision_name"]) + self.assertTrue(found_resource.labels["location"]) + self.assertTrue(found_resource.labels["configuration_name"]) diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_compute.py b/packages/google-cloud-logging/tests/environment/tests/python/test_compute.py index 4b71761a961f..81b7e3a20d20 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_compute.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_compute.py @@ -14,6 +14,7 @@ import logging import unittest +import inspect import google.cloud.logging @@ -24,3 +25,12 @@ class TestComputeEngine(Common, unittest.TestCase): environment = "compute" language = "python" + + def test_monitored_resource(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text) + found_resource = log_list[0].resource + + self.assertEqual(found_resource.type, "gce_instance") + self.assertTrue(found_resource.labels["zone"]) + self.assertTrue(found_resource.labels["instance_id"]) diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py index b3bb7557f247..de65169913e5 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py @@ -14,6 +14,7 @@ import logging import unittest +import inspect import google.cloud.logging @@ -24,3 +25,12 @@ class TestCloudFunctions(Common, unittest.TestCase): environment = "functions" language = "python" + + def test_monitored_resource(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text) + found_resource = log_list[0].resource + + self.assertEqual(found_resource.type, "cloud_function") + self.assertTrue(found_resource.labels["region"]) + self.assertTrue(found_resource.labels["function_name"]) diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py index e4e5bf59076c..77f4d38cbe22 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py @@ -14,6 +14,7 @@ import logging import unittest +import inspect import google.cloud.logging @@ -24,3 +25,16 @@ class TestKubernetesEngine(Common, unittest.TestCase): environment = "kubernetes" language = "python" + + def test_monitored_resource(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text) + found_resource = log_list[0].resource + + self.assertEqual(found_resource.type, "k8s_container") + self.assertTrue(found_resource.labels["project_id"]) + self.assertTrue(found_resource.labels["location"]) + self.assertTrue(found_resource.labels["cluster_name"]) + self.assertTrue(found_resource.labels["namespace_name"]) + self.assertTrue(found_resource.labels["pod_name"]) + self.assertTrue(found_resource.labels["container_name"]) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py b/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py new file mode 100644 index 000000000000..00fade39cc25 --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py @@ -0,0 +1,249 @@ +# Copyright 2021 Google LLC All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import mock +import os + + +from google.cloud.logging_v2.handlers._monitored_resources import ( + _create_functions_resource, +) +from google.cloud.logging_v2.handlers._monitored_resources import ( + _create_app_engine_resource, +) +from google.cloud.logging_v2.handlers._monitored_resources import ( + _create_kubernetes_resource, +) +from google.cloud.logging_v2.handlers._monitored_resources import ( + _create_cloud_run_resource, +) +from google.cloud.logging_v2.handlers._monitored_resources import ( + _create_compute_resource, +) +from google.cloud.logging_v2.handlers._monitored_resources import ( + _create_global_resource, +) +from google.cloud.logging_v2.handlers._monitored_resources import detect_resource +from google.cloud.logging_v2.handlers import _monitored_resources +from google.cloud.logging_v2.resource import Resource + + +class Test_Create_Resources(unittest.TestCase): + + PROJECT = "test-project" + LOCATION = "test-location" + NAME = "test-name" + CLUSTER = "test-cluster" + VERSION = "1" + CONFIG = "test-config" + + def _mock_metadata(self, endpoint): + if ( + endpoint == _monitored_resources._ZONE_ID + or endpoint == _monitored_resources._REGION_ID + ): + return self.LOCATION + elif ( + endpoint == _monitored_resources._GKE_CLUSTER_NAME + or endpoint == _monitored_resources._GCE_INSTANCE_ID + ): + return self.NAME + else: + return None + + def setUp(self): + os.environ.clear() + + def test_create_legacy_functions_resource(self): + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + wraps=self._mock_metadata, + ) + + os.environ[_monitored_resources._CLOUD_RUN_SERVICE_ID] = self.NAME + with patch: + legacy_func_resource = _create_functions_resource(self.PROJECT) + + self.assertIsInstance(legacy_func_resource, Resource) + self.assertEqual(legacy_func_resource.type, "cloud_function") + self.assertEqual(legacy_func_resource.labels["project_id"], self.PROJECT) + self.assertEqual(legacy_func_resource.labels["function_name"], self.NAME) + self.assertEqual(legacy_func_resource.labels["region"], self.LOCATION) + + def test_create_modern_functions_resource(self): + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + wraps=self._mock_metadata, + ) + os.environ[_monitored_resources._FUNCTION_NAME] = self.NAME + with patch: + func_resource = _create_functions_resource(self.PROJECT) + + self.assertIsInstance(func_resource, Resource) + self.assertEqual(func_resource.type, "cloud_function") + self.assertEqual(func_resource.labels["project_id"], self.PROJECT) + self.assertEqual(func_resource.labels["function_name"], self.NAME) + self.assertEqual(func_resource.labels["region"], self.LOCATION) + + def test_create_kubernetes_resource(self): + + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + wraps=self._mock_metadata, + ) + with patch: + resource = _create_kubernetes_resource(self.PROJECT) + + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "k8s_container") + self.assertEqual(resource.labels["project_id"], self.PROJECT) + self.assertEqual(resource.labels["cluster_name"], self.NAME) + self.assertEqual(resource.labels["location"], self.LOCATION) + + def test_compute_resource(self): + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + wraps=self._mock_metadata, + ) + + with patch: + resource = _create_compute_resource(self.PROJECT) + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "gce_instance") + self.assertEqual(resource.labels["project_id"], self.PROJECT) + self.assertEqual(resource.labels["instance_id"], self.NAME) + self.assertEqual(resource.labels["zone"], self.LOCATION) + + def test_cloud_run_resource(self): + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + wraps=self._mock_metadata, + ) + os.environ[_monitored_resources._CLOUD_RUN_SERVICE_ID] = self.NAME + os.environ[_monitored_resources._CLOUD_RUN_REVISION_ID] = self.VERSION + os.environ[_monitored_resources._CLOUD_RUN_CONFIGURATION_ID] = self.CONFIG + with patch: + resource = _create_cloud_run_resource(self.PROJECT) + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "cloud_run_revision") + self.assertEqual(resource.labels["project_id"], self.PROJECT) + self.assertEqual(resource.labels["service_name"], self.NAME) + self.assertEqual(resource.labels["revision_name"], self.VERSION) + self.assertEqual(resource.labels["configuration_name"], self.CONFIG) + self.assertEqual(resource.labels["location"], self.LOCATION) + + def test_app_engine_resource(self): + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + wraps=self._mock_metadata, + ) + os.environ[_monitored_resources._GAE_SERVICE_ENV] = self.NAME + os.environ[_monitored_resources._GAE_VERSION_ENV] = self.VERSION + with patch: + resource = _create_app_engine_resource(self.PROJECT) + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "gae_app") + self.assertEqual(resource.labels["project_id"], self.PROJECT) + self.assertEqual(resource.labels["module_id"], self.NAME) + self.assertEqual(resource.labels["version_id"], self.VERSION) + self.assertEqual(resource.labels["zone"], self.LOCATION) + + def test_global_resource(self): + resource = _create_global_resource(self.PROJECT) + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "global") + self.assertEqual(resource.labels["project_id"], self.PROJECT) + + +class Test_Resource_Detection(unittest.TestCase): + + PROJECT = "test-project" + + def _mock_k8s_metadata(self, endpoint): + if ( + endpoint == _monitored_resources._GKE_CLUSTER_NAME + or endpoint == _monitored_resources._GCE_INSTANCE_ID + ): + return "TRUE" + else: + return None + + def _mock_gce_metadata(self, endpoint): + if endpoint == _monitored_resources._GCE_INSTANCE_ID: + return "TRUE" + else: + return None + + def setUp(self): + os.environ.clear() + + def test_detect_appengine(self): + for env in _monitored_resources._GAE_ENV_VARS: + os.environ[env] = "TRUE" + resource = detect_resource(self.PROJECT) + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "gae_app") + + def test_detect_kubernetes(self): + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + wraps=self._mock_k8s_metadata, + ) + with patch: + resource = detect_resource(self.PROJECT) + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "k8s_container") + + def test_detect_functions(self): + for env in _monitored_resources._FUNCTION_ENV_VARS: + os.environ[env] = "TRUE" + resource = detect_resource(self.PROJECT) + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "cloud_function") + + def test_detect_legacy_functions(self): + for env in _monitored_resources._LEGACY_FUNCTION_ENV_VARS: + os.environ[env] = "TRUE" + resource = detect_resource(self.PROJECT) + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "cloud_function") + + def test_detect_cloud_run(self): + for env in _monitored_resources._CLOUD_RUN_ENV_VARS: + os.environ[env] = "TRUE" + resource = detect_resource(self.PROJECT) + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "cloud_run_revision") + + def test_detect_compute_engine(self): + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + wraps=self._mock_gce_metadata, + ) + with patch: + resource = detect_resource(self.PROJECT) + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "gce_instance") + + def test_detection_unknown(self): + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + return_value=None, + ) + with patch: + resource = detect_resource(self.PROJECT) + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "global") diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index d84c1963505d..0e7c63cc409b 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -14,6 +14,7 @@ import logging import unittest +import mock class TestCloudLoggingHandler(unittest.TestCase): @@ -31,19 +32,27 @@ def _make_one(self, *args, **kw): def test_ctor_defaults(self): import sys - from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.handlers._monitored_resources import ( + _create_global_resource, + ) from google.cloud.logging_v2.handlers.handlers import DEFAULT_LOGGER_NAME - client = _Client(self.PROJECT) - handler = self._make_one(client, transport=_Transport) - self.assertEqual(handler.name, DEFAULT_LOGGER_NAME) - self.assertIs(handler.client, client) - self.assertIsInstance(handler.transport, _Transport) - self.assertIs(handler.transport.client, client) - self.assertEqual(handler.transport.name, DEFAULT_LOGGER_NAME) - self.assertIs(handler.resource, _GLOBAL_RESOURCE) - self.assertIsNone(handler.labels) - self.assertIs(handler.stream, sys.stderr) + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + return_value=None, + ) + with patch: + client = _Client(self.PROJECT) + handler = self._make_one(client, transport=_Transport) + self.assertEqual(handler.name, DEFAULT_LOGGER_NAME) + self.assertIs(handler.client, client) + self.assertIsInstance(handler.transport, _Transport) + self.assertIs(handler.transport.client, client) + self.assertEqual(handler.transport.name, DEFAULT_LOGGER_NAME) + global_resource = _create_global_resource(self.PROJECT) + self.assertEqual(handler.resource, global_resource) + self.assertIsNone(handler.labels) + self.assertIs(handler.stream, sys.stderr) def test_ctor_explicit(self): import io diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 8083e3c56182..f33f1cbdc861 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -718,7 +718,7 @@ def test_list_metrics_with_paging(self): def test_get_default_handler_app_engine(self): import os from google.cloud._testing import _Monkey - from google.cloud.logging_v2.client import _APPENGINE_FLEXIBLE_ENV_VM + from google.cloud.logging_v2.handlers._monitored_resources import _GAE_ENV_VARS from google.cloud.logging.handlers import AppEngineHandler credentials = _make_credentials() @@ -726,7 +726,9 @@ def test_get_default_handler_app_engine(self): project=self.PROJECT, credentials=credentials, _use_grpc=False ) - with _Monkey(os, environ={_APPENGINE_FLEXIBLE_ENV_VM: "True"}): + gae_env_vars = {var: "TRUE" for var in _GAE_ENV_VARS} + + with _Monkey(os, environ=gae_env_vars): handler = client.get_default_handler() handler.transport.worker.stop() @@ -742,7 +744,7 @@ def test_get_default_handler_container_engine(self): ) patch = mock.patch( - "google.cloud.logging_v2.client.retrieve_metadata_server", + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", return_value="test-gke-cluster", ) diff --git a/tests/environment b/tests/environment index f35514893542..0e331115867c 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit f35514893542dfa29f65214eea96b490f04f3d72 +Subproject commit 0e331115867ca5a26b1efd9d99c43fbb1cb9363b From a85cb680318306377ee029b645c9bcb2286f95b6 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 5 Mar 2021 14:39:52 -0800 Subject: [PATCH 396/855] test: add functions_37 environment test config (#204) --- .../environment/functions_37/common.cfg | 39 +++++++++++++++++++ .../environment/functions_37/continuous.cfg | 1 + .../environment/functions_37/presubmit.cfg | 1 + packages/google-cloud-logging/.trampolinerc | 1 + packages/google-cloud-logging/synth.py | 2 +- .../envctl/env_scripts/python/functions.sh | 2 +- tests/environment | 2 +- 7 files changed, 45 insertions(+), 3 deletions(-) create mode 100644 packages/google-cloud-logging/.kokoro/environment/functions_37/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/functions_37/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/environment/functions_37/presubmit.cfg diff --git a/packages/google-cloud-logging/.kokoro/environment/functions_37/common.cfg b/packages/google-cloud-logging/.kokoro/environment/functions_37/common.cfg new file mode 100644 index 000000000000..4daa52db767b --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/functions_37/common.cfg @@ -0,0 +1,39 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + + +# Specify which tests to run +env_vars: { + key: "ENVIRONMENT" + value: "functions" +} + +env_vars: { + key: "RUNTIME" + value: "python37" +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/environment_tests.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/environment/functions_37/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/functions_37/continuous.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/functions_37/continuous.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/functions_37/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/functions_37/presubmit.cfg new file mode 100644 index 000000000000..18a4c35325b8 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/environment/functions_37/presubmit.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.trampolinerc b/packages/google-cloud-logging/.trampolinerc index 2c845a3de623..6f984309b209 100644 --- a/packages/google-cloud-logging/.trampolinerc +++ b/packages/google-cloud-logging/.trampolinerc @@ -20,6 +20,7 @@ required_envvars+=() # Add env vars which are passed down into the container here. pass_down_envvars+=( "ENVIRONMENT" + "RUNTIME" "STAGING_BUCKET" "V2_STAGING_BUCKET" "NOX_SESSION" diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/synth.py index 4ee4bd1df2a2..7f7008a39cbd 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/synth.py @@ -77,7 +77,7 @@ s.replace( ".trampolinerc", "pass_down_envvars\+\=\(", - 'pass_down_envvars+=(\n "ENVIRONMENT"' + 'pass_down_envvars+=(\n "ENVIRONMENT"\n "RUNTIME"' ) # don't lint environment tests diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh index 36fcb6e08855..9e902a2bbf7b 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh @@ -48,7 +48,7 @@ deploy() { set +e gcloud pubsub topics create $SERVICE_NAME 2>/dev/null set -e - local RUNTIME="${2:-python38}" + RUNTIME="${RUNTIME:-python38}" # set up deployment directory # copy over local copy of library pushd $SUPERREPO_ROOT diff --git a/tests/environment b/tests/environment index 0e331115867c..74542ed04061 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 0e331115867ca5a26b1efd9d99c43fbb1cb9363b +Subproject commit 74542ed0406167670870b7db1b170562973ede06 From c19616007d581663012d3112af04ca37f9dd7807 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 5 Mar 2021 14:42:24 -0800 Subject: [PATCH 397/855] chore: report library test failures to flakybot (#199) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * build(python): enable flakybot on library unit and system tests Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Feb 17 14:10:46 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: d17674372e27fb8f23013935e794aa37502071aa Source-Link: https://github.com/googleapis/synthtool/commit/d17674372e27fb8f23013935e794aa37502071aa --- packages/google-cloud-logging/.gitignore | 4 +++- packages/google-cloud-logging/.kokoro/build.sh | 10 ++++++++++ .../.kokoro/test-samples.sh | 8 ++++---- .../.kokoro/trampoline_v2.sh | 2 +- packages/google-cloud-logging/noxfile.py | 17 +++++++++++++++-- packages/google-cloud-logging/synth.metadata | 6 +++--- 6 files changed, 36 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-logging/.gitignore b/packages/google-cloud-logging/.gitignore index b9daa52f118d..b4243ced74e4 100644 --- a/packages/google-cloud-logging/.gitignore +++ b/packages/google-cloud-logging/.gitignore @@ -50,8 +50,10 @@ docs.metadata # Virtual environment env/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/packages/google-cloud-logging/.kokoro/build.sh b/packages/google-cloud-logging/.kokoro/build.sh index 7145c57b0bf4..cb17176f08ef 100755 --- a/packages/google-cloud-logging/.kokoro/build.sh +++ b/packages/google-cloud-logging/.kokoro/build.sh @@ -40,6 +40,16 @@ python3 -m pip uninstall --yes --quiet nox-automation python3 -m pip install --upgrade --quiet nox python3 -m nox --version +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi + # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then diff --git a/packages/google-cloud-logging/.kokoro/test-samples.sh b/packages/google-cloud-logging/.kokoro/test-samples.sh index ba97b53d500c..e75891832fd4 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples.sh @@ -87,11 +87,11 @@ for file in samples/**/requirements.txt; do python3.6 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? - # If this is a periodic build, send the test log to the Build Cop Bot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop - $KOKORO_GFILE_DIR/linux_amd64/buildcop + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot fi if [[ $EXIT -ne 0 ]]; then diff --git a/packages/google-cloud-logging/.kokoro/trampoline_v2.sh b/packages/google-cloud-logging/.kokoro/trampoline_v2.sh index 719bcd5ba84d..4af6cdc26dbc 100755 --- a/packages/google-cloud-logging/.kokoro/trampoline_v2.sh +++ b/packages/google-cloud-logging/.kokoro/trampoline_v2.sh @@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then "KOKORO_GITHUB_COMMIT" "KOKORO_GITHUB_PULL_REQUEST_NUMBER" "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For Build Cop Bot + # For FlakyBot "KOKORO_GITHUB_COMMIT_URL" "KOKORO_GITHUB_PULL_REQUEST_URL" ) diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 19eef5d15cfb..e29c56e4210d 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -91,6 +91,7 @@ def default(session): session.run( "py.test", "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", "--cov=google/cloud", "--cov=tests/unit", "--cov-append", @@ -145,9 +146,21 @@ def system(session): # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 9ca738bf8df5..98c65e06acd1 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "18a0bb7fd8d158baef532b1c715524e5f6303311" + "sha": "de9c4c8bca265f6ecf98ecb9aa145891586232c8" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4679e7e415221f03ff2a71e3ffad75b9ec41d87e" + "sha": "d17674372e27fb8f23013935e794aa37502071aa" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4679e7e415221f03ff2a71e3ffad75b9ec41d87e" + "sha": "d17674372e27fb8f23013935e794aa37502071aa" } } ], From c962e2f2395727f465b2a6a5cc4da68ea7aba930 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 8 Mar 2021 11:15:39 -0800 Subject: [PATCH 398/855] test: install pyopenssl for mtls testing (#206) --- packages/google-cloud-logging/noxfile.py | 3 +++ packages/google-cloud-logging/synth.metadata | 6 +++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index e29c56e4210d..1183ca5fbd0b 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -121,6 +121,9 @@ def system(session): # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") system_test_exists = os.path.exists(system_test_path) system_test_folder_exists = os.path.exists(system_test_folder_path) diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 98c65e06acd1..725861a9049f 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "de9c4c8bca265f6ecf98ecb9aa145891586232c8" + "sha": "867362e803e7c11677df39f600d69d9506b3f0f8" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d17674372e27fb8f23013935e794aa37502071aa" + "sha": "0780323da96d5a53925fe0547757181fe76e8f1e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d17674372e27fb8f23013935e794aa37502071aa" + "sha": "0780323da96d5a53925fe0547757181fe76e8f1e" } } ], From f9a1bfcf26e16c3c29a84bd27bad69fab90022ac Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 8 Mar 2021 11:16:04 -0800 Subject: [PATCH 399/855] chore: Re-generated to pick up changes from googleapis. (#198) * chore: update gapic-generator-python PiperOrigin-RevId: 355923884 Source-Author: Google APIs Source-Date: Fri Feb 5 14:04:52 2021 -0800 Source-Repo: googleapis/googleapis Source-Sha: 5e3dacee19405529b841b53797df799c2383536c Source-Link: https://github.com/googleapis/googleapis/commit/5e3dacee19405529b841b53797df799c2383536c * chore: update gapic-generator-python to 0.40.11 PiperOrigin-RevId: 359562873 Source-Author: Google APIs Source-Date: Thu Feb 25 10:52:32 2021 -0800 Source-Repo: googleapis/googleapis Source-Sha: 07932bb995e7dc91b43620ea8402c6668c7d102c Source-Link: https://github.com/googleapis/googleapis/commit/07932bb995e7dc91b43620ea8402c6668c7d102c --- .../config_service_v2/async_client.py | 32 +- .../services/config_service_v2/pagers.py | 11 +- .../logging_service_v2/async_client.py | 32 +- .../services/logging_service_v2/client.py | 15 +- .../services/logging_service_v2/pagers.py | 11 +- .../metrics_service_v2/async_client.py | 32 +- .../services/metrics_service_v2/pagers.py | 11 +- packages/google-cloud-logging/synth.metadata | 4 +- .../tests/unit/gapic/logging_v2/__init__.py | 15 + .../logging_v2/test_config_service_v2.py | 382 +++++++++++++++++- .../logging_v2/test_logging_service_v2.py | 94 ++++- .../logging_v2/test_metrics_service_v2.py | 96 ++++- 12 files changed, 709 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 9603b3754c8f..a82d99b6e8e8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -90,8 +90,36 @@ class ConfigServiceV2AsyncClient: ConfigServiceV2Client.parse_common_location_path ) - from_service_account_info = ConfigServiceV2Client.from_service_account_info - from_service_account_file = ConfigServiceV2Client.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigServiceV2AsyncClient: The constructed client. + """ + return ConfigServiceV2Client.from_service_account_info.__func__(ConfigServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigServiceV2AsyncClient: The constructed client. + """ + return ConfigServiceV2Client.from_service_account_file.__func__(ConfigServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index af5c5faf6af0..f656fef0d8b0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.logging_v2.types import logging_config diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 0c1ae3fae897..5afd77be56fd 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -87,8 +87,36 @@ class LoggingServiceV2AsyncClient: LoggingServiceV2Client.parse_common_location_path ) - from_service_account_info = LoggingServiceV2Client.from_service_account_info - from_service_account_file = LoggingServiceV2Client.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2AsyncClient: The constructed client. + """ + return LoggingServiceV2Client.from_service_account_info.__func__(LoggingServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2AsyncClient: The constructed client. + """ + return LoggingServiceV2Client.from_service_account_file.__func__(LoggingServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 0e046c957313..00d758ab55b1 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -578,12 +578,10 @@ def write_log_entries( request.log_name = log_name if resource is not None: request.resource = resource - - if labels: - request.labels.update(labels) - - if entries: - request.entries.extend(entries) + if labels is not None: + request.labels = labels + if entries is not None: + request.entries = entries # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -699,14 +697,13 @@ def list_log_entries( # If we have keyword arguments corresponding to fields on the # request, apply these. + if resource_names is not None: + request.resource_names = resource_names if filter is not None: request.filter = filter if order_by is not None: request.order_by = order_by - if resource_names: - request.resource_names.extend(resource_names) - # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_log_entries] diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 5492a3a30c5a..7ab8ac8d27f6 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.api import monitored_resource_pb2 as monitored_resource # type: ignore from google.cloud.logging_v2.types import log_entry diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 2c592e6859af..09b2c317326c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -80,8 +80,36 @@ class MetricsServiceV2AsyncClient: MetricsServiceV2Client.parse_common_location_path ) - from_service_account_info = MetricsServiceV2Client.from_service_account_info - from_service_account_file = MetricsServiceV2Client.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsServiceV2AsyncClient: The constructed client. + """ + return MetricsServiceV2Client.from_service_account_info.__func__(MetricsServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsServiceV2AsyncClient: The constructed client. + """ + return MetricsServiceV2Client.from_service_account_file.__func__(MetricsServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 51c3985980cd..15134ac578fb 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.logging_v2.types import logging_metrics diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 725861a9049f..d039bf17693b 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "20712b8fe95001b312f62c6c5f33e3e3ec92cfaf", - "internalRef": "354996675" + "sha": "07932bb995e7dc91b43620ea8402c6668c7d102c", + "internalRef": "359562873" } }, { diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py index 8b137891791f..42ffdf2bc43d 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index c819769ee03b..d6a2f3983293 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -88,15 +88,19 @@ def test__get_default_mtls_endpoint(): ) -def test_config_service_v2_client_from_service_account_info(): +@pytest.mark.parametrize( + "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] +) +def test_config_service_v2_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = ConfigServiceV2Client.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "logging.googleapis.com:443" @@ -112,9 +116,11 @@ def test_config_service_v2_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "logging.googleapis.com:443" @@ -487,6 +493,22 @@ def test_list_buckets_from_dict(): test_list_buckets(request_type=dict) +def test_list_buckets_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + client.list_buckets() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.ListBucketsRequest() + + @pytest.mark.asyncio async def test_list_buckets_async( transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest @@ -830,6 +852,22 @@ def test_get_bucket_from_dict(): test_get_bucket(request_type=dict) +def test_get_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + client.get_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetBucketRequest() + + @pytest.mark.asyncio async def test_get_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest @@ -982,6 +1020,22 @@ def test_create_bucket_from_dict(): test_create_bucket(request_type=dict) +def test_create_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + client.create_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.CreateBucketRequest() + + @pytest.mark.asyncio async def test_create_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest @@ -1134,6 +1188,22 @@ def test_update_bucket_from_dict(): test_update_bucket(request_type=dict) +def test_update_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + client.update_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateBucketRequest() + + @pytest.mark.asyncio async def test_update_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest @@ -1269,6 +1339,22 @@ def test_delete_bucket_from_dict(): test_delete_bucket(request_type=dict) +def test_delete_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + client.delete_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteBucketRequest() + + @pytest.mark.asyncio async def test_delete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest @@ -1384,6 +1470,22 @@ def test_undelete_bucket_from_dict(): test_undelete_bucket(request_type=dict) +def test_undelete_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + client.undelete_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UndeleteBucketRequest() + + @pytest.mark.asyncio async def test_undelete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest @@ -1504,6 +1606,22 @@ def test_list_views_from_dict(): test_list_views(request_type=dict) +def test_list_views_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + client.list_views() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.ListViewsRequest() + + @pytest.mark.asyncio async def test_list_views_async( transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest @@ -1837,6 +1955,22 @@ def test_get_view_from_dict(): test_get_view(request_type=dict) +def test_get_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + client.get_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetViewRequest() + + @pytest.mark.asyncio async def test_get_view_async( transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest @@ -1975,6 +2109,22 @@ def test_create_view_from_dict(): test_create_view(request_type=dict) +def test_create_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + client.create_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.CreateViewRequest() + + @pytest.mark.asyncio async def test_create_view_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest @@ -2113,6 +2263,22 @@ def test_update_view_from_dict(): test_update_view(request_type=dict) +def test_update_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + client.update_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateViewRequest() + + @pytest.mark.asyncio async def test_update_view_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest @@ -2242,6 +2408,22 @@ def test_delete_view_from_dict(): test_delete_view(request_type=dict) +def test_delete_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + client.delete_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteViewRequest() + + @pytest.mark.asyncio async def test_delete_view_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest @@ -2362,6 +2544,22 @@ def test_list_sinks_from_dict(): test_list_sinks(request_type=dict) +def test_list_sinks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + client.list_sinks() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.ListSinksRequest() + + @pytest.mark.asyncio async def test_list_sinks_async( transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest @@ -2715,6 +2913,22 @@ def test_get_sink_from_dict(): test_get_sink(request_type=dict) +def test_get_sink_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + client.get_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetSinkRequest() + + @pytest.mark.asyncio async def test_get_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest @@ -2955,6 +3169,22 @@ def test_create_sink_from_dict(): test_create_sink(request_type=dict) +def test_create_sink_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + client.create_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.CreateSinkRequest() + + @pytest.mark.asyncio async def test_create_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest @@ -3207,6 +3437,22 @@ def test_update_sink_from_dict(): test_update_sink(request_type=dict) +def test_update_sink_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + client.update_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateSinkRequest() + + @pytest.mark.asyncio async def test_update_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest @@ -3440,6 +3686,22 @@ def test_delete_sink_from_dict(): test_delete_sink(request_type=dict) +def test_delete_sink_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + client.delete_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteSinkRequest() + + @pytest.mark.asyncio async def test_delete_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest @@ -3625,6 +3887,22 @@ def test_list_exclusions_from_dict(): test_list_exclusions(request_type=dict) +def test_list_exclusions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + client.list_exclusions() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.ListExclusionsRequest() + + @pytest.mark.asyncio async def test_list_exclusions_async( transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest @@ -3987,6 +4265,22 @@ def test_get_exclusion_from_dict(): test_get_exclusion(request_type=dict) +def test_get_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + client.get_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetExclusionRequest() + + @pytest.mark.asyncio async def test_get_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest @@ -4200,6 +4494,22 @@ def test_create_exclusion_from_dict(): test_create_exclusion(request_type=dict) +def test_create_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + client.create_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.CreateExclusionRequest() + + @pytest.mark.asyncio async def test_create_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest @@ -4427,6 +4737,22 @@ def test_update_exclusion_from_dict(): test_update_exclusion(request_type=dict) +def test_update_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + client.update_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateExclusionRequest() + + @pytest.mark.asyncio async def test_update_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest @@ -4648,6 +4974,22 @@ def test_delete_exclusion_from_dict(): test_delete_exclusion(request_type=dict) +def test_delete_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + client.delete_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteExclusionRequest() + + @pytest.mark.asyncio async def test_delete_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest @@ -4841,6 +5183,24 @@ def test_get_cmek_settings_from_dict(): test_get_cmek_settings(request_type=dict) +def test_get_cmek_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + client.get_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetCmekSettingsRequest() + + @pytest.mark.asyncio async def test_get_cmek_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest @@ -4989,6 +5349,24 @@ def test_update_cmek_settings_from_dict(): test_update_cmek_settings(request_type=dict) +def test_update_cmek_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + client.update_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateCmekSettingsRequest() + + @pytest.mark.asyncio async def test_update_cmek_settings_async( transport: str = "grpc_asyncio", diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index c811acac5374..66f22621cf9d 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -95,15 +95,19 @@ def test__get_default_mtls_endpoint(): ) -def test_logging_service_v2_client_from_service_account_info(): +@pytest.mark.parametrize( + "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] +) +def test_logging_service_v2_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = LoggingServiceV2Client.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "logging.googleapis.com:443" @@ -119,9 +123,11 @@ def test_logging_service_v2_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "logging.googleapis.com:443" @@ -487,6 +493,22 @@ def test_delete_log_from_dict(): test_delete_log(request_type=dict) +def test_delete_log_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + client.delete_log() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging.DeleteLogRequest() + + @pytest.mark.asyncio async def test_delete_log_async( transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest @@ -676,6 +698,24 @@ def test_write_log_entries_from_dict(): test_write_log_entries(request_type=dict) +def test_write_log_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), "__call__" + ) as call: + client.write_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging.WriteLogEntriesRequest() + + @pytest.mark.asyncio async def test_write_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest @@ -860,6 +900,22 @@ def test_list_log_entries_from_dict(): test_list_log_entries(request_type=dict) +def test_list_log_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + client.list_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging.ListLogEntriesRequest() + + @pytest.mark.asyncio async def test_list_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest @@ -1166,6 +1222,24 @@ def test_list_monitored_resource_descriptors_from_dict(): test_list_monitored_resource_descriptors(request_type=dict) +def test_list_monitored_resource_descriptors_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: + client.list_monitored_resource_descriptors() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + + @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async( transport: str = "grpc_asyncio", @@ -1429,6 +1503,22 @@ def test_list_logs_from_dict(): test_list_logs(request_type=dict) +def test_list_logs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + client.list_logs() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging.ListLogsRequest() + + @pytest.mark.asyncio async def test_list_logs_async( transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 7230b0d870d2..6faec201e7bc 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -94,15 +94,19 @@ def test__get_default_mtls_endpoint(): ) -def test_metrics_service_v2_client_from_service_account_info(): +@pytest.mark.parametrize( + "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] +) +def test_metrics_service_v2_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = MetricsServiceV2Client.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "logging.googleapis.com:443" @@ -118,9 +122,11 @@ def test_metrics_service_v2_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "logging.googleapis.com:443" @@ -493,6 +499,22 @@ def test_list_log_metrics_from_dict(): test_list_log_metrics(request_type=dict) +def test_list_log_metrics_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + client.list_log_metrics() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_metrics.ListLogMetricsRequest() + + @pytest.mark.asyncio async def test_list_log_metrics_async( transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest @@ -844,6 +866,22 @@ def test_get_log_metric_from_dict(): test_get_log_metric(request_type=dict) +def test_get_log_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + client.get_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_metrics.GetLogMetricRequest() + + @pytest.mark.asyncio async def test_get_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest @@ -1071,6 +1109,24 @@ def test_create_log_metric_from_dict(): test_create_log_metric(request_type=dict) +def test_create_log_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), "__call__" + ) as call: + client.create_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_metrics.CreateLogMetricRequest() + + @pytest.mark.asyncio async def test_create_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest @@ -1320,6 +1376,24 @@ def test_update_log_metric_from_dict(): test_update_log_metric(request_type=dict) +def test_update_log_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), "__call__" + ) as call: + client.update_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_metrics.UpdateLogMetricRequest() + + @pytest.mark.asyncio async def test_update_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest @@ -1554,6 +1628,24 @@ def test_delete_log_metric_from_dict(): test_delete_log_metric(request_type=dict) +def test_delete_log_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), "__call__" + ) as call: + client.delete_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_metrics.DeleteLogMetricRequest() + + @pytest.mark.asyncio async def test_delete_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest From 87b29f0f8abcef8679443f1ab08d91932bcf7a2f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 9 Mar 2021 11:45:34 -0800 Subject: [PATCH 400/855] fix: logger uses default resource (#207) --- .../google/cloud/logging_v2/logger.py | 4 +- .../tests/environment/.gitignore | 3 +- .../tests/environment/README.md | 1 - .../tests/environment/deployable/go/go.mod | 3 +- .../tests/environment/deployable/go/go.sum | 148 ++---------------- .../tests/environment/deployable/go/main.go | 25 ++- .../environment/deployable/python/snippets.py | 10 +- .../envctl/env_scripts/go/cloudrun.sh | 2 +- .../envctl/env_scripts/go/functions.sh | 90 +++++++++++ .../envctl/env_scripts/python/cloudrun.sh | 1 + .../environment/tests/common/appengine.py | 34 ---- .../tests/environment/tests/common/common.py | 24 ++- .../tests/environment/tests/common/python.py | 51 ++++++ .../python/test_appengine_flex_container.py | 7 +- .../python/test_appengine_flex_python.py | 7 +- .../tests/python/test_appengine_standard.py | 7 +- .../environment/tests/python/test_cloudrun.py | 22 ++- .../environment/tests/python/test_compute.py | 13 +- .../tests/python/test_functions.py | 13 +- .../tests/python/test_kubernetes.py | 17 +- .../tests/unit/test_logger.py | 44 +++++- tests/environment | 2 +- 22 files changed, 283 insertions(+), 245 deletions(-) create mode 100644 packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh delete mode 100644 packages/google-cloud-logging/tests/environment/tests/common/appengine.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/common/python.py diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py index 6e9c5f00da95..124c33934908 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -20,6 +20,7 @@ from google.cloud.logging_v2.entries import StructEntry from google.cloud.logging_v2.entries import TextEntry from google.cloud.logging_v2.resource import Resource +from google.cloud.logging_v2.handlers._monitored_resources import detect_resource _GLOBAL_RESOURCE = Resource(type="global", labels={}) @@ -62,6 +63,7 @@ def __init__(self, name, client, *, labels=None): self.name = name self._client = client self.labels = labels + self.default_resource = detect_resource(client.project) @property def client(self): @@ -120,7 +122,7 @@ def _do_log(self, client, _entry_class, payload=None, **kw): # Apply defaults kw["log_name"] = kw.pop("log_name", self.full_name) kw["labels"] = kw.pop("labels", self.labels) - kw["resource"] = kw.pop("resource", _GLOBAL_RESOURCE) + kw["resource"] = kw.pop("resource", self.default_resource) if payload is not None: entry = _entry_class(payload=payload, **kw) diff --git a/packages/google-cloud-logging/tests/environment/.gitignore b/packages/google-cloud-logging/tests/environment/.gitignore index 676ba64117a3..1936b00310cd 100644 --- a/packages/google-cloud-logging/tests/environment/.gitignore +++ b/packages/google-cloud-logging/tests/environment/.gitignore @@ -139,4 +139,5 @@ cython_debug/ # Go files deployable/go/google-cloud-go -deployable/go/lib.tar \ No newline at end of file +deployable/go/lib.tar +deployable/go/vendor \ No newline at end of file diff --git a/packages/google-cloud-logging/tests/environment/README.md b/packages/google-cloud-logging/tests/environment/README.md index c8b8b90ac854..d1e1e8e9548a 100644 --- a/packages/google-cloud-logging/tests/environment/README.md +++ b/packages/google-cloud-logging/tests/environment/README.md @@ -71,4 +71,3 @@ Test files in `tests/` can inherit from any file in `tests/common` log | Test Name | Optional Input | Description | | -------------- | ---------------- | -------------------------------- | | `simplelog` | `logname`, `logtext` | Logs a simple text payload | -| `standardlog` | `logname`, `logtext` | Logs a simple text payload using a standard library wrapper | diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/go.mod b/packages/google-cloud-logging/tests/environment/deployable/go/go.mod index c90011c2c812..61bddf0997c7 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/go.mod +++ b/packages/google-cloud-logging/tests/environment/deployable/go/go.mod @@ -8,4 +8,5 @@ require ( ) replace cloud.google.com/go => ./google-cloud-go/. -replace cloud.google.com/go/logging => ./google-cloud-go/logging \ No newline at end of file + +replace cloud.google.com/go/logging => ./google-cloud-go/logging diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/go.sum b/packages/google-cloud-logging/tests/environment/deployable/go/go.sum index dbd6eeaaca20..7914d2b6b54c 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/go.sum +++ b/packages/google-cloud-logging/tests/environment/deployable/go/go.sum @@ -1,34 +1,9 @@ -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= -cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= -cloud.google.com/go v0.78.0 h1:oKpsiyKMfVpwR3zSAkQixGzlVE5ovitBuO0qSmCf0bI= -cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/logging v1.3.0 h1:Ncg2sBr+SUtdqSmwRJ5WwuFG8Um3p/aZmRp6EtD9kPw= -cloud.google.com/go/logging v1.3.0/go.mod h1:pA7Kcvk7H0jQMY5WV03GwzOt07Yjw9xVsQMQC5NUZQQ= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= @@ -36,6 +11,7 @@ cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiy cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0 h1:STgFzyU5/8miMl0//zKh2aQeTyeaUH3WN9bSUiJ09bA= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= @@ -64,18 +40,12 @@ github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4er github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= @@ -85,42 +55,25 @@ github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QD github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3 h1:JjCZWpVbqXDqFVmTfYWEVTMIYrL/NPdPSCHPJ0T/raM= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4 h1:L8R9j+yAqZuZjsqh/z+F1NCffTKKLShY6zXTItVIZ8M= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0 h1:wCKgOCHuUEVfsaQLpPSJb7VdYCdTVZQAuOdYm1yc/60= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -134,26 +87,20 @@ github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81P github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5 h1:dntmOdLpSpHlVqbW5Eay97DelsZHe+55D+xC6i0dDS0= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= @@ -162,16 +109,13 @@ golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMx golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5 h1:2M3HP5CCK1Si9FQhwnzYhXdG6DXeebvUHFpre8QvbyI= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= @@ -179,7 +123,7 @@ golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1 h1:Kvvh58BN8Y9/lBi7hTekvtMpm07eUZ0ck5pRHpsMWrY= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -192,34 +136,22 @@ golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210119194325-5f4716e94777 h1:003p0dJM77cxMSyCPFphvZf/Y5/NXf5fzg6ufd1/Oew= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99 h1:5vD4XjIc0X5+kHZjx4UecYdjA6mJo+XXNoaW0EjU5Os= golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= @@ -230,7 +162,6 @@ golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a h1:DcqTD9SDLc+1P/r1EmRBwnVsrOwW+kk2vWf9n+1sGhs= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -240,69 +171,46 @@ golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210223095934-7937bea0104d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073 h1:8qxJSnu+7dRq6upnbntrmriWByIakBuct5OM/MdQC1M= golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -310,30 +218,20 @@ golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapK golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= -golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= @@ -345,38 +243,29 @@ google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/ google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= google.golang.org/api v0.40.0 h1:uWrpz12dpVPn7cojP82mk02XDgTJLDPc2KbVTxrWb4A= google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= @@ -384,17 +273,7 @@ google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c h1:7A9LQhrZmuCPI79/sYSbscFqBp4XFYf6oaIQuV1xji4= -google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210224155714-063164c882e6 h1:bXUwz2WkXXrXgiLxww3vWmoSHLOGv4ipdPdTvKymcKw= google.golang.org/genproto v0.0.0-20210224155714-063164c882e6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= @@ -407,10 +286,6 @@ google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8 google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= google.golang.org/grpc v1.35.0 h1:TwIQcH3es+MojMVojxxfQ3l3OF2KzlRxML2xZq0kRo8= google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= @@ -430,12 +305,7 @@ gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/main.go b/packages/google-cloud-logging/tests/environment/deployable/go/main.go index 85da8c741dee..5e38c7704cf8 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/main.go +++ b/packages/google-cloud-logging/tests/environment/deployable/go/main.go @@ -27,7 +27,7 @@ import ( "cloud.google.com/go/logging" ) -// PubSubMessage is the logtext of a Pub/Sub event. +// PubSubMessage is the message format received over HTTP type pubSubMessage struct { Message struct { Data []byte `json:"data,omitempty"` @@ -65,6 +65,27 @@ func pubsubHTTP(w http.ResponseWriter, r *http.Request) { } } +// PubSubMessage is the message format received by CloudFunctions +type PubSubMessage struct { + Data []byte `json:"data"` + Attributes map[string]string `json:"attributes"` +} + +// PubsubFunction is a background Cloud Function triggered by Pub/Sub +func PubsubFunction(ctx context.Context, m PubSubMessage) error { + log.Printf("Data is: %v", string(m.Data)) + switch string(m.Data) { + case "simplelog": + simplelog(m.Attributes) + break + case "stdlog": + break + default: + break + } + return nil +} + func main() { if os.Getenv("ENABLE_SUBSCRIBER") == "" { @@ -83,7 +104,7 @@ func main() { } } -// [Optional] envctl go trigger simpleLog logname=foo,logtext=bar +// [Optional] envctl go trigger simplelog logname=foo,logtext=bar func simplelog(args map[string]string) { ctx := context.Background() projectID, err := metadata.ProjectID() diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py index 4c7695012c51..959131c5654f 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -31,13 +31,13 @@ from google.cloud.logging_v2._helpers import retrieve_metadata_server -# def simple_log(log_name=None, log_text="simple_log", **kwargs): -# client = google.cloud.logging.Client() -# logger = client.logger(log_name) -# logger.log_text(log_text) +def simplelog(log_name=None, log_text="simple_log", **kwargs): + client = google.cloud.logging.Client() + logger = client.logger(log_name) + logger.log_text(log_text) -def simplelog(log_text="pylogging", severity="warning", **kwargs): +def pylogging(log_text="pylogging", severity="warning", **kwargs): # allowed severity: debug, info, warning, error, critical if severity == "debug": logging.debug(log_text) diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh index a0a2331e7a37..561ae9b5f7fe 100644 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh @@ -18,7 +18,7 @@ set -o pipefail # any step in pipe caused failure set -u # undefined variables cause exit # Note: there is a max character count constraint -SERVICE_NAME="log-go-run-$(echo $ENVCTL_ID | head -c 9)" +SERVICE_NAME="log-go-run-$(echo $ENVCTL_ID | head -c 8)x" SA_NAME=$SERVICE_NAME-invoker LIBRARY_NAME="google-cloud-go" diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh new file mode 100644 index 000000000000..6125d5ab4830 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh @@ -0,0 +1,90 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +SERVICE_NAME="log-go-func-$(echo $ENVCTL_ID | head -c 8)x" +LIBRARY_NAME="google-cloud-go" + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete service + gcloud functions delete $SERVICE_NAME --region us-west2 -q 2> /dev/null + set -e +} + +verify() { + set +e + gcloud functions describe $SERVICE_NAME --region us-west2 + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +deploy() { + # create pub/sub topic + set +e + gcloud pubsub topics create $SERVICE_NAME 2>/dev/null + set -e + # Note: functions only supports go111 and go113 at the moment + local RUNTIME="go113" + + # Copy over local copy of library to use as dependency + _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE + pushd $SUPERREPO_ROOT + tar -cvf $_deployable_dir/lib.tar --exclude internal/logging --exclude .nox --exclude docs --exclude __pycache__ . + popd + mkdir -p $_deployable_dir/google-cloud-go + tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/google-cloud-go + + # Create vendor folder based on local dependency + pushd $REPO_ROOT/deployable/go + go mod vendor + popd + + # move code into a temp directory used to deploy the cloud function + cp -rf $REPO_ROOT/deployable/go/vendor $TMP_DIR/vendor + + # Renames package as Cloud Functions cannot be 'main' packages. + sed 's/package main.*/package function/g' $REPO_ROOT/deployable/go/main.go > $TMP_DIR/main.go + + # clean up vendor folder + pushd $REPO_ROOT/deployable/go + rm -rf vendor/ + popd + + # deploy function + pushd $TMP_DIR + gcloud functions deploy $SERVICE_NAME \ + --entry-point PubsubFunction \ + --trigger-topic $SERVICE_NAME \ + --runtime $RUNTIME \ + --region us-west2 + popd +} + +filter-string() { + echo "resource.type=\"cloud_function\" AND resource.labels.function_name=\"$SERVICE_NAME\"" +} diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/cloudrun.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/cloudrun.sh index 4da3dc154099..d68f8a4fbccc 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/cloudrun.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/cloudrun.sh @@ -75,6 +75,7 @@ deploy() { gcloud run deploy \ --image $GCR_PATH \ --update-env-vars ENABLE_FLASK=true \ + --no-allow-unauthenticated \ $SERVICE_NAME # create pubsub subscription add_service_accounts diff --git a/packages/google-cloud-logging/tests/environment/tests/common/appengine.py b/packages/google-cloud-logging/tests/environment/tests/common/appengine.py deleted file mode 100644 index a062af90756e..000000000000 --- a/packages/google-cloud-logging/tests/environment/tests/common/appengine.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import unittest -import inspect - -import google.cloud.logging - -from ..common.common import Common - - -class CommonAppEngine: - def test_monitored_resource(self): - log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text) - found_resource = log_list[-1].resource - - self.assertEqual(found_resource.type, "gae_app") - self.assertTrue(found_resource.labels["project_id"]) - self.assertTrue(found_resource.labels["module_id"]) - self.assertTrue(found_resource.labels["version_id"]) - self.assertTrue(found_resource.labels["zone"]) diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index e26c83272d1f..828c778ef171 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -44,8 +44,10 @@ class LogsNotFound(RuntimeError): class Common: _client = Client() - # environment name must be set by subclass + # environment name and monitored resource values must be set by subclass environment = None + monitored_resource_name = None + monitored_resource_labels = None def _add_time_condition_to_filter(self, filter_str, timestamp=None): time_format = "%Y-%m-%dT%H:%M:%S.%f%z" @@ -68,10 +70,10 @@ def _trigger(self, function, **kwargs): self._script.run_command(Command.Trigger, [function, args_str]) @RetryErrors(exception=LogsNotFound, delay=2) - def trigger_and_retrieve(self, log_text, append_uuid=True, max_tries=6): + def trigger_and_retrieve(self, log_text, function="simplelog", append_uuid=True, max_tries=6): if append_uuid: log_text = f"{log_text} - {uuid.uuid1()}" - self._trigger("simplelog", log_text=log_text) + self._trigger(function, log_text=log_text) filter_str = self._add_time_condition_to_filter(log_text) # give the command time to be received tries = 0 @@ -130,3 +132,19 @@ def test_receive_log(self): if message and log_text in message: found_log = log self.assertIsNotNone(found_log, "expected log text not found") + + def test_monitored_resource(self): + if self.language != "python": + # to do: add monitored resource info to go + return True + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text) + found_resource = log_list[-1].resource + + self.assertIsNotNone(self.monitored_resource_name) + self.assertIsNotNone(self.monitored_resource_labels) + + self.assertEqual(found_resource.type, self.monitored_resource_name) + for label in self.monitored_resource_labels: + self.assertTrue(found_resource.labels[label], + f'resource.labels[{label}] is not set') diff --git a/packages/google-cloud-logging/tests/environment/tests/common/python.py b/packages/google-cloud-logging/tests/environment/tests/common/python.py new file mode 100644 index 000000000000..963eb0a18c8c --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/common/python.py @@ -0,0 +1,51 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect + +import google.cloud.logging + +from ..common.common import Common + + +class CommonPython: + def pylogging_test_receive_log(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text, function="pylogging") + + found_log = None + for log in log_list: + message = ( + log.payload.get("message", None) + if isinstance(log.payload, dict) + else str(log.payload) + ) + if message and log_text in message: + found_log = log + self.assertIsNotNone(found_log, "expected log text not found") + + def test_monitored_resource_pylogging(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text, function="pylogging") + found_resource = log_list[-1].resource + + self.assertIsNotNone(self.monitored_resource_name) + self.assertIsNotNone(self.monitored_resource_labels) + + self.assertEqual(found_resource.type, self.monitored_resource_name) + for label in self.monitored_resource_labels: + self.assertTrue(found_resource.labels[label], + f'resource.labels[{label}] is not set') diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py index e8a55ad5341a..a3b4dcac9a61 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py @@ -18,10 +18,13 @@ import google.cloud.logging from ..common.common import Common -from ..common.appengine import CommonAppEngine +from ..common.python import CommonPython -class TestAppEngineFlexContainer(Common, CommonAppEngine, unittest.TestCase): +class TestAppEngineFlexContainer(Common, CommonPython, unittest.TestCase): environment = "appengine_flex_container" language = "python" + + monitored_resource_name = "gae_app" + monitored_resource_labels = ["project_id", "module_id", "version_id", "zone"] diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py index 23d1e30c48a4..8619f77a2e8e 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py @@ -18,10 +18,13 @@ import google.cloud.logging from ..common.common import Common -from ..common.appengine import CommonAppEngine +from ..common.python import CommonPython -class TestAppEngineFlex(Common, CommonAppEngine, unittest.TestCase): +class TestAppEngineFlex(Common, CommonPython, unittest.TestCase): environment = "appengine_flex_python" language = "python" + + monitored_resource_name = "gae_app" + monitored_resource_labels = ["project_id", "module_id", "version_id", "zone"] diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py index bb93ab8f00b0..a633a9770626 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py @@ -18,10 +18,13 @@ import google.cloud.logging from ..common.common import Common -from ..common.appengine import CommonAppEngine +from ..common.python import CommonPython -class TestAppEngineStandard(Common, CommonAppEngine, unittest.TestCase): +class TestAppEngineStandard(Common, CommonPython, unittest.TestCase): environment = "appengine_standard" language = "python" + + monitored_resource_name = "gae_app" + monitored_resource_labels = ["project_id", "module_id", "version_id", "zone"] diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py index 7c83c1179a17..e68578bf05b2 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py @@ -21,21 +21,19 @@ from google.cloud.logging_v2.resource import Resource from ..common.common import Common +from ..common.python import CommonPython -class TestCloudRun(Common, unittest.TestCase): +class TestCloudRun(Common, CommonPython, unittest.TestCase): environment = "cloudrun" language = "python" - def test_monitored_resource(self): - log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text) - found_resource = log_list[0].resource - - self.assertEqual(found_resource.type, "cloud_run_revision") - self.assertTrue(found_resource.labels["project_id"]) - self.assertTrue(found_resource.labels["service_name"]) - self.assertTrue(found_resource.labels["revision_name"]) - self.assertTrue(found_resource.labels["location"]) - self.assertTrue(found_resource.labels["configuration_name"]) + monitored_resource_name = "cloud_run_revision" + monitored_resource_labels = [ + "project_id", + "service_name", + "revision_name", + "location", + "configuration_name", + ] diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_compute.py b/packages/google-cloud-logging/tests/environment/tests/python/test_compute.py index 81b7e3a20d20..fac1e4db1e82 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_compute.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_compute.py @@ -19,18 +19,13 @@ import google.cloud.logging from ..common.common import Common +from ..common.python import CommonPython -class TestComputeEngine(Common, unittest.TestCase): +class TestComputeEngine(Common, CommonPython, unittest.TestCase): environment = "compute" language = "python" - def test_monitored_resource(self): - log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text) - found_resource = log_list[0].resource - - self.assertEqual(found_resource.type, "gce_instance") - self.assertTrue(found_resource.labels["zone"]) - self.assertTrue(found_resource.labels["instance_id"]) + monitored_resource_name = "gce_instance" + monitored_resource_labels = ["instance_id", "zone"] diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py index de65169913e5..8402823ea6b8 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py @@ -19,18 +19,13 @@ import google.cloud.logging from ..common.common import Common +from ..common.python import CommonPython -class TestCloudFunctions(Common, unittest.TestCase): +class TestCloudFunctions(Common, CommonPython, unittest.TestCase): environment = "functions" language = "python" - def test_monitored_resource(self): - log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text) - found_resource = log_list[0].resource - - self.assertEqual(found_resource.type, "cloud_function") - self.assertTrue(found_resource.labels["region"]) - self.assertTrue(found_resource.labels["function_name"]) + monitored_resource_name = "cloud_function" + monitored_resource_labels = ["region", "function_name"] diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py index 77f4d38cbe22..89e7e45f96b0 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py @@ -19,22 +19,13 @@ import google.cloud.logging from ..common.common import Common +from ..common.python import CommonPython -class TestKubernetesEngine(Common, unittest.TestCase): +class TestKubernetesEngine(Common, CommonPython, unittest.TestCase): environment = "kubernetes" language = "python" - def test_monitored_resource(self): - log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text) - found_resource = log_list[0].resource - - self.assertEqual(found_resource.type, "k8s_container") - self.assertTrue(found_resource.labels["project_id"]) - self.assertTrue(found_resource.labels["location"]) - self.assertTrue(found_resource.labels["cluster_name"]) - self.assertTrue(found_resource.labels["namespace_name"]) - self.assertTrue(found_resource.labels["pod_name"]) - self.assertTrue(found_resource.labels["container_name"]) + monitored_resource_name = "k8s_container" + monitored_resource_labels = ["project_id", "location", "cluster_name"] diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 8693306332f7..53ecac8f58f5 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -99,11 +99,15 @@ def test_batch_w_alternate_client(self): self.assertIs(batch.client, client2) def test_log_empty_defaults_w_default_labels(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), "labels": DEFAULT_LABELS, } ] @@ -170,12 +174,17 @@ def test_log_empty_w_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_text_defaults(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + + RESOURCE = detect_resource(self.PROJECT)._to_dict() TEXT = "TEXT" ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "textPayload": TEXT, - "resource": {"type": "global", "labels": {}}, + "resource": RESOURCE, } ] client = _Client(self.PROJECT) @@ -187,13 +196,18 @@ def test_log_text_defaults(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_text_w_unicode_and_default_labels(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + TEXT = "TEXT" + RESOURCE = detect_resource(self.PROJECT)._to_dict() DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "textPayload": TEXT, - "resource": {"type": "global", "labels": {}}, + "resource": RESOURCE, "labels": DEFAULT_LABELS, } ] @@ -263,12 +277,17 @@ def test_log_text_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_struct_defaults(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} + RESOURCE = detect_resource(self.PROJECT)._to_dict() ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "jsonPayload": STRUCT, - "resource": {"type": "global", "labels": {}}, + "resource": RESOURCE, } ] client = _Client(self.PROJECT) @@ -280,13 +299,18 @@ def test_log_struct_defaults(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_struct_w_default_labels(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} + RESOURCE = detect_resource(self.PROJECT)._to_dict() DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "jsonPayload": STRUCT, - "resource": {"type": "global", "labels": {}}, + "resource": RESOURCE, "labels": DEFAULT_LABELS, } ] @@ -359,13 +383,16 @@ def test_log_proto_defaults(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) message = Struct(fields={"foo": Value(bool_value=True)}) ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "protoPayload": json.loads(MessageToJson(message)), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), } ] client = _Client(self.PROJECT) @@ -380,6 +407,9 @@ def test_log_proto_w_default_labels(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) message = Struct(fields={"foo": Value(bool_value=True)}) DEFAULT_LABELS = {"foo": "spam"} @@ -387,7 +417,7 @@ def test_log_proto_w_default_labels(self): { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "protoPayload": json.loads(MessageToJson(message)), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), "labels": DEFAULT_LABELS, } ] diff --git a/tests/environment b/tests/environment index 74542ed04061..a7dd027166b8 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 74542ed0406167670870b7db1b170562973ede06 +Subproject commit a7dd027166b8df1980881a94ba8473065497701d From 157a8ef488e9eccf400cccde597b71347aa60bb4 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 11 Mar 2021 15:58:31 -0800 Subject: [PATCH 401/855] chore: upgrade gapic-generator-python to 0.42.2 (#213) PiperOrigin-RevId: 361662015 Source-Author: Google APIs Source-Date: Mon Mar 8 14:47:18 2021 -0800 Source-Repo: googleapis/googleapis Source-Sha: 28a591963253d52ce3a25a918cafbdd9928de8cf Source-Link: https://github.com/googleapis/googleapis/commit/28a591963253d52ce3a25a918cafbdd9928de8cf --- .../google/cloud/logging_v2/types/__init__.py | 168 +++++++++--------- packages/google-cloud-logging/synth.metadata | 6 +- 2 files changed, 87 insertions(+), 87 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py index dce385af3cda..9519c0777f43 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py @@ -20,121 +20,121 @@ LogEntryOperation, LogEntrySourceLocation, ) +from .logging import ( + DeleteLogRequest, + ListLogEntriesRequest, + ListLogEntriesResponse, + ListLogsRequest, + ListLogsResponse, + ListMonitoredResourceDescriptorsRequest, + ListMonitoredResourceDescriptorsResponse, + TailLogEntriesRequest, + TailLogEntriesResponse, + WriteLogEntriesPartialErrors, + WriteLogEntriesRequest, + WriteLogEntriesResponse, +) from .logging_config import ( - LogBucket, - LogView, - LogSink, BigQueryOptions, - ListBucketsRequest, - ListBucketsResponse, + CmekSettings, CreateBucketRequest, - UpdateBucketRequest, - GetBucketRequest, - DeleteBucketRequest, - UndeleteBucketRequest, - ListViewsRequest, - ListViewsResponse, + CreateExclusionRequest, + CreateSinkRequest, CreateViewRequest, - UpdateViewRequest, - GetViewRequest, + DeleteBucketRequest, + DeleteExclusionRequest, + DeleteSinkRequest, DeleteViewRequest, - ListSinksRequest, - ListSinksResponse, + GetBucketRequest, + GetCmekSettingsRequest, + GetExclusionRequest, GetSinkRequest, - CreateSinkRequest, - UpdateSinkRequest, - DeleteSinkRequest, - LogExclusion, + GetViewRequest, + ListBucketsRequest, + ListBucketsResponse, ListExclusionsRequest, ListExclusionsResponse, - GetExclusionRequest, - CreateExclusionRequest, - UpdateExclusionRequest, - DeleteExclusionRequest, - GetCmekSettingsRequest, + ListSinksRequest, + ListSinksResponse, + ListViewsRequest, + ListViewsResponse, + LogBucket, + LogExclusion, + LogSink, + LogView, + UndeleteBucketRequest, + UpdateBucketRequest, UpdateCmekSettingsRequest, - CmekSettings, + UpdateExclusionRequest, + UpdateSinkRequest, + UpdateViewRequest, LifecycleState, ) -from .logging import ( - DeleteLogRequest, - WriteLogEntriesRequest, - WriteLogEntriesResponse, - WriteLogEntriesPartialErrors, - ListLogEntriesRequest, - ListLogEntriesResponse, - ListMonitoredResourceDescriptorsRequest, - ListMonitoredResourceDescriptorsResponse, - ListLogsRequest, - ListLogsResponse, - TailLogEntriesRequest, - TailLogEntriesResponse, -) from .logging_metrics import ( - LogMetric, + CreateLogMetricRequest, + DeleteLogMetricRequest, + GetLogMetricRequest, ListLogMetricsRequest, ListLogMetricsResponse, - GetLogMetricRequest, - CreateLogMetricRequest, + LogMetric, UpdateLogMetricRequest, - DeleteLogMetricRequest, ) __all__ = ( "LogEntry", "LogEntryOperation", "LogEntrySourceLocation", - "LogBucket", - "LogView", - "LogSink", + "DeleteLogRequest", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", "BigQueryOptions", - "ListBucketsRequest", - "ListBucketsResponse", + "CmekSettings", "CreateBucketRequest", - "UpdateBucketRequest", - "GetBucketRequest", - "DeleteBucketRequest", - "UndeleteBucketRequest", - "ListViewsRequest", - "ListViewsResponse", + "CreateExclusionRequest", + "CreateSinkRequest", "CreateViewRequest", - "UpdateViewRequest", - "GetViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteSinkRequest", "DeleteViewRequest", - "ListSinksRequest", - "ListSinksResponse", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", "GetSinkRequest", - "CreateSinkRequest", - "UpdateSinkRequest", - "DeleteSinkRequest", - "LogExclusion", + "GetViewRequest", + "ListBucketsRequest", + "ListBucketsResponse", "ListExclusionsRequest", "ListExclusionsResponse", - "GetExclusionRequest", - "CreateExclusionRequest", - "UpdateExclusionRequest", - "DeleteExclusionRequest", - "GetCmekSettingsRequest", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LogBucket", + "LogExclusion", + "LogSink", + "LogView", + "UndeleteBucketRequest", + "UpdateBucketRequest", "UpdateCmekSettingsRequest", - "CmekSettings", + "UpdateExclusionRequest", + "UpdateSinkRequest", + "UpdateViewRequest", "LifecycleState", - "DeleteLogRequest", - "WriteLogEntriesRequest", - "WriteLogEntriesResponse", - "WriteLogEntriesPartialErrors", - "ListLogEntriesRequest", - "ListLogEntriesResponse", - "ListMonitoredResourceDescriptorsRequest", - "ListMonitoredResourceDescriptorsResponse", - "ListLogsRequest", - "ListLogsResponse", - "TailLogEntriesRequest", - "TailLogEntriesResponse", - "LogMetric", + "CreateLogMetricRequest", + "DeleteLogMetricRequest", + "GetLogMetricRequest", "ListLogMetricsRequest", "ListLogMetricsResponse", - "GetLogMetricRequest", - "CreateLogMetricRequest", + "LogMetric", "UpdateLogMetricRequest", - "DeleteLogMetricRequest", ) diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index d039bf17693b..dfee48d55b4f 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "867362e803e7c11677df39f600d69d9506b3f0f8" + "sha": "0f90a79d165314d261413cc369408e15f711129f" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "07932bb995e7dc91b43620ea8402c6668c7d102c", - "internalRef": "359562873" + "sha": "28a591963253d52ce3a25a918cafbdd9928de8cf", + "internalRef": "361662015" } }, { From 39865dbe0f3a3136972c69916568b356290f6fd5 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 12 Mar 2021 00:58:48 +0100 Subject: [PATCH 402/855] chore(deps): update dependency google-cloud-storage to v1.36.2 (#215) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 131f3bbeeeb6..d87af05fad26 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.2.0 google-cloud-bigquery==2.10.0 -google-cloud-storage==1.36.1 +google-cloud-storage==1.36.2 google-cloud-pubsub==2.4.0 From 5ee91d945fb364d8875d5b978c7111104fa91900 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 11 Mar 2021 15:59:25 -0800 Subject: [PATCH 403/855] chore: update gapic-generator-python to 0.40.11 (#214) PiperOrigin-RevId: 359562873 Source-Author: Google APIs Source-Date: Thu Feb 25 10:52:32 2021 -0800 Source-Repo: googleapis/googleapis Source-Sha: 07932bb995e7dc91b43620ea8402c6668c7d102c Source-Link: https://github.com/googleapis/googleapis/commit/07932bb995e7dc91b43620ea8402c6668c7d102c --- packages/google-cloud-logging/logging-v2-py.tar.gz | 0 packages/google-cloud-logging/synth.metadata | 1 + 2 files changed, 1 insertion(+) create mode 100644 packages/google-cloud-logging/logging-v2-py.tar.gz diff --git a/packages/google-cloud-logging/logging-v2-py.tar.gz b/packages/google-cloud-logging/logging-v2-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index dfee48d55b4f..611106822fb3 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -132,6 +132,7 @@ "google/cloud/logging_v2/types/logging.py", "google/cloud/logging_v2/types/logging_config.py", "google/cloud/logging_v2/types/logging_metrics.py", + "logging-v2-py.tar.gz", "mypy.ini", "noxfile.py", "renovate.json", From adeaf848a1aa64f80dc8d371b193a46a2c92ea71 Mon Sep 17 00:00:00 2001 From: ArthurYueh Date: Tue, 16 Mar 2021 00:33:27 +0800 Subject: [PATCH 404/855] =?UTF-8?q?feat:=20Add=20json=20setting=20to=20all?= =?UTF-8?q?ow=20unicodes=20to=20show=20in=20log=20instead=20of=20ascii=20c?= =?UTF-8?q?h=E2=80=A6=20(#193)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../google/cloud/logging_v2/handlers/_helpers.py | 2 +- .../tests/unit/handlers/test_container_engine.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py index fff1e9a892d0..88eba07a67a2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py @@ -48,7 +48,7 @@ def format_stackdriver_json(record, message): "severity": record.levelname, } - return json.dumps(payload) + return json.dumps(payload, ensure_ascii=False) def get_request_data_from_flask(): diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py index d2ae838811a0..280ab9cf0037 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py @@ -40,7 +40,7 @@ def test_format(self): handler = self._make_one() logname = "loggername" - message = "hello world" + message = "hello world,嗨 世界" record = logging.LogRecord( logname, logging.INFO, None, None, message, None, None ) @@ -53,4 +53,4 @@ def test_format(self): } payload = handler.format(record) - self.assertEqual(payload, json.dumps(expected_payload)) + self.assertEqual(payload, json.dumps(expected_payload, ensure_ascii=False)) From b714aebc95df3ca29e90192955c863bb51944bd0 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 15 Mar 2021 17:34:05 +0100 Subject: [PATCH 405/855] chore(deps): update dependency google-cloud-bigquery to v2.11.0 (#212) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index d87af05fad26..cc757aabcac4 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.2.0 -google-cloud-bigquery==2.10.0 +google-cloud-bigquery==2.11.0 google-cloud-storage==1.36.2 google-cloud-pubsub==2.4.0 From eb7e03af4afa901122a6816d150b8e71a2441cc8 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 15 Mar 2021 09:53:13 -0700 Subject: [PATCH 406/855] fix: no duplicate logs on GCF or GAE (#209) --- .../google/cloud/logging_v2/client.py | 1 + .../handlers/_monitored_resources.py | 2 +- .../cloud/logging_v2/handlers/handlers.py | 7 +++ .../tests/environment/README.md | 2 +- .../tests/environment/deployable/go/go.sum | 32 +++++++----- .../tests/environment/deployable/go/main.go | 6 +-- .../envctl/env_scripts/go/functions.sh | 2 +- .../tests/environment/noxfile.py | 4 +- .../tests/environment/tests/common/common.py | 11 ++++- .../tests/environment/tests/common/python.py | 6 +++ .../environment/tests/go/test_functions.py | 3 ++ .../tests/unit/handlers/test_handlers.py | 49 +++++++++++++++++++ tests/environment | 2 +- 13 files changed, 103 insertions(+), 24 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py index f196f443a27d..17d8534010dc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -52,6 +52,7 @@ _GAE_RESOURCE_TYPE = "gae_app" _GKE_RESOURCE_TYPE = "k8s_container" +_GCF_RESOURCE_TYPE = "cloud_function" class Client(ClientWithProject): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py index 4bc30d4fb361..bd05c252239f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py @@ -163,7 +163,7 @@ def _create_global_resource(project): return Resource(type="global", labels={"project_id": project}) -def detect_resource(project): +def detect_resource(project=""): """Return the default monitored resource based on the local environment. Args: project (str): The project ID to pass on to the resource diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index c2ad6f35511a..feeac9171f1a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -23,6 +23,8 @@ EXCLUDED_LOGGER_DEFAULTS = ("google.cloud", "google.auth", "google_auth_httplib2") +_CLEAR_HANDLER_RESOURCE_TYPES = ("gae_app", "cloud_function") + class CloudLoggingHandler(logging.StreamHandler): """Handler that directly makes Cloud Logging API calls. @@ -160,6 +162,11 @@ def setup_logging( """ all_excluded_loggers = set(excluded_loggers + EXCLUDED_LOGGER_DEFAULTS) logger = logging.getLogger() + + # remove built-in handlers on App Engine or Cloud Functions environments + if detect_resource().type in _CLEAR_HANDLER_RESOURCE_TYPES: + logger.handlers.clear() + logger.setLevel(log_level) logger.addHandler(handler) for logger_name in all_excluded_loggers: diff --git a/packages/google-cloud-logging/tests/environment/README.md b/packages/google-cloud-logging/tests/environment/README.md index d1e1e8e9548a..6d97b83c6303 100644 --- a/packages/google-cloud-logging/tests/environment/README.md +++ b/packages/google-cloud-logging/tests/environment/README.md @@ -70,4 +70,4 @@ Test files in `tests/` can inherit from any file in `tests/common` log | Test Name | Optional Input | Description | | -------------- | ---------------- | -------------------------------- | -| `simplelog` | `logname`, `logtext` | Logs a simple text payload | +| `simplelog` | `log_name`, `log_text` | Logs a simple text payload | diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/go.sum b/packages/google-cloud-logging/tests/environment/deployable/go/go.sum index 7914d2b6b54c..71e5458c47c9 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/go.sum +++ b/packages/google-cloud-logging/tests/environment/deployable/go/go.sum @@ -40,7 +40,7 @@ github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4er github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -61,11 +61,13 @@ github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMyw github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4 h1:L8R9j+yAqZuZjsqh/z+F1NCffTKKLShY6zXTItVIZ8M= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/martian/v3 v3.1.0 h1:wCKgOCHuUEVfsaQLpPSJb7VdYCdTVZQAuOdYm1yc/60= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= @@ -85,6 +87,7 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -92,8 +95,9 @@ go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.5 h1:dntmOdLpSpHlVqbW5Eay97DelsZHe+55D+xC6i0dDS0= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -146,15 +150,16 @@ golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/ golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210119194325-5f4716e94777 h1:003p0dJM77cxMSyCPFphvZf/Y5/NXf5fzg6ufd1/Oew= -golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 h1:qWPm9rbaAMKs8Bq/9LRpbMqxWRVUAQwMI9fVrssnTfw= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99 h1:5vD4XjIc0X5+kHZjx4UecYdjA6mJo+XXNoaW0EjU5Os= -golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93 h1:alLDrZkL34Y2bnGHfvC1CYBRBXCXgx8AC2vY4MRtYX4= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -202,7 +207,6 @@ golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGm golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -274,8 +278,8 @@ google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1m google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210224155714-063164c882e6 h1:bXUwz2WkXXrXgiLxww3vWmoSHLOGv4ipdPdTvKymcKw= -google.golang.org/genproto v0.0.0-20210224155714-063164c882e6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210302174412-5ede27ff9881 h1:SYuy3hIRsBIROE0aZwsJZOEJNC/n9/p0FmLEU9C31AE= +google.golang.org/genproto v0.0.0-20210302174412-5ede27ff9881/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -286,9 +290,10 @@ google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8 google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.35.0 h1:TwIQcH3es+MojMVojxxfQ3l3OF2KzlRxML2xZq0kRo8= -google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0 h1:o1bcQ6imQMIOpdrO3SWf2z5RV72WbDwdXuK0MDlc8As= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -304,6 +309,7 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/main.go b/packages/google-cloud-logging/tests/environment/deployable/go/main.go index 5e38c7704cf8..c75af3c4f254 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/main.go +++ b/packages/google-cloud-logging/tests/environment/deployable/go/main.go @@ -104,7 +104,7 @@ func main() { } } -// [Optional] envctl go trigger simplelog logname=foo,logtext=bar +// [Optional] envctl go trigger simplelog log_name=foo,log_text=bar func simplelog(args map[string]string) { ctx := context.Background() projectID, err := metadata.ProjectID() @@ -118,12 +118,12 @@ func simplelog(args map[string]string) { defer client.Close() logname := "my-log" - if val, ok := args["logname"]; ok { + if val, ok := args["log_name"]; ok { logname = val } logtext := "hello world" - if val, ok := args["logtext"]; ok { + if val, ok := args["log_text"]; ok { logtext = val } diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh index 6125d5ab4830..3d33ce32c76c 100644 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh @@ -54,7 +54,7 @@ deploy() { # Copy over local copy of library to use as dependency _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE pushd $SUPERREPO_ROOT - tar -cvf $_deployable_dir/lib.tar --exclude internal/logging --exclude .nox --exclude docs --exclude __pycache__ . + tar -cvf $_deployable_dir/lib.tar --exclude internal/env-tests-logging --exclude .nox --exclude docs --exclude __pycache__ . popd mkdir -p $_deployable_dir/google-cloud-go tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/google-cloud-go diff --git a/packages/google-cloud-logging/tests/environment/noxfile.py b/packages/google-cloud-logging/tests/environment/noxfile.py index 9aca74865f7f..18b6d1024fa6 100644 --- a/packages/google-cloud-logging/tests/environment/noxfile.py +++ b/packages/google-cloud-logging/tests/environment/noxfile.py @@ -69,7 +69,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: # We also need to specify the rules which are ignored by default: # ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -DEFAULT_PYTHON_VERSION = "3.8" +DEFAULT_PYTHON_VERSION = "3.7" BLACK_PATHS = ["./deployable/python"] BLACK_VERSION = "black==19.10b0" @@ -116,7 +116,7 @@ def blacken(session: nox.sessions.Session) -> None: # Environment Tests -@nox.session(python="3.8") +@nox.session(python=DEFAULT_PYTHON_VERSION) @nox.parametrize( "platform", [ diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index 828c778ef171..844828557135 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -31,6 +31,7 @@ import inspect from test_utils.retry import RetryErrors +from grpc import RpcError from .script_utils import ScriptRunner from .script_utils import Command @@ -69,7 +70,7 @@ def _trigger(self, function, **kwargs): args_str = ",".join([f'{k}="{v}"' for k, v in kwargs.items()]) self._script.run_command(Command.Trigger, [function, args_str]) - @RetryErrors(exception=LogsNotFound, delay=2) + @RetryErrors(exception=(LogsNotFound, RpcError), delay=2) def trigger_and_retrieve(self, log_text, function="simplelog", append_uuid=True, max_tries=6): if append_uuid: log_text = f"{log_text} - {uuid.uuid1()}" @@ -82,7 +83,7 @@ def trigger_and_retrieve(self, log_text, function="simplelog", append_uuid=True, try: log_list = self._get_logs(filter_str) return log_list - except LogsNotFound: + except (LogsNotFound, RpcError) as e: sleep(10) tries += 1 # log not found @@ -133,6 +134,12 @@ def test_receive_log(self): found_log = log self.assertIsNotNone(found_log, "expected log text not found") + def test_no_duplicates(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text) + + self.assertEqual(len(log_list), 1) + def test_monitored_resource(self): if self.language != "python": # to do: add monitored resource info to go diff --git a/packages/google-cloud-logging/tests/environment/tests/common/python.py b/packages/google-cloud-logging/tests/environment/tests/common/python.py index 963eb0a18c8c..2ee48a2ae499 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/python.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/python.py @@ -37,6 +37,12 @@ def pylogging_test_receive_log(self): found_log = log self.assertIsNotNone(found_log, "expected log text not found") + def test_no_duplicates_pylogging(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text, function="pylogging") + + self.assertEqual(len(log_list), 1) + def test_monitored_resource_pylogging(self): log_text = f"{inspect.currentframe().f_code.co_name}" log_list = self.trigger_and_retrieve(log_text, function="pylogging") diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/go/test_functions.py index 81ee601ec34f..b137532b1965 100644 --- a/packages/google-cloud-logging/tests/environment/tests/go/test_functions.py +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_functions.py @@ -24,3 +24,6 @@ class TestCloudFunctions(Common, unittest.TestCase): environment = "functions" language = "go" + + monitored_resource_name = "project" + monitored_resource_name = ["project_id"] diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 0e7c63cc409b..51e2f0703d57 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -14,8 +14,14 @@ import logging import unittest +from unittest.mock import patch import mock +from google.cloud.logging_v2.handlers._monitored_resources import ( + _FUNCTION_ENV_VARS, + _GAE_ENV_VARS, +) + class TestCloudLoggingHandler(unittest.TestCase): @@ -165,6 +171,49 @@ def test_setup_logging_excludes(self): self.assertNotIn(handler, excluded_logger.handlers) self.assertFalse(excluded_logger.propagate) + @patch.dict("os.environ", {envar: "1" for envar in _FUNCTION_ENV_VARS}) + def test_remove_handlers_gcf(self): + logger = logging.getLogger() + # add fake handler + added_handler = logging.StreamHandler() + logger.addHandler(added_handler) + + handler = _Handler(logging.INFO) + self._call_fut(handler) + self.assertNotIn(added_handler, logger.handlers) + # handler should be removed from logger + self.assertEqual(len(logger.handlers), 1) + + @patch.dict("os.environ", {envar: "1" for envar in _GAE_ENV_VARS}) + def test_remove_handlers_gae(self): + logger = logging.getLogger() + # add fake handler + added_handler = logging.StreamHandler() + logger.addHandler(added_handler) + + handler = _Handler(logging.INFO) + self._call_fut(handler) + self.assertNotIn(added_handler, logger.handlers) + # handler should be removed from logger + self.assertEqual(len(logger.handlers), 1) + + def test_keep_handlers_others(self): + # mock non-cloud environment + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + return_value=None, + ) + with patch: + # add fake handler + added_handler = logging.StreamHandler() + logger = logging.getLogger() + logger.addHandler(added_handler) + + handler = _Handler(logging.INFO) + self._call_fut(handler) + # added handler should remain in logger + self.assertIn(added_handler, logger.handlers) + def setUp(self): self._handlers_cache = logging.getLogger().handlers[:] diff --git a/tests/environment b/tests/environment index a7dd027166b8..265a954c13ab 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit a7dd027166b8df1980881a94ba8473065497701d +Subproject commit 265a954c13ab30188adbf42a9e63d2bb7b9969ab From 824c74f031d67cd79858443a480bbefed3ccfbd9 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 15 Mar 2021 09:53:37 -0700 Subject: [PATCH 407/855] chore: rennovate bot pulls in submodule changes (#218) --- packages/google-cloud-logging/renovate.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/renovate.json b/packages/google-cloud-logging/renovate.json index 4fa949311b20..2b581d17036d 100644 --- a/packages/google-cloud-logging/renovate.json +++ b/packages/google-cloud-logging/renovate.json @@ -1,5 +1,8 @@ { "extends": [ "config:base", ":preserveSemverRanges" - ] + ], + "git-submodules": { + "enabled": true + } } From 7ccc1e8af3de6c328a7b8af80bf011ce00ac7f22 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 15 Mar 2021 17:54:00 +0100 Subject: [PATCH 408/855] chore(deps): update precommit hook pycqa/flake8 to v3.9.0 (#219) --- packages/google-cloud-logging/.pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index a9024b15d725..32302e4883a1 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -12,6 +12,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.4 + rev: 3.9.0 hooks: - id: flake8 From 9416c73567241d93e0eb5c5815e0fabc74a92a43 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 15 Mar 2021 10:00:11 -0700 Subject: [PATCH 409/855] chore: Re-generated to pick up changes from self (#217) --- packages/google-cloud-logging/logging-v2-py.tar.gz | 0 packages/google-cloud-logging/synth.metadata | 3 +-- 2 files changed, 1 insertion(+), 2 deletions(-) delete mode 100644 packages/google-cloud-logging/logging-v2-py.tar.gz diff --git a/packages/google-cloud-logging/logging-v2-py.tar.gz b/packages/google-cloud-logging/logging-v2-py.tar.gz deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 611106822fb3..5656e5bc6529 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "0f90a79d165314d261413cc369408e15f711129f" + "sha": "42bda27dd3eee2123cbe5b21dd75a587af7a910b" } }, { @@ -132,7 +132,6 @@ "google/cloud/logging_v2/types/logging.py", "google/cloud/logging_v2/types/logging_config.py", "google/cloud/logging_v2/types/logging_metrics.py", - "logging-v2-py.tar.gz", "mypy.ini", "noxfile.py", "renovate.json", From adbb20aaf40ee6fb96e70b27275d95aca5d57a8d Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 15 Mar 2021 10:21:40 -0700 Subject: [PATCH 410/855] test: check severity in all environments (#216) --- .../environment/deployable/python/router.py | 11 ++++---- .../environment/deployable/python/snippets.py | 17 +++++++------ .../tests/environment/tests/common/common.py | 25 +++++++++++++------ .../tests/environment/tests/common/python.py | 15 +++++++---- tests/environment | 2 +- 5 files changed, 44 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/router.py b/packages/google-cloud-logging/tests/environment/deployable/python/router.py index 705c0daf8939..cc76a3b15509 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/router.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/router.py @@ -43,8 +43,7 @@ # used in Cloud Functions def pubsub_gcf(event, context): - client = google.cloud.logging.Client() - client.setup_logging() + initialize_client() if "data" not in event: logging.error("invalid pubsub message") @@ -88,11 +87,13 @@ def pubsub_callback(message): else: logging.error(f"function {msg_str} not found") - -if __name__ == "__main__": +def initialize_client(): # set up logging client = google.cloud.logging.Client() - client.setup_logging() + client.setup_logging(log_level=logging.DEBUG) + +if __name__ == "__main__": + initialize_client() if os.getenv("ENABLE_SUBSCRIBER", None): # set up pubsub listener diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py index 959131c5654f..2407b86add84 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -31,21 +31,24 @@ from google.cloud.logging_v2._helpers import retrieve_metadata_server -def simplelog(log_name=None, log_text="simple_log", **kwargs): +def simplelog(log_name=None, log_text="simple_log", severity="DEFAULT", **kwargs): + # allowed severity: default, debug, info, notice, warning, error, critical, alert, emergency + severity = severity.upper() client = google.cloud.logging.Client() logger = client.logger(log_name) - logger.log_text(log_text) + logger.log_text(log_text, severity=severity) -def pylogging(log_text="pylogging", severity="warning", **kwargs): +def pylogging(log_text="pylogging", severity="WARNING", **kwargs): # allowed severity: debug, info, warning, error, critical - if severity == "debug": + severity = severity.upper() + if severity == "DEBUG": logging.debug(log_text) - elif severity == "info": + elif severity == "INFO": logging.info(log_text) - elif severity == "warning": + elif severity == "WARNING": logging.warning(log_text) - elif severity == "error": + elif severity == "ERROR": logging.error(log_text) else: logging.critical(log_text) diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index 844828557135..374470cb4a94 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -71,10 +71,10 @@ def _trigger(self, function, **kwargs): self._script.run_command(Command.Trigger, [function, args_str]) @RetryErrors(exception=(LogsNotFound, RpcError), delay=2) - def trigger_and_retrieve(self, log_text, function="simplelog", append_uuid=True, max_tries=6): + def trigger_and_retrieve(self, log_text, function="simplelog", append_uuid=True, max_tries=6, **kwargs): if append_uuid: - log_text = f"{log_text} - {uuid.uuid1()}" - self._trigger(function, log_text=log_text) + log_text = f"{log_text} {uuid.uuid1()}" + self._trigger(function, log_text=log_text, **kwargs) filter_str = self._add_time_condition_to_filter(log_text) # give the command time to be received tries = 0 @@ -134,11 +134,6 @@ def test_receive_log(self): found_log = log self.assertIsNotNone(found_log, "expected log text not found") - def test_no_duplicates(self): - log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text) - - self.assertEqual(len(log_list), 1) def test_monitored_resource(self): if self.language != "python": @@ -155,3 +150,17 @@ def test_monitored_resource(self): for label in self.monitored_resource_labels: self.assertTrue(found_resource.labels[label], f'resource.labels[{label}] is not set') + + def test_severity(self): + if self.language != "python": + # to do: enable test for other languages + return True + log_text = f"{inspect.currentframe().f_code.co_name}" + severities = ['EMERGENCY', 'ALERT', 'CRITICAL', 'ERROR', 'WARNING', 'NOTICE', 'INFO', 'DEBUG'] + for severity in severities: + log_list = self.trigger_and_retrieve(log_text, severity=severity) + found_severity = log_list[-1].severity + self.assertEqual(found_severity.lower(), severity.lower()) + # DEFAULT severity should result in empty field + log_list = self.trigger_and_retrieve(log_text, severity="DEFAULT") + self.assertIsNone(log_list[-1].severity) diff --git a/packages/google-cloud-logging/tests/environment/tests/common/python.py b/packages/google-cloud-logging/tests/environment/tests/common/python.py index 2ee48a2ae499..a71b0102ea83 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/python.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/python.py @@ -37,11 +37,6 @@ def pylogging_test_receive_log(self): found_log = log self.assertIsNotNone(found_log, "expected log text not found") - def test_no_duplicates_pylogging(self): - log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text, function="pylogging") - - self.assertEqual(len(log_list), 1) def test_monitored_resource_pylogging(self): log_text = f"{inspect.currentframe().f_code.co_name}" @@ -55,3 +50,13 @@ def test_monitored_resource_pylogging(self): for label in self.monitored_resource_labels: self.assertTrue(found_resource.labels[label], f'resource.labels[{label}] is not set') + + def test_severity_pylogging(self): + severities = ['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'] + for severity in severities: + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text, function="pylogging", severity=severity) + found_severity = log_list[-1].severity + + self.assertEqual(found_severity.lower(), severity.lower()) + diff --git a/tests/environment b/tests/environment index 265a954c13ab..eb60e823924d 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 265a954c13ab30188adbf42a9e63d2bb7b9969ab +Subproject commit eb60e823924dabaaea62e2ec0b8243eb868c1826 From 92440fd5d208c1f36ccac70b2cdec91a333a21e9 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 15 Mar 2021 10:46:55 -0700 Subject: [PATCH 411/855] chore: release 2.3.0 (#205) --- packages/google-cloud-logging/CHANGELOG.md | 19 +++++++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index b89976db1bcc..02a416d51fd5 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,25 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [2.3.0](https://www.github.com/googleapis/python-logging/compare/v2.2.0...v2.3.0) (2021-03-15) + + +### Features + +* Add json setting to allow unicodes to show in log instead of ascii ch… ([#193](https://www.github.com/googleapis/python-logging/issues/193)) ([e8c8e30](https://www.github.com/googleapis/python-logging/commit/e8c8e30fc4f618273dec1415c752eed203c75b67)) +* detect monitored resources on all GCP environments ([#200](https://www.github.com/googleapis/python-logging/issues/200)) ([4eda681](https://www.github.com/googleapis/python-logging/commit/4eda6813d19df8a119f1dcd47ff79389310d4a6f)) + + +### Bug Fixes + +* logger uses default resource ([#207](https://www.github.com/googleapis/python-logging/issues/207)) ([0f90a79](https://www.github.com/googleapis/python-logging/commit/0f90a79d165314d261413cc369408e15f711129f)) +* no duplicate logs on GCF or GAE ([#209](https://www.github.com/googleapis/python-logging/issues/209)) ([37e6c8e](https://www.github.com/googleapis/python-logging/commit/37e6c8e90775ddc2fc454f5cb13cab04231c2222)) + + +### Documentation + +* add python std_logging to sample browser ([#173](https://www.github.com/googleapis/python-logging/issues/173)) ([7cc7275](https://www.github.com/googleapis/python-logging/commit/7cc727598c33e7e264ddbeef0a2604a3c215b260)) + ## [2.2.0](https://www.github.com/googleapis/python-logging/compare/v2.1.1...v2.2.0) (2021-01-27) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 8ede9877ec2e..8885090fe78a 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.2.0" +version = "2.3.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 05724ccd319c8015c84bc2d4f9739ea5d674c9ee Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 18 Mar 2021 19:58:43 +0100 Subject: [PATCH 412/855] chore(deps): update dependency google-cloud-logging to v2.3.0 (#221) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index cc757aabcac4..10880dd8aadb 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==2.2.0 +google-cloud-logging==2.3.0 google-cloud-bigquery==2.11.0 google-cloud-storage==1.36.2 google-cloud-pubsub==2.4.0 From 028a22ee7b1faf7540e20a2e9538e313a6ca0f86 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 18 Mar 2021 12:59:10 -0600 Subject: [PATCH 413/855] chore: delete sync-repo-settings.yml (#225) --- .../.github/sync-repo-settings.yml | 13 ------------- 1 file changed, 13 deletions(-) delete mode 100644 packages/google-cloud-logging/.github/sync-repo-settings.yml diff --git a/packages/google-cloud-logging/.github/sync-repo-settings.yml b/packages/google-cloud-logging/.github/sync-repo-settings.yml deleted file mode 100644 index 29fffc283012..000000000000 --- a/packages/google-cloud-logging/.github/sync-repo-settings.yml +++ /dev/null @@ -1,13 +0,0 @@ -# https://github.com/googleapis/repo-automation-bots/tree/master/packages/sync-repo-settings -# Rules for master branch protection -branchProtectionRules: -# Identifies the protection rule pattern. Name of the branch to be protected. -# Defaults to `master` -- pattern: master - requiredStatusCheckContexts: - - 'Kokoro' - - 'cla/google' - - 'Samples - Lint' - - 'Samples - Python 3.6' - - 'Samples - Python 3.7' - - 'Samples - Python 3.8' \ No newline at end of file From e3bd93fbe68c713c1c7eab9d9c83efe965bc4c25 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 18 Mar 2021 12:00:27 -0700 Subject: [PATCH 414/855] chore: Re-generated to pick up changes from synthtool. (#223) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore: add pre-commit-config to renovate ignore paths Disable renovate PRs on the .pre-commit-config.yaml which is templated from synthtool. https://docs.renovatebot.com/configuration-options/#ignorepaths Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Mon Mar 15 09:05:39 2021 -0600 Source-Repo: googleapis/synthtool Source-Sha: 2c54c473779ea731128cea61a3a6c975a08a5378 Source-Link: https://github.com/googleapis/synthtool/commit/2c54c473779ea731128cea61a3a6c975a08a5378 --- packages/google-cloud-logging/.pre-commit-config.yaml | 2 +- packages/google-cloud-logging/renovate.json | 4 +--- packages/google-cloud-logging/synth.metadata | 6 +++--- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index 32302e4883a1..a9024b15d725 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -12,6 +12,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.0 + rev: 3.8.4 hooks: - id: flake8 diff --git a/packages/google-cloud-logging/renovate.json b/packages/google-cloud-logging/renovate.json index 2b581d17036d..f08bc22c9a55 100644 --- a/packages/google-cloud-logging/renovate.json +++ b/packages/google-cloud-logging/renovate.json @@ -2,7 +2,5 @@ "extends": [ "config:base", ":preserveSemverRanges" ], - "git-submodules": { - "enabled": true - } + "ignorePaths": [".pre-commit-config.yaml"] } diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 5656e5bc6529..8508a9432d2f 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "42bda27dd3eee2123cbe5b21dd75a587af7a910b" + "sha": "7a4b0543f9bc9f17ef7de071fb4ec6f2da642e45" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "0780323da96d5a53925fe0547757181fe76e8f1e" + "sha": "2c54c473779ea731128cea61a3a6c975a08a5378" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "0780323da96d5a53925fe0547757181fe76e8f1e" + "sha": "2c54c473779ea731128cea61a3a6c975a08a5378" } } ], From afba40861275399d92bd3c886f71843408c64100 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 19 Mar 2021 22:52:39 +0100 Subject: [PATCH 415/855] chore(deps): update dependency google-cloud-bigquery to v2.12.0 (#224) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 10880dd8aadb..0fb2ce1fa344 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.0 -google-cloud-bigquery==2.11.0 +google-cloud-bigquery==2.12.0 google-cloud-storage==1.36.2 google-cloud-pubsub==2.4.0 From 2eb54425ed211815bc5b1e0f682c60545462f7dc Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 23 Mar 2021 12:27:38 -0700 Subject: [PATCH 416/855] chore: upgrade gapic-generator-python to 0.43.1 (#234) PiperOrigin-RevId: 364411656 Source-Author: Google APIs Source-Date: Mon Mar 22 14:40:22 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: 149a3a84c29c9b8189576c7442ccb6dcf6a8f95b Source-Link: https://github.com/googleapis/googleapis/commit/149a3a84c29c9b8189576c7442ccb6dcf6a8f95b --- .../config_service_v2/async_client.py | 7 ++ .../config_service_v2/transports/base.py | 25 ++-- .../config_service_v2/transports/grpc.py | 101 ++++++---------- .../transports/grpc_asyncio.py | 109 +++++++----------- .../logging_service_v2/async_client.py | 6 + .../logging_service_v2/transports/base.py | 24 ++-- .../logging_service_v2/transports/grpc.py | 101 ++++++---------- .../transports/grpc_asyncio.py | 109 +++++++----------- .../metrics_service_v2/async_client.py | 4 + .../metrics_service_v2/transports/base.py | 22 ++-- .../metrics_service_v2/transports/grpc.py | 101 ++++++---------- .../transports/grpc_asyncio.py | 109 +++++++----------- packages/google-cloud-logging/synth.metadata | 6 +- 13 files changed, 298 insertions(+), 426 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index a82d99b6e8e8..ef184d61c94b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -871,6 +871,7 @@ async def list_sinks( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -974,6 +975,7 @@ async def get_sink( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -1214,6 +1216,7 @@ async def update_sink( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -1302,6 +1305,7 @@ async def delete_sink( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -1394,6 +1398,7 @@ async def list_exclusions( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -1500,6 +1505,7 @@ async def get_exclusion( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -1810,6 +1816,7 @@ async def delete_exclusion( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 3e17598fe680..3981d8e9f219 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -73,10 +73,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -84,6 +84,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -93,20 +96,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -154,6 +154,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -169,6 +170,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -187,6 +189,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -202,6 +205,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -217,6 +221,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -232,6 +237,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -253,6 +259,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index b749eb5d3f33..67f2ea7056da 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -107,7 +107,9 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -115,70 +117,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -186,17 +168,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -210,7 +183,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index f0f1ca07063c..3eabb2bcd3e9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -62,7 +62,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -140,10 +140,10 @@ def __init__( ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -152,7 +152,9 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -160,70 +162,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -231,17 +213,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 5afd77be56fd..f61556922983 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -255,6 +255,7 @@ async def delete_log( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -432,6 +433,7 @@ async def write_log_entries( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -563,6 +565,7 @@ async def list_log_entries( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -628,6 +631,7 @@ async def list_monitored_resource_descriptors( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -720,6 +724,7 @@ async def list_logs( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -782,6 +787,7 @@ def tail_log_entries( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index be9dcdbfee87..66003ef95cd0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -74,10 +74,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -85,6 +85,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -94,20 +97,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -122,6 +122,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -137,6 +138,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -152,6 +154,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -167,6 +170,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -182,6 +186,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -197,6 +202,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=3600.0, ), default_timeout=3600.0, client_info=client_info, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index e90b2a5fec13..b52d306f3daf 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -107,7 +107,9 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -115,70 +117,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -186,17 +168,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -210,7 +183,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 40037da25ed7..0ba87029cfe5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -62,7 +62,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -140,10 +140,10 @@ def __init__( ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -152,7 +152,9 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -160,70 +162,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -231,17 +213,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 09b2c317326c..93a652b79637 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -244,6 +244,7 @@ async def list_log_metrics( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -344,6 +345,7 @@ async def get_log_metric( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -551,6 +553,7 @@ async def update_log_metric( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -632,6 +635,7 @@ async def delete_log_metric( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 78d226dfa6ec..c6ae3da41e5e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -74,10 +74,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -85,6 +85,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -94,20 +97,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -122,6 +122,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -137,6 +138,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -155,6 +157,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -170,6 +173,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index e55bf32e5cb7..a9447ac26a9a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -107,7 +107,9 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -115,70 +117,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -186,17 +168,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -210,7 +183,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index ec93d3850280..94017be9d889 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -62,7 +62,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -140,10 +140,10 @@ def __init__( ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -152,7 +152,9 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -160,70 +162,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -231,17 +213,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 8508a9432d2f..b32612bb023a 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "7a4b0543f9bc9f17ef7de071fb4ec6f2da642e45" + "sha": "7246e7b18d75fe252928d93576fcbb4f3d4be1f2" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "28a591963253d52ce3a25a918cafbdd9928de8cf", - "internalRef": "361662015" + "sha": "149a3a84c29c9b8189576c7442ccb6dcf6a8f95b", + "internalRef": "364411656" } }, { From 2d0821f7cdfac6b536c9432e4e2d967d69123a51 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 24 Mar 2021 11:13:20 -0700 Subject: [PATCH 417/855] fix: revert default resource behavior to avoid breaking changes (#237) --- .../cloud/logging_v2/handlers/handlers.py | 9 +- .../google/cloud/logging_v2/logger.py | 7 +- .../environment/deployable/nodejs/app.js | 45 ++++++++++ .../deployable/nodejs/package.json | 20 +++++ .../environment/deployable/python/router.py | 2 + .../envctl/env_scripts/nodejs/functions.sh | 83 +++++++++++++++++++ .../tests/environment/noxfile.py | 2 +- .../tests/environment/tests/common/common.py | 43 ++++++---- .../tests/environment/tests/common/python.py | 13 +-- .../environment/tests/nodejs/__init__.py | 13 +++ .../tests/nodejs/test_functions.py | 31 +++++++ .../environment/tests/python/test_cloudrun.py | 2 +- .../tests/unit/handlers/test_handlers.py | 7 +- .../tests/unit/test_logger.py | 40 ++------- tests/environment | 2 +- 15 files changed, 248 insertions(+), 71 deletions(-) create mode 100644 packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js create mode 100644 packages/google-cloud-logging/tests/environment/deployable/nodejs/package.json create mode 100644 packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh create mode 100644 packages/google-cloud-logging/tests/environment/tests/nodejs/__init__.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index feeac9171f1a..ffcc03ae2079 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -16,6 +16,8 @@ import logging + +from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport from google.cloud.logging_v2.handlers._monitored_resources import detect_resource @@ -61,7 +63,7 @@ def __init__( *, name=DEFAULT_LOGGER_NAME, transport=BackgroundThreadTransport, - resource=None, + resource=_GLOBAL_RESOURCE, labels=None, stream=None, ): @@ -80,15 +82,12 @@ def __init__( :class:`.BackgroundThreadTransport`. The other option is :class:`.SyncTransport`. resource (~logging_v2.resource.Resource): - Resource for this Handler. If not given, will be inferred from the environment. + Resource for this Handler. Defaults to ``global``. labels (Optional[dict]): Monitored resource of the entry, defaults to the global resource type. stream (Optional[IO]): Stream to be used by the handler. """ super(CloudLoggingHandler, self).__init__(stream) - if not resource: - # infer the correct monitored resource from the local environment - resource = detect_resource(client.project) self.name = name self.client = client self.transport = transport(client, name) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py index 124c33934908..fafb70629380 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -20,7 +20,6 @@ from google.cloud.logging_v2.entries import StructEntry from google.cloud.logging_v2.entries import TextEntry from google.cloud.logging_v2.resource import Resource -from google.cloud.logging_v2.handlers._monitored_resources import detect_resource _GLOBAL_RESOURCE = Resource(type="global", labels={}) @@ -49,13 +48,15 @@ class Logger(object): See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs """ - def __init__(self, name, client, *, labels=None): + def __init__(self, name, client, *, labels=None, resource=_GLOBAL_RESOURCE): """ Args: name (str): The name of the logger. client (~logging_v2.client.Client): A client which holds credentials and project configuration for the logger (which requires a project). + resource (~logging_v2.Resource): a monitored resource object + representing the resource the code was run on. labels (Optional[dict]): Mapping of default labels for entries written via this logger. @@ -63,7 +64,7 @@ def __init__(self, name, client, *, labels=None): self.name = name self._client = client self.labels = labels - self.default_resource = detect_resource(client.project) + self.default_resource = resource @property def client(self): diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js b/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js new file mode 100644 index 000000000000..c6c1fc85fed0 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js @@ -0,0 +1,45 @@ +const {Logging} = require('@google-cloud/logging'); +const logging = new Logging(); + +/** + * Background Cloud Function to be triggered by Pub/Sub. + * This function is exported by index.js, and executed when + * the trigger topic receives a message. + * + * @param {object} message The Pub/Sub message. + * @param {object} context The event metadata. + */ +exports.pubsubFunction = (message, context) => { + const msg = message.data + ? Buffer.from(message.data, 'base64').toString() + : console.log("no log function was invoked"); + + console.log('attributes if any: '); + console.log(message.attributes); + + // TODO later (nicolezhu): + // write fns in separate file and do var funcFo0 = function(){}... modules.exports={ func: funcFoo} + // var methods = require()... methods['funcString']() + switch (msg) { + case 'simplelog': + if (message.attributes) { + simplelog(message.attributes['log_name'], message.attributes['log_text']); + } else { + simplelog(); + } + break; + default: + console.log(`Invalid log function was invoked.`); + } +}; + +/** + * envctl nodejs trigger simplelog log_name=foo,log_text=bar + */ +function simplelog(logname = "my-log", logtext = "hello world" ) { + const log = logging.log(logname); + + const text_entry = log.entry(logtext); + + log.write(text_entry).then(r => console.log(r)); +} diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/package.json b/packages/google-cloud-logging/tests/environment/deployable/nodejs/package.json new file mode 100644 index 000000000000..58203b6742ef --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/package.json @@ -0,0 +1,20 @@ +{ + "name": "node", + "version": "1.0.0", + "license": "Apache-2.0", + "description": "", + "main": "app.js", + "dependencies": { + "express": "^4.17.1", + "@google-cloud/logging": "file:nodejs-logging" + }, + "devDependencies": {}, + "engines": { + "node": ">=12.0.0" + }, + "scripts": { + "start": "node app.js" + }, + "author": "", + "license": "ISC" +} diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/router.py b/packages/google-cloud-logging/tests/environment/deployable/python/router.py index cc76a3b15509..a0ab19c8964a 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/router.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/router.py @@ -87,11 +87,13 @@ def pubsub_callback(message): else: logging.error(f"function {msg_str} not found") + def initialize_client(): # set up logging client = google.cloud.logging.Client() client.setup_logging(log_level=logging.DEBUG) + if __name__ == "__main__": initialize_client() diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh new file mode 100644 index 000000000000..4905e3570edb --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh @@ -0,0 +1,83 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +SERVICE_NAME="log-node-func-$(echo $ENVCTL_ID | head -c 8)" + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete service + gcloud functions delete $SERVICE_NAME --region us-west2 -q 2> /dev/null + set -e +} + +verify() { + set +e + gcloud functions describe $SERVICE_NAME --region us-west2 + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +deploy() { + # create pub/sub topic + set +e + gcloud pubsub topics create $SERVICE_NAME 2>/dev/null + set -e + + # TODO remove print + set -x + # set up deployment directory + # copy over local copy of library + pushd $SUPERREPO_ROOT + echo "in SUPERREPO_ROOT" + ls + tar -cvf $TMP_DIR/lib.tar --exclude node_modules --exclude env-tests-logging --exclude test --exclude system-test --exclude .nox --exclude samples --exclude docs . + popd + + mkdir $TMP_DIR/nodejs-logging + tar -xvf $TMP_DIR/lib.tar --directory $TMP_DIR/nodejs-logging + + # copy test code into temporary test file + cp $REPO_ROOT/deployable/nodejs/app.js $TMP_DIR/app.js + cp $REPO_ROOT/deployable/nodejs/package.json $TMP_DIR/ + + # deploy function + local RUNTIME="nodejs12" + pushd $TMP_DIR + echo "in TMP_DIR" + ls + gcloud functions deploy $SERVICE_NAME \ + --entry-point pubsubFunction \ + --trigger-topic $SERVICE_NAME \ + --runtime $RUNTIME \ + --region us-west2 + popd +} + +filter-string() { + echo "resource.type=\"cloud_function\" AND resource.labels.function_name=\"$SERVICE_NAME\"" +} diff --git a/packages/google-cloud-logging/tests/environment/noxfile.py b/packages/google-cloud-logging/tests/environment/noxfile.py index 18b6d1024fa6..1dc3db103408 100644 --- a/packages/google-cloud-logging/tests/environment/noxfile.py +++ b/packages/google-cloud-logging/tests/environment/noxfile.py @@ -129,7 +129,7 @@ def blacken(session: nox.sessions.Session) -> None: "functions", ], ) -@nox.parametrize("language", ["python", "go"]) +@nox.parametrize("language", ["python", "go", "nodejs"]) def tests(session, language, platform): """Run the e2e environment test suite.""" if os.environ.get("RUN_ENV_TESTS", "true") == "false": diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index 374470cb4a94..dc1fbcdbc8f4 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -71,7 +71,9 @@ def _trigger(self, function, **kwargs): self._script.run_command(Command.Trigger, [function, args_str]) @RetryErrors(exception=(LogsNotFound, RpcError), delay=2) - def trigger_and_retrieve(self, log_text, function="simplelog", append_uuid=True, max_tries=6, **kwargs): + def trigger_and_retrieve( + self, log_text, function="simplelog", append_uuid=True, max_tries=6, **kwargs + ): if append_uuid: log_text = f"{log_text} {uuid.uuid1()}" self._trigger(function, log_text=log_text, **kwargs) @@ -134,29 +136,38 @@ def test_receive_log(self): found_log = log self.assertIsNotNone(found_log, "expected log text not found") + # add back after v3.0.0 + # def test_monitored_resource(self): + # if self.language != "python": + # # to do: add monitored resource info to go + # return True + # log_text = f"{inspect.currentframe().f_code.co_name}" + # log_list = self.trigger_and_retrieve(log_text) + # found_resource = log_list[-1].resource - def test_monitored_resource(self): - if self.language != "python": - # to do: add monitored resource info to go - return True - log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text) - found_resource = log_list[-1].resource - - self.assertIsNotNone(self.monitored_resource_name) - self.assertIsNotNone(self.monitored_resource_labels) + # self.assertIsNotNone(self.monitored_resource_name) + # self.assertIsNotNone(self.monitored_resource_labels) - self.assertEqual(found_resource.type, self.monitored_resource_name) - for label in self.monitored_resource_labels: - self.assertTrue(found_resource.labels[label], - f'resource.labels[{label}] is not set') + # self.assertEqual(found_resource.type, self.monitored_resource_name) + # for label in self.monitored_resource_labels: + # self.assertTrue(found_resource.labels[label], + # f'resource.labels[{label}] is not set') def test_severity(self): if self.language != "python": # to do: enable test for other languages return True log_text = f"{inspect.currentframe().f_code.co_name}" - severities = ['EMERGENCY', 'ALERT', 'CRITICAL', 'ERROR', 'WARNING', 'NOTICE', 'INFO', 'DEBUG'] + severities = [ + "EMERGENCY", + "ALERT", + "CRITICAL", + "ERROR", + "WARNING", + "NOTICE", + "INFO", + "DEBUG", + ] for severity in severities: log_list = self.trigger_and_retrieve(log_text, severity=severity) found_severity = log_list[-1].severity diff --git a/packages/google-cloud-logging/tests/environment/tests/common/python.py b/packages/google-cloud-logging/tests/environment/tests/common/python.py index a71b0102ea83..76ca235de0d0 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/python.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/python.py @@ -37,7 +37,6 @@ def pylogging_test_receive_log(self): found_log = log self.assertIsNotNone(found_log, "expected log text not found") - def test_monitored_resource_pylogging(self): log_text = f"{inspect.currentframe().f_code.co_name}" log_list = self.trigger_and_retrieve(log_text, function="pylogging") @@ -48,15 +47,17 @@ def test_monitored_resource_pylogging(self): self.assertEqual(found_resource.type, self.monitored_resource_name) for label in self.monitored_resource_labels: - self.assertTrue(found_resource.labels[label], - f'resource.labels[{label}] is not set') + self.assertTrue( + found_resource.labels[label], f"resource.labels[{label}] is not set" + ) def test_severity_pylogging(self): - severities = ['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'] + severities = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"] for severity in severities: log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text, function="pylogging", severity=severity) + log_list = self.trigger_and_retrieve( + log_text, function="pylogging", severity=severity + ) found_severity = log_list[-1].severity self.assertEqual(found_severity.lower(), severity.lower()) - diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/__init__.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/__init__.py new file mode 100644 index 000000000000..d46dbae5ebd0 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py new file mode 100644 index 000000000000..cdfdf8a35dbc --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py @@ -0,0 +1,31 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect + +import google.cloud.logging + +from ..common.common import Common +from ..common.python import CommonPython + + +class TestCloudFunctions(Common, unittest.TestCase): + + environment = "functions" + language = "nodejs" + + monitored_resource_name = "cloud_function" + monitored_resource_labels = ["region", "function_name"] diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py index e68578bf05b2..668232e8dda1 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google LLC +# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 51e2f0703d57..80a1368b859e 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -38,9 +38,7 @@ def _make_one(self, *args, **kw): def test_ctor_defaults(self): import sys - from google.cloud.logging_v2.handlers._monitored_resources import ( - _create_global_resource, - ) + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE from google.cloud.logging_v2.handlers.handlers import DEFAULT_LOGGER_NAME patch = mock.patch( @@ -55,8 +53,7 @@ def test_ctor_defaults(self): self.assertIsInstance(handler.transport, _Transport) self.assertIs(handler.transport.client, client) self.assertEqual(handler.transport.name, DEFAULT_LOGGER_NAME) - global_resource = _create_global_resource(self.PROJECT) - self.assertEqual(handler.resource, global_resource) + self.assertEqual(handler.resource, _GLOBAL_RESOURCE) self.assertIsNone(handler.labels) self.assertIs(handler.stream, sys.stderr) diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 53ecac8f58f5..5ad4861784fb 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -99,15 +99,11 @@ def test_batch_w_alternate_client(self): self.assertIs(batch.client, client2) def test_log_empty_defaults_w_default_labels(self): - from google.cloud.logging_v2.handlers._monitored_resources import ( - detect_resource, - ) - DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - "resource": detect_resource(self.PROJECT)._to_dict(), + "resource": {"type": "global", "labels": {}}, "labels": DEFAULT_LABELS, } ] @@ -174,11 +170,7 @@ def test_log_empty_w_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_text_defaults(self): - from google.cloud.logging_v2.handlers._monitored_resources import ( - detect_resource, - ) - - RESOURCE = detect_resource(self.PROJECT)._to_dict() + RESOURCE = {"type": "global", "labels": {}} TEXT = "TEXT" ENTRIES = [ { @@ -196,12 +188,8 @@ def test_log_text_defaults(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_text_w_unicode_and_default_labels(self): - from google.cloud.logging_v2.handlers._monitored_resources import ( - detect_resource, - ) - TEXT = "TEXT" - RESOURCE = detect_resource(self.PROJECT)._to_dict() + RESOURCE = {"type": "global", "labels": {}} DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { @@ -277,12 +265,8 @@ def test_log_text_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_struct_defaults(self): - from google.cloud.logging_v2.handlers._monitored_resources import ( - detect_resource, - ) - STRUCT = {"message": "MESSAGE", "weather": "cloudy"} - RESOURCE = detect_resource(self.PROJECT)._to_dict() + RESOURCE = {"type": "global", "labels": {}} ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), @@ -299,12 +283,8 @@ def test_log_struct_defaults(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_struct_w_default_labels(self): - from google.cloud.logging_v2.handlers._monitored_resources import ( - detect_resource, - ) - STRUCT = {"message": "MESSAGE", "weather": "cloudy"} - RESOURCE = detect_resource(self.PROJECT)._to_dict() + RESOURCE = {"type": "global", "labels": {}} DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { @@ -383,16 +363,13 @@ def test_log_proto_defaults(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value - from google.cloud.logging_v2.handlers._monitored_resources import ( - detect_resource, - ) message = Struct(fields={"foo": Value(bool_value=True)}) ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "protoPayload": json.loads(MessageToJson(message)), - "resource": detect_resource(self.PROJECT)._to_dict(), + "resource": {"type": "global", "labels": {}}, } ] client = _Client(self.PROJECT) @@ -407,9 +384,6 @@ def test_log_proto_w_default_labels(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value - from google.cloud.logging_v2.handlers._monitored_resources import ( - detect_resource, - ) message = Struct(fields={"foo": Value(bool_value=True)}) DEFAULT_LABELS = {"foo": "spam"} @@ -417,7 +391,7 @@ def test_log_proto_w_default_labels(self): { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "protoPayload": json.loads(MessageToJson(message)), - "resource": detect_resource(self.PROJECT)._to_dict(), + "resource": {"type": "global", "labels": {}}, "labels": DEFAULT_LABELS, } ] diff --git a/tests/environment b/tests/environment index eb60e823924d..1962721db8aa 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit eb60e823924dabaaea62e2ec0b8243eb868c1826 +Subproject commit 1962721db8aa382bb1f658921979a1c183bf2d1a From 3a059a8b6635ab57a01a45207509c372807309da Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 24 Mar 2021 13:06:00 -0700 Subject: [PATCH 418/855] fix: detect project from environment instead of from logger (#238) --- .../handlers/_monitored_resources.py | 46 ++++++++++--------- .../cloud/logging_v2/handlers/app_engine.py | 2 +- .../handlers/test__monitored_resources.py | 14 +++--- .../tests/unit/handlers/test_app_engine.py | 11 +++-- 4 files changed, 42 insertions(+), 31 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py index bd05c252239f..ad1de4d2bbec 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py @@ -51,14 +51,16 @@ _GKE_CLUSTER_NAME = "instance/attributes/cluster-name" """Attribute in metadata server when in GKE environment.""" +_PROJECT_NAME = "project/project-id" +"""Attribute in metadata server when in GKE environment.""" + -def _create_functions_resource(project): +def _create_functions_resource(): """Create a standardized Cloud Functions resource. - Args: - project (str): The project ID to pass on to the resource Returns: google.cloud.logging.Resource """ + project = retrieve_metadata_server(_PROJECT_NAME) region = retrieve_metadata_server(_REGION_ID) if _FUNCTION_NAME in os.environ: function_name = os.environ.get(_FUNCTION_NAME) @@ -77,15 +79,14 @@ def _create_functions_resource(project): return resource -def _create_kubernetes_resource(project): +def _create_kubernetes_resource(): """Create a standardized Kubernetes resource. - Args: - project (str): The project ID to pass on to the resource Returns: google.cloud.logging.Resource """ zone = retrieve_metadata_server(_ZONE_ID) cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME) + project = retrieve_metadata_server(_PROJECT_NAME) resource = Resource( type="k8s_container", @@ -98,15 +99,14 @@ def _create_kubernetes_resource(project): return resource -def _create_compute_resource(project): +def _create_compute_resource(): """Create a standardized Compute Engine resource. - Args: - project (str): The project ID to pass on to the resource Returns: google.cloud.logging.Resource """ instance = retrieve_metadata_server(_GCE_INSTANCE_ID) zone = retrieve_metadata_server(_ZONE_ID) + project = retrieve_metadata_server(_PROJECT_NAME) resource = Resource( type="gce_instance", labels={ @@ -118,14 +118,13 @@ def _create_compute_resource(project): return resource -def _create_cloud_run_resource(project): +def _create_cloud_run_resource(): """Create a standardized Cloud Run resource. - Args: - project (str): The project ID to pass on to the resource Returns: google.cloud.logging.Resource """ region = retrieve_metadata_server(_REGION_ID) + project = retrieve_metadata_server(_PROJECT_NAME) resource = Resource( type="cloud_run_revision", labels={ @@ -139,14 +138,13 @@ def _create_cloud_run_resource(project): return resource -def _create_app_engine_resource(project): +def _create_app_engine_resource(): """Create a standardized App Engine resource. - Args: - project (str): The project ID to pass on to the resource Returns: google.cloud.logging.Resource """ zone = retrieve_metadata_server(_ZONE_ID) + project = retrieve_metadata_server(_PROJECT_NAME) resource = Resource( type="gae_app", labels={ @@ -160,13 +158,19 @@ def _create_app_engine_resource(project): def _create_global_resource(project): + """Create a global resource. + Args: + project (str): The project ID to pass on to the resource + Returns: + google.cloud.logging.Resource + """ return Resource(type="global", labels={"project_id": project}) def detect_resource(project=""): """Return the default monitored resource based on the local environment. Args: - project (str): The project ID to pass on to the resource + project (str): The project ID to pass on to the resource (if needed) Returns: google.cloud.logging.Resource: The default resource based on the environment """ @@ -175,21 +179,21 @@ def detect_resource(project=""): if all([env in os.environ for env in _GAE_ENV_VARS]): # App Engine Flex or Standard - return _create_app_engine_resource(project) + return _create_app_engine_resource() elif gke_cluster_name is not None: # Kubernetes Engine - return _create_kubernetes_resource(project) + return _create_kubernetes_resource() elif all([env in os.environ for env in _LEGACY_FUNCTION_ENV_VARS]) or all( [env in os.environ for env in _FUNCTION_ENV_VARS] ): # Cloud Functions - return _create_functions_resource(project) + return _create_functions_resource() elif all([env in os.environ for env in _CLOUD_RUN_ENV_VARS]): # Cloud Run - return _create_cloud_run_resource(project) + return _create_cloud_run_resource() elif gce_instance_name is not None: # Compute Engine - return _create_compute_resource(project) + return _create_compute_resource() else: # use generic global resource return _create_global_resource(project) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py index 7d16ab07a6f4..bc7daa9d0cea 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py @@ -77,7 +77,7 @@ def get_gae_resource(self): Returns: google.cloud.logging_v2.resource.Resource: Monitored resource for GAE. """ - return _create_app_engine_resource(self.project_id) + return _create_app_engine_resource() def get_gae_labels(self): """Return the labels for GAE app. diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py b/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py index 00fade39cc25..5acced157e44 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py @@ -61,6 +61,8 @@ def _mock_metadata(self, endpoint): or endpoint == _monitored_resources._GCE_INSTANCE_ID ): return self.NAME + elif endpoint == _monitored_resources._PROJECT_NAME: + return self.PROJECT else: return None @@ -75,7 +77,7 @@ def test_create_legacy_functions_resource(self): os.environ[_monitored_resources._CLOUD_RUN_SERVICE_ID] = self.NAME with patch: - legacy_func_resource = _create_functions_resource(self.PROJECT) + legacy_func_resource = _create_functions_resource() self.assertIsInstance(legacy_func_resource, Resource) self.assertEqual(legacy_func_resource.type, "cloud_function") @@ -90,7 +92,7 @@ def test_create_modern_functions_resource(self): ) os.environ[_monitored_resources._FUNCTION_NAME] = self.NAME with patch: - func_resource = _create_functions_resource(self.PROJECT) + func_resource = _create_functions_resource() self.assertIsInstance(func_resource, Resource) self.assertEqual(func_resource.type, "cloud_function") @@ -105,7 +107,7 @@ def test_create_kubernetes_resource(self): wraps=self._mock_metadata, ) with patch: - resource = _create_kubernetes_resource(self.PROJECT) + resource = _create_kubernetes_resource() self.assertIsInstance(resource, Resource) self.assertEqual(resource.type, "k8s_container") @@ -120,7 +122,7 @@ def test_compute_resource(self): ) with patch: - resource = _create_compute_resource(self.PROJECT) + resource = _create_compute_resource() self.assertIsInstance(resource, Resource) self.assertEqual(resource.type, "gce_instance") self.assertEqual(resource.labels["project_id"], self.PROJECT) @@ -136,7 +138,7 @@ def test_cloud_run_resource(self): os.environ[_monitored_resources._CLOUD_RUN_REVISION_ID] = self.VERSION os.environ[_monitored_resources._CLOUD_RUN_CONFIGURATION_ID] = self.CONFIG with patch: - resource = _create_cloud_run_resource(self.PROJECT) + resource = _create_cloud_run_resource() self.assertIsInstance(resource, Resource) self.assertEqual(resource.type, "cloud_run_revision") self.assertEqual(resource.labels["project_id"], self.PROJECT) @@ -153,7 +155,7 @@ def test_app_engine_resource(self): os.environ[_monitored_resources._GAE_SERVICE_ENV] = self.NAME os.environ[_monitored_resources._GAE_VERSION_ENV] = self.VERSION with patch: - resource = _create_app_engine_resource(self.PROJECT) + resource = _create_app_engine_resource() self.assertIsInstance(resource, Resource) self.assertEqual(resource.type, "gae_app") self.assertEqual(resource.labels["project_id"], self.PROJECT) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index 1ac9c5dd574f..65e804573855 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -40,17 +40,19 @@ def test_constructor_w_gae_standard_env(self): with mock.patch( "os.environ", new={ - app_engine._GAE_PROJECT_ENV_STANDARD: "test_project", app_engine._GAE_SERVICE_ENV: "test_service", app_engine._GAE_VERSION_ENV: "test_version", }, + ), mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + return_value=self.PROJECT, ): handler = self._make_one(client, transport=_Transport) self.assertIs(handler.client, client) self.assertEqual(handler.name, app_engine._DEFAULT_GAE_LOGGER_NAME) self.assertEqual(handler.resource.type, "gae_app") - self.assertEqual(handler.resource.labels["project_id"], "test_project") + self.assertEqual(handler.resource.labels["project_id"], self.PROJECT) self.assertEqual(handler.resource.labels["module_id"], "test_service") self.assertEqual(handler.resource.labels["version_id"], "test_version") self.assertIs(handler.stream, sys.stderr) @@ -73,6 +75,9 @@ def test_constructor_w_gae_flex_env(self): app_engine._GAE_SERVICE_ENV: "test_service_2", app_engine._GAE_VERSION_ENV: "test_version_2", }, + ), mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + return_value=self.PROJECT, ): handler = self._make_one( client, name=name, transport=_Transport, stream=stream @@ -81,7 +86,7 @@ def test_constructor_w_gae_flex_env(self): self.assertIs(handler.client, client) self.assertEqual(handler.name, name) self.assertEqual(handler.resource.type, "gae_app") - self.assertEqual(handler.resource.labels["project_id"], "test_project_2") + self.assertEqual(handler.resource.labels["project_id"], self.PROJECT) self.assertEqual(handler.resource.labels["module_id"], "test_service_2") self.assertEqual(handler.resource.labels["version_id"], "test_version_2") self.assertIs(handler.stream, stream) From 23cef543aee245a3c09ddebf15bcc4799e2962d2 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 24 Mar 2021 13:41:59 -0700 Subject: [PATCH 419/855] chore: release 2.3.1 (#240) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-logging/CHANGELOG.md | 8 ++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 02a416d51fd5..e7822c4d5e0c 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +### [2.3.1](https://www.github.com/googleapis/python-logging/compare/v2.3.0...v2.3.1) (2021-03-24) + + +### Bug Fixes + +* detect project from environment instead of from logger ([#238](https://www.github.com/googleapis/python-logging/issues/238)) ([813b97c](https://www.github.com/googleapis/python-logging/commit/813b97cb936fa5acc2a4de567e2c84d746527e98)) +* revert default resource behavior to avoid breaking changes ([#237](https://www.github.com/googleapis/python-logging/issues/237)) ([24a0a5e](https://www.github.com/googleapis/python-logging/commit/24a0a5e674430e97a3a2e3b54477d8f95fa08ec6)) + ## [2.3.0](https://www.github.com/googleapis/python-logging/compare/v2.2.0...v2.3.0) (2021-03-15) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 8885090fe78a..326162660bee 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.3.0" +version = "2.3.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From b023ad8f30783b6ac1b78fc4377c0d881677b29c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 25 Mar 2021 18:31:24 +0100 Subject: [PATCH 420/855] chore(deps): update dependency google-cloud-logging to v2.3.1 (#242) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 0fb2ce1fa344..dbf757dbe022 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==2.3.0 +google-cloud-logging==2.3.1 google-cloud-bigquery==2.12.0 google-cloud-storage==1.36.2 google-cloud-pubsub==2.4.0 From 75432051c6de6de95ceadba2c012b3dc249fe045 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 25 Mar 2021 11:42:59 -0700 Subject: [PATCH 421/855] build(python): fail nox sessions if a python version is missing (#230) --- packages/google-cloud-logging/noxfile.py | 3 +++ packages/google-cloud-logging/synth.metadata | 6 +++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 1183ca5fbd0b..0fea45535325 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -41,6 +41,9 @@ "docs", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index b32612bb023a..fbb31e23257b 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "7246e7b18d75fe252928d93576fcbb4f3d4be1f2" + "sha": "c7bb2499e265a096a53c1644d48acb1c1429a9d5" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "2c54c473779ea731128cea61a3a6c975a08a5378" + "sha": "eda422b90c3dde4a872a13e6b78a8f802c40d0db" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "2c54c473779ea731128cea61a3a6c975a08a5378" + "sha": "eda422b90c3dde4a872a13e6b78a8f802c40d0db" } } ], From 28cc0f97314e3c926031aaa8fd4a40294a8635e6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 25 Mar 2021 19:43:16 +0100 Subject: [PATCH 422/855] chore(deps): update dependency google-cloud-bigquery to v2.13.1 (#232) Co-authored-by: Daniel Sanche --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index dbf757dbe022..44e63a5342d6 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 -google-cloud-bigquery==2.12.0 +google-cloud-bigquery==2.13.1 google-cloud-storage==1.36.2 google-cloud-pubsub==2.4.0 From f428e9a00668684ee48f8396bf6d29d90c1387e4 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 1 Apr 2021 15:57:47 -0700 Subject: [PATCH 423/855] chore: Re-generated to pick up changes from self. (#222) --- packages/google-cloud-logging/logging-v2-py.tar.gz | 0 packages/google-cloud-logging/synth.metadata | 3 ++- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-logging/logging-v2-py.tar.gz diff --git a/packages/google-cloud-logging/logging-v2-py.tar.gz b/packages/google-cloud-logging/logging-v2-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index fbb31e23257b..36ff3816601d 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "c7bb2499e265a096a53c1644d48acb1c1429a9d5" + "sha": "7246e7b18d75fe252928d93576fcbb4f3d4be1f2" } }, { @@ -132,6 +132,7 @@ "google/cloud/logging_v2/types/logging.py", "google/cloud/logging_v2/types/logging_config.py", "google/cloud/logging_v2/types/logging_metrics.py", + "logging-v2-py.tar.gz", "mypy.ini", "noxfile.py", "renovate.json", From fee56f91590228c5440a12a656fc503fe29f58c7 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 2 Apr 2021 00:58:08 +0200 Subject: [PATCH 424/855] chore(deps): update dependency google-cloud-pubsub to v2.4.1 (#246) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 44e63a5342d6..f7f5bbe0eff4 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 google-cloud-bigquery==2.13.1 google-cloud-storage==1.36.2 -google-cloud-pubsub==2.4.0 +google-cloud-pubsub==2.4.1 From 6b3a572fae41b963091453732e37db482af8d341 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 2 Apr 2021 16:01:12 -0700 Subject: [PATCH 425/855] fix(deps): fix minimum required version of google-api-core (#244) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): add kokoro configs for periodic builds against head This change should be non-destructive. Note for library repo maintainers: After applying this change, you can easily add (or change) periodic builds against head by adding config files in google3. See python-pubsub repo for example. Source-Author: Takashi Matsuo Source-Date: Fri Mar 19 11:17:59 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 79c8dd7ee768292f933012d3a69a5b4676404cda Source-Link: https://github.com/googleapis/synthtool/commit/79c8dd7ee768292f933012d3a69a5b4676404cda * chore(deps): update precommit hook pycqa/flake8 to v3.9.0 [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [pycqa/flake8](https://gitlab.com/pycqa/flake8) | repository | minor | `3.8.4` -> `3.9.0` | --- ### Release Notes
pycqa/flake8 ### [`v3.9.0`](https://gitlab.com/pycqa/flake8/compare/3.8.4...3.9.0) [Compare Source](https://gitlab.com/pycqa/flake8/compare/3.8.4...3.9.0)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/synthtool). Source-Author: WhiteSource Renovate Source-Date: Tue Mar 23 17:38:03 2021 +0100 Source-Repo: googleapis/synthtool Source-Sha: f5c5904fb0c6aa3b3730eadf4e5a4485afc65726 Source-Link: https://github.com/googleapis/synthtool/commit/f5c5904fb0c6aa3b3730eadf4e5a4485afc65726 * test(python): use constraints files to check dependency lower bounds Use a constraints file when installing dependencies for system and unit tests nox sessions. https://pip.pypa.io/en/stable/user_guide/#constraints-files > Constraints files are requirements files that **only control which version of a requirement is installed, not whether it is installed or not**. Their syntax and contents is nearly identical to Requirements Files. There is one key difference: Including a package in a constraints file does not trigger installation of the package. ``` testing ├── constraints-3.10.txt ├── constraints-3.11.txt ├── constraints-3.6.txt ├── constraints-3.7.txt ├── constraints-3.8.txt └── constraints-3.9.txt ``` Going forward, one constraints file (currently 3.6) will be populated with every library requirement and extra listed in the `setup.py`. The constraints file will pin each requirement to the lower bound. This ensures that library maintainers will see test failures if they forget to update a lower bound on a dependency. See https://github.com/googleapis/python-bigquery/pull/263 for an example Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Tue Mar 23 10:52:02 2021 -0600 Source-Repo: googleapis/synthtool Source-Sha: 86ed43d4f56e6404d068e62e497029018879c771 Source-Link: https://github.com/googleapis/synthtool/commit/86ed43d4f56e6404d068e62e497029018879c771 * fix: update minimum version of google-api-core Co-authored-by: Bu Sun Kim Co-authored-by: Daniel Sanche --- .../samples/python3.6/periodic-head.cfg | 11 ++ .../samples/python3.7/periodic-head.cfg | 11 ++ .../samples/python3.8/periodic-head.cfg | 11 ++ .../.kokoro/test-samples-against-head.sh | 28 +++++ .../.kokoro/test-samples-impl.sh | 102 ++++++++++++++++++ .../.kokoro/test-samples.sh | 96 +++-------------- .../.pre-commit-config.yaml | 2 +- packages/google-cloud-logging/noxfile.py | 29 ++++- packages/google-cloud-logging/setup.py | 2 +- packages/google-cloud-logging/synth.metadata | 11 +- .../testing/constraints-3.6.txt | 4 +- 11 files changed, 216 insertions(+), 91 deletions(-) create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg create mode 100755 packages/google-cloud-logging/.kokoro/test-samples-against-head.sh create mode 100755 packages/google-cloud-logging/.kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh b/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh new file mode 100755 index 000000000000..635a5ace0c20 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-logging + +exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh new file mode 100755 index 000000000000..cf5de74c17a5 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/packages/google-cloud-logging/.kokoro/test-samples.sh b/packages/google-cloud-logging/.kokoro/test-samples.sh index e75891832fd4..0f5f8d4008f6 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -24,87 +28,19 @@ cd github/python-logging # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" +exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index a9024b15d725..32302e4883a1 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -12,6 +12,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.4 + rev: 3.9.0 hooks: - id: flake8 diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 0fea45535325..7eb35fecc60f 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -18,6 +18,7 @@ from __future__ import absolute_import import os +import pathlib import shutil import nox @@ -30,6 +31,8 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + # 'docfx' is excluded since it only needs to run in 'docs-presubmit' nox.options.sessions = [ "unit", @@ -84,11 +87,24 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("asyncmock", "pytest-asyncio") - session.install("mock", "pytest", "pytest-cov", "flask", "webob", "django") + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) - session.install("-e", ".") + session.install( + "mock", + "pytest", + "pytest-cov", + "flask", + "webob", + "django", + "-c", + constraints_path, + ) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -115,6 +131,9 @@ def unit(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -147,8 +166,10 @@ def system(session): "google-cloud-pubsub", "google-cloud-storage", "google-cloud-testutils", + "-c", + constraints_path, ) - session.install("-e", ".") + session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 326162660bee..8b794791c41f 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.22.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "google-cloud-core >= 1.4.1, < 2.0dev", "proto-plus >= 1.11.0", ] diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 36ff3816601d..2af114c7d5e7 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "7246e7b18d75fe252928d93576fcbb4f3d4be1f2" + "sha": "bc75a0be5403ab4533b0241cead8a0f4e841d751" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "eda422b90c3dde4a872a13e6b78a8f802c40d0db" + "sha": "86ed43d4f56e6404d068e62e497029018879c771" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "eda422b90c3dde4a872a13e6b78a8f802c40d0db" + "sha": "86ed43d4f56e6404d068e62e497029018879c771" } } ], @@ -74,16 +74,21 @@ ".kokoro/samples/lint/presubmit.cfg", ".kokoro/samples/python3.6/common.cfg", ".kokoro/samples/python3.6/continuous.cfg", + ".kokoro/samples/python3.6/periodic-head.cfg", ".kokoro/samples/python3.6/periodic.cfg", ".kokoro/samples/python3.6/presubmit.cfg", ".kokoro/samples/python3.7/common.cfg", ".kokoro/samples/python3.7/continuous.cfg", + ".kokoro/samples/python3.7/periodic-head.cfg", ".kokoro/samples/python3.7/periodic.cfg", ".kokoro/samples/python3.7/presubmit.cfg", ".kokoro/samples/python3.8/common.cfg", ".kokoro/samples/python3.8/continuous.cfg", + ".kokoro/samples/python3.8/periodic-head.cfg", ".kokoro/samples/python3.8/periodic.cfg", ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/test-samples-against-head.sh", + ".kokoro/test-samples-impl.sh", ".kokoro/test-samples.sh", ".kokoro/trampoline.sh", ".kokoro/trampoline_v2.sh", diff --git a/packages/google-cloud-logging/testing/constraints-3.6.txt b/packages/google-cloud-logging/testing/constraints-3.6.txt index 0e0bdeb0b73e..ae89ab4a1cca 100644 --- a/packages/google-cloud-logging/testing/constraints-3.6.txt +++ b/packages/google-cloud-logging/testing/constraints-3.6.txt @@ -5,6 +5,6 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.22.0 +google-api-core==1.22.2 google-cloud-core==1.4.1 -proto-plus==1.11.0 \ No newline at end of file +proto-plus==1.11.0 From 4022a1e892ebfb5560bab35b4dc0481c1f830133 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 2 Apr 2021 16:18:58 -0700 Subject: [PATCH 426/855] chore: remove logging-v2-py.tar.gz from file tracking (#248) --- packages/google-cloud-logging/logging-v2-py.tar.gz | 0 packages/google-cloud-logging/synth.metadata | 3 +-- 2 files changed, 1 insertion(+), 2 deletions(-) delete mode 100644 packages/google-cloud-logging/logging-v2-py.tar.gz diff --git a/packages/google-cloud-logging/logging-v2-py.tar.gz b/packages/google-cloud-logging/logging-v2-py.tar.gz deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 2af114c7d5e7..703aaf0e5ac3 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "bc75a0be5403ab4533b0241cead8a0f4e841d751" + "sha": "79b37c3566e71880c1b63a3c3b7e04e9df910c2c" } }, { @@ -137,7 +137,6 @@ "google/cloud/logging_v2/types/logging.py", "google/cloud/logging_v2/types/logging_config.py", "google/cloud/logging_v2/types/logging_metrics.py", - "logging-v2-py.tar.gz", "mypy.ini", "noxfile.py", "renovate.json", From ae66b58f25379549e0a787a16d3b96ce4a4a331f Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 5 Apr 2021 07:42:27 -0700 Subject: [PATCH 427/855] build(python): update docfx job to use new plugin (#250) Source-Author: Dan Lee <71398022+dandhlee@users.noreply.github.com> Source-Date: Tue Mar 30 19:36:37 2021 -0400 Source-Repo: googleapis/synthtool Source-Sha: 4501974ad08b5d693311457e2ea4ce845676e329 Source-Link: https://github.com/googleapis/synthtool/commit/4501974ad08b5d693311457e2ea4ce845676e329 --- packages/google-cloud-logging/noxfile.py | 4 +--- packages/google-cloud-logging/synth.metadata | 6 +++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 7eb35fecc60f..eec35ebb4b7f 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -230,9 +230,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 703aaf0e5ac3..840550083af7 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "79b37c3566e71880c1b63a3c3b7e04e9df910c2c" + "sha": "ecefea40c367aa2a50ee6591241e18c3ac1331d1" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "86ed43d4f56e6404d068e62e497029018879c771" + "sha": "4501974ad08b5d693311457e2ea4ce845676e329" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "86ed43d4f56e6404d068e62e497029018879c771" + "sha": "4501974ad08b5d693311457e2ea4ce845676e329" } } ], From 7ec80d4dbb545a223c37856b5f700e6a49b630f5 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 7 Apr 2021 08:30:06 -0700 Subject: [PATCH 428/855] chore: Add license headers for python config files (#253) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/920075d4-7b9c-4b14-9957-7b33425ad95b/targets - [ ] To automatically regenerate this PR, check this box. (May take up to 24 hours.) Source-Link: https://github.com/googleapis/synthtool/commit/5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc --- .../google-cloud-logging/.pre-commit-config.yaml | 14 ++++++++++++++ packages/google-cloud-logging/docs/conf.py | 13 +++++++++++++ packages/google-cloud-logging/synth.metadata | 6 +++--- 3 files changed, 30 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index 32302e4883a1..8912e9b5d7d7 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -1,3 +1,17 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index 6da1e2e7988d..8e1d46bc779a 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -1,4 +1,17 @@ # -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # google-cloud-logging documentation build configuration file # diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 840550083af7..c881df5796a8 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "ecefea40c367aa2a50ee6591241e18c3ac1331d1" + "sha": "7eeb6f9b62a764c687b129713b4fba6ce006fc45" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4501974ad08b5d693311457e2ea4ce845676e329" + "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4501974ad08b5d693311457e2ea4ce845676e329" + "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" } } ], From ecfeaa9e1cdaf48b2d85ba3a485cab4d4e741d06 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 7 Apr 2021 22:10:52 +0200 Subject: [PATCH 429/855] chore(deps): update dependency google-cloud-storage to v1.37.0 (#243) Co-authored-by: Daniel Sanche --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index f7f5bbe0eff4..83ffc5b5af8c 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 google-cloud-bigquery==2.13.1 -google-cloud-storage==1.36.2 +google-cloud-storage==1.37.0 google-cloud-pubsub==2.4.1 From 6a6b0523a02d89f49d10fab90704b33b5b6a6078 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 9 Apr 2021 13:41:58 -0700 Subject: [PATCH 430/855] feat: use standard output logs on serverless environments (#228) --- .../google/cloud/logging/handlers/__init__.py | 4 + .../google/cloud/logging_v2/client.py | 31 ++-- .../cloud/logging_v2/handlers/__init__.py | 4 + .../cloud/logging_v2/handlers/handlers.py | 34 +++- .../logging_v2/handlers/structured_log.py | 55 ++++++ .../environment/deployable/go/.dockerignore | 1 + .../environment/deployable/go/Dockerfile | 4 +- .../deployable/nodejs/.dockerignore | 4 + .../environment/deployable/nodejs/.gitignore | 4 + .../environment/deployable/nodejs/Dockerfile | 35 ++++ .../environment/deployable/nodejs/app.js | 97 +++++++---- .../deployable/nodejs/package.json | 4 +- .../environment/deployable/nodejs/tests.js | 31 ++++ .../envctl/env_scripts/nodejs/cloudrun.sh | 105 +++++++++++ .../envctl/env_scripts/nodejs/functions.sh | 1 + .../environment/tests/nodejs/test_cloudrun.py | 38 ++++ .../tests/unit/handlers/test_handlers.py | 164 ++++++++++++++++++ .../unit/handlers/test_structured_log.py | 151 ++++++++++++++++ tests/environment | 2 +- 19 files changed, 722 insertions(+), 47 deletions(-) create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py create mode 100644 packages/google-cloud-logging/tests/environment/deployable/nodejs/.dockerignore create mode 100644 packages/google-cloud-logging/tests/environment/deployable/nodejs/.gitignore create mode 100644 packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile create mode 100644 packages/google-cloud-logging/tests/environment/deployable/nodejs/tests.js create mode 100644 packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/cloudrun.sh create mode 100644 packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py create mode 100644 packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py diff --git a/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py b/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py index 29ed8f0d165c..e27f8e673334 100644 --- a/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging/handlers/__init__.py @@ -16,12 +16,16 @@ from google.cloud.logging_v2.handlers.app_engine import AppEngineHandler from google.cloud.logging_v2.handlers.container_engine import ContainerEngineHandler +from google.cloud.logging_v2.handlers.structured_log import StructuredLogHandler +from google.cloud.logging_v2.handlers.handlers import CloudLoggingFilter from google.cloud.logging_v2.handlers.handlers import CloudLoggingHandler from google.cloud.logging_v2.handlers.handlers import setup_logging __all__ = [ "AppEngineHandler", + "CloudLoggingFilter", "CloudLoggingHandler", "ContainerEngineHandler", + "StructuredLogHandler", "setup_logging", ] diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py index 17d8534010dc..0b5fd1dd3101 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -16,6 +16,7 @@ import logging import os +import sys try: from google.cloud.logging_v2 import _gapic @@ -36,6 +37,7 @@ from google.cloud.logging_v2.handlers import CloudLoggingHandler from google.cloud.logging_v2.handlers import AppEngineHandler from google.cloud.logging_v2.handlers import ContainerEngineHandler +from google.cloud.logging_v2.handlers import StructuredLogHandler from google.cloud.logging_v2.handlers import setup_logging from google.cloud.logging_v2.handlers.handlers import EXCLUDED_LOGGER_DEFAULTS from google.cloud.logging_v2.resource import Resource @@ -53,6 +55,7 @@ _GAE_RESOURCE_TYPE = "gae_app" _GKE_RESOURCE_TYPE = "k8s_container" _GCF_RESOURCE_TYPE = "cloud_function" +_RUN_RESOURCE_TYPE = "cloud_run_revision" class Client(ClientWithProject): @@ -347,18 +350,22 @@ def get_default_handler(self, **kw): """ monitored_resource = kw.pop("resource", detect_resource(self.project)) - if ( - isinstance(monitored_resource, Resource) - and monitored_resource.type == _GAE_RESOURCE_TYPE - ): - return AppEngineHandler(self, **kw) - elif ( - isinstance(monitored_resource, Resource) - and monitored_resource.type == _GKE_RESOURCE_TYPE - ): - return ContainerEngineHandler(**kw) - else: - return CloudLoggingHandler(self, resource=monitored_resource, **kw) + if isinstance(monitored_resource, Resource): + if monitored_resource.type == _GAE_RESOURCE_TYPE: + return AppEngineHandler(self, **kw) + elif monitored_resource.type == _GKE_RESOURCE_TYPE: + return ContainerEngineHandler(**kw) + elif ( + monitored_resource.type == _GCF_RESOURCE_TYPE + and sys.version_info[0] == 3 + and sys.version_info[1] >= 8 + ): + # Cloud Functions with runtimes > 3.8 supports structured logs on standard out + # 3.7 should use the standard CloudLoggingHandler, which sends logs over the network. + return StructuredLogHandler(**kw, project=self.project) + elif monitored_resource.type == _RUN_RESOURCE_TYPE: + return StructuredLogHandler(**kw, project=self.project) + return CloudLoggingHandler(self, resource=monitored_resource, **kw) def setup_logging( self, *, log_level=logging.INFO, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, **kw diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/__init__.py index 29ed8f0d165c..a1ed08b5e3dd 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/__init__.py @@ -16,12 +16,16 @@ from google.cloud.logging_v2.handlers.app_engine import AppEngineHandler from google.cloud.logging_v2.handlers.container_engine import ContainerEngineHandler +from google.cloud.logging_v2.handlers.structured_log import StructuredLogHandler from google.cloud.logging_v2.handlers.handlers import CloudLoggingHandler +from google.cloud.logging_v2.handlers.handlers import CloudLoggingFilter from google.cloud.logging_v2.handlers.handlers import setup_logging __all__ = [ "AppEngineHandler", + "CloudLoggingFilter", "CloudLoggingHandler", "ContainerEngineHandler", + "StructuredLogHandler", "setup_logging", ] diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index ffcc03ae2079..175cd010a7ce 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -16,10 +16,10 @@ import logging - from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport from google.cloud.logging_v2.handlers._monitored_resources import detect_resource +from google.cloud.logging_v2.handlers._helpers import get_request_data DEFAULT_LOGGER_NAME = "python" @@ -28,6 +28,38 @@ _CLEAR_HANDLER_RESOURCE_TYPES = ("gae_app", "cloud_function") +class CloudLoggingFilter(logging.Filter): + """Python standard ``logging`` Filter class to add Cloud Logging + information to each LogRecord. + + When attached to a LogHandler, each incoming log will receive trace and + http_request related to the request. This data can be overwritten using + the `extras` argument when writing logs. + """ + + def __init__(self, project=None): + self.project = project + + def filter(self, record): + # ensure record has all required fields set + record.lineno = 0 if record.lineno is None else record.lineno + record.msg = "" if record.msg is None else record.msg + record.funcName = "" if record.funcName is None else record.funcName + record.pathname = "" if record.pathname is None else record.pathname + # find http request data + inferred_http, inferred_trace = get_request_data() + if inferred_trace is not None and self.project is not None: + inferred_trace = f"projects/{self.project}/traces/{inferred_trace}" + + record.trace = getattr(record, "trace", inferred_trace) or "" + record.http_request = getattr(record, "http_request", inferred_http) or {} + record.request_method = record.http_request.get("requestMethod", "") + record.request_url = record.http_request.get("requestUrl", "") + record.user_agent = record.http_request.get("userAgent", "") + record.protocol = record.http_request.get("protocol", "") + return True + + class CloudLoggingHandler(logging.StreamHandler): """Handler that directly makes Cloud Logging API calls. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py new file mode 100644 index 000000000000..56d032d0ad36 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py @@ -0,0 +1,55 @@ +# Copyright 2021 Google LLC All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Logging handler for printing formatted structured logs to standard output. +""" + +import logging.handlers + +from google.cloud.logging_v2.handlers.handlers import CloudLoggingFilter + +GCP_FORMAT = '{"message": "%(message)s", "severity": "%(levelname)s", "logging.googleapis.com/trace": "%(trace)s", "logging.googleapis.com/sourceLocation": { "file": "%(pathname)s", "line": "%(lineno)d", "function": "%(funcName)s"}, "httpRequest": {"requestMethod": "%(request_method)s", "requestUrl": "%(request_url)s", "userAgent": "%(user_agent)s", "protocol": "%(protocol)s"} }' + + +class StructuredLogHandler(logging.StreamHandler): + """Handler to format logs into the Cloud Logging structured log format, + and write them to standard output + """ + + def __init__(self, *, name=None, stream=None, project=None): + """ + Args: + name (Optional[str]): The name of the custom log in Cloud Logging. + stream (Optional[IO]): Stream to be used by the handler. + """ + super(StructuredLogHandler, self).__init__(stream=stream) + self.name = name + self.project_id = project + + # add extra keys to log record + self.addFilter(CloudLoggingFilter(project)) + + # make logs appear in GCP structured logging format + self.formatter = logging.Formatter(GCP_FORMAT) + + def format(self, record): + """Format the message into structured log JSON. + Args: + record (logging.LogRecord): The log record. + Returns: + str: A JSON string formatted for GKE fluentd. + """ + + payload = self.formatter.format(record) + return payload diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/.dockerignore b/packages/google-cloud-logging/tests/environment/deployable/go/.dockerignore index 8d6fb10f7d61..1331be3a3fcd 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/.dockerignore +++ b/packages/google-cloud-logging/tests/environment/deployable/go/.dockerignore @@ -1,2 +1,3 @@ Dockerfile */.nox +.dockerignore diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/Dockerfile b/packages/google-cloud-logging/tests/environment/deployable/go/Dockerfile index 6efa3a24f003..56eea7df8390 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/Dockerfile +++ b/packages/google-cloud-logging/tests/environment/deployable/go/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2020 Google Inc. All Rights Reserved. +# Copyright 2021 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -39,4 +39,4 @@ RUN set -x && apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install - COPY --from=builder /app/server /app/server # Run the web service on container startup. -CMD ["/app/server"] \ No newline at end of file +CMD ["/app/server"] diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/.dockerignore b/packages/google-cloud-logging/tests/environment/deployable/nodejs/.dockerignore new file mode 100644 index 000000000000..5747c4c87d22 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/.dockerignore @@ -0,0 +1,4 @@ +Dockerfile +.dockerignore +node_modules +npm-debug.log diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/.gitignore b/packages/google-cloud-logging/tests/environment/deployable/nodejs/.gitignore new file mode 100644 index 000000000000..2d56765aa1d6 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/.gitignore @@ -0,0 +1,4 @@ +node_modules +nodejs-logging +*.tar +package-lock.json diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile b/packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile new file mode 100644 index 000000000000..f19e36137bf4 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile @@ -0,0 +1,35 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Use the official lightweight Node.js 12 image. +# https://hub.docker.com/_/node +FROM node:12-slim + +# Create and change to the app directory. +WORKDIR /usr/src/app + +# Copy test script and dependencies to the container image. +COPY package*.json ./ +COPY app.js ./ +COPY tests.js ./ +COPY nodejs-logging ./nodejs-logging + +# Install dependencies. +RUN npm install --production + +# Environment variable denoting whether to run an app server +ENV RUNSERVER=1 + +# Run the web service on container startup. +CMD [ "npm", "start" ] diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js b/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js index c6c1fc85fed0..fdaebf8fde4b 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js @@ -1,5 +1,60 @@ -const {Logging} = require('@google-cloud/logging'); -const logging = new Logging(); +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +var tests = require('./tests.js'); + +/** + * Only triggers for GCP services that require a running app server. + * For instance, Cloud Functions does not execute this block. + * RUNSERVER env var is set in the Dockerfile. + */ +if (process.env.RUNSERVER) { + const express = require('express'); + const bodyParser = require('body-parser'); + const app = express(); + + app.use(bodyParser.json()); + + /** + * Cloud Run to be triggered by Pub/Sub. + */ + app.post('/', (req, res) => { + if (!req.body) { + const msg = 'no Pub/Sub message received'; + console.error(`error: ${msg}`); + res.status(400).send(`Bad Request: ${msg}`); + return; + } + if (!req.body.message) { + const msg = 'invalid Pub/Sub message format'; + console.error(`error: ${msg}`); + res.status(400).send(`Bad Request: ${msg}`); + return; + } + + const message = req.body.message; + triggerTest(message); + + res.status(204).send(); + }); + + // Start app server + const PORT = process.env.PORT || 8080; + app.listen(PORT, () => + console.log(`nodejs-pubsub-tutorial listening on port ${PORT}`) + ); +} /** * Background Cloud Function to be triggered by Pub/Sub. @@ -10,36 +65,20 @@ const logging = new Logging(); * @param {object} context The event metadata. */ exports.pubsubFunction = (message, context) => { - const msg = message.data + triggerTest(message); +}; + +function triggerTest(message) { + const testName = message.data ? Buffer.from(message.data, 'base64').toString() - : console.log("no log function was invoked"); + : console.error("WARNING: no log function was invoked"); - console.log('attributes if any: '); + console.log('Fn invoked with attributes, if any: '); console.log(message.attributes); - // TODO later (nicolezhu): - // write fns in separate file and do var funcFo0 = function(){}... modules.exports={ func: funcFoo} - // var methods = require()... methods['funcString']() - switch (msg) { - case 'simplelog': - if (message.attributes) { - simplelog(message.attributes['log_name'], message.attributes['log_text']); - } else { - simplelog(); - } - break; - default: - console.log(`Invalid log function was invoked.`); + if (message.attributes) { + tests[testName](message.attributes['log_name'], message.attributes['log_text']); + } else { + tests[testName](); } -}; - -/** - * envctl nodejs trigger simplelog log_name=foo,log_text=bar - */ -function simplelog(logname = "my-log", logtext = "hello world" ) { - const log = logging.log(logname); - - const text_entry = log.entry(logtext); - - log.write(text_entry).then(r => console.log(r)); } diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/package.json b/packages/google-cloud-logging/tests/environment/deployable/nodejs/package.json index 58203b6742ef..817b0f2b5964 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/nodejs/package.json +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/package.json @@ -5,6 +5,7 @@ "description": "", "main": "app.js", "dependencies": { + "body-parser": "^1.19.0", "express": "^4.17.1", "@google-cloud/logging": "file:nodejs-logging" }, @@ -15,6 +16,5 @@ "scripts": { "start": "node app.js" }, - "author": "", - "license": "ISC" + "author": "" } diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/tests.js b/packages/google-cloud-logging/tests/environment/deployable/nodejs/tests.js new file mode 100644 index 000000000000..71bdb17877d9 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/tests.js @@ -0,0 +1,31 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const {Logging} = require('@google-cloud/logging'); +const logging = new Logging(); + +/** + * The following are test functions that can be triggered in each service. + * envctl nodejs trigger simplelog log_name=foo,log_text=bar + */ +var simplelog = function(logname = "my-log", logtext = "hello world" ) { + const log = logging.log(logname); + + const text_entry = log.entry(logtext); + + log.write(text_entry).then(r => console.log(r)); +} + +module.exports={ 'simplelog': simplelog } + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/cloudrun.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/cloudrun.sh new file mode 100644 index 000000000000..97bf97b95337 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/cloudrun.sh @@ -0,0 +1,105 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +SERVICE_NAME="log-node-run-$(echo $ENVCTL_ID | head -c 8)" +SA_NAME=$SERVICE_NAME-invoker +LIBRARY_NAME="nodejs-logging" + +add_service_accounts() { + set +e + local PROJECT_ID=$(gcloud config list --format 'value(core.project)') + local PROJECT_NUMBER=$(gcloud projects list --filter=$PROJECT_ID --format="value(PROJECT_NUMBER)") + gcloud projects add-iam-policy-binding $PROJECT_ID \ + --member=serviceAccount:service-$PROJECT_NUMBER@gcp-sa-pubsub.iam.gserviceaccount.com \ + --role=roles/iam.serviceAccountTokenCreator + gcloud iam service-accounts create $SA_NAME \ + --display-name "Pub/Sub Invoker" + gcloud run services add-iam-policy-binding $SERVICE_NAME \ + --member=serviceAccount:$SA_NAME@$PROJECT_ID.iam.gserviceaccount.com \ + --role=roles/run.invoker + RUN_URL=$(gcloud run services list --filter=$SERVICE_NAME --format="value(URL)") + gcloud pubsub subscriptions create $SERVICE_NAME-subscriber --topic $SERVICE_NAME \ + --push-endpoint=$RUN_URL \ + --push-auth-service-account=$SA_NAME@$PROJECT_ID.iam.gserviceaccount.com + set -e +} + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete service account + gcloud iam service-accounts delete $SA_NAME@$PROJECT_ID.iam.gserviceaccount.com -q 2> /dev/null + # delete container images + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null + # delete service + gcloud run services delete $SERVICE_NAME -q 2> /dev/null + set -e +} + +verify() { + set +e + gcloud run services describe $SERVICE_NAME > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +build_node_container() { + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + # copy super-repo into deployable dir + _env_tests_relative_path=${REPO_ROOT#"$SUPERREPO_ROOT/"} + _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE + + # copy over local copy of library + pushd $SUPERREPO_ROOT + tar -cvf $_deployable_dir/lib.tar --exclude node_modules --exclude env-tests-logging --exclude test --exclude system-test --exclude .nox --exclude samples --exclude docs . + popd + mkdir -p $_deployable_dir/$LIBRARY_NAME + tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/$LIBRARY_NAME + + # build container + docker build -t $GCR_PATH $_deployable_dir + docker push $GCR_PATH +} + +deploy() { + set -x + build_node_container + gcloud config set run/platform managed + gcloud config set run/region us-west1 + gcloud run deploy \ + --image $GCR_PATH \ + --update-env-vars ENABLE_FLASK=true \ + --no-allow-unauthenticated \ + $SERVICE_NAME + # create pubsub subscription + add_service_accounts +} + +filter-string() { + echo "resource.type=\"global\"" +} diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh index 4905e3570edb..b1bfe95e4849 100644 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh @@ -63,6 +63,7 @@ deploy() { # copy test code into temporary test file cp $REPO_ROOT/deployable/nodejs/app.js $TMP_DIR/app.js + cp $REPO_ROOT/deployable/nodejs/tests.js $TMP_DIR/tests.js cp $REPO_ROOT/deployable/nodejs/package.json $TMP_DIR/ # deploy function diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py new file mode 100644 index 000000000000..d1fb33e69a1f --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py @@ -0,0 +1,38 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect +import uuid + +import google.cloud.logging +from google.cloud.logging_v2.resource import Resource + +from ..common.common import Common + +class TestCloudRun(Common, unittest.TestCase): + + environment = "cloudrun" + language = "nodejs" + + # What it should be + monitored_resource_name = "cloud_run_revision" + monitored_resource_labels = [ + "project_id", + "service_name", + "revision_name", + "location", + "configuration_name", + ] diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 80a1368b859e..9772d7b0a1d0 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -23,6 +23,170 @@ ) +class TestCloudLoggingFilter(unittest.TestCase): + + PROJECT = "PROJECT" + + @staticmethod + def _get_target_class(): + from google.cloud.logging.handlers import CloudLoggingFilter + + return CloudLoggingFilter + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + @staticmethod + def create_app(): + import flask + + app = flask.Flask(__name__) + + @app.route("/") + def index(): + return "test flask trace" # pragma: NO COVER + + return app + + def test_filter_record(self): + """ + test adding fields to a standard record + """ + import logging + + filter_obj = self._make_one() + logname = "loggername" + message = "hello world,嗨 世界" + pathname = "testpath" + lineno = 1 + func = "test-function" + record = logging.LogRecord( + logname, logging.INFO, pathname, lineno, message, None, None, func=func + ) + + success = filter_obj.filter(record) + self.assertTrue(success) + + self.assertEqual(record.lineno, lineno) + self.assertEqual(record.msg, message) + self.assertEqual(record.funcName, func) + self.assertEqual(record.pathname, pathname) + self.assertEqual(record.trace, "") + self.assertEqual(record.http_request, {}) + self.assertEqual(record.request_method, "") + self.assertEqual(record.request_url, "") + self.assertEqual(record.user_agent, "") + self.assertEqual(record.protocol, "") + + def test_minimal_record(self): + """ + test filter adds empty strings on missing attributes + """ + import logging + + filter_obj = self._make_one() + record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) + record.created = None + + success = filter_obj.filter(record) + self.assertTrue(success) + + self.assertEqual(record.lineno, 0) + self.assertEqual(record.msg, "") + self.assertEqual(record.funcName, "") + self.assertEqual(record.pathname, "") + self.assertEqual(record.trace, "") + self.assertEqual(record.http_request, {}) + self.assertEqual(record.request_method, "") + self.assertEqual(record.request_url, "") + self.assertEqual(record.user_agent, "") + self.assertEqual(record.protocol, "") + + def test_record_with_request(self): + """ + test filter adds http request data when available + """ + import logging + + filter_obj = self._make_one() + record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) + record.created = None + + expected_path = "http://testserver/123" + expected_agent = "Mozilla/5.0" + expected_trace = "123" + expected_request = { + "requestMethod": "PUT", + "requestUrl": expected_path, + "userAgent": expected_agent, + "protocol": "HTTP/1.1", + } + + app = self.create_app() + with app.test_client() as c: + c.put( + path=expected_path, + data="body", + headers={ + "User-Agent": expected_agent, + "X_CLOUD_TRACE_CONTEXT": expected_trace, + }, + ) + success = filter_obj.filter(record) + self.assertTrue(success) + + self.assertEqual(record.trace, expected_trace) + for key, val in expected_request.items(): + self.assertEqual(record.http_request[key], val) + self.assertEqual(record.request_method, "PUT") + self.assertEqual(record.request_url, expected_path) + self.assertEqual(record.user_agent, expected_agent) + self.assertEqual(record.protocol, "HTTP/1.1") + + def test_user_overrides(self): + """ + ensure user can override fields + """ + import logging + + filter_obj = self._make_one() + record = logging.LogRecord( + "name", logging.INFO, "default", 99, "message", None, None, func="default" + ) + record.created = 5.03 + + app = self.create_app() + with app.test_client() as c: + c.put( + path="http://testserver/123", + data="body", + headers={"User-Agent": "default", "X_CLOUD_TRACE_CONTEXT": "default"}, + ) + # override values + overwritten_trace = "456" + record.trace = overwritten_trace + overwritten_method = "GET" + overwritten_url = "www.google.com" + overwritten_agent = "custom" + overwritten_protocol = "test" + overwritten_request_object = { + "requestMethod": overwritten_method, + "requestUrl": overwritten_url, + "userAgent": overwritten_agent, + "protocol": overwritten_protocol, + } + record.http_request = overwritten_request_object + success = filter_obj.filter(record) + self.assertTrue(success) + + self.assertEqual(record.trace, overwritten_trace) + self.assertEqual(record.http_request, overwritten_request_object) + self.assertEqual(record.request_method, overwritten_method) + self.assertEqual(record.request_url, overwritten_url) + self.assertEqual(record.user_agent, overwritten_agent) + self.assertEqual(record.protocol, overwritten_protocol) + + class TestCloudLoggingHandler(unittest.TestCase): PROJECT = "PROJECT" diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py new file mode 100644 index 000000000000..64c63c0278ac --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -0,0 +1,151 @@ +# Copyright 2021 Google LLC All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestStructuredLogHandler(unittest.TestCase): + PROJECT = "PROJECT" + + def _get_target_class(self): + from google.cloud.logging.handlers import StructuredLogHandler + + return StructuredLogHandler + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + @staticmethod + def create_app(): + import flask + + app = flask.Flask(__name__) + + @app.route("/") + def index(): + return "test flask trace" # pragma: NO COVER + + return app + + def test_ctor_defaults(self): + handler = self._make_one() + self.assertIsNone(handler.name) + + def test_ctor_w_name(self): + handler = self._make_one(name="foo") + self.assertEqual(handler.name, "foo") + + def test_format(self): + import logging + import json + + handler = self._make_one() + logname = "loggername" + message = "hello world,嗨 世界" + pathname = "testpath" + lineno = 1 + func = "test-function" + record = logging.LogRecord( + logname, logging.INFO, pathname, lineno, message, None, None, func=func + ) + expected_payload = { + "message": message, + "severity": record.levelname, + "logging.googleapis.com/trace": "", + "logging.googleapis.com/sourceLocation": { + "file": pathname, + "line": str(lineno), + "function": func, + }, + "httpRequest": { + "requestMethod": "", + "requestUrl": "", + "userAgent": "", + "protocol": "", + }, + } + handler.filter(record) + result = json.loads(handler.format(record)) + for (key, value) in expected_payload.items(): + self.assertEqual(value, result[key]) + self.assertEqual( + len(expected_payload.keys()), + len(result.keys()), + f"result dictionary has unexpected keys: {result.keys()}", + ) + + def test_format_minimal(self): + import logging + import json + + handler = self._make_one() + record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) + record.created = None + expected_payload = { + "message": "", + "logging.googleapis.com/trace": "", + "logging.googleapis.com/sourceLocation": { + "file": "", + "line": "0", + "function": "", + }, + "httpRequest": { + "requestMethod": "", + "requestUrl": "", + "userAgent": "", + "protocol": "", + }, + } + handler.filter(record) + result = json.loads(handler.format(record)) + for (key, value) in expected_payload.items(): + self.assertEqual( + value, result[key], f"expected_payload[{key}] != result[{key}]" + ) + + def test_format_with_request(self): + import logging + import json + + handler = self._make_one() + logname = "loggername" + message = "hello world,嗨 世界" + record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None) + expected_path = "http://testserver/123" + expected_agent = "Mozilla/5.0" + expected_trace = "123" + expected_payload = { + "logging.googleapis.com/trace": expected_trace, + "httpRequest": { + "requestMethod": "PUT", + "requestUrl": expected_path, + "userAgent": expected_agent, + "protocol": "HTTP/1.1", + }, + } + + app = self.create_app() + with app.test_client() as c: + c.put( + path=expected_path, + data="body", + headers={ + "User-Agent": expected_agent, + "X_CLOUD_TRACE_CONTEXT": expected_trace, + }, + ) + handler.filter(record) + result = json.loads(handler.format(record)) + for (key, value) in expected_payload.items(): + self.assertEqual(value, result[key]) diff --git a/tests/environment b/tests/environment index 1962721db8aa..f0e2726579ef 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 1962721db8aa382bb1f658921979a1c183bf2d1a +Subproject commit f0e2726579ef96f8e6b3ceaed8145d2bfbfa32bc From cc5159949a0fd0fafec97beb24ce16bd2f64d88d Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Fri, 16 Apr 2021 14:41:47 -0400 Subject: [PATCH 431/855] chore: prevent normalization of semver versioning (#259) * chore: prevent normalization of semver versioning * chore: update workaround to make sic work --- packages/google-cloud-logging/setup.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 8b794791c41f..c88b4ebd8d10 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -17,6 +17,20 @@ import setuptools +# Disable version normalization performed by setuptools.setup() +try: + # Try the approach of using sic(), added in setuptools 46.1.0 + from setuptools import sic +except ImportError: + # Try the approach of replacing packaging.version.Version + sic = lambda v: v + try: + # setuptools >=39.0.0 uses packaging from setuptools.extern + from setuptools.extern import packaging + except ImportError: + # setuptools <39.0.0 uses packaging from pkg_resources.extern + from pkg_resources.extern import packaging + packaging.version.Version = packaging.version.LegacyVersion # Package metadata. @@ -58,7 +72,7 @@ setuptools.setup( name=name, - version=version, + version=sic(version), description=description, long_description=readme, author="Google LLC", From 04ee65013e85b512a937da7e4cc3e5a94ac25477 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 16 Apr 2021 11:44:31 -0700 Subject: [PATCH 432/855] feat: record source locations (#254) --- .../cloud/logging_v2/handlers/handlers.py | 24 ++++++---- .../environment/deployable/python/snippets.py | 6 +++ .../tests/environment/tests/common/common.py | 5 +- .../tests/environment/tests/common/python.py | 47 +++++++++++++++++++ .../environment/tests/python/test_cloudrun.py | 21 +++++++++ .../tests/python/test_functions.py | 22 ++++++++- .../tests/unit/handlers/test_handlers.py | 16 ++++++- tests/environment | 2 +- 8 files changed, 129 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 175cd010a7ce..99346532d43d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -126,6 +126,8 @@ def __init__( self.project_id = client.project self.resource = resource self.labels = labels + # add extra keys to log record + self.addFilter(CloudLoggingFilter(self.project_id)) def emit(self, record): """Actually log the specified logging record. @@ -138,25 +140,31 @@ def emit(self, record): record (logging.LogRecord): The record to be logged. """ message = super(CloudLoggingHandler, self).format(record) - trace_id = getattr(record, "trace", None) - span_id = getattr(record, "span_id", None) - http_request = getattr(record, "http_request", None) - resource = getattr(record, "resource", self.resource) user_labels = getattr(record, "labels", {}) # merge labels total_labels = self.labels if self.labels is not None else {} total_labels.update(user_labels) if len(total_labels) == 0: total_labels = None + # create source location object + if record.lineno and record.funcName and record.pathname: + source_location = { + "file": record.pathname, + "line": str(record.lineno), + "function": record.funcName, + } + else: + source_location = None # send off request self.transport.send( record, message, - resource=resource, + resource=getattr(record, "resource", self.resource), labels=(total_labels if total_labels else None), - trace=trace_id, - span_id=span_id, - http_request=http_request, + trace=(record.trace if record.trace else None), + span_id=getattr(record, "span_id", None), + http_request=(record.http_request if record.http_request else None), + source_location=source_location, ) diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py index 2407b86add84..f6e95b50995e 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -53,6 +53,12 @@ def pylogging(log_text="pylogging", severity="WARNING", **kwargs): else: logging.critical(log_text) +def pylogging_flask(log_text="pylogging_flask", path="/", base_url="http://google", agent="Chrome", trace="123", **kwargs): + import flask + app = flask.Flask(__name__) + with app.test_request_context( + path, base_url, headers={'User-Agent': agent, "X_CLOUD_TRACE_CONTEXT": trace}): + logging.info(log_text) def print_handlers(**kwargs): root_logger = logging.getLogger() diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index dc1fbcdbc8f4..788957b53f83 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -70,13 +70,14 @@ def _trigger(self, function, **kwargs): args_str = ",".join([f'{k}="{v}"' for k, v in kwargs.items()]) self._script.run_command(Command.Trigger, [function, args_str]) - @RetryErrors(exception=(LogsNotFound, RpcError), delay=2) + @RetryErrors(exception=(LogsNotFound, RpcError), delay=2, max_tries=2) def trigger_and_retrieve( self, log_text, function="simplelog", append_uuid=True, max_tries=6, **kwargs ): if append_uuid: log_text = f"{log_text} {uuid.uuid1()}" self._trigger(function, log_text=log_text, **kwargs) + sleep(2) filter_str = self._add_time_condition_to_filter(log_text) # give the command time to be received tries = 0 @@ -86,7 +87,7 @@ def trigger_and_retrieve( log_list = self._get_logs(filter_str) return log_list except (LogsNotFound, RpcError) as e: - sleep(10) + sleep(5) tries += 1 # log not found raise LogsNotFound diff --git a/packages/google-cloud-logging/tests/environment/tests/common/python.py b/packages/google-cloud-logging/tests/environment/tests/common/python.py index 76ca235de0d0..e65c4ae7980f 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/python.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/python.py @@ -61,3 +61,50 @@ def test_severity_pylogging(self): found_severity = log_list[-1].severity self.assertEqual(found_severity.lower(), severity.lower()) + + def test_source_location_pylogging(self): + if self.environment == "kubernetes" or "appengine" in self.environment: + # disable these tests on environments with custom handlers + # todo: enable in v3.0.0 + return + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text, function="pylogging") + found_source = log_list[-1].source_location + + self.assertIsNotNone(found_source) + self.assertIsNotNone(found_source['file']) + self.assertIsNotNone(found_source['function']) + self.assertIsNotNone(found_source['line']) + self.assertIn("snippets.py", found_source['file']) + self.assertEqual(found_source['function'], "pylogging") + self.assertTrue(int(found_source['line']) > 0) + + def test_flask_http_request_pylogging(self): + if self.environment == "kubernetes" or "appengine" in self.environment: + # disable these tests on environments with custom handlers + # todo: enable in v3.0.0 + return + log_text = f"{inspect.currentframe().f_code.co_name}" + + expected_agent = "test-agent" + expected_base_url = "http://test" + expected_path = "/pylogging" + expected_trace = "123" + + log_list = self.trigger_and_retrieve(log_text, function="pylogging_flask", + path=expected_path, trace=expected_trace, base_url=expected_base_url, agent=expected_agent) + found_request = log_list[-1].http_request + + self.assertIsNotNone(found_request) + self.assertIsNotNone(found_request['requestMethod']) + self.assertIsNotNone(found_request['requestUrl']) + self.assertIsNotNone(found_request['userAgent']) + self.assertIsNotNone(found_request['protocol']) + self.assertEqual(found_request['requestMethod'], 'GET') + self.assertEqual(found_request['requestUrl'], expected_base_url + expected_path) + self.assertEqual(found_request['userAgent'], expected_agent) + self.assertEqual(found_request['protocol'], 'HTTP/1.1') + + found_trace = log_list[-1].trace + self.assertIsNotNone(found_trace) + self.assertIn("projects/", found_trace) diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py index 668232e8dda1..aab145ee7730 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py @@ -37,3 +37,24 @@ class TestCloudRun(Common, CommonPython, unittest.TestCase): "location", "configuration_name", ] + + def test_default_http_request_pylogging(self): + """ + Cloud Run should automatically attach http request information + """ + log_text = f"{inspect.currentframe().f_code.co_name}" + + log_list = self.trigger_and_retrieve(log_text, function="pylogging") + found_request = log_list[-1].http_request + found_trace = log_list[-1].trace + + self.assertIsNotNone(found_request) + self.assertIsNotNone(found_request['requestMethod']) + self.assertIsNotNone(found_request['requestUrl']) + self.assertIsNotNone(found_request['userAgent']) + self.assertIsNotNone(found_request['protocol']) + self.assertEqual(found_request['requestMethod'], 'POST') + self.assertEqual(found_request['protocol'], 'HTTP/1.1') + + self.assertIsNotNone(found_trace) + self.assertIn("projects/", found_trace) diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py index 8402823ea6b8..63a57a49c868 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py @@ -21,7 +21,6 @@ from ..common.common import Common from ..common.python import CommonPython - class TestCloudFunctions(Common, CommonPython, unittest.TestCase): environment = "functions" @@ -29,3 +28,24 @@ class TestCloudFunctions(Common, CommonPython, unittest.TestCase): monitored_resource_name = "cloud_function" monitored_resource_labels = ["region", "function_name"] + + def test_default_http_request_pylogging(self): + """ + Cloud Functions should automatically attach http request information + """ + log_text = f"{inspect.currentframe().f_code.co_name}" + + log_list = self.trigger_and_retrieve(log_text, function="pylogging") + found_request = log_list[-1].http_request + found_trace = log_list[-1].trace + + self.assertIsNotNone(found_request) + self.assertIsNotNone(found_request['requestMethod']) + self.assertIsNotNone(found_request['requestUrl']) + self.assertIsNotNone(found_request['userAgent']) + self.assertIsNotNone(found_request['protocol']) + self.assertEqual(found_request['requestMethod'], 'POST') + self.assertEqual(found_request['protocol'], 'HTTP/1.1') + + self.assertIsNotNone(found_trace) + self.assertIn("projects/", found_trace) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 9772d7b0a1d0..c182a5b7fe5b 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -257,11 +257,11 @@ def test_emit(self): logname = "loggername" message = "hello world" record = logging.LogRecord(logname, logging, None, None, message, None, None) + handler.filter(record) handler.emit(record) - self.assertEqual( handler.transport.send_called_with, - (record, message, _GLOBAL_RESOURCE, None, None, None, None), + (record, message, _GLOBAL_RESOURCE, None, None, None, None, None), ) def test_emit_manual_field_override(self): @@ -286,6 +286,15 @@ def test_emit_manual_field_override(self): setattr(record, "resource", expected_resource) expected_labels = {"test-label": "manual"} setattr(record, "labels", expected_labels) + expected_source = { + "file": "test-file", + "line": str(1), + "function": "test-func", + } + setattr(record, "lineno", int(expected_source["line"])) + setattr(record, "funcName", expected_source["function"]) + setattr(record, "pathname", expected_source["file"]) + handler.filter(record) handler.emit(record) self.assertEqual( @@ -298,6 +307,7 @@ def test_emit_manual_field_override(self): expected_trace, expected_span, expected_http, + expected_source, ), ) @@ -413,6 +423,7 @@ def send( trace=None, span_id=None, http_request=None, + source_location=None, ): self.send_called_with = ( record, @@ -422,4 +433,5 @@ def send( trace, span_id, http_request, + source_location, ) diff --git a/tests/environment b/tests/environment index f0e2726579ef..273db6c60b8f 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit f0e2726579ef96f8e6b3ceaed8145d2bfbfa32bc +Subproject commit 273db6c60b8f39fa5092b01730ff4d2dffcca17e From 8940d6ee9e1671f43c4ebad579df9c291938ef4f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 20 Apr 2021 11:59:43 -0700 Subject: [PATCH 433/855] feat: Improve source location overrides (#258) --- .../cloud/logging_v2/handlers/handlers.py | 34 +++++---- .../logging_v2/handlers/structured_log.py | 2 +- .../environment/deployable/python/snippets.py | 28 +++++-- .../tests/environment/tests/common/common.py | 16 ++-- .../tests/environment/tests/common/python.py | 74 +++++++++++++++++-- .../environment/tests/python/test_cloudrun.py | 2 +- .../tests/python/test_functions.py | 2 +- .../tests/system/test_system.py | 2 + .../tests/unit/handlers/test_handlers.py | 40 +++++----- .../unit/handlers/test_structured_log.py | 49 ++++++++++++ tests/environment | 2 +- 11 files changed, 196 insertions(+), 55 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 99346532d43d..8e99632f0363 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -42,10 +42,21 @@ def __init__(self, project=None): def filter(self, record): # ensure record has all required fields set - record.lineno = 0 if record.lineno is None else record.lineno + if hasattr(record, "source_location"): + record.line = int(record.source_location.get("line", 0)) + record.file = record.source_location.get("file", "") + record.function = record.source_location.get("function", "") + else: + record.line = record.lineno if record.lineno else 0 + record.file = record.pathname if record.pathname else "" + record.function = record.funcName if record.funcName else "" + if any([record.line, record.file, record.function]): + record.source_location = { + "line": record.line, + "file": record.file, + "function": record.function, + } record.msg = "" if record.msg is None else record.msg - record.funcName = "" if record.funcName is None else record.funcName - record.pathname = "" if record.pathname is None else record.pathname # find http request data inferred_http, inferred_trace = get_request_data() if inferred_trace is not None and self.project is not None: @@ -146,25 +157,16 @@ def emit(self, record): total_labels.update(user_labels) if len(total_labels) == 0: total_labels = None - # create source location object - if record.lineno and record.funcName and record.pathname: - source_location = { - "file": record.pathname, - "line": str(record.lineno), - "function": record.funcName, - } - else: - source_location = None # send off request self.transport.send( record, message, resource=getattr(record, "resource", self.resource), - labels=(total_labels if total_labels else None), - trace=(record.trace if record.trace else None), + labels=total_labels, + trace=getattr(record, "trace", None), span_id=getattr(record, "span_id", None), - http_request=(record.http_request if record.http_request else None), - source_location=source_location, + http_request=getattr(record, "http_request", None), + source_location=getattr(record, "source_location", None), ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py index 56d032d0ad36..4e7801706ff2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py @@ -19,7 +19,7 @@ from google.cloud.logging_v2.handlers.handlers import CloudLoggingFilter -GCP_FORMAT = '{"message": "%(message)s", "severity": "%(levelname)s", "logging.googleapis.com/trace": "%(trace)s", "logging.googleapis.com/sourceLocation": { "file": "%(pathname)s", "line": "%(lineno)d", "function": "%(funcName)s"}, "httpRequest": {"requestMethod": "%(request_method)s", "requestUrl": "%(request_url)s", "userAgent": "%(user_agent)s", "protocol": "%(protocol)s"} }' +GCP_FORMAT = '{"message": "%(message)s", "severity": "%(levelname)s", "logging.googleapis.com/trace": "%(trace)s", "logging.googleapis.com/sourceLocation": { "file": "%(file)s", "line": "%(line)d", "function": "%(function)s"}, "httpRequest": {"requestMethod": "%(request_method)s", "requestUrl": "%(request_url)s", "userAgent": "%(user_agent)s", "protocol": "%(protocol)s"} }' class StructuredLogHandler(logging.StreamHandler): diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py index f6e95b50995e..eee715f9a92c 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -41,17 +41,35 @@ def simplelog(log_name=None, log_text="simple_log", severity="DEFAULT", **kwargs def pylogging(log_text="pylogging", severity="WARNING", **kwargs): # allowed severity: debug, info, warning, error, critical + + # build http request if fields given as argument + http_keys = ["protocol", "requestUrl", "userAgent", "requestMethod"] + if any([k in kwargs for k in http_keys]): + http_request = {} + for key in http_keys: + if key in kwargs: + http_request[key] = kwargs[key] + kwargs['http_request'] = http_request + # build source location if given as argument + source_keys = ["line", "file", "function"] + if any([k in kwargs for k in http_keys]): + source_location = {} + for key in source_keys: + if key in kwargs: + source_location[key] = kwargs[key] + kwargs['source_location'] = source_location + severity = severity.upper() if severity == "DEBUG": - logging.debug(log_text) + logging.debug(log_text, extra=kwargs) elif severity == "INFO": - logging.info(log_text) + logging.info(log_text, extra=kwargs) elif severity == "WARNING": - logging.warning(log_text) + logging.warning(log_text, extra=kwargs) elif severity == "ERROR": - logging.error(log_text) + logging.error(log_text, extra=kwargs) else: - logging.critical(log_text) + logging.critical(log_text, extra=kwargs) def pylogging_flask(log_text="pylogging_flask", path="/", base_url="http://google", agent="Chrome", trace="123", **kwargs): import flask diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index 788957b53f83..7c1d927301fd 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -65,18 +65,18 @@ def _get_logs(self, filter_str=None): raise LogsNotFound return entries - def _trigger(self, function, **kwargs): + def _trigger(self, snippet, **kwargs): timestamp = datetime.now(timezone.utc) args_str = ",".join([f'{k}="{v}"' for k, v in kwargs.items()]) - self._script.run_command(Command.Trigger, [function, args_str]) + self._script.run_command(Command.Trigger, [snippet, args_str]) @RetryErrors(exception=(LogsNotFound, RpcError), delay=2, max_tries=2) def trigger_and_retrieve( - self, log_text, function="simplelog", append_uuid=True, max_tries=6, **kwargs + self, log_text, snippet, append_uuid=True, max_tries=6, **kwargs ): if append_uuid: log_text = f"{log_text} {uuid.uuid1()}" - self._trigger(function, log_text=log_text, **kwargs) + self._trigger(snippet, log_text=log_text, **kwargs) sleep(2) filter_str = self._add_time_condition_to_filter(log_text) # give the command time to be received @@ -124,7 +124,7 @@ def tearDown_class(cls): def test_receive_log(self): log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text) + log_list = self.trigger_and_retrieve(log_text, "simplelog") found_log = None for log in log_list: @@ -143,7 +143,7 @@ def test_receive_log(self): # # to do: add monitored resource info to go # return True # log_text = f"{inspect.currentframe().f_code.co_name}" - # log_list = self.trigger_and_retrieve(log_text) + # log_list = self.trigger_and_retrieve(log_text, "simplelog") # found_resource = log_list[-1].resource # self.assertIsNotNone(self.monitored_resource_name) @@ -170,9 +170,9 @@ def test_severity(self): "DEBUG", ] for severity in severities: - log_list = self.trigger_and_retrieve(log_text, severity=severity) + log_list = self.trigger_and_retrieve(log_text, "simplelog", severity=severity) found_severity = log_list[-1].severity self.assertEqual(found_severity.lower(), severity.lower()) # DEFAULT severity should result in empty field - log_list = self.trigger_and_retrieve(log_text, severity="DEFAULT") + log_list = self.trigger_and_retrieve(log_text, "simplelog", severity="DEFAULT") self.assertIsNone(log_list[-1].severity) diff --git a/packages/google-cloud-logging/tests/environment/tests/common/python.py b/packages/google-cloud-logging/tests/environment/tests/common/python.py index e65c4ae7980f..1e98f084f0e8 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/python.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/python.py @@ -24,7 +24,7 @@ class CommonPython: def pylogging_test_receive_log(self): log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text, function="pylogging") + log_list = self.trigger_and_retrieve(log_text, "pylogging") found_log = None for log in log_list: @@ -39,7 +39,7 @@ def pylogging_test_receive_log(self): def test_monitored_resource_pylogging(self): log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text, function="pylogging") + log_list = self.trigger_and_retrieve(log_text, "pylogging") found_resource = log_list[-1].resource self.assertIsNotNone(self.monitored_resource_name) @@ -56,7 +56,7 @@ def test_severity_pylogging(self): for severity in severities: log_text = f"{inspect.currentframe().f_code.co_name}" log_list = self.trigger_and_retrieve( - log_text, function="pylogging", severity=severity + log_text, "pylogging", severity=severity ) found_severity = log_list[-1].severity @@ -68,7 +68,7 @@ def test_source_location_pylogging(self): # todo: enable in v3.0.0 return log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text, function="pylogging") + log_list = self.trigger_and_retrieve(log_text, "pylogging") found_source = log_list[-1].source_location self.assertIsNotNone(found_source) @@ -91,7 +91,7 @@ def test_flask_http_request_pylogging(self): expected_path = "/pylogging" expected_trace = "123" - log_list = self.trigger_and_retrieve(log_text, function="pylogging_flask", + log_list = self.trigger_and_retrieve(log_text, "pylogging_flask", path=expected_path, trace=expected_trace, base_url=expected_base_url, agent=expected_agent) found_request = log_list[-1].http_request @@ -108,3 +108,67 @@ def test_flask_http_request_pylogging(self): found_trace = log_list[-1].trace self.assertIsNotNone(found_trace) self.assertIn("projects/", found_trace) + + def test_pylogging_extras(self): + if self.environment == "kubernetes" or "appengine" in self.environment: + # disable these tests on environments with custom handlers + # todo: enable in v3.0.0 + return + log_text = f"{inspect.currentframe().f_code.co_name}" + kwargs = { + 'trace': '123', + 'requestMethod': 'POST', + 'requestUrl': 'http://test', + 'userAgent': 'agent', + 'protocol': 'test', + 'line': 25, + 'file': 'test-file', + 'function': 'test-function' + } + log_list = self.trigger_and_retrieve(log_text, "pylogging", **kwargs) + found_log = log_list[-1] + + if self.environment != "functions": + # functions seems to override the user's trace value + self.assertEqual(found_log.trace, kwargs['trace']) + + # check that custom http request fields were set + self.assertIsNotNone(found_log.http_request) + for field in ['requestMethod', 'requestUrl', 'userAgent', 'protocol']: + self.assertIsNotNone(found_log.http_request[field], + 'http_request[{field}] is unexpectedly None') + self.assertEqual(found_log.http_request[field], kwargs[field], + f'http_request[{field}] != {kwargs[field]}') + # check that custom source location fields were set + self.assertIsNotNone(found_log.source_location) + for field in ['line', 'file', 'function']: + self.assertIsNotNone(found_log.source_location[field], + f'source_location[{field}] is unexpectedly None') + self.assertEqual(found_log.source_location[field], kwargs[field], + f'source_location[{field}] != {kwargs[field]}') + + def test_pylogging_extras_sparse(self): + if self.environment == "kubernetes" or "appengine" in self.environment: + # disable these tests on environments with custom handlers + # todo: enable in v3.0.0 + return + log_text = f"{inspect.currentframe().f_code.co_name}" + kwargs = { + 'requestMethod': 'POST', + 'file': 'test-file', + } + log_list = self.trigger_and_retrieve(log_text, "pylogging", **kwargs) + found_log = log_list[-1] + + # check that custom http request fields were set + self.assertIsNotNone(found_log.http_request) + self.assertEqual(found_log.http_request["requestMethod"], kwargs["requestMethod"]) + for field in ['requestUrl', 'userAgent', 'protocol']: + self.assertIsNone(found_log.http_request.get(field, None), + f'http_request[{field}] is unexpectedly not None') + # check that custom source location fields were set + self.assertIsNotNone(found_log.source_location) + self.assertEqual(found_log.source_location['file'], kwargs['file']) + for field in ['line', 'function']: + self.assertIsNone(found_log.source_location.get(field, None), + f'source_location[{field}] is unexpectedly not None') diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py index aab145ee7730..74ad48131c33 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py @@ -44,7 +44,7 @@ def test_default_http_request_pylogging(self): """ log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text, function="pylogging") + log_list = self.trigger_and_retrieve(log_text, "pylogging") found_request = log_list[-1].http_request found_trace = log_list[-1].trace diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py index 63a57a49c868..f107d3b1e5f6 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py @@ -35,7 +35,7 @@ def test_default_http_request_pylogging(self): """ log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text, function="pylogging") + log_list = self.trigger_and_retrieve(log_text, "pylogging") found_request = log_list[-1].http_request found_trace = log_list[-1].trace diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index e6f5aa7cf520..cc6d03804461 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -333,10 +333,12 @@ def test_handlers_w_extras(self): cloud_logger = logging.getLogger(LOGGER_NAME) cloud_logger.addHandler(handler) expected_request = {"requestUrl": "localhost"} + expected_source = {"file": "test.py"} extra = { "trace": "123", "span_id": "456", "http_request": expected_request, + "source_location": expected_source, "resource": Resource(type="cloudiot_device", labels={}), "labels": {"test-label": "manual"}, } diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index c182a5b7fe5b..08b74cb44771 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -67,10 +67,10 @@ def test_filter_record(self): success = filter_obj.filter(record) self.assertTrue(success) - self.assertEqual(record.lineno, lineno) + self.assertEqual(record.line, lineno) self.assertEqual(record.msg, message) - self.assertEqual(record.funcName, func) - self.assertEqual(record.pathname, pathname) + self.assertEqual(record.function, func) + self.assertEqual(record.file, pathname) self.assertEqual(record.trace, "") self.assertEqual(record.http_request, {}) self.assertEqual(record.request_method, "") @@ -91,10 +91,10 @@ def test_minimal_record(self): success = filter_obj.filter(record) self.assertTrue(success) - self.assertEqual(record.lineno, 0) + self.assertEqual(record.line, 0) self.assertEqual(record.msg, "") - self.assertEqual(record.funcName, "") - self.assertEqual(record.pathname, "") + self.assertEqual(record.function, "") + self.assertEqual(record.file, "") self.assertEqual(record.trace, "") self.assertEqual(record.http_request, {}) self.assertEqual(record.request_method, "") @@ -175,7 +175,16 @@ def test_user_overrides(self): "userAgent": overwritten_agent, "protocol": overwritten_protocol, } + overwritten_line = 22 + overwritten_function = "test-func" + overwritten_file = "test-file" + overwritten_source_location = { + "file": overwritten_file, + "line": overwritten_line, + "function": overwritten_function, + } record.http_request = overwritten_request_object + record.source_location = overwritten_source_location success = filter_obj.filter(record) self.assertTrue(success) @@ -185,6 +194,9 @@ def test_user_overrides(self): self.assertEqual(record.request_url, overwritten_url) self.assertEqual(record.user_agent, overwritten_agent) self.assertEqual(record.protocol, overwritten_protocol) + self.assertEqual(record.line, overwritten_line) + self.assertEqual(record.function, overwritten_function) + self.assertEqual(record.file, overwritten_file) class TestCloudLoggingHandler(unittest.TestCase): @@ -256,12 +268,13 @@ def test_emit(self): ) logname = "loggername" message = "hello world" + labels = {"test-key": "test-value"} record = logging.LogRecord(logname, logging, None, None, message, None, None) - handler.filter(record) + record.labels = labels handler.emit(record) self.assertEqual( handler.transport.send_called_with, - (record, message, _GLOBAL_RESOURCE, None, None, None, None, None), + (record, message, _GLOBAL_RESOURCE, labels, None, None, None, None), ) def test_emit_manual_field_override(self): @@ -282,19 +295,12 @@ def test_emit_manual_field_override(self): setattr(record, "span_id", expected_span) expected_http = {"reuqest_url": "manual"} setattr(record, "http_request", expected_http) + expected_source = {"file": "test-file"} + setattr(record, "source_location", expected_source) expected_resource = Resource(type="test", labels={}) setattr(record, "resource", expected_resource) expected_labels = {"test-label": "manual"} setattr(record, "labels", expected_labels) - expected_source = { - "file": "test-file", - "line": str(1), - "function": "test-func", - } - setattr(record, "lineno", int(expected_source["line"])) - setattr(record, "funcName", expected_source["function"]) - setattr(record, "pathname", expected_source["file"]) - handler.filter(record) handler.emit(record) self.assertEqual( diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 64c63c0278ac..09d6b14faa96 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -149,3 +149,52 @@ def test_format_with_request(self): result = json.loads(handler.format(record)) for (key, value) in expected_payload.items(): self.assertEqual(value, result[key]) + + def test_format_overrides(self): + """ + Allow users to override log fields using `logging.info("", extra={})` + + If supported fields were overriden by the user, those choices should + take precedence. + """ + import logging + import json + + handler = self._make_one() + logname = "loggername" + message = "hello world,嗨 世界" + record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None) + overwrite_path = "http://overwrite" + inferred_path = "http://testserver/123" + overwrite_trace = "456" + inferred_trace = "123" + overwrite_file = "test-file" + record.http_request = {"requestUrl": overwrite_path} + record.source_location = {"file": overwrite_file} + record.trace = overwrite_trace + expected_payload = { + "logging.googleapis.com/trace": overwrite_trace, + "logging.googleapis.com/sourceLocation": { + "file": overwrite_file, + "function": "", + "line": "0", + }, + "httpRequest": { + "requestMethod": "", + "requestUrl": overwrite_path, + "userAgent": "", + "protocol": "", + }, + } + + app = self.create_app() + with app.test_client() as c: + c.put( + path=inferred_path, + data="body", + headers={"X_CLOUD_TRACE_CONTEXT": inferred_trace}, + ) + handler.filter(record) + result = json.loads(handler.format(record)) + for (key, value) in expected_payload.items(): + self.assertEqual(value, result[key]) diff --git a/tests/environment b/tests/environment index 273db6c60b8f..cf9ccb495dd3 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 273db6c60b8f39fa5092b01730ff4d2dffcca17e +Subproject commit cf9ccb495dd39555748c704963df93054d246050 From 5e89f49bcd2232b176680d1c7d15156f13bd2faf Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 20 Apr 2021 22:12:16 +0200 Subject: [PATCH 434/855] chore(deps): update dependency google-cloud-storage to v1.37.1 (#255) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 83ffc5b5af8c..30afb45a2afe 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 google-cloud-bigquery==2.13.1 -google-cloud-storage==1.37.0 +google-cloud-storage==1.37.1 google-cloud-pubsub==2.4.1 From 69ee0644cbc468100abecc85b8c0010198cec61c Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 20 Apr 2021 13:13:23 -0700 Subject: [PATCH 435/855] chore: Re-generated to pick up changes from self (#260) --- packages/google-cloud-logging/logging-v2-py.tar.gz | 0 packages/google-cloud-logging/synth.metadata | 3 ++- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-logging/logging-v2-py.tar.gz diff --git a/packages/google-cloud-logging/logging-v2-py.tar.gz b/packages/google-cloud-logging/logging-v2-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index c881df5796a8..345edc6bc267 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "7eeb6f9b62a764c687b129713b4fba6ce006fc45" + "sha": "6743654727f56ce65bdd6dc45075ef41ebb84bcf" } }, { @@ -137,6 +137,7 @@ "google/cloud/logging_v2/types/logging.py", "google/cloud/logging_v2/types/logging_config.py", "google/cloud/logging_v2/types/logging_metrics.py", + "logging-v2-py.tar.gz", "mypy.ini", "noxfile.py", "renovate.json", From e65bdf6b1a253b1979c49694373714a7793456d4 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 22 Apr 2021 13:36:16 -0700 Subject: [PATCH 436/855] feat: allow custom labels with standard library logging (#264) --- .../google/cloud/logging_v2/client.py | 4 +- .../cloud/logging_v2/handlers/handlers.py | 31 ++--- .../logging_v2/handlers/structured_log.py | 20 ++- .../environment/deployable/python/snippets.py | 32 ++++- .../tests/environment/tests/common/common.py | 4 +- .../tests/environment/tests/common/python.py | 116 +++++++++++------- .../environment/tests/nodejs/test_cloudrun.py | 23 ++-- .../environment/tests/python/test_cloudrun.py | 12 +- .../tests/python/test_functions.py | 13 +- .../tests/unit/handlers/test_handlers.py | 26 ++-- .../unit/handlers/test_structured_log.py | 26 +++- tests/environment | 2 +- 12 files changed, 196 insertions(+), 113 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py index 0b5fd1dd3101..51d93355ce9e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -362,9 +362,9 @@ def get_default_handler(self, **kw): ): # Cloud Functions with runtimes > 3.8 supports structured logs on standard out # 3.7 should use the standard CloudLoggingHandler, which sends logs over the network. - return StructuredLogHandler(**kw, project=self.project) + return StructuredLogHandler(**kw, project_id=self.project) elif monitored_resource.type == _RUN_RESOURCE_TYPE: - return StructuredLogHandler(**kw, project=self.project) + return StructuredLogHandler(**kw, project_id=self.project) return CloudLoggingHandler(self, resource=monitored_resource, **kw) def setup_logging( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 8e99632f0363..b9cc53a94fdd 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -37,8 +37,9 @@ class CloudLoggingFilter(logging.Filter): the `extras` argument when writing logs. """ - def __init__(self, project=None): + def __init__(self, project=None, default_labels=None): self.project = project + self.default_labels = default_labels if default_labels else {} def filter(self, record): # ensure record has all required fields set @@ -61,6 +62,12 @@ def filter(self, record): inferred_http, inferred_trace = get_request_data() if inferred_trace is not None and self.project is not None: inferred_trace = f"projects/{self.project}/traces/{inferred_trace}" + # set labels + user_labels = getattr(record, "labels", {}) + record.total_labels = {**self.default_labels, **user_labels} + record.total_labels_str = ", ".join( + [f'"{k}": "{v}"' for k, v in record.total_labels.items()] + ) record.trace = getattr(record, "trace", inferred_trace) or "" record.http_request = getattr(record, "http_request", inferred_http) or {} @@ -126,8 +133,7 @@ def __init__( option is :class:`.SyncTransport`. resource (~logging_v2.resource.Resource): Resource for this Handler. Defaults to ``global``. - labels (Optional[dict]): Monitored resource of the entry, defaults - to the global resource type. + labels (Optional[dict]): Additional labels to attach to logs. stream (Optional[IO]): Stream to be used by the handler. """ super(CloudLoggingHandler, self).__init__(stream) @@ -138,7 +144,8 @@ def __init__( self.resource = resource self.labels = labels # add extra keys to log record - self.addFilter(CloudLoggingFilter(self.project_id)) + log_filter = CloudLoggingFilter(project=self.project_id, default_labels=labels) + self.addFilter(log_filter) def emit(self, record): """Actually log the specified logging record. @@ -151,22 +158,16 @@ def emit(self, record): record (logging.LogRecord): The record to be logged. """ message = super(CloudLoggingHandler, self).format(record) - user_labels = getattr(record, "labels", {}) - # merge labels - total_labels = self.labels if self.labels is not None else {} - total_labels.update(user_labels) - if len(total_labels) == 0: - total_labels = None # send off request self.transport.send( record, message, resource=getattr(record, "resource", self.resource), - labels=total_labels, - trace=getattr(record, "trace", None), - span_id=getattr(record, "span_id", None), - http_request=getattr(record, "http_request", None), - source_location=getattr(record, "source_location", None), + labels=getattr(record, "total_labels", None) or None, + trace=getattr(record, "trace", None) or None, + span_id=getattr(record, "span_id", None) or None, + http_request=getattr(record, "http_request", None) or None, + source_location=getattr(record, "source_location", None) or None, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py index 4e7801706ff2..e9d036423e9f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py @@ -19,7 +19,14 @@ from google.cloud.logging_v2.handlers.handlers import CloudLoggingFilter -GCP_FORMAT = '{"message": "%(message)s", "severity": "%(levelname)s", "logging.googleapis.com/trace": "%(trace)s", "logging.googleapis.com/sourceLocation": { "file": "%(file)s", "line": "%(line)d", "function": "%(function)s"}, "httpRequest": {"requestMethod": "%(request_method)s", "requestUrl": "%(request_url)s", "userAgent": "%(user_agent)s", "protocol": "%(protocol)s"} }' +GCP_FORMAT = ( + '{"message": "%(message)s", ' + '"severity": "%(levelname)s", ' + '"logging.googleapis.com/labels": { %(total_labels_str)s }, ' + '"logging.googleapis.com/trace": "%(trace)s", ' + '"logging.googleapis.com/sourceLocation": { "file": "%(file)s", "line": "%(line)d", "function": "%(function)s"}, ' + '"httpRequest": {"requestMethod": "%(request_method)s", "requestUrl": "%(request_url)s", "userAgent": "%(user_agent)s", "protocol": "%(protocol)s"} }' +) class StructuredLogHandler(logging.StreamHandler): @@ -27,18 +34,19 @@ class StructuredLogHandler(logging.StreamHandler): and write them to standard output """ - def __init__(self, *, name=None, stream=None, project=None): + def __init__(self, *, labels=None, stream=None, project_id=None): """ Args: - name (Optional[str]): The name of the custom log in Cloud Logging. + labels (Optional[dict]): Additional labels to attach to logs. stream (Optional[IO]): Stream to be used by the handler. + project (Optional[str]): Project Id associated with the logs. """ super(StructuredLogHandler, self).__init__(stream=stream) - self.name = name - self.project_id = project + self.project_id = project_id # add extra keys to log record - self.addFilter(CloudLoggingFilter(project)) + log_filter = CloudLoggingFilter(project=project_id, default_labels=labels) + self.addFilter(log_filter) # make logs appear in GCP structured logging format self.formatter = logging.Formatter(GCP_FORMAT) diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py index eee715f9a92c..c4011dd473cf 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -43,21 +43,30 @@ def pylogging(log_text="pylogging", severity="WARNING", **kwargs): # allowed severity: debug, info, warning, error, critical # build http request if fields given as argument - http_keys = ["protocol", "requestUrl", "userAgent", "requestMethod"] + http_keys = ["protocol", "requestUrl", "userAgent", "requestMethod"] if any([k in kwargs for k in http_keys]): http_request = {} for key in http_keys: if key in kwargs: http_request[key] = kwargs[key] - kwargs['http_request'] = http_request + kwargs["http_request"] = http_request # build source location if given as argument - source_keys = ["line", "file", "function"] + source_keys = ["line", "file", "function"] if any([k in kwargs for k in http_keys]): source_location = {} for key in source_keys: if key in kwargs: source_location[key] = kwargs[key] - kwargs['source_location'] = source_location + kwargs["source_location"] = source_location + # build custom labels + label_prefix = "label_" + label_keys = [k for k in kwargs.keys() if k.startswith(label_prefix)] + if label_keys: + labels = {} + for k in label_keys: + adjusted_key = k[len(label_prefix) :] + labels[adjusted_key] = kwargs[k] + kwargs["labels"] = labels severity = severity.upper() if severity == "DEBUG": @@ -71,13 +80,24 @@ def pylogging(log_text="pylogging", severity="WARNING", **kwargs): else: logging.critical(log_text, extra=kwargs) -def pylogging_flask(log_text="pylogging_flask", path="/", base_url="http://google", agent="Chrome", trace="123", **kwargs): + +def pylogging_flask( + log_text="pylogging_flask", + path="/", + base_url="http://google", + agent="Chrome", + trace="123", + **kwargs, +): import flask + app = flask.Flask(__name__) with app.test_request_context( - path, base_url, headers={'User-Agent': agent, "X_CLOUD_TRACE_CONTEXT": trace}): + path, base_url, headers={"User-Agent": agent, "X_CLOUD_TRACE_CONTEXT": trace} + ): logging.info(log_text) + def print_handlers(**kwargs): root_logger = logging.getLogger() handlers_str = ", ".join([type(h).__name__ for h in root_logger.handlers]) diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index 7c1d927301fd..abd9ecd5ac24 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -170,7 +170,9 @@ def test_severity(self): "DEBUG", ] for severity in severities: - log_list = self.trigger_and_retrieve(log_text, "simplelog", severity=severity) + log_list = self.trigger_and_retrieve( + log_text, "simplelog", severity=severity + ) found_severity = log_list[-1].severity self.assertEqual(found_severity.lower(), severity.lower()) # DEFAULT severity should result in empty field diff --git a/packages/google-cloud-logging/tests/environment/tests/common/python.py b/packages/google-cloud-logging/tests/environment/tests/common/python.py index 1e98f084f0e8..e765a6928718 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/python.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/python.py @@ -72,12 +72,12 @@ def test_source_location_pylogging(self): found_source = log_list[-1].source_location self.assertIsNotNone(found_source) - self.assertIsNotNone(found_source['file']) - self.assertIsNotNone(found_source['function']) - self.assertIsNotNone(found_source['line']) - self.assertIn("snippets.py", found_source['file']) - self.assertEqual(found_source['function'], "pylogging") - self.assertTrue(int(found_source['line']) > 0) + self.assertIsNotNone(found_source["file"]) + self.assertIsNotNone(found_source["function"]) + self.assertIsNotNone(found_source["line"]) + self.assertIn("snippets.py", found_source["file"]) + self.assertEqual(found_source["function"], "pylogging") + self.assertTrue(int(found_source["line"]) > 0) def test_flask_http_request_pylogging(self): if self.environment == "kubernetes" or "appengine" in self.environment: @@ -91,19 +91,25 @@ def test_flask_http_request_pylogging(self): expected_path = "/pylogging" expected_trace = "123" - log_list = self.trigger_and_retrieve(log_text, "pylogging_flask", - path=expected_path, trace=expected_trace, base_url=expected_base_url, agent=expected_agent) + log_list = self.trigger_and_retrieve( + log_text, + "pylogging_flask", + path=expected_path, + trace=expected_trace, + base_url=expected_base_url, + agent=expected_agent, + ) found_request = log_list[-1].http_request self.assertIsNotNone(found_request) - self.assertIsNotNone(found_request['requestMethod']) - self.assertIsNotNone(found_request['requestUrl']) - self.assertIsNotNone(found_request['userAgent']) - self.assertIsNotNone(found_request['protocol']) - self.assertEqual(found_request['requestMethod'], 'GET') - self.assertEqual(found_request['requestUrl'], expected_base_url + expected_path) - self.assertEqual(found_request['userAgent'], expected_agent) - self.assertEqual(found_request['protocol'], 'HTTP/1.1') + self.assertIsNotNone(found_request["requestMethod"]) + self.assertIsNotNone(found_request["requestUrl"]) + self.assertIsNotNone(found_request["userAgent"]) + self.assertIsNotNone(found_request["protocol"]) + self.assertEqual(found_request["requestMethod"], "GET") + self.assertEqual(found_request["requestUrl"], expected_base_url + expected_path) + self.assertEqual(found_request["userAgent"], expected_agent) + self.assertEqual(found_request["protocol"], "HTTP/1.1") found_trace = log_list[-1].trace self.assertIsNotNone(found_trace) @@ -116,36 +122,50 @@ def test_pylogging_extras(self): return log_text = f"{inspect.currentframe().f_code.co_name}" kwargs = { - 'trace': '123', - 'requestMethod': 'POST', - 'requestUrl': 'http://test', - 'userAgent': 'agent', - 'protocol': 'test', - 'line': 25, - 'file': 'test-file', - 'function': 'test-function' + "trace": "123", + "requestMethod": "POST", + "requestUrl": "http://test", + "userAgent": "agent", + "protocol": "test", + "line": 25, + "file": "test-file", + "function": "test-function", + "label_custom": "test-label", } log_list = self.trigger_and_retrieve(log_text, "pylogging", **kwargs) found_log = log_list[-1] if self.environment != "functions": # functions seems to override the user's trace value - self.assertEqual(found_log.trace, kwargs['trace']) + self.assertEqual(found_log.trace, kwargs["trace"]) # check that custom http request fields were set self.assertIsNotNone(found_log.http_request) - for field in ['requestMethod', 'requestUrl', 'userAgent', 'protocol']: - self.assertIsNotNone(found_log.http_request[field], - 'http_request[{field}] is unexpectedly None') - self.assertEqual(found_log.http_request[field], kwargs[field], - f'http_request[{field}] != {kwargs[field]}') + for field in ["requestMethod", "requestUrl", "userAgent", "protocol"]: + self.assertIsNotNone( + found_log.http_request[field], + "http_request[{field}] is unexpectedly None", + ) + self.assertEqual( + found_log.http_request[field], + kwargs[field], + f"http_request[{field}] != {kwargs[field]}", + ) # check that custom source location fields were set self.assertIsNotNone(found_log.source_location) - for field in ['line', 'file', 'function']: - self.assertIsNotNone(found_log.source_location[field], - f'source_location[{field}] is unexpectedly None') - self.assertEqual(found_log.source_location[field], kwargs[field], - f'source_location[{field}] != {kwargs[field]}') + for field in ["line", "file", "function"]: + self.assertIsNotNone( + found_log.source_location[field], + f"source_location[{field}] is unexpectedly None", + ) + self.assertEqual( + found_log.source_location[field], + kwargs[field], + f"source_location[{field}] != {kwargs[field]}", + ) + # check that custom label is set + self.assertIsNotNone(found_log.labels) + self.assertEqual(found_log.labels["custom"], kwargs["label_custom"]) def test_pylogging_extras_sparse(self): if self.environment == "kubernetes" or "appengine" in self.environment: @@ -154,21 +174,27 @@ def test_pylogging_extras_sparse(self): return log_text = f"{inspect.currentframe().f_code.co_name}" kwargs = { - 'requestMethod': 'POST', - 'file': 'test-file', + "requestMethod": "POST", + "file": "test-file", } log_list = self.trigger_and_retrieve(log_text, "pylogging", **kwargs) found_log = log_list[-1] # check that custom http request fields were set self.assertIsNotNone(found_log.http_request) - self.assertEqual(found_log.http_request["requestMethod"], kwargs["requestMethod"]) - for field in ['requestUrl', 'userAgent', 'protocol']: - self.assertIsNone(found_log.http_request.get(field, None), - f'http_request[{field}] is unexpectedly not None') + self.assertEqual( + found_log.http_request["requestMethod"], kwargs["requestMethod"] + ) + for field in ["requestUrl", "userAgent", "protocol"]: + self.assertIsNone( + found_log.http_request.get(field, None), + f"http_request[{field}] is unexpectedly not None", + ) # check that custom source location fields were set self.assertIsNotNone(found_log.source_location) - self.assertEqual(found_log.source_location['file'], kwargs['file']) - for field in ['line', 'function']: - self.assertIsNone(found_log.source_location.get(field, None), - f'source_location[{field}] is unexpectedly not None') + self.assertEqual(found_log.source_location["file"], kwargs["file"]) + for field in ["line", "function"]: + self.assertIsNone( + found_log.source_location.get(field, None), + f"source_location[{field}] is unexpectedly not None", + ) diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py index d1fb33e69a1f..98f2ce816820 100644 --- a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py @@ -22,17 +22,18 @@ from ..common.common import Common + class TestCloudRun(Common, unittest.TestCase): - environment = "cloudrun" - language = "nodejs" + environment = "cloudrun" + language = "nodejs" - # What it should be - monitored_resource_name = "cloud_run_revision" - monitored_resource_labels = [ - "project_id", - "service_name", - "revision_name", - "location", - "configuration_name", - ] + # What it should be + monitored_resource_name = "cloud_run_revision" + monitored_resource_labels = [ + "project_id", + "service_name", + "revision_name", + "location", + "configuration_name", + ] diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py index 74ad48131c33..eecab08ae0c2 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py @@ -49,12 +49,12 @@ def test_default_http_request_pylogging(self): found_trace = log_list[-1].trace self.assertIsNotNone(found_request) - self.assertIsNotNone(found_request['requestMethod']) - self.assertIsNotNone(found_request['requestUrl']) - self.assertIsNotNone(found_request['userAgent']) - self.assertIsNotNone(found_request['protocol']) - self.assertEqual(found_request['requestMethod'], 'POST') - self.assertEqual(found_request['protocol'], 'HTTP/1.1') + self.assertIsNotNone(found_request["requestMethod"]) + self.assertIsNotNone(found_request["requestUrl"]) + self.assertIsNotNone(found_request["userAgent"]) + self.assertIsNotNone(found_request["protocol"]) + self.assertEqual(found_request["requestMethod"], "POST") + self.assertEqual(found_request["protocol"], "HTTP/1.1") self.assertIsNotNone(found_trace) self.assertIn("projects/", found_trace) diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py index f107d3b1e5f6..7eaa0c119df7 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py @@ -21,6 +21,7 @@ from ..common.common import Common from ..common.python import CommonPython + class TestCloudFunctions(Common, CommonPython, unittest.TestCase): environment = "functions" @@ -40,12 +41,12 @@ def test_default_http_request_pylogging(self): found_trace = log_list[-1].trace self.assertIsNotNone(found_request) - self.assertIsNotNone(found_request['requestMethod']) - self.assertIsNotNone(found_request['requestUrl']) - self.assertIsNotNone(found_request['userAgent']) - self.assertIsNotNone(found_request['protocol']) - self.assertEqual(found_request['requestMethod'], 'POST') - self.assertEqual(found_request['protocol'], 'HTTP/1.1') + self.assertIsNotNone(found_request["requestMethod"]) + self.assertIsNotNone(found_request["requestUrl"]) + self.assertIsNotNone(found_request["userAgent"]) + self.assertIsNotNone(found_request["protocol"]) + self.assertEqual(found_request["requestMethod"], "POST") + self.assertEqual(found_request["protocol"], "HTTP/1.1") self.assertIsNotNone(found_trace) self.assertIn("projects/", found_trace) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 08b74cb44771..7fb7033b04a0 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -268,13 +268,11 @@ def test_emit(self): ) logname = "loggername" message = "hello world" - labels = {"test-key": "test-value"} record = logging.LogRecord(logname, logging, None, None, message, None, None) - record.labels = labels - handler.emit(record) + handler.handle(record) self.assertEqual( handler.transport.send_called_with, - (record, message, _GLOBAL_RESOURCE, labels, None, None, None, None), + (record, message, _GLOBAL_RESOURCE, None, None, None, None, None), ) def test_emit_manual_field_override(self): @@ -282,8 +280,15 @@ def test_emit_manual_field_override(self): from google.cloud.logging_v2.resource import Resource client = _Client(self.PROJECT) + default_labels = { + "default_key": "default-value", + "overwritten_key": "bad_value", + } handler = self._make_one( - client, transport=_Transport, resource=_GLOBAL_RESOURCE + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, + labels=default_labels, ) logname = "loggername" message = "hello world" @@ -299,9 +304,14 @@ def test_emit_manual_field_override(self): setattr(record, "source_location", expected_source) expected_resource = Resource(type="test", labels={}) setattr(record, "resource", expected_resource) - expected_labels = {"test-label": "manual"} - setattr(record, "labels", expected_labels) - handler.emit(record) + added_labels = {"added_key": "added_value", "overwritten_key": "new_value"} + expected_labels = { + "default_key": "default-value", + "overwritten_key": "new_value", + "added_key": "added_value", + } + setattr(record, "labels", added_labels) + handler.handle(record) self.assertEqual( handler.transport.send_called_with, diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 09d6b14faa96..13719bf536f1 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -40,17 +40,18 @@ def index(): def test_ctor_defaults(self): handler = self._make_one() - self.assertIsNone(handler.name) + self.assertIsNone(handler.project_id) - def test_ctor_w_name(self): - handler = self._make_one(name="foo") - self.assertEqual(handler.name, "foo") + def test_ctor_w_project(self): + handler = self._make_one(project_id="foo") + self.assertEqual(handler.project_id, "foo") def test_format(self): import logging import json - handler = self._make_one() + labels = {"default_key": "default-value"} + handler = self._make_one(labels=labels) logname = "loggername" message = "hello world,嗨 世界" pathname = "testpath" @@ -74,6 +75,7 @@ def test_format(self): "userAgent": "", "protocol": "", }, + "logging.googleapis.com/labels": labels, } handler.filter(record) result = json.loads(handler.format(record)) @@ -106,6 +108,7 @@ def test_format_minimal(self): "userAgent": "", "protocol": "", }, + "logging.googleapis.com/labels": {}, } handler.filter(record) result = json.loads(handler.format(record)) @@ -160,7 +163,11 @@ def test_format_overrides(self): import logging import json - handler = self._make_one() + default_labels = { + "default_key": "default-value", + "overwritten_key": "bad_value", + } + handler = self._make_one(labels=default_labels) logname = "loggername" message = "hello world,嗨 世界" record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None) @@ -172,6 +179,8 @@ def test_format_overrides(self): record.http_request = {"requestUrl": overwrite_path} record.source_location = {"file": overwrite_file} record.trace = overwrite_trace + added_labels = {"added_key": "added_value", "overwritten_key": "new_value"} + record.labels = added_labels expected_payload = { "logging.googleapis.com/trace": overwrite_trace, "logging.googleapis.com/sourceLocation": { @@ -185,6 +194,11 @@ def test_format_overrides(self): "userAgent": "", "protocol": "", }, + "logging.googleapis.com/labels": { + "default_key": "default-value", + "overwritten_key": "new_value", + "added_key": "added_value", + }, } app = self.create_app() diff --git a/tests/environment b/tests/environment index cf9ccb495dd3..94ff68580551 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit cf9ccb495dd39555748c704963df93054d246050 +Subproject commit 94ff685805510ad8d78c170603798cbe44050bce From b8bf13466f045df27da86f7f1007a28b002d8ee2 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 26 Apr 2021 17:45:00 -0400 Subject: [PATCH 437/855] chore: migrate to owl bot (#270) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: migrate to owl bot * chore: copy files from googleapis-gen 130ce904e5d546c312943d10f48799590f9c0f66 * chore: run the post processor * 🦉 Updates from OwlBot Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 + .../google-cloud-logging/.github/.OwlBot.yaml | 26 +++ .../.github/header-checker-lint.yml | 2 +- .../google-cloud-logging/.kokoro/release.sh | 4 +- .../.kokoro/release/common.cfg | 14 +- .../docs/_static/custom.css | 13 +- .../{synth.py => owlbot.py} | 51 +++--- packages/google-cloud-logging/renovate.json | 5 +- .../samples/snippets/noxfile.py | 10 +- packages/google-cloud-logging/synth.metadata | 161 ------------------ 10 files changed, 82 insertions(+), 208 deletions(-) create mode 100644 packages/google-cloud-logging/.github/.OwlBot.lock.yaml create mode 100644 packages/google-cloud-logging/.github/.OwlBot.yaml rename packages/google-cloud-logging/{synth.py => owlbot.py} (69%) delete mode 100644 packages/google-cloud-logging/synth.metadata diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml new file mode 100644 index 000000000000..29084e8a33af --- /dev/null +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -0,0 +1,4 @@ +docker: + digest: sha256:cfc0e802701262c211703c468874d767f65dabe6a1a71d0e07bfc8a3d5175f32 + image: gcr.io/repo-automation-bots/owlbot-python:latest + diff --git a/packages/google-cloud-logging/.github/.OwlBot.yaml b/packages/google-cloud-logging/.github/.OwlBot.yaml new file mode 100644 index 000000000000..63a2aab5460e --- /dev/null +++ b/packages/google-cloud-logging/.github/.OwlBot.yaml @@ -0,0 +1,26 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/repo-automation-bots/owlbot-python:latest + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/logging/(v.*)/.*-py/(.*) + dest: /owl-bot-staging/$1/$2 + +begin-after-commit-hash: 130ce904e5d546c312943d10f48799590f9c0f66 + diff --git a/packages/google-cloud-logging/.github/header-checker-lint.yml b/packages/google-cloud-logging/.github/header-checker-lint.yml index fc281c05bd55..6fe78aa7987a 100644 --- a/packages/google-cloud-logging/.github/header-checker-lint.yml +++ b/packages/google-cloud-logging/.github/header-checker-lint.yml @@ -1,6 +1,6 @@ {"allowedCopyrightHolders": ["Google LLC"], "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], - "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], "sourceFileExtensions": [ "ts", "js", diff --git a/packages/google-cloud-logging/.kokoro/release.sh b/packages/google-cloud-logging/.kokoro/release.sh index 2ef944a00e3f..f8994b0341dc 100755 --- a/packages/google-cloud-logging/.kokoro/release.sh +++ b/packages/google-cloud-logging/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") cd github/python-logging python3 setup.py sdist bdist_wheel -twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* +twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-logging/.kokoro/release/common.cfg b/packages/google-cloud-logging/.kokoro/release/common.cfg index 9fedb82bbf67..e0012bf9c19e 100644 --- a/packages/google-cloud-logging/.kokoro/release/common.cfg +++ b/packages/google-cloud-logging/.kokoro/release/common.cfg @@ -23,18 +23,8 @@ env_vars: { value: "github/python-logging/.kokoro/release.sh" } -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} \ No newline at end of file + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" +} diff --git a/packages/google-cloud-logging/docs/_static/custom.css b/packages/google-cloud-logging/docs/_static/custom.css index bcd37bbd3c4a..b0a295464b23 100644 --- a/packages/google-cloud-logging/docs/_static/custom.css +++ b/packages/google-cloud-logging/docs/_static/custom.css @@ -1,9 +1,20 @@ div#python2-eol { border-color: red; border-width: medium; -} +} /* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px } + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/owlbot.py similarity index 69% rename from packages/google-cloud-logging/synth.py rename to packages/google-cloud-logging/owlbot.py index 7f7008a39cbd..f012b1191d46 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/owlbot.py @@ -17,38 +17,33 @@ from synthtool import gcp from synthtool.languages import python -gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() -# ---------------------------------------------------------------------------- -# Generate logging GAPIC layer -# ---------------------------------------------------------------------------- -library = gapic.py_library( - service="logging", - version="v2", - bazel_target="//google/logging/v2:logging-v2-py", - include_protos=True, -) +default_version = "v2" -s.move( - library, - excludes=[ - "setup.py", - "README.rst", - "google/cloud/logging/__init__.py", # generated types are hidden from users - "google/cloud/logging_v2/__init__.py", - "docs/index.rst", - "docs/logging_v2", # Don't include gapic library docs. Users should use the hand-written layer instead - "scripts/fixup_logging_v2_keywords.py", # don't include script since it only works for generated layer - ], -) +for library in s.get_staging_dirs(default_version): + if library.name == "v2": + # Fix generated unit tests + s.replace( + library / "tests/unit/gapic/logging_v2/test_logging_service_v2.py", + "MonitoredResource\(\s*type_", + "MonitoredResource(type" + ) -# Fix generated unit tests -s.replace( - "tests/unit/gapic/logging_v2/test_logging_service_v2.py", - "MonitoredResource\(\s*type_", - "MonitoredResource(type" -) + s.move( + library, + excludes=[ + "setup.py", + "README.rst", + "google/cloud/logging/__init__.py", # generated types are hidden from users + "google/cloud/logging_v2/__init__.py", + "docs/index.rst", + "docs/logging_v2", # Don't include gapic library docs. Users should use the hand-written layer instead + "scripts/fixup_logging_v2_keywords.py", # don't include script since it only works for generated layer + ], + ) + +s.remove_staging_dirs() # ---------------------------------------------------------------------------- # Add templated files diff --git a/packages/google-cloud-logging/renovate.json b/packages/google-cloud-logging/renovate.json index f08bc22c9a55..c04895563e69 100644 --- a/packages/google-cloud-logging/renovate.json +++ b/packages/google-cloud-logging/renovate.json @@ -2,5 +2,8 @@ "extends": [ "config:base", ":preserveSemverRanges" ], - "ignorePaths": [".pre-commit-config.yaml"] + "ignorePaths": [".pre-commit-config.yaml"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } } diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 97bf7da80e39..956cdf4f9250 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -172,10 +172,16 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata deleted file mode 100644 index 345edc6bc267..000000000000 --- a/packages/google-cloud-logging/synth.metadata +++ /dev/null @@ -1,161 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/python-logging.git", - "sha": "6743654727f56ce65bdd6dc45075ef41ebb84bcf" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "149a3a84c29c9b8189576c7442ccb6dcf6a8f95b", - "internalRef": "364411656" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "logging", - "apiVersion": "v2", - "language": "python", - "generator": "bazel" - } - } - ], - "generatedFiles": [ - ".coveragerc", - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/header-checker-lint.yml", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic-head.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic-head.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic-head.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples-against-head.sh", - ".kokoro/test-samples-impl.sh", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".pre-commit-config.yaml", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "google/cloud/logging/py.typed", - "google/cloud/logging_v2/proto/log_entry.proto", - "google/cloud/logging_v2/proto/logging.proto", - "google/cloud/logging_v2/proto/logging_config.proto", - "google/cloud/logging_v2/proto/logging_metrics.proto", - "google/cloud/logging_v2/py.typed", - "google/cloud/logging_v2/services/__init__.py", - "google/cloud/logging_v2/services/config_service_v2/__init__.py", - "google/cloud/logging_v2/services/config_service_v2/async_client.py", - "google/cloud/logging_v2/services/config_service_v2/client.py", - "google/cloud/logging_v2/services/config_service_v2/pagers.py", - "google/cloud/logging_v2/services/config_service_v2/transports/__init__.py", - "google/cloud/logging_v2/services/config_service_v2/transports/base.py", - "google/cloud/logging_v2/services/config_service_v2/transports/grpc.py", - "google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py", - "google/cloud/logging_v2/services/logging_service_v2/__init__.py", - "google/cloud/logging_v2/services/logging_service_v2/async_client.py", - "google/cloud/logging_v2/services/logging_service_v2/client.py", - "google/cloud/logging_v2/services/logging_service_v2/pagers.py", - "google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py", - "google/cloud/logging_v2/services/logging_service_v2/transports/base.py", - "google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py", - "google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py", - "google/cloud/logging_v2/services/metrics_service_v2/__init__.py", - "google/cloud/logging_v2/services/metrics_service_v2/async_client.py", - "google/cloud/logging_v2/services/metrics_service_v2/client.py", - "google/cloud/logging_v2/services/metrics_service_v2/pagers.py", - "google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py", - "google/cloud/logging_v2/services/metrics_service_v2/transports/base.py", - "google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py", - "google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py", - "google/cloud/logging_v2/types/__init__.py", - "google/cloud/logging_v2/types/log_entry.py", - "google/cloud/logging_v2/types/logging.py", - "google/cloud/logging_v2/types/logging_config.py", - "google/cloud/logging_v2/types/logging_metrics.py", - "logging-v2-py.tar.gz", - "mypy.ini", - "noxfile.py", - "renovate.json", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/noxfile.py", - "scripts/decrypt-secrets.sh", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/logging_v2/__init__.py", - "tests/unit/gapic/logging_v2/test_config_service_v2.py", - "tests/unit/gapic/logging_v2/test_logging_service_v2.py", - "tests/unit/gapic/logging_v2/test_metrics_service_v2.py" - ] -} \ No newline at end of file From d20678753cca3e19dc0d486de1b2c53a6d248582 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 26 Apr 2021 23:45:20 +0200 Subject: [PATCH 438/855] chore(deps): update dependency google-cloud-bigquery to v2.14.0 (#271) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 30afb45a2afe..3540b111a713 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 -google-cloud-bigquery==2.13.1 +google-cloud-bigquery==2.14.0 google-cloud-storage==1.37.1 google-cloud-pubsub==2.4.1 From 0bd37e2b2704f44b8d541a13a7fcb41aac4463ae Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Mon, 26 Apr 2021 20:21:37 -0400 Subject: [PATCH 439/855] chore(revert): revert preventing normalization (#269) --- packages/google-cloud-logging/setup.py | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index c88b4ebd8d10..8b794791c41f 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -17,20 +17,6 @@ import setuptools -# Disable version normalization performed by setuptools.setup() -try: - # Try the approach of using sic(), added in setuptools 46.1.0 - from setuptools import sic -except ImportError: - # Try the approach of replacing packaging.version.Version - sic = lambda v: v - try: - # setuptools >=39.0.0 uses packaging from setuptools.extern - from setuptools.extern import packaging - except ImportError: - # setuptools <39.0.0 uses packaging from pkg_resources.extern - from pkg_resources.extern import packaging - packaging.version.Version = packaging.version.LegacyVersion # Package metadata. @@ -72,7 +58,7 @@ setuptools.setup( name=name, - version=sic(version), + version=version, description=description, long_description=readme, author="Google LLC", From e620788963edb8f188aa8889f5b76e0473bdd77b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 May 2021 00:20:55 +0200 Subject: [PATCH 440/855] chore(deps): update dependency google-cloud-bigquery to v2.15.0 (#277) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 3540b111a713..bfff198eabbc 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 -google-cloud-bigquery==2.14.0 +google-cloud-bigquery==2.15.0 google-cloud-storage==1.37.1 google-cloud-pubsub==2.4.1 From 62606e91fd167c63666b2c3ae5a4570fa693cd90 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 3 May 2021 15:25:45 -0700 Subject: [PATCH 441/855] feat: support span inference (#267) --- .../cloud/logging_v2/handlers/_helpers.py | 72 ++++++---- .../cloud/logging_v2/handlers/app_engine.py | 4 +- .../cloud/logging_v2/handlers/handlers.py | 3 +- .../logging_v2/handlers/structured_log.py | 1 + .../tests/environment/tests/common/common.py | 15 +++ .../tests/environment/tests/common/python.py | 28 +++- .../tests/unit/handlers/test__helpers.py | 124 ++++++++++++++---- .../tests/unit/handlers/test_app_engine.py | 6 +- .../unit/handlers/test_structured_log.py | 15 ++- tests/environment | 2 +- 10 files changed, 207 insertions(+), 63 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py index 88eba07a67a2..ba853edff15b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py @@ -16,6 +16,7 @@ import math import json +import re try: import flask @@ -55,12 +56,13 @@ def get_request_data_from_flask(): """Get http_request and trace data from flask request headers. Returns: - Tuple[Optional[dict], Optional[str]]: - Data related to the current http request and the trace_id for the - request. Both fields will be None if a flask request isn't found. + Tuple[Optional[dict], Optional[str], Optional[str]]: + Data related to the current http request, trace_id, and span_id for + the request. All fields will be None if a django request isn't + found. """ if flask is None or not flask.request: - return None, None + return None, None, None # build http_request http_request = { @@ -73,27 +75,26 @@ def get_request_data_from_flask(): "protocol": flask.request.environ.get(_PROTOCOL_HEADER), } - # find trace id - trace_id = None + # find trace id and span id header = flask.request.headers.get(_FLASK_TRACE_HEADER) - if header: - trace_id = header.split("/", 1)[0] + trace_id, span_id = _parse_trace_span(header) - return http_request, trace_id + return http_request, trace_id, span_id def get_request_data_from_django(): """Get http_request and trace data from django request headers. Returns: - Tuple[Optional[dict], Optional[str]]: - Data related to the current http request and the trace_id for the - request. Both fields will be None if a django request isn't found. + Tuple[Optional[dict], Optional[str], Optional[str]]: + Data related to the current http request, trace_id, and span_id for + the request. All fields will be None if a django request isn't + found. """ request = _get_django_request() if request is None: - return None, None + return None, None, None # convert content_length to int if it exists content_length = None @@ -112,13 +113,35 @@ def get_request_data_from_django(): "protocol": request.META.get(_PROTOCOL_HEADER), } - # find trace id - trace_id = None + # find trace id and span id header = request.META.get(_DJANGO_TRACE_HEADER) - if header: - trace_id = header.split("/", 1)[0] + trace_id, span_id = _parse_trace_span(header) - return http_request, trace_id + return http_request, trace_id, span_id + + +def _parse_trace_span(header): + """Given an X_CLOUD_TRACE header, extract the trace and span ids. + + Args: + header (str): the string extracted from the X_CLOUD_TRACE header + Returns: + Tuple[Optional[dict], Optional[str]]: + The trace_id and span_id extracted from the header + Each field will be None if not found. + """ + trace_id = None + span_id = None + if header: + try: + split_header = header.split("/", 1) + trace_id = split_header[0] + header_suffix = split_header[1] + # the span is the set of alphanumeric characters after the / + span_id = re.findall(r"^\w+", header_suffix)[0] + except IndexError: + pass + return trace_id, span_id def get_request_data(): @@ -126,9 +149,10 @@ def get_request_data(): frameworks (currently supported: Flask and Django). Returns: - Tuple[Optional[dict], Optional[str]]: - Data related to the current http request and the trace_id for the - request. Both fields will be None if a supported web request isn't found. + Tuple[Optional[dict], Optional[str], Optional[str]]: + Data related to the current http request, trace_id, and span_id for + the request. All fields will be None if a django request isn't + found. """ checkers = ( get_request_data_from_django, @@ -136,8 +160,8 @@ def get_request_data(): ) for checker in checkers: - http_request, trace_id = checker() + http_request, trace_id, span_id = checker() if http_request is not None: - return http_request, trace_id + return http_request, trace_id, span_id - return None, None + return None, None, None diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py index bc7daa9d0cea..874a9d6085d4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py @@ -90,7 +90,7 @@ def get_gae_labels(self): """ gae_labels = {} - _, trace_id = get_request_data() + _, trace_id, _ = get_request_data() if trace_id is not None: gae_labels[_TRACE_ID_LABEL] = trace_id @@ -107,7 +107,7 @@ def emit(self, record): record (logging.LogRecord): The record to be logged. """ message = super(AppEngineHandler, self).format(record) - inferred_http, inferred_trace = get_request_data() + inferred_http, inferred_trace, _ = get_request_data() if inferred_trace is not None: inferred_trace = f"projects/{self.project_id}/traces/{inferred_trace}" # allow user overrides diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index b9cc53a94fdd..3a5599aaebf9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -59,7 +59,7 @@ def filter(self, record): } record.msg = "" if record.msg is None else record.msg # find http request data - inferred_http, inferred_trace = get_request_data() + inferred_http, inferred_trace, inferred_span = get_request_data() if inferred_trace is not None and self.project is not None: inferred_trace = f"projects/{self.project}/traces/{inferred_trace}" # set labels @@ -70,6 +70,7 @@ def filter(self, record): ) record.trace = getattr(record, "trace", inferred_trace) or "" + record.span_id = getattr(record, "span_id", inferred_span) or "" record.http_request = getattr(record, "http_request", inferred_http) or {} record.request_method = record.http_request.get("requestMethod", "") record.request_url = record.http_request.get("requestUrl", "") diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py index e9d036423e9f..76a560538818 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py @@ -24,6 +24,7 @@ '"severity": "%(levelname)s", ' '"logging.googleapis.com/labels": { %(total_labels_str)s }, ' '"logging.googleapis.com/trace": "%(trace)s", ' + '"logging.googleapis.com/spanId": "%(span_id)s", ' '"logging.googleapis.com/sourceLocation": { "file": "%(file)s", "line": "%(line)d", "function": "%(function)s"}, ' '"httpRequest": {"requestMethod": "%(request_method)s", "requestUrl": "%(request_url)s", "userAgent": "%(user_agent)s", "protocol": "%(protocol)s"} }' ) diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index abd9ecd5ac24..8e25fc3e82be 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -137,6 +137,21 @@ def test_receive_log(self): found_log = log self.assertIsNotNone(found_log, "expected log text not found") + def test_receive_unicode_log(self): + log_text = f"{inspect.currentframe().f_code.co_name} 嗨 世界 😀" + log_list = self.trigger_and_retrieve(log_text, "simplelog") + + found_log = None + for log in log_list: + message = ( + log.payload.get("message", None) + if isinstance(log.payload, dict) + else str(log.payload) + ) + if message and log_text in message: + found_log = log + self.assertIsNotNone(found_log, "expected unicode log not found") + # add back after v3.0.0 # def test_monitored_resource(self): # if self.language != "python": diff --git a/packages/google-cloud-logging/tests/environment/tests/common/python.py b/packages/google-cloud-logging/tests/environment/tests/common/python.py index e765a6928718..7455924d6329 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/python.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/python.py @@ -22,7 +22,7 @@ class CommonPython: - def pylogging_test_receive_log(self): + def test_pylogging_receive_log(self): log_text = f"{inspect.currentframe().f_code.co_name}" log_list = self.trigger_and_retrieve(log_text, "pylogging") @@ -37,6 +37,21 @@ def pylogging_test_receive_log(self): found_log = log self.assertIsNotNone(found_log, "expected log text not found") + def test_pylogging_receive_unicode_log(self): + log_text = f"{inspect.currentframe().f_code.co_name} 嗨 世界 😀" + log_list = self.trigger_and_retrieve(log_text, "pylogging") + + found_log = None + for log in log_list: + message = ( + log.payload.get("message", None) + if isinstance(log.payload, dict) + else str(log.payload) + ) + if message and log_text in message: + found_log = log + self.assertIsNotNone(found_log, "expected unicode log not found") + def test_monitored_resource_pylogging(self): log_text = f"{inspect.currentframe().f_code.co_name}" log_list = self.trigger_and_retrieve(log_text, "pylogging") @@ -90,12 +105,14 @@ def test_flask_http_request_pylogging(self): expected_base_url = "http://test" expected_path = "/pylogging" expected_trace = "123" + expected_span = "456" + trace_header = f"{expected_trace}/{expected_span};o=1" log_list = self.trigger_and_retrieve( log_text, "pylogging_flask", path=expected_path, - trace=expected_trace, + trace=trace_header, base_url=expected_base_url, agent=expected_agent, ) @@ -112,8 +129,13 @@ def test_flask_http_request_pylogging(self): self.assertEqual(found_request["protocol"], "HTTP/1.1") found_trace = log_list[-1].trace + found_span = log_list[-1].span_id self.assertIsNotNone(found_trace) self.assertIn("projects/", found_trace) + if self.environment != "functions": + # functions seems to override the user's trace value + self.assertIn(expected_trace, found_trace) + self.assertEqual(expected_span, found_span) def test_pylogging_extras(self): if self.environment == "kubernetes" or "appengine" in self.environment: @@ -123,6 +145,7 @@ def test_pylogging_extras(self): log_text = f"{inspect.currentframe().f_code.co_name}" kwargs = { "trace": "123", + "span_id": "456", "requestMethod": "POST", "requestUrl": "http://test", "userAgent": "agent", @@ -138,6 +161,7 @@ def test_pylogging_extras(self): if self.environment != "functions": # functions seems to override the user's trace value self.assertEqual(found_log.trace, kwargs["trace"]) + self.assertEqual(found_log.span_id, kwargs["span_id"]) # check that custom http request fields were set self.assertIsNotNone(found_log.http_request) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index fd17f6ffd33c..b2c822e7c88e 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -17,8 +17,10 @@ import mock _FLASK_TRACE_ID = "flask-id" +_FLASK_SPAN_ID = "span0flask" _FLASK_HTTP_REQUEST = {"requestUrl": "https://flask.palletsprojects.com/en/1.1.x/"} _DJANGO_TRACE_ID = "django-id" +_DJANGO_SPAN_ID = "span0django" _DJANGO_HTTP_REQUEST = {"requestUrl": "https://www.djangoproject.com/"} @@ -44,15 +46,17 @@ def index(): def test_no_context_header(self): app = self.create_app() with app.test_request_context(path="/", headers={}): - http_request, trace_id = self._call_fut() + http_request, trace_id, span_id = self._call_fut() self.assertIsNone(trace_id) + self.assertIsNone(span_id) self.assertEqual(http_request["requestMethod"], "GET") def test_valid_context_header(self): flask_trace_header = "X_CLOUD_TRACE_CONTEXT" expected_trace_id = _FLASK_TRACE_ID - flask_trace_id = expected_trace_id + "/testspanid" + expected_span_id = _FLASK_SPAN_ID + flask_trace_id = f"{expected_trace_id}/{expected_span_id}" app = self.create_app() context = app.test_request_context( @@ -60,9 +64,10 @@ def test_valid_context_header(self): ) with context: - http_request, trace_id = self._call_fut() + http_request, trace_id, span_id = self._call_fut() self.assertEqual(trace_id, expected_trace_id) + self.assertEqual(span_id, expected_span_id) self.assertEqual(http_request["requestMethod"], "GET") def test_http_request_populated(self): @@ -84,7 +89,7 @@ def test_http_request_populated(self): environ_base={"REMOTE_ADDR": expected_ip}, headers=headers, ) - http_request, trace_id = self._call_fut() + http_request, *_ = self._call_fut() self.assertEqual(http_request["requestMethod"], "PUT") self.assertEqual(http_request["requestUrl"], expected_path) @@ -99,7 +104,7 @@ def test_http_request_sparse(self): app = self.create_app() with app.test_client() as c: c.put(path=expected_path) - http_request, trace_id = self._call_fut() + http_request, *_ = self._call_fut() self.assertEqual(http_request["requestMethod"], "PUT") self.assertEqual(http_request["requestUrl"], expected_path) self.assertEqual(http_request["protocol"], "HTTP/1.1") @@ -135,17 +140,20 @@ def test_no_context_header(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) - http_request, trace_id = self._call_fut() + http_request, trace_id, span_id = self._call_fut() + self.assertEqual(http_request["requestMethod"], "GET") self.assertIsNone(trace_id) + self.assertIsNone(span_id) def test_valid_context_header(self): from django.test import RequestFactory from google.cloud.logging_v2.handlers.middleware import request django_trace_header = "HTTP_X_CLOUD_TRACE_CONTEXT" - expected_trace_id = "testtraceiddjango" - django_trace_id = expected_trace_id + "/testspanid" + expected_span_id = _DJANGO_SPAN_ID + expected_trace_id = _DJANGO_TRACE_ID + django_trace_id = f"{expected_trace_id}/{expected_span_id}" django_request = RequestFactory().get( "/", **{django_trace_header: django_trace_id} @@ -153,9 +161,10 @@ def test_valid_context_header(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) - http_request, trace_id = self._call_fut() + http_request, trace_id, span_id = self._call_fut() self.assertEqual(trace_id, expected_trace_id) + self.assertEqual(span_id, expected_span_id) self.assertEqual(http_request["requestMethod"], "GET") def test_http_request_populated(self): @@ -178,7 +187,7 @@ def test_http_request_populated(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) - http_request, trace_id = self._call_fut() + http_request, *_ = self._call_fut() self.assertEqual(http_request["requestMethod"], "PUT") self.assertEqual(http_request["requestUrl"], expected_path) self.assertEqual(http_request["userAgent"], expected_agent) @@ -195,7 +204,7 @@ def test_http_request_sparse(self): django_request = RequestFactory().put(expected_path) middleware = request.RequestMiddleware(None) middleware.process_request(django_request) - http_request, trace_id = self._call_fut() + http_request, *_ = self._call_fut() self.assertEqual(http_request["requestMethod"], "PUT") self.assertEqual(http_request["requestUrl"], expected_path) self.assertEqual(http_request["remoteIp"], "127.0.0.1") @@ -226,8 +235,8 @@ def _helper(self, django_return, flask_return): return django_mock, flask_mock, result def test_from_django(self): - django_expected = (_DJANGO_HTTP_REQUEST, _DJANGO_TRACE_ID) - flask_expected = (None, None) + django_expected = (_DJANGO_HTTP_REQUEST, _DJANGO_TRACE_ID, _DJANGO_SPAN_ID) + flask_expected = (None, None, None) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) self.assertEqual(output, django_expected) @@ -235,8 +244,8 @@ def test_from_django(self): flask_mock.assert_not_called() def test_from_flask(self): - django_expected = (None, None) - flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID) + django_expected = (None, None, None) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) self.assertEqual(output, flask_expected) @@ -245,8 +254,8 @@ def test_from_flask(self): flask_mock.assert_called_once_with() def test_from_django_and_flask(self): - django_expected = (_DJANGO_HTTP_REQUEST, _DJANGO_TRACE_ID) - flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID) + django_expected = (_DJANGO_HTTP_REQUEST, _DJANGO_TRACE_ID, _DJANGO_SPAN_ID) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) @@ -257,19 +266,19 @@ def test_from_django_and_flask(self): flask_mock.assert_not_called() def test_missing_http_request(self): - flask_expected = (None, _FLASK_TRACE_ID) - django_expected = (None, _DJANGO_TRACE_ID) + flask_expected = (None, _FLASK_TRACE_ID, _FLASK_SPAN_ID) + django_expected = (None, _DJANGO_TRACE_ID, _DJANGO_TRACE_ID) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) # function only returns trace if http_request data is present - self.assertEqual(output, (None, None)) + self.assertEqual(output, (None, None, None)) django_mock.assert_called_once_with() flask_mock.assert_called_once_with() def test_missing_trace_id(self): - flask_expected = (_FLASK_HTTP_REQUEST, None) - django_expected = (None, _DJANGO_TRACE_ID) + flask_expected = (_FLASK_HTTP_REQUEST, None, None) + django_expected = (None, _DJANGO_TRACE_ID, _DJANGO_SPAN_ID) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) # trace_id is optional @@ -279,14 +288,77 @@ def test_missing_trace_id(self): flask_mock.assert_called_once_with() def test_missing_both(self): - flask_expected = (None, None) - django_expected = (None, None) + flask_expected = (None, None, None) + django_expected = (None, None, None) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) - self.assertEqual(output, (None, None)) + self.assertEqual(output, (None, None, None)) django_mock.assert_called_once_with() flask_mock.assert_called_once_with() def test_wo_libraries(self): output = self._call_fut() - self.assertEqual(output, (None, None)) + self.assertEqual(output, (None, None, None)) + + +class Test__parse_trace_span(unittest.TestCase): + @staticmethod + def _call_fut(header): + from google.cloud.logging_v2.handlers import _helpers + + return _helpers._parse_trace_span(header) + + def test_empty_header(self): + header = "" + trace_id, span_id = self._call_fut(header) + self.assertEqual(trace_id, None) + self.assertEqual(span_id, None) + + def test_no_span(self): + header = "12345" + trace_id, span_id = self._call_fut(header) + self.assertEqual(trace_id, header) + self.assertEqual(span_id, None) + + def test_no_trace(self): + header = "/12345" + trace_id, span_id = self._call_fut(header) + self.assertEqual(trace_id, "") + self.assertEqual(span_id, "12345") + + def test_with_span(self): + expected_trace = "12345" + expected_span = "67890" + header = f"{expected_trace}/{expected_span}" + trace_id, span_id = self._call_fut(header) + self.assertEqual(trace_id, expected_trace) + self.assertEqual(span_id, expected_span) + + def test_with_extra_characters(self): + expected_trace = "12345" + expected_span = "67890" + header = f"{expected_trace}/{expected_span};o=0" + trace_id, span_id = self._call_fut(header) + self.assertEqual(trace_id, expected_trace) + self.assertEqual(span_id, expected_span) + + def test_with_unicode_span(self): + """ + Spans are expected to be alphanumeric + """ + expected_trace = "12345" + header = f"{expected_trace}/😀123" + trace_id, span_id = self._call_fut(header) + self.assertEqual(trace_id, expected_trace) + self.assertEqual(span_id, None) + + def test_with_unicode_trace(self): + """ + Spans are expected to be alphanumeric + """ + expected_trace = "12😀345" + expected_span = "67890" + header = f"{expected_trace}/{expected_span}" + trace_id, span_id = self._call_fut(header) + self.assertEqual(trace_id, expected_trace) + self.assertEqual(span_id, expected_span) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index 65e804573855..c726c8496df9 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -97,7 +97,7 @@ def test_emit(self): expected_trace_id = f"projects/{self.PROJECT}/traces/{trace_id}" get_request_patch = mock.patch( "google.cloud.logging_v2.handlers.app_engine.get_request_data", - return_value=(expected_http_request, trace_id), + return_value=(expected_http_request, trace_id, None), ) with get_request_patch: # library integrations mocked to return test data @@ -135,7 +135,7 @@ def test_emit_manual_field_override(self): inferred_trace_id = "trace-test" get_request_patch = mock.patch( "google.cloud.logging_v2.handlers.app_engine.get_request_data", - return_value=(inferred_http_request, inferred_trace_id), + return_value=(inferred_http_request, inferred_trace_id, None), ) with get_request_patch: # library integrations mocked to return test data @@ -180,7 +180,7 @@ def test_emit_manual_field_override(self): def _get_gae_labels_helper(self, trace_id): get_request_patch = mock.patch( "google.cloud.logging_v2.handlers.app_engine.get_request_data", - return_value=(None, trace_id), + return_value=(None, trace_id, None), ) client = mock.Mock(project=self.PROJECT, spec=["project"]) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 13719bf536f1..66822d74c2c7 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -64,6 +64,7 @@ def test_format(self): "message": message, "severity": record.levelname, "logging.googleapis.com/trace": "", + "logging.googleapis.com/spanId": "", "logging.googleapis.com/sourceLocation": { "file": pathname, "line": str(lineno), @@ -128,8 +129,11 @@ def test_format_with_request(self): expected_path = "http://testserver/123" expected_agent = "Mozilla/5.0" expected_trace = "123" + expected_span = "456" + trace_header = f"{expected_trace}/{expected_span};o=0" expected_payload = { "logging.googleapis.com/trace": expected_trace, + "logging.googleapis.com/spanId": expected_span, "httpRequest": { "requestMethod": "PUT", "requestUrl": expected_path, @@ -145,7 +149,7 @@ def test_format_with_request(self): data="body", headers={ "User-Agent": expected_agent, - "X_CLOUD_TRACE_CONTEXT": expected_trace, + "X_CLOUD_TRACE_CONTEXT": trace_header, }, ) handler.filter(record) @@ -173,16 +177,19 @@ def test_format_overrides(self): record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None) overwrite_path = "http://overwrite" inferred_path = "http://testserver/123" - overwrite_trace = "456" - inferred_trace = "123" + overwrite_trace = "abc" + overwrite_span = "def" + inferred_trace_span = "123/456;" overwrite_file = "test-file" record.http_request = {"requestUrl": overwrite_path} record.source_location = {"file": overwrite_file} record.trace = overwrite_trace + record.span_id = overwrite_span added_labels = {"added_key": "added_value", "overwritten_key": "new_value"} record.labels = added_labels expected_payload = { "logging.googleapis.com/trace": overwrite_trace, + "logging.googleapis.com/spanId": overwrite_span, "logging.googleapis.com/sourceLocation": { "file": overwrite_file, "function": "", @@ -206,7 +213,7 @@ def test_format_overrides(self): c.put( path=inferred_path, data="body", - headers={"X_CLOUD_TRACE_CONTEXT": inferred_trace}, + headers={"X_CLOUD_TRACE_CONTEXT": inferred_trace_span}, ) handler.filter(record) result = json.loads(handler.format(record)) diff --git a/tests/environment b/tests/environment index 94ff68580551..df1b7c131575 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 94ff685805510ad8d78c170603798cbe44050bce +Subproject commit df1b7c131575f8eb59120cef75709496602b7665 From 6cc850b5435466e2e92ca3da67c4ea3e83296a1e Mon Sep 17 00:00:00 2001 From: "google-cloud-policy-bot[bot]" <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> Date: Mon, 3 May 2021 15:31:21 -0700 Subject: [PATCH 442/855] chore: add SECURITY.md (#275) Co-authored-by: google-cloud-policy-bot[bot] <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> --- packages/google-cloud-logging/SECURITY.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 packages/google-cloud-logging/SECURITY.md diff --git a/packages/google-cloud-logging/SECURITY.md b/packages/google-cloud-logging/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-logging/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. From 86d1a58767b7e91ec5c27cdfdf368fd22667c36e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 May 2021 00:38:06 +0200 Subject: [PATCH 443/855] chore(deps): update dependency pytest to v6.2.3 (#273) --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index d0029c6de49e..93f50ad13971 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==1.10.0 -pytest==6.0.1 +pytest==6.2.3 From f1152ce8a005d52d0d0128969719d20faedd438c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 May 2021 00:41:46 +0200 Subject: [PATCH 444/855] chore(deps): update dependency google-cloud-storage to v1.38.0 (#272) Co-authored-by: Daniel Sanche --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index bfff198eabbc..f06182601629 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 google-cloud-bigquery==2.15.0 -google-cloud-storage==1.37.1 +google-cloud-storage==1.38.0 google-cloud-pubsub==2.4.1 From 0ec5ddcc16dfba4572a91c53934ad3a32022202b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 May 2021 20:40:44 +0200 Subject: [PATCH 445/855] chore(deps): update dependency pytest to v6.2.4 (#280) --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 93f50ad13971..766a8035d690 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==1.10.0 -pytest==6.2.3 +pytest==6.2.4 From 87859dc591878d1764511667496e92abfc235e84 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 5 May 2021 10:17:32 -0700 Subject: [PATCH 446/855] refactor: clean up CloudLoggingFilter (#281) --- .../cloud/logging_v2/handlers/_helpers.py | 12 -- .../cloud/logging_v2/handlers/handlers.py | 86 +++++++------- .../logging_v2/handlers/structured_log.py | 12 +- .../tests/unit/handlers/test__helpers.py | 7 -- .../tests/unit/handlers/test_handlers.py | 105 +++++++++++------- .../unit/handlers/test_structured_log.py | 35 +----- 6 files changed, 124 insertions(+), 133 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py index ba853edff15b..6d9debfe26cc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py @@ -68,10 +68,7 @@ def get_request_data_from_flask(): http_request = { "requestMethod": flask.request.method, "requestUrl": flask.request.url, - "requestSize": flask.request.content_length, "userAgent": flask.request.user_agent.string, - "remoteIp": flask.request.remote_addr, - "referer": flask.request.referrer, "protocol": flask.request.environ.get(_PROTOCOL_HEADER), } @@ -96,20 +93,11 @@ def get_request_data_from_django(): if request is None: return None, None, None - # convert content_length to int if it exists - content_length = None - try: - content_length = int(request.META.get(_DJANGO_CONTENT_LENGTH)) - except (ValueError, TypeError): - content_length = None # build http_request http_request = { "requestMethod": request.method, "requestUrl": request.build_absolute_uri(), - "requestSize": content_length, "userAgent": request.META.get(_DJANGO_USERAGENT_HEADER), - "remoteIp": request.META.get(_DJANGO_REMOTE_ADDR_HEADER), - "referer": request.META.get(_DJANGO_REFERER_HEADER), "protocol": request.META.get(_PROTOCOL_HEADER), } diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 3a5599aaebf9..973321423a14 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -14,6 +14,7 @@ """Python :mod:`logging` handlers for Cloud Logging.""" +import json import logging from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE @@ -32,50 +33,57 @@ class CloudLoggingFilter(logging.Filter): """Python standard ``logging`` Filter class to add Cloud Logging information to each LogRecord. - When attached to a LogHandler, each incoming log will receive trace and - http_request related to the request. This data can be overwritten using - the `extras` argument when writing logs. + When attached to a LogHandler, each incoming log will be modified + to include new Cloud Logging relevant data. This data can be manually + overwritten using the `extras` argument when writing logs. """ def __init__(self, project=None, default_labels=None): self.project = project self.default_labels = default_labels if default_labels else {} - def filter(self, record): - # ensure record has all required fields set + @staticmethod + def _infer_source_location(record): + """Helper function to infer source location data from a LogRecord. + Will default to record.source_location if already set + """ if hasattr(record, "source_location"): - record.line = int(record.source_location.get("line", 0)) - record.file = record.source_location.get("file", "") - record.function = record.source_location.get("function", "") + return record.source_location else: - record.line = record.lineno if record.lineno else 0 - record.file = record.pathname if record.pathname else "" - record.function = record.funcName if record.funcName else "" - if any([record.line, record.file, record.function]): - record.source_location = { - "line": record.line, - "file": record.file, - "function": record.function, - } - record.msg = "" if record.msg is None else record.msg - # find http request data + name_map = [ + ("line", "lineno"), + ("file", "pathname"), + ("function", "funcName"), + ] + output = {} + for (gcp_name, std_lib_name) in name_map: + value = getattr(record, std_lib_name, None) + if value is not None: + output[gcp_name] = value + return output if output else None + + def filter(self, record): + """ + Add new Cloud Logging data to each LogRecord as it comes in + """ + user_labels = getattr(record, "labels", {}) inferred_http, inferred_trace, inferred_span = get_request_data() if inferred_trace is not None and self.project is not None: inferred_trace = f"projects/{self.project}/traces/{inferred_trace}" - # set labels - user_labels = getattr(record, "labels", {}) - record.total_labels = {**self.default_labels, **user_labels} - record.total_labels_str = ", ".join( - [f'"{k}": "{v}"' for k, v in record.total_labels.items()] - ) - - record.trace = getattr(record, "trace", inferred_trace) or "" - record.span_id = getattr(record, "span_id", inferred_span) or "" - record.http_request = getattr(record, "http_request", inferred_http) or {} - record.request_method = record.http_request.get("requestMethod", "") - record.request_url = record.http_request.get("requestUrl", "") - record.user_agent = record.http_request.get("userAgent", "") - record.protocol = record.http_request.get("protocol", "") + # set new record values + record._resource = getattr(record, "resource", None) + record._trace = getattr(record, "trace", inferred_trace) or None + record._span_id = getattr(record, "span_id", inferred_span) or None + record._http_request = getattr(record, "http_request", inferred_http) + record._source_location = CloudLoggingFilter._infer_source_location(record) + record._labels = {**self.default_labels, **user_labels} or None + # create guaranteed string representations for structured logging + record._msg_str = record.msg or "" + record._trace_str = record._trace or "" + record._span_id_str = record._span_id or "" + record._http_request_str = json.dumps(record._http_request or {}) + record._source_location_str = json.dumps(record._source_location or {}) + record._labels_str = json.dumps(record._labels or {}) return True @@ -163,12 +171,12 @@ def emit(self, record): self.transport.send( record, message, - resource=getattr(record, "resource", self.resource), - labels=getattr(record, "total_labels", None) or None, - trace=getattr(record, "trace", None) or None, - span_id=getattr(record, "span_id", None) or None, - http_request=getattr(record, "http_request", None) or None, - source_location=getattr(record, "source_location", None) or None, + resource=(record._resource or self.resource), + labels=record._labels, + trace=record._trace, + span_id=record._span_id, + http_request=record._http_request, + source_location=record._source_location, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py index 76a560538818..0edb5c39eba5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py @@ -20,13 +20,13 @@ from google.cloud.logging_v2.handlers.handlers import CloudLoggingFilter GCP_FORMAT = ( - '{"message": "%(message)s", ' + '{"message": "%(_msg_str)s", ' '"severity": "%(levelname)s", ' - '"logging.googleapis.com/labels": { %(total_labels_str)s }, ' - '"logging.googleapis.com/trace": "%(trace)s", ' - '"logging.googleapis.com/spanId": "%(span_id)s", ' - '"logging.googleapis.com/sourceLocation": { "file": "%(file)s", "line": "%(line)d", "function": "%(function)s"}, ' - '"httpRequest": {"requestMethod": "%(request_method)s", "requestUrl": "%(request_url)s", "userAgent": "%(user_agent)s", "protocol": "%(protocol)s"} }' + '"logging.googleapis.com/labels": %(_labels_str)s, ' + '"logging.googleapis.com/trace": "%(_trace_str)s", ' + '"logging.googleapis.com/spanId": "%(_span_id_str)s", ' + '"logging.googleapis.com/sourceLocation": %(_source_location_str)s, ' + '"httpRequest": %(_http_request_str)s }' ) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index b2c822e7c88e..e1230991fa11 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -94,9 +94,6 @@ def test_http_request_populated(self): self.assertEqual(http_request["requestMethod"], "PUT") self.assertEqual(http_request["requestUrl"], expected_path) self.assertEqual(http_request["userAgent"], expected_agent) - self.assertEqual(http_request["referer"], expected_referrer) - self.assertEqual(http_request["remoteIp"], expected_ip) - self.assertEqual(http_request["requestSize"], len(body_content)) self.assertEqual(http_request["protocol"], "HTTP/1.1") def test_http_request_sparse(self): @@ -191,9 +188,6 @@ def test_http_request_populated(self): self.assertEqual(http_request["requestMethod"], "PUT") self.assertEqual(http_request["requestUrl"], expected_path) self.assertEqual(http_request["userAgent"], expected_agent) - self.assertEqual(http_request["referer"], expected_referrer) - self.assertEqual(http_request["remoteIp"], "127.0.0.1") - self.assertEqual(http_request["requestSize"], len(body_content)) self.assertEqual(http_request["protocol"], "HTTP/1.1") def test_http_request_sparse(self): @@ -207,7 +201,6 @@ def test_http_request_sparse(self): http_request, *_ = self._call_fut() self.assertEqual(http_request["requestMethod"], "PUT") self.assertEqual(http_request["requestUrl"], expected_path) - self.assertEqual(http_request["remoteIp"], "127.0.0.1") self.assertEqual(http_request["protocol"], "HTTP/1.1") diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 7fb7033b04a0..f0a1f81def67 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -16,6 +16,7 @@ import unittest from unittest.mock import patch import mock +import json from google.cloud.logging_v2.handlers._monitored_resources import ( _FUNCTION_ENV_VARS, @@ -57,26 +58,38 @@ def test_filter_record(self): filter_obj = self._make_one() logname = "loggername" message = "hello world,嗨 世界" - pathname = "testpath" - lineno = 1 - func = "test-function" + expected_location = { + "line": 1, + "file": "testpath", + "function": "test-function", + } record = logging.LogRecord( - logname, logging.INFO, pathname, lineno, message, None, None, func=func + logname, + logging.INFO, + expected_location["file"], + expected_location["line"], + message, + None, + None, + func=expected_location["function"], ) success = filter_obj.filter(record) self.assertTrue(success) - self.assertEqual(record.line, lineno) self.assertEqual(record.msg, message) - self.assertEqual(record.function, func) - self.assertEqual(record.file, pathname) - self.assertEqual(record.trace, "") - self.assertEqual(record.http_request, {}) - self.assertEqual(record.request_method, "") - self.assertEqual(record.request_url, "") - self.assertEqual(record.user_agent, "") - self.assertEqual(record.protocol, "") + self.assertEqual(record._msg_str, message) + self.assertEqual(record._source_location, expected_location) + self.assertEqual(record._source_location_str, json.dumps(expected_location)) + self.assertIsNone(record._resource) + self.assertIsNone(record._trace) + self.assertEqual(record._trace_str, "") + self.assertIsNone(record._span_id) + self.assertEqual(record._span_id_str, "") + self.assertIsNone(record._http_request) + self.assertEqual(record._http_request_str, "{}") + self.assertIsNone(record._labels) + self.assertEqual(record._labels_str, "{}") def test_minimal_record(self): """ @@ -91,16 +104,19 @@ def test_minimal_record(self): success = filter_obj.filter(record) self.assertTrue(success) - self.assertEqual(record.line, 0) - self.assertEqual(record.msg, "") - self.assertEqual(record.function, "") - self.assertEqual(record.file, "") - self.assertEqual(record.trace, "") - self.assertEqual(record.http_request, {}) - self.assertEqual(record.request_method, "") - self.assertEqual(record.request_url, "") - self.assertEqual(record.user_agent, "") - self.assertEqual(record.protocol, "") + self.assertIsNone(record.msg) + self.assertEqual(record._msg_str, "") + self.assertIsNone(record._source_location) + self.assertEqual(record._source_location_str, "{}") + self.assertIsNone(record._resource) + self.assertIsNone(record._trace) + self.assertEqual(record._trace_str, "") + self.assertIsNone(record._span_id) + self.assertEqual(record._span_id_str, "") + self.assertIsNone(record._http_request) + self.assertEqual(record._http_request_str, "{}") + self.assertIsNone(record._labels) + self.assertEqual(record._labels_str, "{}") def test_record_with_request(self): """ @@ -115,6 +131,8 @@ def test_record_with_request(self): expected_path = "http://testserver/123" expected_agent = "Mozilla/5.0" expected_trace = "123" + expected_span = "456" + combined_trace = f"{expected_trace}/{expected_span}" expected_request = { "requestMethod": "PUT", "requestUrl": expected_path, @@ -129,19 +147,18 @@ def test_record_with_request(self): data="body", headers={ "User-Agent": expected_agent, - "X_CLOUD_TRACE_CONTEXT": expected_trace, + "X_CLOUD_TRACE_CONTEXT": combined_trace, }, ) success = filter_obj.filter(record) self.assertTrue(success) - self.assertEqual(record.trace, expected_trace) - for key, val in expected_request.items(): - self.assertEqual(record.http_request[key], val) - self.assertEqual(record.request_method, "PUT") - self.assertEqual(record.request_url, expected_path) - self.assertEqual(record.user_agent, expected_agent) - self.assertEqual(record.protocol, "HTTP/1.1") + self.assertEqual(record._trace, expected_trace) + self.assertEqual(record._trace_str, expected_trace) + self.assertEqual(record._span_id, expected_span) + self.assertEqual(record._span_id_str, expected_span) + self.assertEqual(record._http_request, expected_request) + self.assertEqual(record._http_request_str, json.dumps(expected_request)) def test_user_overrides(self): """ @@ -163,8 +180,12 @@ def test_user_overrides(self): headers={"User-Agent": "default", "X_CLOUD_TRACE_CONTEXT": "default"}, ) # override values + overwritten_resource = "test" + record.resource = overwritten_resource overwritten_trace = "456" record.trace = overwritten_trace + overwritten_span = "789" + record.span_id = overwritten_span overwritten_method = "GET" overwritten_url = "www.google.com" overwritten_agent = "custom" @@ -188,15 +209,19 @@ def test_user_overrides(self): success = filter_obj.filter(record) self.assertTrue(success) - self.assertEqual(record.trace, overwritten_trace) - self.assertEqual(record.http_request, overwritten_request_object) - self.assertEqual(record.request_method, overwritten_method) - self.assertEqual(record.request_url, overwritten_url) - self.assertEqual(record.user_agent, overwritten_agent) - self.assertEqual(record.protocol, overwritten_protocol) - self.assertEqual(record.line, overwritten_line) - self.assertEqual(record.function, overwritten_function) - self.assertEqual(record.file, overwritten_file) + self.assertEqual(record._trace, overwritten_trace) + self.assertEqual(record._trace_str, overwritten_trace) + self.assertEqual(record._span_id, overwritten_span) + self.assertEqual(record._span_id_str, overwritten_span) + self.assertEqual(record._http_request, overwritten_request_object) + self.assertEqual( + record._http_request_str, json.dumps(overwritten_request_object) + ) + self.assertEqual(record._source_location, overwritten_source_location) + self.assertEqual( + record._source_location_str, json.dumps(overwritten_source_location) + ) + self.assertEqual(record._resource, overwritten_resource) class TestCloudLoggingHandler(unittest.TestCase): diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 66822d74c2c7..4b83a4c2d24b 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -67,15 +67,10 @@ def test_format(self): "logging.googleapis.com/spanId": "", "logging.googleapis.com/sourceLocation": { "file": pathname, - "line": str(lineno), + "line": lineno, "function": func, }, - "httpRequest": { - "requestMethod": "", - "requestUrl": "", - "userAgent": "", - "protocol": "", - }, + "httpRequest": {}, "logging.googleapis.com/labels": labels, } handler.filter(record) @@ -98,17 +93,8 @@ def test_format_minimal(self): expected_payload = { "message": "", "logging.googleapis.com/trace": "", - "logging.googleapis.com/sourceLocation": { - "file": "", - "line": "0", - "function": "", - }, - "httpRequest": { - "requestMethod": "", - "requestUrl": "", - "userAgent": "", - "protocol": "", - }, + "logging.googleapis.com/sourceLocation": {}, + "httpRequest": {}, "logging.googleapis.com/labels": {}, } handler.filter(record) @@ -190,17 +176,8 @@ def test_format_overrides(self): expected_payload = { "logging.googleapis.com/trace": overwrite_trace, "logging.googleapis.com/spanId": overwrite_span, - "logging.googleapis.com/sourceLocation": { - "file": overwrite_file, - "function": "", - "line": "0", - }, - "httpRequest": { - "requestMethod": "", - "requestUrl": overwrite_path, - "userAgent": "", - "protocol": "", - }, + "logging.googleapis.com/sourceLocation": {"file": overwrite_file}, + "httpRequest": {"requestUrl": overwrite_path}, "logging.googleapis.com/labels": { "default_key": "default-value", "overwritten_key": "new_value", From 7ac6623725e51188de4ee7a811c1a6838032a0d4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 6 May 2021 20:00:57 +0200 Subject: [PATCH 447/855] chore(deps): update dependency google-cloud-bigquery to v2.16.0 (#282) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index f06182601629..de585f1bf2fc 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 -google-cloud-bigquery==2.15.0 +google-cloud-bigquery==2.16.0 google-cloud-storage==1.38.0 google-cloud-pubsub==2.4.1 From 344ef9f1f100d04a7bffdc8fdd713304badbb790 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 10 May 2021 12:01:02 -0700 Subject: [PATCH 448/855] chore: new owl bot post processor docker image (#287) gcr.io/repo-automation-bots/owlbot-python:latest@sha256:457583330eec64daa02aeb7a72a04d33e7be2428f646671ce4045dcbc0191b1e Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 5 ++--- .../google-cloud-logging/.pre-commit-config.yaml | 2 +- packages/google-cloud-logging/CONTRIBUTING.rst | 16 +--------------- packages/google-cloud-logging/noxfile.py | 14 ++------------ 4 files changed, 6 insertions(+), 31 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 29084e8a33af..b5c26ed01808 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,4 +1,3 @@ docker: - digest: sha256:cfc0e802701262c211703c468874d767f65dabe6a1a71d0e07bfc8a3d5175f32 - image: gcr.io/repo-automation-bots/owlbot-python:latest - + image: gcr.io/repo-automation-bots/owlbot-python:latest + digest: sha256:457583330eec64daa02aeb7a72a04d33e7be2428f646671ce4045dcbc0191b1e diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index 8912e9b5d7d7..1bbd787833ec 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.0 + rev: 3.9.1 hooks: - id: flake8 diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index f6ddd72684f0..4604493b61f3 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -160,21 +160,7 @@ Running System Tests auth settings and change some configuration in your project to run all the tests. -- System tests will be run against an actual project and - so you'll need to provide some environment variables to facilitate - authentication to your project: - - - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; - Such a file can be downloaded directly from the developer's console by clicking - "Generate new JSON key". See private key - `docs `__ - for more details. - -- Once you have downloaded your json keys, set the environment variable - ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file:: - - $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" - +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. ************* Test Coverage diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index eec35ebb4b7f..493d67e6f55a 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -62,16 +62,9 @@ def lint(session): session.run("flake8", "google", "tests") -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): - """Run black. - - Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. - """ + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( "black", *BLACK_PATHS, @@ -140,9 +133,6 @@ def system(session): # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") # Install pyopenssl for mTLS testing. if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": session.install("pyopenssl") From fa73d6da49db1bd66e1b78b661b0915703cd1e63 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 May 2021 16:53:15 -0400 Subject: [PATCH 449/855] chore: add library type to .repo-metadata.json (#285) --- packages/google-cloud-logging/.repo-metadata.json | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-logging/.repo-metadata.json b/packages/google-cloud-logging/.repo-metadata.json index 30541e78b44f..911d58dca90d 100644 --- a/packages/google-cloud-logging/.repo-metadata.json +++ b/packages/google-cloud-logging/.repo-metadata.json @@ -6,6 +6,7 @@ "issue_tracker": "https://issuetracker.google.com/savedsearches/559764", "release_level": "ga", "language": "python", + "library_type": "GAPIC_COMBO", "repo": "googleapis/python-logging", "distribution_name": "google-cloud-logging", "api_id": "logging.googleapis.com", From 53925168d4e0c5c9cef75b8654f16fa03e716320 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 10 May 2021 14:23:01 -0700 Subject: [PATCH 450/855] fix: changed region format on serverless (#291) --- .../google/cloud/logging_v2/handlers/_monitored_resources.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py index ad1de4d2bbec..e257f08e49a4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py @@ -73,7 +73,7 @@ def _create_functions_resource(): labels={ "project_id": project, "function_name": function_name, - "region": region if region else "", + "region": region.split("/")[-1] if region else "", }, ) return resource @@ -131,7 +131,7 @@ def _create_cloud_run_resource(): "project_id": project, "service_name": os.environ.get(_CLOUD_RUN_SERVICE_ID, ""), "revision_name": os.environ.get(_CLOUD_RUN_REVISION_ID, ""), - "location": region if region else "", + "location": region.split("/")[-1] if region else "", "configuration_name": os.environ.get(_CLOUD_RUN_CONFIGURATION_ID, ""), }, ) From 73391a25f7a65bd56cf237af4e7191afa5325574 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 11 May 2021 00:00:38 +0200 Subject: [PATCH 451/855] chore(deps): update dependency google-cloud-pubsub to v2.4.2 (#288) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index de585f1bf2fc..489b0453d9df 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 google-cloud-bigquery==2.16.0 google-cloud-storage==1.38.0 -google-cloud-pubsub==2.4.1 +google-cloud-pubsub==2.4.2 From c47a385a4cf42dbe9a29a45aabeb5806eaa6772f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 6 May 2021 12:21:01 -0700 Subject: [PATCH 452/855] chore: added owlbot to gitignore --- packages/google-cloud-logging/.gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/google-cloud-logging/.gitignore b/packages/google-cloud-logging/.gitignore index b4243ced74e4..ba7b78cfdaf8 100644 --- a/packages/google-cloud-logging/.gitignore +++ b/packages/google-cloud-logging/.gitignore @@ -61,3 +61,6 @@ system_tests/local_test_setup # Make sure a generated file isn't accidentally committed. pylintrc pylintrc.test + +# ignore owlbot +owl-bot-staging From aae2aa3a9997fc99d69228df2b3fa547f305b54c Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 2 Apr 2021 16:01:12 -0700 Subject: [PATCH 453/855] fix(deps): fix minimum required version of google-api-core (#244) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): add kokoro configs for periodic builds against head This change should be non-destructive. Note for library repo maintainers: After applying this change, you can easily add (or change) periodic builds against head by adding config files in google3. See python-pubsub repo for example. Source-Author: Takashi Matsuo Source-Date: Fri Mar 19 11:17:59 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 79c8dd7ee768292f933012d3a69a5b4676404cda Source-Link: https://github.com/googleapis/synthtool/commit/79c8dd7ee768292f933012d3a69a5b4676404cda * chore(deps): update precommit hook pycqa/flake8 to v3.9.0 [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [pycqa/flake8](https://gitlab.com/pycqa/flake8) | repository | minor | `3.8.4` -> `3.9.0` | --- ### Release Notes
pycqa/flake8 ### [`v3.9.0`](https://gitlab.com/pycqa/flake8/compare/3.8.4...3.9.0) [Compare Source](https://gitlab.com/pycqa/flake8/compare/3.8.4...3.9.0)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/synthtool). Source-Author: WhiteSource Renovate Source-Date: Tue Mar 23 17:38:03 2021 +0100 Source-Repo: googleapis/synthtool Source-Sha: f5c5904fb0c6aa3b3730eadf4e5a4485afc65726 Source-Link: https://github.com/googleapis/synthtool/commit/f5c5904fb0c6aa3b3730eadf4e5a4485afc65726 * test(python): use constraints files to check dependency lower bounds Use a constraints file when installing dependencies for system and unit tests nox sessions. https://pip.pypa.io/en/stable/user_guide/#constraints-files > Constraints files are requirements files that **only control which version of a requirement is installed, not whether it is installed or not**. Their syntax and contents is nearly identical to Requirements Files. There is one key difference: Including a package in a constraints file does not trigger installation of the package. ``` testing ├── constraints-3.10.txt ├── constraints-3.11.txt ├── constraints-3.6.txt ├── constraints-3.7.txt ├── constraints-3.8.txt └── constraints-3.9.txt ``` Going forward, one constraints file (currently 3.6) will be populated with every library requirement and extra listed in the `setup.py`. The constraints file will pin each requirement to the lower bound. This ensures that library maintainers will see test failures if they forget to update a lower bound on a dependency. See https://github.com/googleapis/python-bigquery/pull/263 for an example Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Tue Mar 23 10:52:02 2021 -0600 Source-Repo: googleapis/synthtool Source-Sha: 86ed43d4f56e6404d068e62e497029018879c771 Source-Link: https://github.com/googleapis/synthtool/commit/86ed43d4f56e6404d068e62e497029018879c771 * fix: update minimum version of google-api-core Co-authored-by: Bu Sun Kim Co-authored-by: Daniel Sanche --- .../samples/python3.6/periodic-head.cfg | 11 ++ .../samples/python3.7/periodic-head.cfg | 11 ++ .../samples/python3.8/periodic-head.cfg | 11 ++ .../.kokoro/test-samples-against-head.sh | 28 +++++ .../.kokoro/test-samples-impl.sh | 102 ++++++++++++++++++ .../.kokoro/test-samples.sh | 96 +++-------------- .../.pre-commit-config.yaml | 2 +- packages/google-cloud-logging/noxfile.py | 29 ++++- packages/google-cloud-logging/setup.py | 2 +- packages/google-cloud-logging/synth.metadata | 11 +- .../testing/constraints-3.6.txt | 4 +- 11 files changed, 216 insertions(+), 91 deletions(-) create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg create mode 100755 packages/google-cloud-logging/.kokoro/test-samples-against-head.sh create mode 100755 packages/google-cloud-logging/.kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh b/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh new file mode 100755 index 000000000000..635a5ace0c20 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-logging + +exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh new file mode 100755 index 000000000000..cf5de74c17a5 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/packages/google-cloud-logging/.kokoro/test-samples.sh b/packages/google-cloud-logging/.kokoro/test-samples.sh index e75891832fd4..0f5f8d4008f6 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -24,87 +28,19 @@ cd github/python-logging # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" +exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index a9024b15d725..32302e4883a1 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -12,6 +12,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.4 + rev: 3.9.0 hooks: - id: flake8 diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 0fea45535325..7eb35fecc60f 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -18,6 +18,7 @@ from __future__ import absolute_import import os +import pathlib import shutil import nox @@ -30,6 +31,8 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + # 'docfx' is excluded since it only needs to run in 'docs-presubmit' nox.options.sessions = [ "unit", @@ -84,11 +87,24 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("asyncmock", "pytest-asyncio") - session.install("mock", "pytest", "pytest-cov", "flask", "webob", "django") + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) - session.install("-e", ".") + session.install( + "mock", + "pytest", + "pytest-cov", + "flask", + "webob", + "django", + "-c", + constraints_path, + ) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -115,6 +131,9 @@ def unit(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -147,8 +166,10 @@ def system(session): "google-cloud-pubsub", "google-cloud-storage", "google-cloud-testutils", + "-c", + constraints_path, ) - session.install("-e", ".") + session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 326162660bee..8b794791c41f 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.22.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "google-cloud-core >= 1.4.1, < 2.0dev", "proto-plus >= 1.11.0", ] diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 36ff3816601d..2af114c7d5e7 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "7246e7b18d75fe252928d93576fcbb4f3d4be1f2" + "sha": "bc75a0be5403ab4533b0241cead8a0f4e841d751" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "eda422b90c3dde4a872a13e6b78a8f802c40d0db" + "sha": "86ed43d4f56e6404d068e62e497029018879c771" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "eda422b90c3dde4a872a13e6b78a8f802c40d0db" + "sha": "86ed43d4f56e6404d068e62e497029018879c771" } } ], @@ -74,16 +74,21 @@ ".kokoro/samples/lint/presubmit.cfg", ".kokoro/samples/python3.6/common.cfg", ".kokoro/samples/python3.6/continuous.cfg", + ".kokoro/samples/python3.6/periodic-head.cfg", ".kokoro/samples/python3.6/periodic.cfg", ".kokoro/samples/python3.6/presubmit.cfg", ".kokoro/samples/python3.7/common.cfg", ".kokoro/samples/python3.7/continuous.cfg", + ".kokoro/samples/python3.7/periodic-head.cfg", ".kokoro/samples/python3.7/periodic.cfg", ".kokoro/samples/python3.7/presubmit.cfg", ".kokoro/samples/python3.8/common.cfg", ".kokoro/samples/python3.8/continuous.cfg", + ".kokoro/samples/python3.8/periodic-head.cfg", ".kokoro/samples/python3.8/periodic.cfg", ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/test-samples-against-head.sh", + ".kokoro/test-samples-impl.sh", ".kokoro/test-samples.sh", ".kokoro/trampoline.sh", ".kokoro/trampoline_v2.sh", diff --git a/packages/google-cloud-logging/testing/constraints-3.6.txt b/packages/google-cloud-logging/testing/constraints-3.6.txt index 0e0bdeb0b73e..ae89ab4a1cca 100644 --- a/packages/google-cloud-logging/testing/constraints-3.6.txt +++ b/packages/google-cloud-logging/testing/constraints-3.6.txt @@ -5,6 +5,6 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.22.0 +google-api-core==1.22.2 google-cloud-core==1.4.1 -proto-plus==1.11.0 \ No newline at end of file +proto-plus==1.11.0 From 38188fe4bcd9cd2f4898556a746b63b19c765fd2 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 2 Apr 2021 16:18:58 -0700 Subject: [PATCH 454/855] chore: remove logging-v2-py.tar.gz from file tracking (#248) --- packages/google-cloud-logging/logging-v2-py.tar.gz | 0 packages/google-cloud-logging/synth.metadata | 3 +-- 2 files changed, 1 insertion(+), 2 deletions(-) delete mode 100644 packages/google-cloud-logging/logging-v2-py.tar.gz diff --git a/packages/google-cloud-logging/logging-v2-py.tar.gz b/packages/google-cloud-logging/logging-v2-py.tar.gz deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 2af114c7d5e7..703aaf0e5ac3 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "bc75a0be5403ab4533b0241cead8a0f4e841d751" + "sha": "79b37c3566e71880c1b63a3c3b7e04e9df910c2c" } }, { @@ -137,7 +137,6 @@ "google/cloud/logging_v2/types/logging.py", "google/cloud/logging_v2/types/logging_config.py", "google/cloud/logging_v2/types/logging_metrics.py", - "logging-v2-py.tar.gz", "mypy.ini", "noxfile.py", "renovate.json", From 1eed05208853790a764c6ff8375289d588c0dee6 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 5 Apr 2021 07:42:27 -0700 Subject: [PATCH 455/855] build(python): update docfx job to use new plugin (#250) Source-Author: Dan Lee <71398022+dandhlee@users.noreply.github.com> Source-Date: Tue Mar 30 19:36:37 2021 -0400 Source-Repo: googleapis/synthtool Source-Sha: 4501974ad08b5d693311457e2ea4ce845676e329 Source-Link: https://github.com/googleapis/synthtool/commit/4501974ad08b5d693311457e2ea4ce845676e329 --- packages/google-cloud-logging/noxfile.py | 4 +--- packages/google-cloud-logging/synth.metadata | 6 +++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 7eb35fecc60f..eec35ebb4b7f 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -230,9 +230,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 703aaf0e5ac3..840550083af7 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "79b37c3566e71880c1b63a3c3b7e04e9df910c2c" + "sha": "ecefea40c367aa2a50ee6591241e18c3ac1331d1" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "86ed43d4f56e6404d068e62e497029018879c771" + "sha": "4501974ad08b5d693311457e2ea4ce845676e329" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "86ed43d4f56e6404d068e62e497029018879c771" + "sha": "4501974ad08b5d693311457e2ea4ce845676e329" } } ], From 40c6bfda6b60a182880257aa03e09f9e624359b3 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 7 Apr 2021 08:30:06 -0700 Subject: [PATCH 456/855] chore: Add license headers for python config files (#253) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/920075d4-7b9c-4b14-9957-7b33425ad95b/targets - [ ] To automatically regenerate this PR, check this box. (May take up to 24 hours.) Source-Link: https://github.com/googleapis/synthtool/commit/5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc --- .../google-cloud-logging/.pre-commit-config.yaml | 14 ++++++++++++++ packages/google-cloud-logging/docs/conf.py | 13 +++++++++++++ packages/google-cloud-logging/synth.metadata | 6 +++--- 3 files changed, 30 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index 32302e4883a1..8912e9b5d7d7 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -1,3 +1,17 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index 6da1e2e7988d..8e1d46bc779a 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -1,4 +1,17 @@ # -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # google-cloud-logging documentation build configuration file # diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index 840550083af7..c881df5796a8 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "ecefea40c367aa2a50ee6591241e18c3ac1331d1" + "sha": "7eeb6f9b62a764c687b129713b4fba6ce006fc45" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4501974ad08b5d693311457e2ea4ce845676e329" + "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4501974ad08b5d693311457e2ea4ce845676e329" + "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" } } ], From a106b8849d6fdd0e85d96825392cf6978fed623c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 7 Apr 2021 22:10:52 +0200 Subject: [PATCH 457/855] chore(deps): update dependency google-cloud-storage to v1.37.0 (#243) Co-authored-by: Daniel Sanche --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index f7f5bbe0eff4..83ffc5b5af8c 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 google-cloud-bigquery==2.13.1 -google-cloud-storage==1.36.2 +google-cloud-storage==1.37.0 google-cloud-pubsub==2.4.1 From 4d47b7e0159074467442ee55ef67aa5128908fbb Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Fri, 16 Apr 2021 14:41:47 -0400 Subject: [PATCH 458/855] chore: prevent normalization of semver versioning (#259) * chore: prevent normalization of semver versioning * chore: update workaround to make sic work --- packages/google-cloud-logging/setup.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 8b794791c41f..c88b4ebd8d10 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -17,6 +17,20 @@ import setuptools +# Disable version normalization performed by setuptools.setup() +try: + # Try the approach of using sic(), added in setuptools 46.1.0 + from setuptools import sic +except ImportError: + # Try the approach of replacing packaging.version.Version + sic = lambda v: v + try: + # setuptools >=39.0.0 uses packaging from setuptools.extern + from setuptools.extern import packaging + except ImportError: + # setuptools <39.0.0 uses packaging from pkg_resources.extern + from pkg_resources.extern import packaging + packaging.version.Version = packaging.version.LegacyVersion # Package metadata. @@ -58,7 +72,7 @@ setuptools.setup( name=name, - version=version, + version=sic(version), description=description, long_description=readme, author="Google LLC", From ba02109163b13173d5d037d57f6472730561ddaa Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 20 Apr 2021 22:12:16 +0200 Subject: [PATCH 459/855] chore(deps): update dependency google-cloud-storage to v1.37.1 (#255) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 83ffc5b5af8c..30afb45a2afe 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 google-cloud-bigquery==2.13.1 -google-cloud-storage==1.37.0 +google-cloud-storage==1.37.1 google-cloud-pubsub==2.4.1 From 784b94ae356e3841292a804e4f7b328b058bd209 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 20 Apr 2021 13:13:23 -0700 Subject: [PATCH 460/855] chore: Re-generated to pick up changes from self (#260) --- packages/google-cloud-logging/logging-v2-py.tar.gz | 0 packages/google-cloud-logging/synth.metadata | 3 ++- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-logging/logging-v2-py.tar.gz diff --git a/packages/google-cloud-logging/logging-v2-py.tar.gz b/packages/google-cloud-logging/logging-v2-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata index c881df5796a8..345edc6bc267 100644 --- a/packages/google-cloud-logging/synth.metadata +++ b/packages/google-cloud-logging/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "7eeb6f9b62a764c687b129713b4fba6ce006fc45" + "sha": "6743654727f56ce65bdd6dc45075ef41ebb84bcf" } }, { @@ -137,6 +137,7 @@ "google/cloud/logging_v2/types/logging.py", "google/cloud/logging_v2/types/logging_config.py", "google/cloud/logging_v2/types/logging_metrics.py", + "logging-v2-py.tar.gz", "mypy.ini", "noxfile.py", "renovate.json", From 1e5ed7a2a06edadbcd537057547e4937e535d08c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 26 Apr 2021 17:45:00 -0400 Subject: [PATCH 461/855] chore: migrate to owl bot (#270) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: migrate to owl bot * chore: copy files from googleapis-gen 130ce904e5d546c312943d10f48799590f9c0f66 * chore: run the post processor * 🦉 Updates from OwlBot Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 + .../google-cloud-logging/.github/.OwlBot.yaml | 26 +++ .../.github/header-checker-lint.yml | 2 +- .../google-cloud-logging/.kokoro/release.sh | 4 +- .../.kokoro/release/common.cfg | 14 +- .../docs/_static/custom.css | 13 +- .../{synth.py => owlbot.py} | 51 +++--- packages/google-cloud-logging/renovate.json | 5 +- .../samples/snippets/noxfile.py | 10 +- packages/google-cloud-logging/synth.metadata | 161 ------------------ 10 files changed, 82 insertions(+), 208 deletions(-) create mode 100644 packages/google-cloud-logging/.github/.OwlBot.lock.yaml create mode 100644 packages/google-cloud-logging/.github/.OwlBot.yaml rename packages/google-cloud-logging/{synth.py => owlbot.py} (69%) delete mode 100644 packages/google-cloud-logging/synth.metadata diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml new file mode 100644 index 000000000000..29084e8a33af --- /dev/null +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -0,0 +1,4 @@ +docker: + digest: sha256:cfc0e802701262c211703c468874d767f65dabe6a1a71d0e07bfc8a3d5175f32 + image: gcr.io/repo-automation-bots/owlbot-python:latest + diff --git a/packages/google-cloud-logging/.github/.OwlBot.yaml b/packages/google-cloud-logging/.github/.OwlBot.yaml new file mode 100644 index 000000000000..63a2aab5460e --- /dev/null +++ b/packages/google-cloud-logging/.github/.OwlBot.yaml @@ -0,0 +1,26 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/repo-automation-bots/owlbot-python:latest + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/logging/(v.*)/.*-py/(.*) + dest: /owl-bot-staging/$1/$2 + +begin-after-commit-hash: 130ce904e5d546c312943d10f48799590f9c0f66 + diff --git a/packages/google-cloud-logging/.github/header-checker-lint.yml b/packages/google-cloud-logging/.github/header-checker-lint.yml index fc281c05bd55..6fe78aa7987a 100644 --- a/packages/google-cloud-logging/.github/header-checker-lint.yml +++ b/packages/google-cloud-logging/.github/header-checker-lint.yml @@ -1,6 +1,6 @@ {"allowedCopyrightHolders": ["Google LLC"], "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], - "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], "sourceFileExtensions": [ "ts", "js", diff --git a/packages/google-cloud-logging/.kokoro/release.sh b/packages/google-cloud-logging/.kokoro/release.sh index 2ef944a00e3f..f8994b0341dc 100755 --- a/packages/google-cloud-logging/.kokoro/release.sh +++ b/packages/google-cloud-logging/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") cd github/python-logging python3 setup.py sdist bdist_wheel -twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* +twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-logging/.kokoro/release/common.cfg b/packages/google-cloud-logging/.kokoro/release/common.cfg index 9fedb82bbf67..e0012bf9c19e 100644 --- a/packages/google-cloud-logging/.kokoro/release/common.cfg +++ b/packages/google-cloud-logging/.kokoro/release/common.cfg @@ -23,18 +23,8 @@ env_vars: { value: "github/python-logging/.kokoro/release.sh" } -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} \ No newline at end of file + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" +} diff --git a/packages/google-cloud-logging/docs/_static/custom.css b/packages/google-cloud-logging/docs/_static/custom.css index bcd37bbd3c4a..b0a295464b23 100644 --- a/packages/google-cloud-logging/docs/_static/custom.css +++ b/packages/google-cloud-logging/docs/_static/custom.css @@ -1,9 +1,20 @@ div#python2-eol { border-color: red; border-width: medium; -} +} /* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px } + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-logging/synth.py b/packages/google-cloud-logging/owlbot.py similarity index 69% rename from packages/google-cloud-logging/synth.py rename to packages/google-cloud-logging/owlbot.py index 7f7008a39cbd..f012b1191d46 100644 --- a/packages/google-cloud-logging/synth.py +++ b/packages/google-cloud-logging/owlbot.py @@ -17,38 +17,33 @@ from synthtool import gcp from synthtool.languages import python -gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() -# ---------------------------------------------------------------------------- -# Generate logging GAPIC layer -# ---------------------------------------------------------------------------- -library = gapic.py_library( - service="logging", - version="v2", - bazel_target="//google/logging/v2:logging-v2-py", - include_protos=True, -) +default_version = "v2" -s.move( - library, - excludes=[ - "setup.py", - "README.rst", - "google/cloud/logging/__init__.py", # generated types are hidden from users - "google/cloud/logging_v2/__init__.py", - "docs/index.rst", - "docs/logging_v2", # Don't include gapic library docs. Users should use the hand-written layer instead - "scripts/fixup_logging_v2_keywords.py", # don't include script since it only works for generated layer - ], -) +for library in s.get_staging_dirs(default_version): + if library.name == "v2": + # Fix generated unit tests + s.replace( + library / "tests/unit/gapic/logging_v2/test_logging_service_v2.py", + "MonitoredResource\(\s*type_", + "MonitoredResource(type" + ) -# Fix generated unit tests -s.replace( - "tests/unit/gapic/logging_v2/test_logging_service_v2.py", - "MonitoredResource\(\s*type_", - "MonitoredResource(type" -) + s.move( + library, + excludes=[ + "setup.py", + "README.rst", + "google/cloud/logging/__init__.py", # generated types are hidden from users + "google/cloud/logging_v2/__init__.py", + "docs/index.rst", + "docs/logging_v2", # Don't include gapic library docs. Users should use the hand-written layer instead + "scripts/fixup_logging_v2_keywords.py", # don't include script since it only works for generated layer + ], + ) + +s.remove_staging_dirs() # ---------------------------------------------------------------------------- # Add templated files diff --git a/packages/google-cloud-logging/renovate.json b/packages/google-cloud-logging/renovate.json index f08bc22c9a55..c04895563e69 100644 --- a/packages/google-cloud-logging/renovate.json +++ b/packages/google-cloud-logging/renovate.json @@ -2,5 +2,8 @@ "extends": [ "config:base", ":preserveSemverRanges" ], - "ignorePaths": [".pre-commit-config.yaml"] + "ignorePaths": [".pre-commit-config.yaml"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } } diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 97bf7da80e39..956cdf4f9250 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -172,10 +172,16 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) diff --git a/packages/google-cloud-logging/synth.metadata b/packages/google-cloud-logging/synth.metadata deleted file mode 100644 index 345edc6bc267..000000000000 --- a/packages/google-cloud-logging/synth.metadata +++ /dev/null @@ -1,161 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/python-logging.git", - "sha": "6743654727f56ce65bdd6dc45075ef41ebb84bcf" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "149a3a84c29c9b8189576c7442ccb6dcf6a8f95b", - "internalRef": "364411656" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "logging", - "apiVersion": "v2", - "language": "python", - "generator": "bazel" - } - } - ], - "generatedFiles": [ - ".coveragerc", - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/header-checker-lint.yml", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic-head.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic-head.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic-head.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples-against-head.sh", - ".kokoro/test-samples-impl.sh", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".pre-commit-config.yaml", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "google/cloud/logging/py.typed", - "google/cloud/logging_v2/proto/log_entry.proto", - "google/cloud/logging_v2/proto/logging.proto", - "google/cloud/logging_v2/proto/logging_config.proto", - "google/cloud/logging_v2/proto/logging_metrics.proto", - "google/cloud/logging_v2/py.typed", - "google/cloud/logging_v2/services/__init__.py", - "google/cloud/logging_v2/services/config_service_v2/__init__.py", - "google/cloud/logging_v2/services/config_service_v2/async_client.py", - "google/cloud/logging_v2/services/config_service_v2/client.py", - "google/cloud/logging_v2/services/config_service_v2/pagers.py", - "google/cloud/logging_v2/services/config_service_v2/transports/__init__.py", - "google/cloud/logging_v2/services/config_service_v2/transports/base.py", - "google/cloud/logging_v2/services/config_service_v2/transports/grpc.py", - "google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py", - "google/cloud/logging_v2/services/logging_service_v2/__init__.py", - "google/cloud/logging_v2/services/logging_service_v2/async_client.py", - "google/cloud/logging_v2/services/logging_service_v2/client.py", - "google/cloud/logging_v2/services/logging_service_v2/pagers.py", - "google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py", - "google/cloud/logging_v2/services/logging_service_v2/transports/base.py", - "google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py", - "google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py", - "google/cloud/logging_v2/services/metrics_service_v2/__init__.py", - "google/cloud/logging_v2/services/metrics_service_v2/async_client.py", - "google/cloud/logging_v2/services/metrics_service_v2/client.py", - "google/cloud/logging_v2/services/metrics_service_v2/pagers.py", - "google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py", - "google/cloud/logging_v2/services/metrics_service_v2/transports/base.py", - "google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py", - "google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py", - "google/cloud/logging_v2/types/__init__.py", - "google/cloud/logging_v2/types/log_entry.py", - "google/cloud/logging_v2/types/logging.py", - "google/cloud/logging_v2/types/logging_config.py", - "google/cloud/logging_v2/types/logging_metrics.py", - "logging-v2-py.tar.gz", - "mypy.ini", - "noxfile.py", - "renovate.json", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/noxfile.py", - "scripts/decrypt-secrets.sh", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/logging_v2/__init__.py", - "tests/unit/gapic/logging_v2/test_config_service_v2.py", - "tests/unit/gapic/logging_v2/test_logging_service_v2.py", - "tests/unit/gapic/logging_v2/test_metrics_service_v2.py" - ] -} \ No newline at end of file From 89742b452993776fe03db4f3d980d404696168c0 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 26 Apr 2021 23:45:20 +0200 Subject: [PATCH 462/855] chore(deps): update dependency google-cloud-bigquery to v2.14.0 (#271) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 30afb45a2afe..3540b111a713 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 -google-cloud-bigquery==2.13.1 +google-cloud-bigquery==2.14.0 google-cloud-storage==1.37.1 google-cloud-pubsub==2.4.1 From 3e0cbf873e7e24c2c1def22899a6850236cfdc09 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Mon, 26 Apr 2021 20:21:37 -0400 Subject: [PATCH 463/855] chore(revert): revert preventing normalization (#269) --- packages/google-cloud-logging/setup.py | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index c88b4ebd8d10..8b794791c41f 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -17,20 +17,6 @@ import setuptools -# Disable version normalization performed by setuptools.setup() -try: - # Try the approach of using sic(), added in setuptools 46.1.0 - from setuptools import sic -except ImportError: - # Try the approach of replacing packaging.version.Version - sic = lambda v: v - try: - # setuptools >=39.0.0 uses packaging from setuptools.extern - from setuptools.extern import packaging - except ImportError: - # setuptools <39.0.0 uses packaging from pkg_resources.extern - from pkg_resources.extern import packaging - packaging.version.Version = packaging.version.LegacyVersion # Package metadata. @@ -72,7 +58,7 @@ setuptools.setup( name=name, - version=sic(version), + version=version, description=description, long_description=readme, author="Google LLC", From 3b0ecd2012096e410b6ac4eb7c5070340279afbf Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 May 2021 00:20:55 +0200 Subject: [PATCH 464/855] chore(deps): update dependency google-cloud-bigquery to v2.15.0 (#277) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 3540b111a713..bfff198eabbc 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 -google-cloud-bigquery==2.14.0 +google-cloud-bigquery==2.15.0 google-cloud-storage==1.37.1 google-cloud-pubsub==2.4.1 From 6ff31589628166644733992ebb45a623cedbf5cc Mon Sep 17 00:00:00 2001 From: "google-cloud-policy-bot[bot]" <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> Date: Mon, 3 May 2021 15:31:21 -0700 Subject: [PATCH 465/855] chore: add SECURITY.md (#275) Co-authored-by: google-cloud-policy-bot[bot] <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> --- packages/google-cloud-logging/SECURITY.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 packages/google-cloud-logging/SECURITY.md diff --git a/packages/google-cloud-logging/SECURITY.md b/packages/google-cloud-logging/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-logging/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. From 15ec6ecf8f3b414fdf053e35ce983241ad71f084 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 May 2021 00:38:06 +0200 Subject: [PATCH 466/855] chore(deps): update dependency pytest to v6.2.3 (#273) --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index d0029c6de49e..93f50ad13971 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==1.10.0 -pytest==6.0.1 +pytest==6.2.3 From f3019b07751641297bd28525ee5f44ddd15c43cb Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 May 2021 00:41:46 +0200 Subject: [PATCH 467/855] chore(deps): update dependency google-cloud-storage to v1.38.0 (#272) Co-authored-by: Daniel Sanche --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index bfff198eabbc..f06182601629 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 google-cloud-bigquery==2.15.0 -google-cloud-storage==1.37.1 +google-cloud-storage==1.38.0 google-cloud-pubsub==2.4.1 From 1f67a4391978c13cca26625fef2c2d361fe96739 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 May 2021 20:40:44 +0200 Subject: [PATCH 468/855] chore(deps): update dependency pytest to v6.2.4 (#280) --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 93f50ad13971..766a8035d690 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==1.10.0 -pytest==6.2.3 +pytest==6.2.4 From 4544e8d7bd1c4630d9235e0a52064319d01111c2 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 6 May 2021 20:00:57 +0200 Subject: [PATCH 469/855] chore(deps): update dependency google-cloud-bigquery to v2.16.0 (#282) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index f06182601629..de585f1bf2fc 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 -google-cloud-bigquery==2.15.0 +google-cloud-bigquery==2.16.0 google-cloud-storage==1.38.0 google-cloud-pubsub==2.4.1 From 148bac31cc566c2b16dfb3644b19676ad479912c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 10 May 2021 12:01:02 -0700 Subject: [PATCH 470/855] chore: new owl bot post processor docker image (#287) gcr.io/repo-automation-bots/owlbot-python:latest@sha256:457583330eec64daa02aeb7a72a04d33e7be2428f646671ce4045dcbc0191b1e Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 5 ++--- .../google-cloud-logging/.pre-commit-config.yaml | 2 +- packages/google-cloud-logging/CONTRIBUTING.rst | 16 +--------------- packages/google-cloud-logging/noxfile.py | 14 ++------------ 4 files changed, 6 insertions(+), 31 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 29084e8a33af..b5c26ed01808 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,4 +1,3 @@ docker: - digest: sha256:cfc0e802701262c211703c468874d767f65dabe6a1a71d0e07bfc8a3d5175f32 - image: gcr.io/repo-automation-bots/owlbot-python:latest - + image: gcr.io/repo-automation-bots/owlbot-python:latest + digest: sha256:457583330eec64daa02aeb7a72a04d33e7be2428f646671ce4045dcbc0191b1e diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index 8912e9b5d7d7..1bbd787833ec 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.0 + rev: 3.9.1 hooks: - id: flake8 diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index f6ddd72684f0..4604493b61f3 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -160,21 +160,7 @@ Running System Tests auth settings and change some configuration in your project to run all the tests. -- System tests will be run against an actual project and - so you'll need to provide some environment variables to facilitate - authentication to your project: - - - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; - Such a file can be downloaded directly from the developer's console by clicking - "Generate new JSON key". See private key - `docs `__ - for more details. - -- Once you have downloaded your json keys, set the environment variable - ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file:: - - $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" - +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. ************* Test Coverage diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index eec35ebb4b7f..493d67e6f55a 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -62,16 +62,9 @@ def lint(session): session.run("flake8", "google", "tests") -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): - """Run black. - - Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. - """ + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( "black", *BLACK_PATHS, @@ -140,9 +133,6 @@ def system(session): # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") # Install pyopenssl for mTLS testing. if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": session.install("pyopenssl") From 5d9aa4bd781d5591ef6ed60a6ce25a962c8a11df Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 May 2021 16:53:15 -0400 Subject: [PATCH 471/855] chore: add library type to .repo-metadata.json (#285) --- packages/google-cloud-logging/.repo-metadata.json | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-logging/.repo-metadata.json b/packages/google-cloud-logging/.repo-metadata.json index 30541e78b44f..911d58dca90d 100644 --- a/packages/google-cloud-logging/.repo-metadata.json +++ b/packages/google-cloud-logging/.repo-metadata.json @@ -6,6 +6,7 @@ "issue_tracker": "https://issuetracker.google.com/savedsearches/559764", "release_level": "ga", "language": "python", + "library_type": "GAPIC_COMBO", "repo": "googleapis/python-logging", "distribution_name": "google-cloud-logging", "api_id": "logging.googleapis.com", From a36dab8a53875a6195f3b5c8f0602a95d86bc906 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 10 May 2021 14:23:01 -0700 Subject: [PATCH 472/855] fix: changed region format on serverless (#291) --- .../google/cloud/logging_v2/handlers/_monitored_resources.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py index ad1de4d2bbec..e257f08e49a4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py @@ -73,7 +73,7 @@ def _create_functions_resource(): labels={ "project_id": project, "function_name": function_name, - "region": region if region else "", + "region": region.split("/")[-1] if region else "", }, ) return resource @@ -131,7 +131,7 @@ def _create_cloud_run_resource(): "project_id": project, "service_name": os.environ.get(_CLOUD_RUN_SERVICE_ID, ""), "revision_name": os.environ.get(_CLOUD_RUN_REVISION_ID, ""), - "location": region if region else "", + "location": region.split("/")[-1] if region else "", "configuration_name": os.environ.get(_CLOUD_RUN_CONFIGURATION_ID, ""), }, ) From 7f394de9ed8fa4e4564e21f0d2aa196607b8daa4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 11 May 2021 00:00:38 +0200 Subject: [PATCH 473/855] chore(deps): update dependency google-cloud-pubsub to v2.4.2 (#288) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index de585f1bf2fc..489b0453d9df 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.3.1 google-cloud-bigquery==2.16.0 google-cloud-storage==1.38.0 -google-cloud-pubsub==2.4.1 +google-cloud-pubsub==2.4.2 From ad83a57b82907826c5daa36c659e6f6ea13ad721 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 11 May 2021 14:11:49 -0700 Subject: [PATCH 474/855] fix: remove noisy logs (#290) --- .../cloud/logging_v2/handlers/handlers.py | 11 ++++++++-- .../environment/deployable/python/snippets.py | 20 +++++++++++++++++++ .../tests/unit/test_client.py | 16 +++++++++++++-- tests/environment | 2 +- 4 files changed, 44 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 973321423a14..3580a4e98c63 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -24,7 +24,14 @@ DEFAULT_LOGGER_NAME = "python" -EXCLUDED_LOGGER_DEFAULTS = ("google.cloud", "google.auth", "google_auth_httplib2") +"""Exclude internal logs from propagating through handlers""" +EXCLUDED_LOGGER_DEFAULTS = ( + "google.cloud", + "google.auth", + "google_auth_httplib2", + "google.api_core.bidi", + "werkzeug", +) _CLEAR_HANDLER_RESOURCE_TYPES = ("gae_app", "cloud_function") @@ -221,6 +228,6 @@ def setup_logging( logger.setLevel(log_level) logger.addHandler(handler) for logger_name in all_excluded_loggers: + # prevent excluded loggers from propagating logs to handler logger = logging.getLogger(logger_name) logger.propagate = False - logger.addHandler(logging.StreamHandler()) diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py index c4011dd473cf..99ec66c5a397 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -39,6 +39,26 @@ def simplelog(log_name=None, log_text="simple_log", severity="DEFAULT", **kwargs logger.log_text(log_text, severity=severity) +def pylogging_json(log_text=None, severity="WARNING", **kwargs): + # allowed severity: debug, info, warning, error, critical + + # build json message + message = {} + for k in kwargs.keys(): + message[k] = kwargs[k] + + severity = severity.upper() + if severity == "DEBUG": + logging.debug(message) + elif severity == "INFO": + logging.info(message) + elif severity == "WARNING": + logging.warning(message) + elif severity == "ERROR": + logging.error(message) + else: + logging.critical(message) + def pylogging(log_text="pylogging", severity="WARNING", **kwargs): # allowed severity: debug, info, warning, error, critical diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index f33f1cbdc861..9dbfa87fd0b9 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -799,7 +799,13 @@ def test_setup_logging(self): handler.transport.worker.stop() expected_kwargs = { - "excluded_loggers": ("google.cloud", "google.auth", "google_auth_httplib2"), + "excluded_loggers": ( + "google.cloud", + "google.auth", + "google_auth_httplib2", + "google.api_core.bidi", + "werkzeug", + ), "log_level": 20, } self.assertEqual(kwargs, expected_kwargs) @@ -836,7 +842,13 @@ def test_setup_logging_w_extra_kwargs(self): handler.transport.worker.stop() expected_kwargs = { - "excluded_loggers": ("google.cloud", "google.auth", "google_auth_httplib2"), + "excluded_loggers": ( + "google.cloud", + "google.auth", + "google_auth_httplib2", + "google.api_core.bidi", + "werkzeug", + ), "log_level": 20, } self.assertEqual(kwargs, expected_kwargs) diff --git a/tests/environment b/tests/environment index df1b7c131575..f1937814bf78 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit df1b7c131575f8eb59120cef75709496602b7665 +Subproject commit f1937814bf78953a160fa33600f6af8cfdb82527 From 400daa4abcfa68dbbd7ee32153b8f538948c0612 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 12 May 2021 10:57:01 -0700 Subject: [PATCH 475/855] fix: improve API compatibility for next release (#292) --- packages/google-cloud-logging/.gitignore | 3 -- .../cloud/logging_v2/handlers/_helpers.py | 13 +++++++ .../cloud/logging_v2/handlers/handlers.py | 18 ++++++++-- .../deployable/python/requirements.txt | 5 +-- .../tests/unit/handlers/test__helpers.py | 18 ++++------ .../tests/unit/handlers/test_handlers.py | 18 +++++----- .../unit/handlers/test_structured_log.py | 35 +++++++++++++------ tests/environment | 2 +- 8 files changed, 72 insertions(+), 40 deletions(-) diff --git a/packages/google-cloud-logging/.gitignore b/packages/google-cloud-logging/.gitignore index ba7b78cfdaf8..b4243ced74e4 100644 --- a/packages/google-cloud-logging/.gitignore +++ b/packages/google-cloud-logging/.gitignore @@ -61,6 +61,3 @@ system_tests/local_test_setup # Make sure a generated file isn't accidentally committed. pylintrc pylintrc.test - -# ignore owlbot -owl-bot-staging diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py index 6d9debfe26cc..931b7a2f5e3a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py @@ -68,7 +68,10 @@ def get_request_data_from_flask(): http_request = { "requestMethod": flask.request.method, "requestUrl": flask.request.url, + "requestSize": flask.request.content_length, "userAgent": flask.request.user_agent.string, + "remoteIp": flask.request.remote_addr, + "referer": flask.request.referrer, "protocol": flask.request.environ.get(_PROTOCOL_HEADER), } @@ -93,11 +96,21 @@ def get_request_data_from_django(): if request is None: return None, None, None + # convert content_length to int if it exists + content_length = None + try: + content_length = int(request.META.get(_DJANGO_CONTENT_LENGTH)) + except (ValueError, TypeError): + content_length = None + # build http_request http_request = { "requestMethod": request.method, "requestUrl": request.build_absolute_uri(), + "requestSize": content_length, "userAgent": request.META.get(_DJANGO_USERAGENT_HEADER), + "remoteIp": request.META.get(_DJANGO_REMOTE_ADDR_HEADER), + "referer": request.META.get(_DJANGO_REFERER_HEADER), "protocol": request.META.get(_PROTOCOL_HEADER), } diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 3580a4e98c63..b3b787fe22ff 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -45,6 +45,10 @@ class CloudLoggingFilter(logging.Filter): overwritten using the `extras` argument when writing logs. """ + # The subset of http_request fields have been tested to work consistently across GCP environments + # https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#httprequest + _supported_http_fields = ("requestMethod", "requestUrl", "userAgent", "protocol") + def __init__(self, project=None, default_labels=None): self.project = project self.default_labels = default_labels if default_labels else {} @@ -74,8 +78,17 @@ def filter(self, record): Add new Cloud Logging data to each LogRecord as it comes in """ user_labels = getattr(record, "labels", {}) + # infer request data from the environment inferred_http, inferred_trace, inferred_span = get_request_data() + if inferred_http is not None: + # filter inferred_http to include only well-supported fields + inferred_http = { + k: v + for (k, v) in inferred_http.items() + if k in self._supported_http_fields and v is not None + } if inferred_trace is not None and self.project is not None: + # add full path for detected trace inferred_trace = f"projects/{self.project}/traces/{inferred_trace}" # set new record values record._resource = getattr(record, "resource", None) @@ -84,13 +97,14 @@ def filter(self, record): record._http_request = getattr(record, "http_request", inferred_http) record._source_location = CloudLoggingFilter._infer_source_location(record) record._labels = {**self.default_labels, **user_labels} or None - # create guaranteed string representations for structured logging - record._msg_str = record.msg or "" + # create string representations for structured logging record._trace_str = record._trace or "" record._span_id_str = record._span_id or "" record._http_request_str = json.dumps(record._http_request or {}) record._source_location_str = json.dumps(record._source_location or {}) record._labels_str = json.dumps(record._labels or {}) + # break quotes for parsing through structured logging + record._msg_str = str(record.msg).replace('"', '\\"') if record.msg else "" return True diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt index 4af7b519062c..37d80d55bba1 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt +++ b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt @@ -1,2 +1,3 @@ -flask -google-cloud-pubsub +flask==1.1.2 +google-cloud-pubsub==2.3.0 +click==7.1.2 diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index e1230991fa11..d26e700e8b50 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -75,23 +75,18 @@ def test_http_request_populated(self): expected_agent = "Mozilla/5.0" expected_referrer = "self" expected_ip = "10.1.2.3" - body_content = "test" headers = { "User-Agent": expected_agent, "Referer": expected_referrer, } app = self.create_app() - with app.test_client() as c: - c.put( - path=expected_path, - data=body_content, - environ_base={"REMOTE_ADDR": expected_ip}, - headers=headers, - ) + with app.test_request_context( + expected_path, headers=headers, environ_base={"REMOTE_ADDR": expected_ip} + ): http_request, *_ = self._call_fut() - self.assertEqual(http_request["requestMethod"], "PUT") + self.assertEqual(http_request["requestMethod"], "GET") self.assertEqual(http_request["requestUrl"], expected_path) self.assertEqual(http_request["userAgent"], expected_agent) self.assertEqual(http_request["protocol"], "HTTP/1.1") @@ -99,10 +94,9 @@ def test_http_request_populated(self): def test_http_request_sparse(self): expected_path = "http://testserver/123" app = self.create_app() - with app.test_client() as c: - c.put(path=expected_path) + with app.test_request_context(expected_path): http_request, *_ = self._call_fut() - self.assertEqual(http_request["requestMethod"], "PUT") + self.assertEqual(http_request["requestMethod"], "GET") self.assertEqual(http_request["requestUrl"], expected_path) self.assertEqual(http_request["protocol"], "HTTP/1.1") diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index f0a1f81def67..4ba05212179b 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -134,22 +134,20 @@ def test_record_with_request(self): expected_span = "456" combined_trace = f"{expected_trace}/{expected_span}" expected_request = { - "requestMethod": "PUT", + "requestMethod": "GET", "requestUrl": expected_path, "userAgent": expected_agent, "protocol": "HTTP/1.1", } app = self.create_app() - with app.test_client() as c: - c.put( - path=expected_path, - data="body", - headers={ - "User-Agent": expected_agent, - "X_CLOUD_TRACE_CONTEXT": combined_trace, - }, - ) + with app.test_request_context( + expected_path, + headers={ + "User-Agent": expected_agent, + "X_CLOUD_TRACE_CONTEXT": combined_trace, + }, + ): success = filter_obj.filter(record) self.assertTrue(success) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 4b83a4c2d24b..0536583a514c 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -104,6 +104,23 @@ def test_format_minimal(self): value, result[key], f"expected_payload[{key}] != result[{key}]" ) + def test_format_with_quotes(self): + """ + When logging a message containing quotes, escape chars should be added + """ + import logging + import json + + handler = self._make_one() + message = '"test"' + expected_result = '\\"test\\"' + record = logging.LogRecord(None, logging.INFO, None, None, message, None, None,) + record.created = None + handler.filter(record) + result = json.loads(handler.format(record)) + result["message"] = expected_result + self.assertEqual(result["message"], expected_result) + def test_format_with_request(self): import logging import json @@ -121,7 +138,7 @@ def test_format_with_request(self): "logging.googleapis.com/trace": expected_trace, "logging.googleapis.com/spanId": expected_span, "httpRequest": { - "requestMethod": "PUT", + "requestMethod": "GET", "requestUrl": expected_path, "userAgent": expected_agent, "protocol": "HTTP/1.1", @@ -129,15 +146,13 @@ def test_format_with_request(self): } app = self.create_app() - with app.test_client() as c: - c.put( - path=expected_path, - data="body", - headers={ - "User-Agent": expected_agent, - "X_CLOUD_TRACE_CONTEXT": trace_header, - }, - ) + with app.test_request_context( + expected_path, + headers={ + "User-Agent": expected_agent, + "X_CLOUD_TRACE_CONTEXT": trace_header, + }, + ): handler.filter(record) result = json.loads(handler.format(record)) for (key, value) in expected_payload.items(): diff --git a/tests/environment b/tests/environment index f1937814bf78..912d193cfb90 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit f1937814bf78953a160fa33600f6af8cfdb82527 +Subproject commit 912d193cfb90b741fee035dff29fa0d5959a5186 From 16c5da04fd8a7e0a9747ad03a1dd63c645245169 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 12 May 2021 11:25:32 -0700 Subject: [PATCH 476/855] chore: merged environment tests to main --- .../tests/environment/.gitignore | 1 + .../tests/environment/SECURITY.md | 7 + .../tests/environment/deployable/go/go.mod | 10 +- .../tests/environment/deployable/go/go.sum | 184 ++++++++++++++++-- .../tests/environment/deployable/go/main.go | 140 ++++++++++--- .../env_scripts/go/appengine_standard.sh | 87 +++++++++ .../envctl/env_scripts/go/cloudrun.sh | 9 +- .../envctl/env_scripts/go/compute.sh | 88 +++++++++ .../envctl/env_scripts/go/functions.sh | 7 +- .../envctl/env_scripts/go/kubernetes.sh | 136 +++++++++++++ .../tests/environment/tests/common/common.py | 45 +++-- .../environment/tests/common/script_utils.py | 2 +- .../tests/go/test_appengine_flex_container.py | 26 --- .../tests/go/test_appengine_standard.py | 3 + .../environment/tests/go/test_cloudrun.py | 9 + .../environment/tests/go/test_compute.py | 3 + .../environment/tests/go/test_functions.py | 8 +- .../environment/tests/go/test_kubernetes.py | 3 + .../tests/nodejs/test_functions.py | 7 +- tests/environment | 2 +- 20 files changed, 675 insertions(+), 102 deletions(-) create mode 100644 packages/google-cloud-logging/tests/environment/SECURITY.md create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/appengine_standard.sh create mode 100644 packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/compute.sh create mode 100644 packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/kubernetes.sh delete mode 100644 packages/google-cloud-logging/tests/environment/tests/go/test_appengine_flex_container.py diff --git a/packages/google-cloud-logging/tests/environment/.gitignore b/packages/google-cloud-logging/tests/environment/.gitignore index 1936b00310cd..60a42c15cd62 100644 --- a/packages/google-cloud-logging/tests/environment/.gitignore +++ b/packages/google-cloud-logging/tests/environment/.gitignore @@ -139,5 +139,6 @@ cython_debug/ # Go files deployable/go/google-cloud-go +deployable/go/logging deployable/go/lib.tar deployable/go/vendor \ No newline at end of file diff --git a/packages/google-cloud-logging/tests/environment/SECURITY.md b/packages/google-cloud-logging/tests/environment/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/go.mod b/packages/google-cloud-logging/tests/environment/deployable/go/go.mod index 61bddf0997c7..716a271e893c 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/go.mod +++ b/packages/google-cloud-logging/tests/environment/deployable/go/go.mod @@ -3,10 +3,10 @@ module github.com/googleapis/env-tests-logging/deployable/go/main go 1.15 require ( - cloud.google.com/go v0.78.0 - cloud.google.com/go/logging v1.3.0 + cloud.google.com/go v0.81.0 + cloud.google.com/go/logging v1.4.0 + cloud.google.com/go/pubsub v1.3.1 + google.golang.org/grpc v1.37.0 ) -replace cloud.google.com/go => ./google-cloud-go/. - -replace cloud.google.com/go/logging => ./google-cloud-go/logging +replace cloud.google.com/go/logging => ./logging diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/go.sum b/packages/google-cloud-logging/tests/environment/deployable/go/go.sum index 71e5458c47c9..d1437bdb5a8d 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/go.sum +++ b/packages/google-cloud-logging/tests/environment/deployable/go/go.sum @@ -1,11 +1,36 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0 h1:at8Tk2zUz63cLPR0JPWm5vp77pEZmzxEQBEfRKn1VV8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1 h1:ukjixP1wl0LpnZ6LWtZJ0mX5tBmjp1f8Sqer8Z2OMUU= cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= @@ -30,6 +55,7 @@ github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.m github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -40,12 +66,19 @@ github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4er github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= @@ -53,28 +86,51 @@ github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:W github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3 h1:JjCZWpVbqXDqFVmTfYWEVTMIYrL/NPdPSCHPJ0T/raM= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0 h1:wCKgOCHuUEVfsaQLpPSJb7VdYCdTVZQAuOdYm1yc/60= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= @@ -90,21 +146,27 @@ github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5 github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= @@ -113,13 +175,17 @@ golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMx golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5 h1:2M3HP5CCK1Si9FQhwnzYhXdG6DXeebvUHFpre8QvbyI= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= @@ -127,6 +193,7 @@ golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1 h1:Kvvh58BN8Y9/lBi7hTekvtMpm07eUZ0ck5pRHpsMWrY= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -140,26 +207,42 @@ golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 h1:qWPm9rbaAMKs8Bq/9LRpbMqxWRVUAQwMI9fVrssnTfw= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4 h1:b0LrWgu8+q7z4J+0Y3Umo5q1dL7NXBkKBWkaVkAq17E= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93 h1:alLDrZkL34Y2bnGHfvC1CYBRBXCXgx8AC2vY4MRtYX4= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210413134643-5e61552d6c78 h1:rPRtHfUb0UKZeZ6GH4K4Nt4YRbE9V1u+QZX5upZXqJQ= +golang.org/x/oauth2 v0.0.0-20210413134643-5e61552d6c78/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -167,54 +250,83 @@ golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a h1:DcqTD9SDLc+1P/r1EmRBwnVsrOwW+kk2vWf9n+1sGhs= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073 h1:8qxJSnu+7dRq6upnbntrmriWByIakBuct5OM/MdQC1M= -golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210412220455-f1c623a9e750 h1:ZBu6861dZq7xBnG1bn5SRU0vA8nx42at4+kP07FMTog= +golang.org/x/sys v0.0.0-20210412220455-f1c623a9e750/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -222,12 +334,20 @@ golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapK golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= @@ -236,6 +356,9 @@ golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= @@ -247,29 +370,42 @@ google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/ google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.40.0 h1:uWrpz12dpVPn7cojP82mk02XDgTJLDPc2KbVTxrWb4A= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.45.0 h1:pqMffJFLBVUDIoYsHcqtxgQVTsmxMDpYLOc5MT4Jrww= +google.golang.org/api v0.45.0/go.mod h1:ISLIJCedJolbZvDfAk+Ctuq5hf+aJ33WgtUsfyFoLXA= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= @@ -277,9 +413,22 @@ google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210302174412-5ede27ff9881 h1:SYuy3hIRsBIROE0aZwsJZOEJNC/n9/p0FmLEU9C31AE= -google.golang.org/genproto v0.0.0-20210302174412-5ede27ff9881/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210413151531-c14fb6ef47c3/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= +google.golang.org/genproto v0.0.0-20210420162539-3c870d7478d2 h1:g2sJMUGCpeHZqTx8p3wsAWRS64nFq20i4dvJWcKGqvY= +google.golang.org/genproto v0.0.0-20210420162539-3c870d7478d2/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -290,10 +439,16 @@ google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8 google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.36.0 h1:o1bcQ6imQMIOpdrO3SWf2z5RV72WbDwdXuK0MDlc8As= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.37.0 h1:uSZWeQJX5j11bIQ4AJoj+McDBo29cY1MCoC1wO3ts+c= +google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -303,15 +458,22 @@ google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2 google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/main.go b/packages/google-cloud-logging/tests/environment/deployable/go/main.go index c75af3c4f254..37d2bfafe009 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/main.go +++ b/packages/google-cloud-logging/tests/environment/deployable/go/main.go @@ -17,17 +17,23 @@ package main import ( "context" "encoding/json" + "fmt" "io/ioutil" "log" "net/http" "os" + "time" - // This is replaced by the local version of cloud logging "cloud.google.com/go/compute/metadata" + // go/logging is replaced by the local version of cloud logging "cloud.google.com/go/logging" + "cloud.google.com/go/pubsub" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" ) -// PubSubMessage is the message format received over HTTP +// pubSubMessage is the message format received over HTTP +// ****************** CloudRun ****************** type pubSubMessage struct { Message struct { Data []byte `json:"data,omitempty"` @@ -37,7 +43,8 @@ type pubSubMessage struct { Subscription string `json:"subscription"` } -// CloudRun: Processes a Pub/Sub message through HTTP. +// pubsubHTTP processes a Pub/Sub message through HTTP. +// ****************** CloudRun ****************** func pubsubHTTP(w http.ResponseWriter, r *http.Request) { var m pubSubMessage body, err := ioutil.ReadAll(r.Body) @@ -54,43 +61,115 @@ func pubsubHTTP(w http.ResponseWriter, r *http.Request) { msg := string(m.Message.Data) args := m.Message.Attributes - - switch msg { - case "simplelog": - simplelog(args) - case "stdLog": - break - default: - break - } + testLog(msg, args) } // PubSubMessage is the message format received by CloudFunctions +// ****************** Functions ****************** type PubSubMessage struct { - Data []byte `json:"data"` + Data []byte `json:"data"` Attributes map[string]string `json:"attributes"` } // PubsubFunction is a background Cloud Function triggered by Pub/Sub +// ****************** Functions ****************** func PubsubFunction(ctx context.Context, m PubSubMessage) error { log.Printf("Data is: %v", string(m.Data)) - switch string(m.Data) { - case "simplelog": - simplelog(m.Attributes) - break - case "stdlog": - break - default: - break + testLog(string(m.Data), m.Attributes) + return nil +} + +// pullMsgsSync synchronously pulls pubsub messages for a maximum of 2400 seconds +// ****************** App Engine ****************** +func pullMsgsSync(sub *pubsub.Subscription) error { + // Turn on synchronous mode. This makes the subscriber use the Pull RPC rather + // than the StreamingPull RPC, which is useful for guaranteeing MaxOutstandingMessages. + sub.ReceiveSettings.Synchronous = true + sub.ReceiveSettings.MaxOutstandingMessages = 10 + + // Receive messages for 1000 seconds. + ctx, cancel := context.WithTimeout(ctx, 2400*time.Second) + defer cancel() + + // Create a channel to handle messages to as they come in. + cm := make(chan *pubsub.Message) + defer close(cm) + // Handle individual messages in a goroutine. + go func() { + for msg := range cm { + log.Printf("Got message :%q\n", string(msg.Data)) + testLog(string(msg.Data), msg.Attributes) + msg.Ack() + } + }() + // Receive blocks until the passed in context is done. + err := sub.Receive(ctx, func(ctx context.Context, msg *pubsub.Message) { + cm <- msg + }) + if err != nil && status.Code(err) != codes.Canceled { + return fmt.Errorf("receive: %v", err) } return nil } +// Initializations for all GCP services +var ctx context.Context + +// init executes for all environments, regardless if its a program or package +func init() { + ctx = context.Background() +} + +// main runs for all environments except GCF func main() { - if os.Getenv("ENABLE_SUBSCRIBER") == "" { + // ****************** GAE, GKE, GCE ****************** + // Enable app subscriber for all environments except GCR + if os.Getenv("ENABLE_SUBSCRIBER") == "true" { + projectID, err := metadata.ProjectID() + if err != nil { + log.Fatalf("metadata.ProjectID: %v", err) + } + topicID := os.Getenv("PUBSUB_TOPIC") + if topicID == "" { + topicID = "logging-test" + } + client, err := pubsub.NewClient(ctx, projectID) + if err != nil { + log.Fatalf("pubsub.NewClient: %v", err) + } + defer client.Close() + subscriptionID := topicID + "-subscriber" + topic := client.Topic(topicID) + + // Create a pull subscription to receive messages + sub, err := client.CreateSubscription(ctx, + subscriptionID, + pubsub.SubscriptionConfig{ + Topic: topic, + }) + if err != nil { + log.Fatalf("pubsub.CreateSubscription: %v", err) + } - // Set up PubSub for CloudRun - http.HandleFunc("/", pubsubHTTP) + // Blocking call, pulls messages from pubsub until context is cancelled or test ends + err = pullMsgsSync(sub) + if err != nil { + log.Fatalf("pullMsgsSync failed: %v", err) + } + } + + // ****************** GCR, GKE, GCE ****************** + // Listen and serve for all environments except GAE + _, gaeApp := os.LookupEnv("GAE_SERVICE") + _, gaeRuntime := os.LookupEnv("GAE_VERSION") + isAppEngine := gaeApp || gaeRuntime + _, isCloudRun := os.LookupEnv("K_CONFIGURATION") + + if !isAppEngine { + // Cloud run is triggered through http handler + if isCloudRun { + http.HandleFunc("/", pubsubHTTP) + } port := os.Getenv("PORT") if port == "" { @@ -104,6 +183,7 @@ func main() { } } +// ****************** Test Cases ****************** // [Optional] envctl go trigger simplelog log_name=foo,log_text=bar func simplelog(args map[string]string) { ctx := context.Background() @@ -130,3 +210,15 @@ func simplelog(args map[string]string) { logger := client.Logger(logname).StandardLogger(logging.Info) logger.Println(logtext) } + +// testLog is a helper function which invokes the correct test functions +func testLog(message string, attrs map[string]string) { + switch message { + case "simplelog": + simplelog(attrs) + case "stdlog": + break + default: + break + } +} diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/appengine_standard.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/appengine_standard.sh new file mode 100755 index 000000000000..9fd19d919b82 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/appengine_standard.sh @@ -0,0 +1,87 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +SERVICE_NAME="logging-go-standard-$(echo $ENVCTL_ID | head -c 8)"\ + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete service + gcloud app services delete $SERVICE_NAME -q 2> /dev/null + set -e +} + +verify() { + set +e + gcloud app services describe $SERVICE_NAME -q > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +deploy() { + # create pub/sub topic + set +e + gcloud pubsub topics create $SERVICE_NAME 2>/dev/null + set -ex + # set up deployment directory + # copy over local copy of library + pushd $SUPERREPO_ROOT/logging + tar -cvf $TMP_DIR/lib.tar --exclude .git/ --exclude internal/env-tests-logging --exclude .nox --exclude docs --exclude __pycache__ . + popd + mkdir $TMP_DIR/logging + tar -xvf $TMP_DIR/lib.tar --directory $TMP_DIR/logging + # copy test code and Go dependencies + cp $REPO_ROOT/deployable/go/*.go $TMP_DIR + cp $REPO_ROOT/deployable/go/go.* $TMP_DIR + + # manual_scaling allows 1 instance to continuously run regardless of the load level. + cat < $TMP_DIR/app.yaml + runtime: go115 + service: $SERVICE_NAME + manual_scaling: + instances: 1 + env_variables: + ENABLE_SUBSCRIBER: "true" + PUBSUB_TOPIC: $SERVICE_NAME +EOF + # deploy + pushd $TMP_DIR + gcloud app deploy -q + popd + # wait for the pub/sub subscriber to start + NUM_SUBSCRIBERS=0 + TRIES=0 + while [[ "${NUM_SUBSCRIBERS}" -lt 1 && "${TRIES}" -lt 10 ]]; do + sleep 30 + NUM_SUBSCRIBERS=$(gcloud pubsub topics list-subscriptions $SERVICE_NAME 2> /dev/null | wc -l) + TRIES=$((TRIES + 1)) + done +} + +filter-string() { + echo "resource.type=\"gae_app\" AND resource.labels.module_id=\"$SERVICE_NAME\"" +} diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh index 561ae9b5f7fe..3155c856530b 100644 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh @@ -20,7 +20,6 @@ set -u # undefined variables cause exit # Note: there is a max character count constraint SERVICE_NAME="log-go-run-$(echo $ENVCTL_ID | head -c 8)x" SA_NAME=$SERVICE_NAME-invoker -LIBRARY_NAME="google-cloud-go" add_service_accounts() { set +e @@ -48,11 +47,11 @@ build_go_container() { _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE # copy over local copy of library - pushd $SUPERREPO_ROOT - tar -cvf $_deployable_dir/lib.tar --exclude internal/logging --exclude .nox --exclude docs --exclude __pycache__ . + pushd $SUPERREPO_ROOT/logging + tar -cvf $_deployable_dir/lib.tar --exclude internal/env-tests-logging --exclude .nox --exclude docs --exclude __pycache__ . popd - mkdir -p $_deployable_dir/$LIBRARY_NAME - tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/$LIBRARY_NAME + mkdir -p $_deployable_dir/logging + tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/logging # build container docker build -t $GCR_PATH $_deployable_dir docker push $GCR_PATH diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/compute.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/compute.sh new file mode 100644 index 000000000000..0552de289b00 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/compute.sh @@ -0,0 +1,88 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +SERVICE_NAME="logging-go-gce-$(echo $ENVCTL_ID | head -c 8)" +ZONE="us-west2-a" + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete container images + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null + # delete service + gcloud compute instances delete $SERVICE_NAME -q + set -e +} + +verify() { + set +e + gcloud compute instances describe $SERVICE_NAME > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +build_go_container(){ + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + # copy super-repo into deployable dir + _env_tests_relative_path=${REPO_ROOT#"$SUPERREPO_ROOT/"} + _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE + + # copy over local copy of library + pushd $SUPERREPO_ROOT/logging + tar -cvf $_deployable_dir/lib.tar --exclude internal/env-tests-logging --exclude .nox --exclude docs --exclude __pycache__ . + popd + mkdir -p $_deployable_dir/logging + tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/logging + # build container + docker build -t $GCR_PATH $_deployable_dir + docker push $GCR_PATH +} + +deploy() { + build_go_container + gcloud config set compute/zone $ZONE + gcloud compute instances create-with-container \ + $SERVICE_NAME \ + --container-image $GCR_PATH \ + --container-env PUBSUB_TOPIC="$SERVICE_NAME",ENABLE_SUBSCRIBER="true" + # wait for the pub/sub subscriber to start + NUM_SUBSCRIBERS=0 + TRIES=0 + while [[ "${NUM_SUBSCRIBERS}" -lt 1 && "${TRIES}" -lt 10 ]]; do + sleep 30 + NUM_SUBSCRIBERS=$(gcloud pubsub topics list-subscriptions $SERVICE_NAME 2> /dev/null | wc -l) + TRIES=$((TRIES + 1)) + done +} + +filter-string() { + #INSTANCE_ID=$(gcloud compute instances list --filter="name~^$SERVICE_NAME$" --format="value(ID)") + #echo "resource.type=\"gce_instance\" AND resource.labels.instance_id=\"$INSTANCE_ID\"" + echo "resource.type=\"global\"" +} + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh index 3d33ce32c76c..d9d8bd4b69aa 100644 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh @@ -18,7 +18,6 @@ set -o pipefail # any step in pipe caused failure set -u # undefined variables cause exit SERVICE_NAME="log-go-func-$(echo $ENVCTL_ID | head -c 8)x" -LIBRARY_NAME="google-cloud-go" destroy() { set +e @@ -53,11 +52,11 @@ deploy() { # Copy over local copy of library to use as dependency _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE - pushd $SUPERREPO_ROOT + pushd $SUPERREPO_ROOT/logging tar -cvf $_deployable_dir/lib.tar --exclude internal/env-tests-logging --exclude .nox --exclude docs --exclude __pycache__ . popd - mkdir -p $_deployable_dir/google-cloud-go - tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/google-cloud-go + mkdir -p $_deployable_dir/logging + tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/logging # Create vendor folder based on local dependency pushd $REPO_ROOT/deployable/go diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/kubernetes.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/kubernetes.sh new file mode 100644 index 000000000000..91191e375212 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/kubernetes.sh @@ -0,0 +1,136 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +# Note: there is a max character count constraint +SERVICE_NAME="log-go-gke-$(echo $ENVCTL_ID | head -c 8)x" +ZONE=us-central1-a + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete container images + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null + # delete cluster + gcloud container clusters delete --zone $ZONE $SERVICE_NAME -q + set -e +} + +verify() { + set +e + gcloud pubsub subscriptions describe $SERVICE_NAME-subscriber 2> /dev/null + if [[ $? != 0 ]]; then + echo "FALSE" + exit 1 + fi + gcloud container clusters describe --zone $ZONE $SERVICE_NAME > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +attach_or_create_gke_cluster(){ + set +e + gcloud container clusters get-credentials $SERVICE_NAME + if [[ $? -ne 0 ]]; then + echo "cluster not found. creating..." + gcloud container clusters create $SERVICE_NAME \ + --zone $ZONE \ + --scopes=gke-default,pubsub + fi + set -e +} + +build_go_container(){ + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + # copy super-repo into deployable dir + _env_tests_relative_path=${REPO_ROOT#"$SUPERREPO_ROOT/"} + _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE + + # copy over local copy of library + pushd $SUPERREPO_ROOT/logging + tar -cvf $_deployable_dir/lib.tar --exclude internal/env-tests-logging --exclude .nox --exclude docs --exclude __pycache__ . + popd + mkdir -p $_deployable_dir/logging + tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/logging + # build container + docker build -t $GCR_PATH $_deployable_dir + docker push $GCR_PATH +} + +deploy() { +# local SCRIPT="${1:-router.py}" +# TODO: double check this doesn't print impt otkens/pws + set -x + attach_or_create_gke_cluster + build_go_container + cat < $TMP_DIR/gke.yaml + apiVersion: apps/v1 + kind: Deployment + metadata: + name: $SERVICE_NAME + spec: + selector: + matchLabels: + app: $SERVICE_NAME + template: + metadata: + labels: + app: $SERVICE_NAME + spec: + containers: + - name: $SERVICE_NAME + image: $GCR_PATH + env: + - name: PUBSUB_TOPIC + value: $SERVICE_NAME + - name: ENABLE_SUBSCRIBER + value: "true" +EOF + # clean cluster + set +e + kubectl delete deployments --all 2>/dev/null + kubectl delete -f $TMP_DIR 2>/dev/null + set -e + # deploy test container + kubectl apply -f $TMP_DIR + sleep 60 + # wait for pod to spin up + kubectl wait --for=condition=ready pod -l app=$SERVICE_NAME + # wait for the pub/sub subscriber to start + NUM_SUBSCRIBERS=0 + TRIES=0 + while [[ "${NUM_SUBSCRIBERS}" -lt 1 && "${TRIES}" -lt 10 ]]; do + sleep 30 + NUM_SUBSCRIBERS=$(gcloud pubsub topics list-subscriptions $SERVICE_NAME 2> /dev/null | wc -l) + TRIES=$((TRIES + 1)) + done +} + +filter-string() { + echo "resource.type=\"k8s_container\" AND resource.labels.cluster_name=\"$SERVICE_NAME\"" +} + diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index 8e25fc3e82be..4f37bcfe79be 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -58,7 +58,7 @@ def _add_time_condition_to_filter(self, filter_str, timestamp=None): def _get_logs(self, filter_str=None): if not filter_str: - _, filter_str = self._script.run_command(Command.GetFilter) + _, filter_str, _ = self._script.run_command(Command.GetFilter) iterator = self._client.list_entries(filter_=filter_str) entries = list(iterator) if not entries: @@ -100,19 +100,23 @@ def setUpClass(cls): raise NotImplementedError("language not set by subclass") cls._script = ScriptRunner(cls.environment, cls.language) # check if already setup - status, _ = cls._script.run_command(Command.Verify) + status, _, _ = cls._script.run_command(Command.Verify) if status == 0: if os.getenv("NO_CLEAN"): # ready to go return else: # reset environment - status, _ = cls._script.run_command(Command.Destroy) + status, _, _ = cls._script.run_command(Command.Destroy) assert status == 0 # deploy test code to GCE - status, _ = cls._script.run_command(Command.Deploy) + status, _, err = cls._script.run_command(Command.Deploy) + if status != 0: + print(err) # verify code is running - status, _ = cls._script.run_command(Command.Verify) + status, _, err = cls._script.run_command(Command.Verify) + if status != 0: + print(err) assert status == 0 @classmethod @@ -152,22 +156,21 @@ def test_receive_unicode_log(self): found_log = log self.assertIsNotNone(found_log, "expected unicode log not found") - # add back after v3.0.0 - # def test_monitored_resource(self): - # if self.language != "python": - # # to do: add monitored resource info to go - # return True - # log_text = f"{inspect.currentframe().f_code.co_name}" - # log_list = self.trigger_and_retrieve(log_text, "simplelog") - # found_resource = log_list[-1].resource - - # self.assertIsNotNone(self.monitored_resource_name) - # self.assertIsNotNone(self.monitored_resource_labels) - - # self.assertEqual(found_resource.type, self.monitored_resource_name) - # for label in self.monitored_resource_labels: - # self.assertTrue(found_resource.labels[label], - # f'resource.labels[{label}] is not set') + def test_monitored_resource(self): + if self.language not in ["nodejs", "go"]: + # TODO: other languages to also support this test + return True + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text) + found_resource = log_list[-1].resource + + self.assertIsNotNone(self.monitored_resource_name) + self.assertIsNotNone(self.monitored_resource_labels) + + self.assertEqual(found_resource.type, self.monitored_resource_name) + for label in self.monitored_resource_labels: + self.assertTrue(found_resource.labels[label], + f'resource.labels[{label}] is not set') def test_severity(self): if self.language != "python": diff --git a/packages/google-cloud-logging/tests/environment/tests/common/script_utils.py b/packages/google-cloud-logging/tests/environment/tests/common/script_utils.py index 800198d2b141..df8786248730 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/script_utils.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/script_utils.py @@ -59,7 +59,7 @@ def run_command(self, command, args=[]): print(full_command) result = subprocess.run(full_command, capture_output=True) complete = True - return result.returncode, result.stdout.decode("utf-8") + return result.returncode, result.stdout.decode("utf-8"), result.stderr.decode("utf-8") except Exception as e: print(e) finally: diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_flex_container.py b/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_flex_container.py deleted file mode 100644 index 56f629dd7ec3..000000000000 --- a/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_flex_container.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import unittest - -import google.cloud.logging - -from ..common.common import Common - - -class TestAppEngineFlexContainer(Common, unittest.TestCase): - - environment = "appengine_flex_container" - language = "go" diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_standard.py b/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_standard.py index d5f892d93835..d7525a9a2578 100644 --- a/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_standard.py +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_standard.py @@ -24,3 +24,6 @@ class TestAppEngineStandard(Common, unittest.TestCase): environment = "appengine_standard" language = "go" + + monitored_resource_name = "gae_app" + monitored_resource_labels = ["project_id", "module_id", "version_id", "zone"] diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/go/test_cloudrun.py index 44c9d097b60a..0e6ca4c4f44b 100644 --- a/packages/google-cloud-logging/tests/environment/tests/go/test_cloudrun.py +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_cloudrun.py @@ -24,3 +24,12 @@ class TestCloudRun(Common, unittest.TestCase): environment = "cloudrun" language = "go" + + monitored_resource_name = "cloud_run_revision" + monitored_resource_labels = [ + "project_id", + "service_name", + "revision_name", + "location", + "configuration_name", + ] diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_compute.py b/packages/google-cloud-logging/tests/environment/tests/go/test_compute.py index 96ce5786eeaa..59c77fb3eb9b 100644 --- a/packages/google-cloud-logging/tests/environment/tests/go/test_compute.py +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_compute.py @@ -24,3 +24,6 @@ class TestComputeEngine(Common, unittest.TestCase): environment = "compute" language = "go" + + monitored_resource_name = "gce_instance" + monitored_resource_labels = ["project_id", "instance_id", "zone"] \ No newline at end of file diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/go/test_functions.py index b137532b1965..5bb45ea5a59c 100644 --- a/packages/google-cloud-logging/tests/environment/tests/go/test_functions.py +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_functions.py @@ -25,5 +25,9 @@ class TestCloudFunctions(Common, unittest.TestCase): environment = "functions" language = "go" - monitored_resource_name = "project" - monitored_resource_name = ["project_id"] + monitored_resource_name = "cloud_function" + monitored_resource_labels = [ + "region", + "function_name", + "project_id", + ] diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py index 85f7104555f0..10090261eb01 100644 --- a/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py @@ -24,3 +24,6 @@ class TestKubernetesEngine(Common, unittest.TestCase): environment = "kubernetes" language = "go" + + monitored_resource_name = "gce_instance" + monitored_resource_labels = ["project_id", "instance_id", "zone"] \ No newline at end of file diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py index cdfdf8a35dbc..46f7382e585f 100644 --- a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py @@ -19,7 +19,6 @@ import google.cloud.logging from ..common.common import Common -from ..common.python import CommonPython class TestCloudFunctions(Common, unittest.TestCase): @@ -28,4 +27,8 @@ class TestCloudFunctions(Common, unittest.TestCase): language = "nodejs" monitored_resource_name = "cloud_function" - monitored_resource_labels = ["region", "function_name"] + monitored_resource_labels = [ + "region", + "function_name", + "project_id", + ] diff --git a/tests/environment b/tests/environment index 912d193cfb90..30d6a80838a1 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 912d193cfb90b741fee035dff29fa0d5959a5186 +Subproject commit 30d6a80838a1cae6fb3945f41f3e1d90e815c0c9 From c6bfcb7563cd21b6098c53911ae5ed7327382197 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 12 May 2021 13:48:58 -0700 Subject: [PATCH 477/855] chore: release 2.4.0 (#294) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-logging/CHANGELOG.md | 21 +++++++++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index e7822c4d5e0c..1828b3e69758 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,27 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [2.4.0](https://www.github.com/googleapis/python-logging/compare/v2.3.1...v2.4.0) (2021-05-12) + + +### Features + +* allow custom labels with standard library logging ([#264](https://www.github.com/googleapis/python-logging/issues/264)) ([fe4de39](https://www.github.com/googleapis/python-logging/commit/fe4de39a87581a9e9f2cee62462ae2f26176194f)) +* Improve source location overrides ([#258](https://www.github.com/googleapis/python-logging/issues/258)) ([6b10b74](https://www.github.com/googleapis/python-logging/commit/6b10b74e2bf65ea406b10585a4c24078348483d2)) +* record source locations ([#254](https://www.github.com/googleapis/python-logging/issues/254)) ([a5c2f8e](https://www.github.com/googleapis/python-logging/commit/a5c2f8e948bb116cbce313f063643aec02d06a84)) +* support span inference ([#267](https://www.github.com/googleapis/python-logging/issues/267)) ([fcd26eb](https://www.github.com/googleapis/python-logging/commit/fcd26eb0ff4f97c097ca33b2d212d8f83e56686e)) +* use standard output logs on serverless environments ([#228](https://www.github.com/googleapis/python-logging/issues/228)) ([a78f577](https://www.github.com/googleapis/python-logging/commit/a78f577bda17d758551237be84182035ed7b9cce)) + + +### Bug Fixes + +* changed region format on serverless ([#291](https://www.github.com/googleapis/python-logging/issues/291)) ([8872d6f](https://www.github.com/googleapis/python-logging/commit/8872d6f6b2bb979adffad0b054fa40306b68cfc0)) +* changed region format on serverless ([#291](https://www.github.com/googleapis/python-logging/issues/291)) ([360d3d2](https://www.github.com/googleapis/python-logging/commit/360d3d23db7709b7c3946c092ef373f888f47c3d)) +* **deps:** fix minimum required version of google-api-core ([#244](https://www.github.com/googleapis/python-logging/issues/244)) ([874fdfa](https://www.github.com/googleapis/python-logging/commit/874fdfa809063c2bfb33e59aded553e098601876)) +* **deps:** fix minimum required version of google-api-core ([#244](https://www.github.com/googleapis/python-logging/issues/244)) ([37d33fc](https://www.github.com/googleapis/python-logging/commit/37d33fcd8402b973377486a572c04ba6d4029b58)) +* improve API compatibility for next release ([#292](https://www.github.com/googleapis/python-logging/issues/292)) ([1f9517d](https://www.github.com/googleapis/python-logging/commit/1f9517da7302e19198e598d452df58238d4e6306)) +* remove noisy logs ([#290](https://www.github.com/googleapis/python-logging/issues/290)) ([bdf8273](https://www.github.com/googleapis/python-logging/commit/bdf827358de5935f736ecd73ab10b2d861daf690)) + ### [2.3.1](https://www.github.com/googleapis/python-logging/compare/v2.3.0...v2.3.1) (2021-03-24) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 8b794791c41f..36426fb60aaa 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.3.1" +version = "2.4.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 6320918b70350e01cc8d6d3b02043b9d2639a2b8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 13 May 2021 16:53:45 +0200 Subject: [PATCH 478/855] chore(deps): update dependency google-cloud-logging to v2.4.0 (#296) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 489b0453d9df..5c09377cdb90 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==2.3.1 +google-cloud-logging==2.4.0 google-cloud-bigquery==2.16.0 google-cloud-storage==1.38.0 google-cloud-pubsub==2.4.2 From 7200574aa9fbdea178ede09d567419575af39d7f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 13 May 2021 20:43:32 +0200 Subject: [PATCH 479/855] chore(deps): update dependency google-cloud-bigquery to v2.16.1 (#297) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 5c09377cdb90..97e818af0611 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.4.0 -google-cloud-bigquery==2.16.0 +google-cloud-bigquery==2.16.1 google-cloud-storage==1.38.0 google-cloud-pubsub==2.4.2 From 1048f91c7c2f52ce436d83e00d6cdbb34032757d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 14 May 2021 12:56:07 +0000 Subject: [PATCH 480/855] chore: upgrade gapic-generator-python to 0.46.3 (#299) PiperOrigin-RevId: 373649163 Source-Link: https://github.com/googleapis/googleapis/commit/7e1b14e6c7a9ab96d2db7e4a131981f162446d34 Source-Link: https://github.com/googleapis/googleapis-gen/commit/0a3c7d272d697796db75857bac73905c68e498c3 --- .../cloud/logging_v2/gapic_metadata.json | 391 ++++++ .../cloud/logging_v2/services/__init__.py | 1 - .../services/config_service_v2/__init__.py | 2 - .../config_service_v2/async_client.py | 120 +- .../services/config_service_v2/client.py | 143 +- .../services/config_service_v2/pagers.py | 10 +- .../config_service_v2/transports/__init__.py | 2 - .../config_service_v2/transports/base.py | 258 ++-- .../config_service_v2/transports/grpc.py | 48 +- .../transports/grpc_asyncio.py | 45 +- .../services/logging_service_v2/__init__.py | 2 - .../logging_service_v2/async_client.py | 76 +- .../services/logging_service_v2/client.py | 79 +- .../services/logging_service_v2/pagers.py | 14 +- .../logging_service_v2/transports/__init__.py | 2 - .../logging_service_v2/transports/base.py | 174 ++- .../logging_service_v2/transports/grpc.py | 28 +- .../transports/grpc_asyncio.py | 29 +- .../services/metrics_service_v2/__init__.py | 2 - .../metrics_service_v2/async_client.py | 65 +- .../services/metrics_service_v2/client.py | 83 +- .../services/metrics_service_v2/pagers.py | 4 +- .../metrics_service_v2/transports/__init__.py | 2 - .../metrics_service_v2/transports/base.py | 152 +- .../metrics_service_v2/transports/grpc.py | 28 +- .../transports/grpc_asyncio.py | 29 +- .../google/cloud/logging_v2/types/__init__.py | 2 - .../cloud/logging_v2/types/log_entry.py | 78 +- .../google/cloud/logging_v2/types/logging.py | 99 +- .../cloud/logging_v2/types/logging_config.py | 257 ++-- .../cloud/logging_v2/types/logging_metrics.py | 65 +- .../google-cloud-logging/tests/__init__.py | 15 + .../tests/unit/__init__.py | 4 +- .../tests/unit/gapic/__init__.py | 15 + .../tests/unit/gapic/logging_v2/__init__.py | 1 - .../logging_v2/test_config_service_v2.py | 1223 +++++++++-------- .../logging_v2/test_logging_service_v2.py | 576 +++++--- .../logging_v2/test_metrics_service_v2.py | 530 ++++--- 38 files changed, 2604 insertions(+), 2050 deletions(-) create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic_metadata.json create mode 100644 packages/google-cloud-logging/tests/unit/gapic/__init__.py diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_metadata.json b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_metadata.json new file mode 100644 index 000000000000..da4eefd477fc --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_metadata.json @@ -0,0 +1,391 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.logging_v2", + "protoPackage": "google.logging.v2", + "schema": "1.0", + "services": { + "ConfigServiceV2": { + "clients": { + "grpc": { + "libraryClient": "ConfigServiceV2Client", + "rpcs": { + "CreateBucket": { + "methods": [ + "create_bucket" + ] + }, + "CreateExclusion": { + "methods": [ + "create_exclusion" + ] + }, + "CreateSink": { + "methods": [ + "create_sink" + ] + }, + "CreateView": { + "methods": [ + "create_view" + ] + }, + "DeleteBucket": { + "methods": [ + "delete_bucket" + ] + }, + "DeleteExclusion": { + "methods": [ + "delete_exclusion" + ] + }, + "DeleteSink": { + "methods": [ + "delete_sink" + ] + }, + "DeleteView": { + "methods": [ + "delete_view" + ] + }, + "GetBucket": { + "methods": [ + "get_bucket" + ] + }, + "GetCmekSettings": { + "methods": [ + "get_cmek_settings" + ] + }, + "GetExclusion": { + "methods": [ + "get_exclusion" + ] + }, + "GetSink": { + "methods": [ + "get_sink" + ] + }, + "GetView": { + "methods": [ + "get_view" + ] + }, + "ListBuckets": { + "methods": [ + "list_buckets" + ] + }, + "ListExclusions": { + "methods": [ + "list_exclusions" + ] + }, + "ListSinks": { + "methods": [ + "list_sinks" + ] + }, + "ListViews": { + "methods": [ + "list_views" + ] + }, + "UndeleteBucket": { + "methods": [ + "undelete_bucket" + ] + }, + "UpdateBucket": { + "methods": [ + "update_bucket" + ] + }, + "UpdateCmekSettings": { + "methods": [ + "update_cmek_settings" + ] + }, + "UpdateExclusion": { + "methods": [ + "update_exclusion" + ] + }, + "UpdateSink": { + "methods": [ + "update_sink" + ] + }, + "UpdateView": { + "methods": [ + "update_view" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ConfigServiceV2AsyncClient", + "rpcs": { + "CreateBucket": { + "methods": [ + "create_bucket" + ] + }, + "CreateExclusion": { + "methods": [ + "create_exclusion" + ] + }, + "CreateSink": { + "methods": [ + "create_sink" + ] + }, + "CreateView": { + "methods": [ + "create_view" + ] + }, + "DeleteBucket": { + "methods": [ + "delete_bucket" + ] + }, + "DeleteExclusion": { + "methods": [ + "delete_exclusion" + ] + }, + "DeleteSink": { + "methods": [ + "delete_sink" + ] + }, + "DeleteView": { + "methods": [ + "delete_view" + ] + }, + "GetBucket": { + "methods": [ + "get_bucket" + ] + }, + "GetCmekSettings": { + "methods": [ + "get_cmek_settings" + ] + }, + "GetExclusion": { + "methods": [ + "get_exclusion" + ] + }, + "GetSink": { + "methods": [ + "get_sink" + ] + }, + "GetView": { + "methods": [ + "get_view" + ] + }, + "ListBuckets": { + "methods": [ + "list_buckets" + ] + }, + "ListExclusions": { + "methods": [ + "list_exclusions" + ] + }, + "ListSinks": { + "methods": [ + "list_sinks" + ] + }, + "ListViews": { + "methods": [ + "list_views" + ] + }, + "UndeleteBucket": { + "methods": [ + "undelete_bucket" + ] + }, + "UpdateBucket": { + "methods": [ + "update_bucket" + ] + }, + "UpdateCmekSettings": { + "methods": [ + "update_cmek_settings" + ] + }, + "UpdateExclusion": { + "methods": [ + "update_exclusion" + ] + }, + "UpdateSink": { + "methods": [ + "update_sink" + ] + }, + "UpdateView": { + "methods": [ + "update_view" + ] + } + } + } + } + }, + "LoggingServiceV2": { + "clients": { + "grpc": { + "libraryClient": "LoggingServiceV2Client", + "rpcs": { + "DeleteLog": { + "methods": [ + "delete_log" + ] + }, + "ListLogEntries": { + "methods": [ + "list_log_entries" + ] + }, + "ListLogs": { + "methods": [ + "list_logs" + ] + }, + "ListMonitoredResourceDescriptors": { + "methods": [ + "list_monitored_resource_descriptors" + ] + }, + "TailLogEntries": { + "methods": [ + "tail_log_entries" + ] + }, + "WriteLogEntries": { + "methods": [ + "write_log_entries" + ] + } + } + }, + "grpc-async": { + "libraryClient": "LoggingServiceV2AsyncClient", + "rpcs": { + "DeleteLog": { + "methods": [ + "delete_log" + ] + }, + "ListLogEntries": { + "methods": [ + "list_log_entries" + ] + }, + "ListLogs": { + "methods": [ + "list_logs" + ] + }, + "ListMonitoredResourceDescriptors": { + "methods": [ + "list_monitored_resource_descriptors" + ] + }, + "TailLogEntries": { + "methods": [ + "tail_log_entries" + ] + }, + "WriteLogEntries": { + "methods": [ + "write_log_entries" + ] + } + } + } + } + }, + "MetricsServiceV2": { + "clients": { + "grpc": { + "libraryClient": "MetricsServiceV2Client", + "rpcs": { + "CreateLogMetric": { + "methods": [ + "create_log_metric" + ] + }, + "DeleteLogMetric": { + "methods": [ + "delete_log_metric" + ] + }, + "GetLogMetric": { + "methods": [ + "get_log_metric" + ] + }, + "ListLogMetrics": { + "methods": [ + "list_log_metrics" + ] + }, + "UpdateLogMetric": { + "methods": [ + "update_log_metric" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MetricsServiceV2AsyncClient", + "rpcs": { + "CreateLogMetric": { + "methods": [ + "create_log_metric" + ] + }, + "DeleteLogMetric": { + "methods": [ + "delete_log_metric" + ] + }, + "GetLogMetric": { + "methods": [ + "get_log_metric" + ] + }, + "ListLogMetrics": { + "methods": [ + "list_log_metrics" + ] + }, + "UpdateLogMetric": { + "methods": [ + "update_log_metric" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py index 42ffdf2bc43d..4de65971c238 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py index 4ab8f4d4080a..e7f6042801dd 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import ConfigServiceV2Client from .async_client import ConfigServiceV2AsyncClient diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index ef184d61c94b..634c106b6787 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,17 +20,16 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport from .client import ConfigServiceV2Client @@ -60,31 +57,26 @@ class ConfigServiceV2AsyncClient: parse_log_sink_path = staticmethod(ConfigServiceV2Client.parse_log_sink_path) log_view_path = staticmethod(ConfigServiceV2Client.log_view_path) parse_log_view_path = staticmethod(ConfigServiceV2Client.parse_log_view_path) - common_billing_account_path = staticmethod( ConfigServiceV2Client.common_billing_account_path ) parse_common_billing_account_path = staticmethod( ConfigServiceV2Client.parse_common_billing_account_path ) - common_folder_path = staticmethod(ConfigServiceV2Client.common_folder_path) parse_common_folder_path = staticmethod( ConfigServiceV2Client.parse_common_folder_path ) - common_organization_path = staticmethod( ConfigServiceV2Client.common_organization_path ) parse_common_organization_path = staticmethod( ConfigServiceV2Client.parse_common_organization_path ) - common_project_path = staticmethod(ConfigServiceV2Client.common_project_path) parse_common_project_path = staticmethod( ConfigServiceV2Client.parse_common_project_path ) - common_location_path = staticmethod(ConfigServiceV2Client.common_location_path) parse_common_location_path = staticmethod( ConfigServiceV2Client.parse_common_location_path @@ -92,7 +84,8 @@ class ConfigServiceV2AsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -107,7 +100,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -124,7 +117,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> ConfigServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: ConfigServiceV2Transport: The transport used by the client instance. @@ -138,12 +131,12 @@ def transport(self) -> ConfigServiceV2Transport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, ConfigServiceV2Transport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the config service v2 client. + """Instantiates the config service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -175,7 +168,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = ConfigServiceV2Client( credentials=credentials, transport=transport, @@ -215,7 +207,6 @@ async def list_buckets( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -244,7 +235,6 @@ async def list_buckets( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -287,7 +277,6 @@ async def get_bucket( Args: request (:class:`google.cloud.logging_v2.types.GetBucketRequest`): The request object. The parameters to `GetBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -299,7 +288,6 @@ async def get_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - request = logging_config.GetBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -337,7 +325,6 @@ async def create_bucket( Args: request (:class:`google.cloud.logging_v2.types.CreateBucketRequest`): The request object. The parameters to `CreateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -349,7 +336,6 @@ async def create_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - request = logging_config.CreateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -395,7 +381,6 @@ async def update_bucket( Args: request (:class:`google.cloud.logging_v2.types.UpdateBucketRequest`): The request object. The parameters to `UpdateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -407,7 +392,6 @@ async def update_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - request = logging_config.UpdateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -445,7 +429,6 @@ async def delete_bucket( Args: request (:class:`google.cloud.logging_v2.types.DeleteBucketRequest`): The request object. The parameters to `DeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -453,7 +436,6 @@ async def delete_bucket( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.DeleteBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -489,7 +471,6 @@ async def undelete_bucket( Args: request (:class:`google.cloud.logging_v2.types.UndeleteBucketRequest`): The request object. The parameters to `UndeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -497,7 +478,6 @@ async def undelete_bucket( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.UndeleteBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -543,7 +523,6 @@ async def list_views( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -572,7 +551,6 @@ async def list_views( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -615,7 +593,6 @@ async def get_view( Args: request (:class:`google.cloud.logging_v2.types.GetViewRequest`): The request object. The parameters to `GetView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -629,7 +606,6 @@ async def get_view( """ # Create or coerce a protobuf request object. - request = logging_config.GetViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -666,7 +642,6 @@ async def create_view( Args: request (:class:`google.cloud.logging_v2.types.CreateViewRequest`): The request object. The parameters to `CreateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -680,7 +655,6 @@ async def create_view( """ # Create or coerce a protobuf request object. - request = logging_config.CreateViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -717,7 +691,6 @@ async def update_view( Args: request (:class:`google.cloud.logging_v2.types.UpdateViewRequest`): The request object. The parameters to `UpdateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -731,7 +704,6 @@ async def update_view( """ # Create or coerce a protobuf request object. - request = logging_config.UpdateViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -767,7 +739,6 @@ async def delete_view( Args: request (:class:`google.cloud.logging_v2.types.DeleteViewRequest`): The request object. The parameters to `DeleteView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -775,7 +746,6 @@ async def delete_view( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.DeleteViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -825,7 +795,6 @@ async def list_sinks( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -854,7 +823,6 @@ async def list_sinks( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -867,9 +835,9 @@ async def list_sinks( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -924,7 +892,6 @@ async def get_sink( This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -958,7 +925,6 @@ async def get_sink( # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name @@ -971,9 +937,9 @@ async def get_sink( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1037,7 +1003,6 @@ async def create_sink( This corresponds to the ``sink`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1071,7 +1036,6 @@ async def create_sink( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if sink is not None: @@ -1103,7 +1067,7 @@ async def update_sink( *, sink_name: str = None, sink: logging_config.LogSink = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1161,7 +1125,6 @@ async def update_sink( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1195,7 +1158,6 @@ async def update_sink( # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name if sink is not None: @@ -1212,9 +1174,9 @@ async def update_sink( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1267,7 +1229,6 @@ async def delete_sink( This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1288,7 +1249,6 @@ async def delete_sink( # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name @@ -1301,9 +1261,9 @@ async def delete_sink( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1352,7 +1312,6 @@ async def list_exclusions( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1381,7 +1340,6 @@ async def list_exclusions( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1394,9 +1352,9 @@ async def list_exclusions( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1452,7 +1410,6 @@ async def get_exclusion( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1488,7 +1445,6 @@ async def get_exclusion( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1501,9 +1457,9 @@ async def get_exclusion( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1566,7 +1522,6 @@ async def create_exclusion( This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1602,7 +1557,6 @@ async def create_exclusion( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if exclusion is not None: @@ -1634,7 +1588,7 @@ async def update_exclusion( *, name: str = None, exclusion: logging_config.LogExclusion = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1683,7 +1637,6 @@ async def update_exclusion( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1719,7 +1672,6 @@ async def update_exclusion( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if exclusion is not None: @@ -1778,7 +1730,6 @@ async def delete_exclusion( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1799,7 +1750,6 @@ async def delete_exclusion( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1812,9 +1762,9 @@ async def delete_exclusion( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1858,7 +1808,6 @@ async def get_cmek_settings( See [Enabling CMEK for Logs Router](https://cloud.google.com/logging/docs/routing/managed- encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1882,7 +1831,6 @@ async def get_cmek_settings( """ # Create or coerce a protobuf request object. - request = logging_config.GetCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1936,7 +1884,6 @@ async def update_cmek_settings( See [Enabling CMEK for Logs Router](https://cloud.google.com/logging/docs/routing/managed- encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1960,7 +1907,6 @@ async def update_cmek_settings( """ # Create or coerce a protobuf request object. - request = logging_config.UpdateCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 37a28d7a2b0a..d2b32322777e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -34,9 +32,8 @@ from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import ConfigServiceV2GrpcTransport from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport @@ -57,7 +54,7 @@ class ConfigServiceV2ClientMeta(type): _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[ConfigServiceV2Transport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -80,7 +77,8 @@ class ConfigServiceV2Client(metaclass=ConfigServiceV2ClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -114,7 +112,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -131,7 +130,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -150,34 +149,35 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> ConfigServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - ConfigServiceV2Transport: The transport used by the client instance. + ConfigServiceV2Transport: The transport used by the client + instance. """ return self._transport @staticmethod def cmek_settings_path(project: str,) -> str: - """Return a fully-qualified cmek_settings string.""" + """Returns a fully-qualified cmek_settings string.""" return "projects/{project}/cmekSettings".format(project=project,) @staticmethod def parse_cmek_settings_path(path: str) -> Dict[str, str]: - """Parse a cmek_settings path into its component segments.""" + """Parses a cmek_settings path into its component segments.""" m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) return m.groupdict() if m else {} @staticmethod def log_bucket_path(project: str, location: str, bucket: str,) -> str: - """Return a fully-qualified log_bucket string.""" + """Returns a fully-qualified log_bucket string.""" return "projects/{project}/locations/{location}/buckets/{bucket}".format( project=project, location=location, bucket=bucket, ) @staticmethod def parse_log_bucket_path(path: str) -> Dict[str, str]: - """Parse a log_bucket path into its component segments.""" + """Parses a log_bucket path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)$", path, @@ -186,38 +186,38 @@ def parse_log_bucket_path(path: str) -> Dict[str, str]: @staticmethod def log_exclusion_path(project: str, exclusion: str,) -> str: - """Return a fully-qualified log_exclusion string.""" + """Returns a fully-qualified log_exclusion string.""" return "projects/{project}/exclusions/{exclusion}".format( project=project, exclusion=exclusion, ) @staticmethod def parse_log_exclusion_path(path: str) -> Dict[str, str]: - """Parse a log_exclusion path into its component segments.""" + """Parses a log_exclusion path into its component segments.""" m = re.match(r"^projects/(?P.+?)/exclusions/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def log_sink_path(project: str, sink: str,) -> str: - """Return a fully-qualified log_sink string.""" + """Returns a fully-qualified log_sink string.""" return "projects/{project}/sinks/{sink}".format(project=project, sink=sink,) @staticmethod def parse_log_sink_path(path: str) -> Dict[str, str]: - """Parse a log_sink path into its component segments.""" + """Parses a log_sink path into its component segments.""" m = re.match(r"^projects/(?P.+?)/sinks/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def log_view_path(project: str, location: str, bucket: str, view: str,) -> str: - """Return a fully-qualified log_view string.""" + """Returns a fully-qualified log_view string.""" return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( project=project, location=location, bucket=bucket, view=view, ) @staticmethod def parse_log_view_path(path: str) -> Dict[str, str]: - """Parse a log_view path into its component segments.""" + """Parses a log_view path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", path, @@ -226,7 +226,7 @@ def parse_log_view_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -239,7 +239,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -250,7 +250,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -261,7 +261,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -272,7 +272,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -286,12 +286,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, ConfigServiceV2Transport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the config service v2 client. + """Instantiates the config service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -346,9 +346,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -360,12 +361,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -380,8 +383,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -428,7 +431,6 @@ def list_buckets( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -459,10 +461,8 @@ def list_buckets( # there are no flattened fields. if not isinstance(request, logging_config.ListBucketsRequest): request = logging_config.ListBucketsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -501,7 +501,6 @@ def get_bucket( Args: request (google.cloud.logging_v2.types.GetBucketRequest): The request object. The parameters to `GetBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -513,7 +512,6 @@ def get_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.GetBucketRequest. # There's no risk of modifying the input as we've already verified @@ -552,7 +550,6 @@ def create_bucket( Args: request (google.cloud.logging_v2.types.CreateBucketRequest): The request object. The parameters to `CreateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -564,7 +561,6 @@ def create_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.CreateBucketRequest. # There's no risk of modifying the input as we've already verified @@ -611,7 +607,6 @@ def update_bucket( Args: request (google.cloud.logging_v2.types.UpdateBucketRequest): The request object. The parameters to `UpdateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -623,7 +618,6 @@ def update_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.UpdateBucketRequest. # There's no risk of modifying the input as we've already verified @@ -662,7 +656,6 @@ def delete_bucket( Args: request (google.cloud.logging_v2.types.DeleteBucketRequest): The request object. The parameters to `DeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -670,7 +663,6 @@ def delete_bucket( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.DeleteBucketRequest. # There's no risk of modifying the input as we've already verified @@ -707,7 +699,6 @@ def undelete_bucket( Args: request (google.cloud.logging_v2.types.UndeleteBucketRequest): The request object. The parameters to `UndeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -715,7 +706,6 @@ def undelete_bucket( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.UndeleteBucketRequest. # There's no risk of modifying the input as we've already verified @@ -762,7 +752,6 @@ def list_views( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -793,10 +782,8 @@ def list_views( # there are no flattened fields. if not isinstance(request, logging_config.ListViewsRequest): request = logging_config.ListViewsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -835,7 +822,6 @@ def get_view( Args: request (google.cloud.logging_v2.types.GetViewRequest): The request object. The parameters to `GetView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -849,7 +835,6 @@ def get_view( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.GetViewRequest. # There's no risk of modifying the input as we've already verified @@ -887,7 +872,6 @@ def create_view( Args: request (google.cloud.logging_v2.types.CreateViewRequest): The request object. The parameters to `CreateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -901,7 +885,6 @@ def create_view( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.CreateViewRequest. # There's no risk of modifying the input as we've already verified @@ -939,7 +922,6 @@ def update_view( Args: request (google.cloud.logging_v2.types.UpdateViewRequest): The request object. The parameters to `UpdateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -953,7 +935,6 @@ def update_view( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.UpdateViewRequest. # There's no risk of modifying the input as we've already verified @@ -990,7 +971,6 @@ def delete_view( Args: request (google.cloud.logging_v2.types.DeleteViewRequest): The request object. The parameters to `DeleteView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -998,7 +978,6 @@ def delete_view( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.DeleteViewRequest. # There's no risk of modifying the input as we've already verified @@ -1049,7 +1028,6 @@ def list_sinks( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1080,10 +1058,8 @@ def list_sinks( # there are no flattened fields. if not isinstance(request, logging_config.ListSinksRequest): request = logging_config.ListSinksRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1138,7 +1114,6 @@ def get_sink( This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1174,10 +1149,8 @@ def get_sink( # there are no flattened fields. if not isinstance(request, logging_config.GetSinkRequest): request = logging_config.GetSinkRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name @@ -1241,7 +1214,6 @@ def create_sink( This corresponds to the ``sink`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1277,10 +1249,8 @@ def create_sink( # there are no flattened fields. if not isinstance(request, logging_config.CreateSinkRequest): request = logging_config.CreateSinkRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if sink is not None: @@ -1308,7 +1278,7 @@ def update_sink( *, sink_name: str = None, sink: logging_config.LogSink = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1366,7 +1336,6 @@ def update_sink( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1402,10 +1371,8 @@ def update_sink( # there are no flattened fields. if not isinstance(request, logging_config.UpdateSinkRequest): request = logging_config.UpdateSinkRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name if sink is not None: @@ -1462,7 +1429,6 @@ def delete_sink( This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1485,10 +1451,8 @@ def delete_sink( # there are no flattened fields. if not isinstance(request, logging_config.DeleteSinkRequest): request = logging_config.DeleteSinkRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name @@ -1537,7 +1501,6 @@ def list_exclusions( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1568,10 +1531,8 @@ def list_exclusions( # there are no flattened fields. if not isinstance(request, logging_config.ListExclusionsRequest): request = logging_config.ListExclusionsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1627,7 +1588,6 @@ def get_exclusion( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1665,10 +1625,8 @@ def get_exclusion( # there are no flattened fields. if not isinstance(request, logging_config.GetExclusionRequest): request = logging_config.GetExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1731,7 +1689,6 @@ def create_exclusion( This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1769,10 +1726,8 @@ def create_exclusion( # there are no flattened fields. if not isinstance(request, logging_config.CreateExclusionRequest): request = logging_config.CreateExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if exclusion is not None: @@ -1800,7 +1755,7 @@ def update_exclusion( *, name: str = None, exclusion: logging_config.LogExclusion = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1849,7 +1804,6 @@ def update_exclusion( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1887,10 +1841,8 @@ def update_exclusion( # there are no flattened fields. if not isinstance(request, logging_config.UpdateExclusionRequest): request = logging_config.UpdateExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if exclusion is not None: @@ -1945,7 +1897,6 @@ def delete_exclusion( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1968,10 +1919,8 @@ def delete_exclusion( # there are no flattened fields. if not isinstance(request, logging_config.DeleteExclusionRequest): request = logging_config.DeleteExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2015,7 +1964,6 @@ def get_cmek_settings( See [Enabling CMEK for Logs Router](https://cloud.google.com/logging/docs/routing/managed- encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2039,7 +1987,6 @@ def get_cmek_settings( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.GetCmekSettingsRequest. # There's no risk of modifying the input as we've already verified @@ -2094,7 +2041,6 @@ def update_cmek_settings( See [Enabling CMEK for Logs Router](https://cloud.google.com/logging/docs/routing/managed- encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2118,7 +2064,6 @@ def update_cmek_settings( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.UpdateCmekSettingsRequest. # There's no risk of modifying the input as we've already verified diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index f656fef0d8b0..6d8e11fb83e8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -245,7 +243,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -373,7 +371,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -501,7 +499,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index 30282e2d26ce..b1e24fc64213 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 3981d8e9f219..d52c97635c4c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.logging_v2.types import logging_config -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -36,6 +35,17 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class ConfigServiceV2Transport(abc.ABC): """Abstract transport class for ConfigServiceV2.""" @@ -47,21 +57,24 @@ class ConfigServiceV2Transport(abc.ABC): "https://www.googleapis.com/auth/logging.read", ) + DEFAULT_HOST: str = "logging.googleapis.com" + def __init__( self, *, - host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -70,7 +83,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -84,29 +97,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -150,9 +210,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -166,9 +226,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -185,9 +245,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -201,9 +261,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -217,9 +277,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -233,9 +293,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -255,9 +315,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -277,11 +337,11 @@ def _prep_wrapped_messages(self, client_info): @property def list_buckets( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.ListBucketsRequest], - typing.Union[ + Union[ logging_config.ListBucketsResponse, - typing.Awaitable[logging_config.ListBucketsResponse], + Awaitable[logging_config.ListBucketsResponse], ], ]: raise NotImplementedError() @@ -289,62 +349,56 @@ def list_buckets( @property def get_bucket( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.GetBucketRequest], - typing.Union[ - logging_config.LogBucket, typing.Awaitable[logging_config.LogBucket] - ], + Union[logging_config.LogBucket, Awaitable[logging_config.LogBucket]], ]: raise NotImplementedError() @property def create_bucket( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.CreateBucketRequest], - typing.Union[ - logging_config.LogBucket, typing.Awaitable[logging_config.LogBucket] - ], + Union[logging_config.LogBucket, Awaitable[logging_config.LogBucket]], ]: raise NotImplementedError() @property def update_bucket( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UpdateBucketRequest], - typing.Union[ - logging_config.LogBucket, typing.Awaitable[logging_config.LogBucket] - ], + Union[logging_config.LogBucket, Awaitable[logging_config.LogBucket]], ]: raise NotImplementedError() @property def delete_bucket( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.DeleteBucketRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def undelete_bucket( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UndeleteBucketRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def list_views( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.ListViewsRequest], - typing.Union[ + Union[ logging_config.ListViewsResponse, - typing.Awaitable[logging_config.ListViewsResponse], + Awaitable[logging_config.ListViewsResponse], ], ]: raise NotImplementedError() @@ -352,47 +406,47 @@ def list_views( @property def get_view( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.GetViewRequest], - typing.Union[logging_config.LogView, typing.Awaitable[logging_config.LogView]], + Union[logging_config.LogView, Awaitable[logging_config.LogView]], ]: raise NotImplementedError() @property def create_view( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.CreateViewRequest], - typing.Union[logging_config.LogView, typing.Awaitable[logging_config.LogView]], + Union[logging_config.LogView, Awaitable[logging_config.LogView]], ]: raise NotImplementedError() @property def update_view( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UpdateViewRequest], - typing.Union[logging_config.LogView, typing.Awaitable[logging_config.LogView]], + Union[logging_config.LogView, Awaitable[logging_config.LogView]], ]: raise NotImplementedError() @property def delete_view( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.DeleteViewRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def list_sinks( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.ListSinksRequest], - typing.Union[ + Union[ logging_config.ListSinksResponse, - typing.Awaitable[logging_config.ListSinksResponse], + Awaitable[logging_config.ListSinksResponse], ], ]: raise NotImplementedError() @@ -400,47 +454,47 @@ def list_sinks( @property def get_sink( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.GetSinkRequest], - typing.Union[logging_config.LogSink, typing.Awaitable[logging_config.LogSink]], + Union[logging_config.LogSink, Awaitable[logging_config.LogSink]], ]: raise NotImplementedError() @property def create_sink( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.CreateSinkRequest], - typing.Union[logging_config.LogSink, typing.Awaitable[logging_config.LogSink]], + Union[logging_config.LogSink, Awaitable[logging_config.LogSink]], ]: raise NotImplementedError() @property def update_sink( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UpdateSinkRequest], - typing.Union[logging_config.LogSink, typing.Awaitable[logging_config.LogSink]], + Union[logging_config.LogSink, Awaitable[logging_config.LogSink]], ]: raise NotImplementedError() @property def delete_sink( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.DeleteSinkRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def list_exclusions( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.ListExclusionsRequest], - typing.Union[ + Union[ logging_config.ListExclusionsResponse, - typing.Awaitable[logging_config.ListExclusionsResponse], + Awaitable[logging_config.ListExclusionsResponse], ], ]: raise NotImplementedError() @@ -448,64 +502,54 @@ def list_exclusions( @property def get_exclusion( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.GetExclusionRequest], - typing.Union[ - logging_config.LogExclusion, typing.Awaitable[logging_config.LogExclusion] - ], + Union[logging_config.LogExclusion, Awaitable[logging_config.LogExclusion]], ]: raise NotImplementedError() @property def create_exclusion( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.CreateExclusionRequest], - typing.Union[ - logging_config.LogExclusion, typing.Awaitable[logging_config.LogExclusion] - ], + Union[logging_config.LogExclusion, Awaitable[logging_config.LogExclusion]], ]: raise NotImplementedError() @property def update_exclusion( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UpdateExclusionRequest], - typing.Union[ - logging_config.LogExclusion, typing.Awaitable[logging_config.LogExclusion] - ], + Union[logging_config.LogExclusion, Awaitable[logging_config.LogExclusion]], ]: raise NotImplementedError() @property def delete_exclusion( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.DeleteExclusionRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def get_cmek_settings( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.GetCmekSettingsRequest], - typing.Union[ - logging_config.CmekSettings, typing.Awaitable[logging_config.CmekSettings] - ], + Union[logging_config.CmekSettings, Awaitable[logging_config.CmekSettings]], ]: raise NotImplementedError() @property def update_cmek_settings( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UpdateCmekSettingsRequest], - typing.Union[ - logging_config.CmekSettings, typing.Awaitable[logging_config.CmekSettings] - ], + Union[logging_config.CmekSettings, Awaitable[logging_config.CmekSettings]], ]: raise NotImplementedError() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 67f2ea7056da..327cc79c3e77 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.logging_v2.types import logging_config -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO @@ -51,7 +48,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -65,7 +62,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -175,7 +173,7 @@ def __init__( def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -206,13 +204,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -343,7 +343,7 @@ def update_bucket( @property def delete_bucket( self, - ) -> Callable[[logging_config.DeleteBucketRequest], empty.Empty]: + ) -> Callable[[logging_config.DeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the delete bucket method over gRPC. Deletes a bucket. Moves the bucket to the DELETE_REQUESTED @@ -364,14 +364,14 @@ def delete_bucket( self._stubs["delete_bucket"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_bucket"] @property def undelete_bucket( self, - ) -> Callable[[logging_config.UndeleteBucketRequest], empty.Empty]: + ) -> Callable[[logging_config.UndeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the undelete bucket method over gRPC. Undeletes a bucket. A bucket that has been deleted @@ -391,7 +391,7 @@ def undelete_bucket( self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["undelete_bucket"] @@ -502,7 +502,9 @@ def update_view( return self._stubs["update_view"] @property - def delete_view(self) -> Callable[[logging_config.DeleteViewRequest], empty.Empty]: + def delete_view( + self, + ) -> Callable[[logging_config.DeleteViewRequest], empty_pb2.Empty]: r"""Return a callable for the delete view method over gRPC. Deletes a view from a bucket. @@ -521,7 +523,7 @@ def delete_view(self) -> Callable[[logging_config.DeleteViewRequest], empty.Empt self._stubs["delete_view"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_view"] @@ -639,7 +641,9 @@ def update_sink( return self._stubs["update_sink"] @property - def delete_sink(self) -> Callable[[logging_config.DeleteSinkRequest], empty.Empty]: + def delete_sink( + self, + ) -> Callable[[logging_config.DeleteSinkRequest], empty_pb2.Empty]: r"""Return a callable for the delete sink method over gRPC. Deletes a sink. If the sink has a unique ``writer_identity``, @@ -659,7 +663,7 @@ def delete_sink(self) -> Callable[[logging_config.DeleteSinkRequest], empty.Empt self._stubs["delete_sink"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_sink"] @@ -776,7 +780,7 @@ def update_exclusion( @property def delete_exclusion( self, - ) -> Callable[[logging_config.DeleteExclusionRequest], empty.Empty]: + ) -> Callable[[logging_config.DeleteExclusionRequest], empty_pb2.Empty]: r"""Return a callable for the delete exclusion method over gRPC. Deletes an exclusion. @@ -795,7 +799,7 @@ def delete_exclusion( self._stubs["delete_exclusion"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_exclusion"] diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 3eabb2bcd3e9..3d7d271bf469 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_config -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import ConfigServiceV2GrpcTransport @@ -54,7 +51,7 @@ class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -81,13 +78,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -95,7 +94,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -109,7 +108,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -167,7 +167,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -354,7 +353,7 @@ def update_bucket( @property def delete_bucket( self, - ) -> Callable[[logging_config.DeleteBucketRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_config.DeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete bucket method over gRPC. Deletes a bucket. Moves the bucket to the DELETE_REQUESTED @@ -375,14 +374,14 @@ def delete_bucket( self._stubs["delete_bucket"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_bucket"] @property def undelete_bucket( self, - ) -> Callable[[logging_config.UndeleteBucketRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_config.UndeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the undelete bucket method over gRPC. Undeletes a bucket. A bucket that has been deleted @@ -402,7 +401,7 @@ def undelete_bucket( self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["undelete_bucket"] @@ -521,7 +520,7 @@ def update_view( @property def delete_view( self, - ) -> Callable[[logging_config.DeleteViewRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_config.DeleteViewRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete view method over gRPC. Deletes a view from a bucket. @@ -540,7 +539,7 @@ def delete_view( self._stubs["delete_view"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_view"] @@ -666,7 +665,7 @@ def update_sink( @property def delete_sink( self, - ) -> Callable[[logging_config.DeleteSinkRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_config.DeleteSinkRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete sink method over gRPC. Deletes a sink. If the sink has a unique ``writer_identity``, @@ -686,7 +685,7 @@ def delete_sink( self._stubs["delete_sink"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_sink"] @@ -810,7 +809,7 @@ def update_exclusion( @property def delete_exclusion( self, - ) -> Callable[[logging_config.DeleteExclusionRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_config.DeleteExclusionRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete exclusion method over gRPC. Deletes an exclusion. @@ -829,7 +828,7 @@ def delete_exclusion( self._stubs["delete_exclusion"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_exclusion"] diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py index c46b48a29424..bd7a79820348 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import LoggingServiceV2Client from .async_client import LoggingServiceV2AsyncClient diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index f61556922983..6a11e96cb018 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -31,17 +29,16 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging - from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport from .client import LoggingServiceV2Client @@ -57,31 +54,26 @@ class LoggingServiceV2AsyncClient: log_path = staticmethod(LoggingServiceV2Client.log_path) parse_log_path = staticmethod(LoggingServiceV2Client.parse_log_path) - common_billing_account_path = staticmethod( LoggingServiceV2Client.common_billing_account_path ) parse_common_billing_account_path = staticmethod( LoggingServiceV2Client.parse_common_billing_account_path ) - common_folder_path = staticmethod(LoggingServiceV2Client.common_folder_path) parse_common_folder_path = staticmethod( LoggingServiceV2Client.parse_common_folder_path ) - common_organization_path = staticmethod( LoggingServiceV2Client.common_organization_path ) parse_common_organization_path = staticmethod( LoggingServiceV2Client.parse_common_organization_path ) - common_project_path = staticmethod(LoggingServiceV2Client.common_project_path) parse_common_project_path = staticmethod( LoggingServiceV2Client.parse_common_project_path ) - common_location_path = staticmethod(LoggingServiceV2Client.common_location_path) parse_common_location_path = staticmethod( LoggingServiceV2Client.parse_common_location_path @@ -89,7 +81,8 @@ class LoggingServiceV2AsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -104,7 +97,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -121,7 +114,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> LoggingServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: LoggingServiceV2Transport: The transport used by the client instance. @@ -135,12 +128,12 @@ def transport(self) -> LoggingServiceV2Transport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, LoggingServiceV2Transport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the logging service v2 client. + """Instantiates the logging service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -172,7 +165,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = LoggingServiceV2Client( credentials=credentials, transport=transport, @@ -217,7 +209,6 @@ async def delete_log( This corresponds to the ``log_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -238,7 +229,6 @@ async def delete_log( # If we have keyword arguments corresponding to fields on the # request, apply these. - if log_name is not None: request.log_name = log_name @@ -251,9 +241,9 @@ async def delete_log( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -277,7 +267,7 @@ async def write_log_entries( request: logging.WriteLogEntriesRequest = None, *, log_name: str = None, - resource: monitored_resource.MonitoredResource = None, + resource: monitored_resource_pb2.MonitoredResource = None, labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, entries: Sequence[log_entry.LogEntry] = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -383,7 +373,6 @@ async def write_log_entries( This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -408,7 +397,6 @@ async def write_log_entries( # If we have keyword arguments corresponding to fields on the # request, apply these. - if log_name is not None: request.log_name = log_name if resource is not None: @@ -416,7 +404,6 @@ async def write_log_entries( if labels: request.labels.update(labels) - if entries: request.entries.extend(entries) @@ -429,9 +416,9 @@ async def write_log_entries( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -514,7 +501,6 @@ async def list_log_entries( This corresponds to the ``order_by`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -543,12 +529,10 @@ async def list_log_entries( # If we have keyword arguments corresponding to fields on the # request, apply these. - if filter is not None: request.filter = filter if order_by is not None: request.order_by = order_by - if resource_names: request.resource_names.extend(resource_names) @@ -561,9 +545,9 @@ async def list_log_entries( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -598,7 +582,6 @@ async def list_monitored_resource_descriptors( request (:class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest`): The request object. The parameters to ListMonitoredResourceDescriptors - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -615,7 +598,6 @@ async def list_monitored_resource_descriptors( """ # Create or coerce a protobuf request object. - request = logging.ListMonitoredResourceDescriptorsRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -627,9 +609,9 @@ async def list_monitored_resource_descriptors( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -678,7 +660,6 @@ async def list_logs( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -707,7 +688,6 @@ async def list_logs( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -720,9 +700,9 @@ async def list_logs( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -783,9 +763,9 @@ def tail_log_entries( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=3600.0, ), diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 00d758ab55b1..dd94b6721520 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -33,20 +31,19 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging - from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import LoggingServiceV2GrpcTransport from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport @@ -67,7 +64,7 @@ class LoggingServiceV2ClientMeta(type): _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[LoggingServiceV2Transport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -90,7 +87,8 @@ class LoggingServiceV2Client(metaclass=LoggingServiceV2ClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -124,7 +122,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -141,7 +140,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -160,27 +159,28 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> LoggingServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - LoggingServiceV2Transport: The transport used by the client instance. + LoggingServiceV2Transport: The transport used by the client + instance. """ return self._transport @staticmethod def log_path(project: str, log: str,) -> str: - """Return a fully-qualified log string.""" + """Returns a fully-qualified log string.""" return "projects/{project}/logs/{log}".format(project=project, log=log,) @staticmethod def parse_log_path(path: str) -> Dict[str, str]: - """Parse a log path into its component segments.""" + """Parses a log path into its component segments.""" m = re.match(r"^projects/(?P.+?)/logs/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -193,7 +193,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -204,7 +204,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -215,7 +215,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -226,7 +226,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -240,12 +240,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, LoggingServiceV2Transport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the logging service v2 client. + """Instantiates the logging service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -300,9 +300,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -314,12 +315,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -334,8 +337,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -387,7 +390,6 @@ def delete_log( This corresponds to the ``log_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -410,10 +412,8 @@ def delete_log( # there are no flattened fields. if not isinstance(request, logging.DeleteLogRequest): request = logging.DeleteLogRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if log_name is not None: request.log_name = log_name @@ -437,7 +437,7 @@ def write_log_entries( request: logging.WriteLogEntriesRequest = None, *, log_name: str = None, - resource: monitored_resource.MonitoredResource = None, + resource: monitored_resource_pb2.MonitoredResource = None, labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, entries: Sequence[log_entry.LogEntry] = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -543,7 +543,6 @@ def write_log_entries( This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -570,10 +569,8 @@ def write_log_entries( # there are no flattened fields. if not isinstance(request, logging.WriteLogEntriesRequest): request = logging.WriteLogEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if log_name is not None: request.log_name = log_name if resource is not None: @@ -662,7 +659,6 @@ def list_log_entries( This corresponds to the ``order_by`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -693,10 +689,8 @@ def list_log_entries( # there are no flattened fields. if not isinstance(request, logging.ListLogEntriesRequest): request = logging.ListLogEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if resource_names is not None: request.resource_names = resource_names if filter is not None: @@ -735,7 +729,6 @@ def list_monitored_resource_descriptors( request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): The request object. The parameters to ListMonitoredResourceDescriptors - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -752,7 +745,6 @@ def list_monitored_resource_descriptors( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging.ListMonitoredResourceDescriptorsRequest. # There's no risk of modifying the input as we've already verified @@ -807,7 +799,6 @@ def list_logs( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -838,10 +829,8 @@ def list_logs( # there are no flattened fields. if not isinstance(request, logging.ListLogsRequest): request = logging.ListLogsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 7ab8ac8d27f6..b06007cb4a40 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -26,7 +24,7 @@ Optional, ) -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging @@ -119,7 +117,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -213,7 +211,7 @@ def pages(self) -> Iterable[logging.ListMonitoredResourceDescriptorsResponse]: self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[monitored_resource.MonitoredResourceDescriptor]: + def __iter__(self) -> Iterable[monitored_resource_pb2.MonitoredResourceDescriptor]: for page in self.pages: yield from page.resource_descriptors @@ -249,7 +247,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -281,7 +279,7 @@ async def pages( def __aiter__( self, - ) -> AsyncIterable[monitored_resource.MonitoredResourceDescriptor]: + ) -> AsyncIterable[monitored_resource_pb2.MonitoredResourceDescriptor]: async def async_generator(): async for page in self.pages: for response in page.resource_descriptors: @@ -381,7 +379,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index cd979b771d26..65e713121f22 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 66003ef95cd0..fdcbead00c60 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.logging_v2.types import logging -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -36,6 +35,17 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class LoggingServiceV2Transport(abc.ABC): """Abstract transport class for LoggingServiceV2.""" @@ -48,21 +58,24 @@ class LoggingServiceV2Transport(abc.ABC): "https://www.googleapis.com/auth/logging.write", ) + DEFAULT_HOST: str = "logging.googleapis.com" + def __init__( self, *, - host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -71,7 +84,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -85,29 +98,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -118,9 +178,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -134,9 +194,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -150,9 +210,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -166,9 +226,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -182,9 +242,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -198,9 +258,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=3600.0, ), @@ -212,20 +272,18 @@ def _prep_wrapped_messages(self, client_info): @property def delete_log( self, - ) -> typing.Callable[ - [logging.DeleteLogRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ) -> Callable[ + [logging.DeleteLogRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] ]: raise NotImplementedError() @property def write_log_entries( self, - ) -> typing.Callable[ + ) -> Callable[ [logging.WriteLogEntriesRequest], - typing.Union[ - logging.WriteLogEntriesResponse, - typing.Awaitable[logging.WriteLogEntriesResponse], + Union[ + logging.WriteLogEntriesResponse, Awaitable[logging.WriteLogEntriesResponse] ], ]: raise NotImplementedError() @@ -233,11 +291,10 @@ def write_log_entries( @property def list_log_entries( self, - ) -> typing.Callable[ + ) -> Callable[ [logging.ListLogEntriesRequest], - typing.Union[ - logging.ListLogEntriesResponse, - typing.Awaitable[logging.ListLogEntriesResponse], + Union[ + logging.ListLogEntriesResponse, Awaitable[logging.ListLogEntriesResponse] ], ]: raise NotImplementedError() @@ -245,11 +302,11 @@ def list_log_entries( @property def list_monitored_resource_descriptors( self, - ) -> typing.Callable[ + ) -> Callable[ [logging.ListMonitoredResourceDescriptorsRequest], - typing.Union[ + Union[ logging.ListMonitoredResourceDescriptorsResponse, - typing.Awaitable[logging.ListMonitoredResourceDescriptorsResponse], + Awaitable[logging.ListMonitoredResourceDescriptorsResponse], ], ]: raise NotImplementedError() @@ -257,22 +314,19 @@ def list_monitored_resource_descriptors( @property def list_logs( self, - ) -> typing.Callable[ + ) -> Callable[ [logging.ListLogsRequest], - typing.Union[ - logging.ListLogsResponse, typing.Awaitable[logging.ListLogsResponse] - ], + Union[logging.ListLogsResponse, Awaitable[logging.ListLogsResponse]], ]: raise NotImplementedError() @property def tail_log_entries( self, - ) -> typing.Callable[ + ) -> Callable[ [logging.TailLogEntriesRequest], - typing.Union[ - logging.TailLogEntriesResponse, - typing.Awaitable[logging.TailLogEntriesResponse], + Union[ + logging.TailLogEntriesResponse, Awaitable[logging.TailLogEntriesResponse] ], ]: raise NotImplementedError() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index b52d306f3daf..5e5c1ad0c9c1 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.logging_v2.types import logging -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO @@ -51,7 +48,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -65,7 +62,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -175,7 +173,7 @@ def __init__( def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -206,13 +204,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -223,7 +223,7 @@ def grpc_channel(self) -> grpc.Channel: return self._grpc_channel @property - def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty.Empty]: + def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty_pb2.Empty]: r"""Return a callable for the delete log method over gRPC. Deletes all the log entries in a log. The log @@ -246,7 +246,7 @@ def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty.Empty]: self._stubs["delete_log"] = self.grpc_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_log"] diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 0ba87029cfe5..1f33ad78a14f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import LoggingServiceV2GrpcTransport @@ -54,7 +51,7 @@ class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -81,13 +78,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -95,7 +94,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -109,7 +108,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -167,7 +167,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -229,7 +228,7 @@ def grpc_channel(self) -> aio.Channel: @property def delete_log( self, - ) -> Callable[[logging.DeleteLogRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging.DeleteLogRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log method over gRPC. Deletes all the log entries in a log. The log @@ -252,7 +251,7 @@ def delete_log( self._stubs["delete_log"] = self.grpc_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_log"] diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index c857ea037ff3..f37e39314d1d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import MetricsServiceV2Client from .async_client import MetricsServiceV2AsyncClient diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 93a652b79637..defd64a13efa 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,19 +20,17 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.api import distribution_pb2 as distribution # type: ignore -from google.api import metric_pb2 as ga_metric # type: ignore -from google.api import metric_pb2 as metric # type: ignore +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport from .client import MetricsServiceV2Client @@ -50,31 +46,26 @@ class MetricsServiceV2AsyncClient: log_metric_path = staticmethod(MetricsServiceV2Client.log_metric_path) parse_log_metric_path = staticmethod(MetricsServiceV2Client.parse_log_metric_path) - common_billing_account_path = staticmethod( MetricsServiceV2Client.common_billing_account_path ) parse_common_billing_account_path = staticmethod( MetricsServiceV2Client.parse_common_billing_account_path ) - common_folder_path = staticmethod(MetricsServiceV2Client.common_folder_path) parse_common_folder_path = staticmethod( MetricsServiceV2Client.parse_common_folder_path ) - common_organization_path = staticmethod( MetricsServiceV2Client.common_organization_path ) parse_common_organization_path = staticmethod( MetricsServiceV2Client.parse_common_organization_path ) - common_project_path = staticmethod(MetricsServiceV2Client.common_project_path) parse_common_project_path = staticmethod( MetricsServiceV2Client.parse_common_project_path ) - common_location_path = staticmethod(MetricsServiceV2Client.common_location_path) parse_common_location_path = staticmethod( MetricsServiceV2Client.parse_common_location_path @@ -82,7 +73,8 @@ class MetricsServiceV2AsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -97,7 +89,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -114,7 +106,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> MetricsServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: MetricsServiceV2Transport: The transport used by the client instance. @@ -128,12 +120,12 @@ def transport(self) -> MetricsServiceV2Transport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, MetricsServiceV2Transport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the metrics service v2 client. + """Instantiates the metrics service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -165,7 +157,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = MetricsServiceV2Client( credentials=credentials, transport=transport, @@ -198,7 +189,6 @@ async def list_log_metrics( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -227,7 +217,6 @@ async def list_log_metrics( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -240,9 +229,9 @@ async def list_log_metrics( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -292,7 +281,6 @@ async def get_log_metric( This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -328,7 +316,6 @@ async def get_log_metric( # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name @@ -341,9 +328,9 @@ async def get_log_metric( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -401,7 +388,6 @@ async def create_log_metric( This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -437,7 +423,6 @@ async def create_log_metric( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if metric is not None: @@ -498,7 +483,6 @@ async def update_log_metric( This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -534,7 +518,6 @@ async def update_log_metric( # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name if metric is not None: @@ -549,9 +532,9 @@ async def update_log_metric( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -597,7 +580,6 @@ async def delete_log_metric( This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -618,7 +600,6 @@ async def delete_log_metric( # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name @@ -631,9 +612,9 @@ async def delete_log_metric( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 850236a573dc..6dcbcdfb3bac 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,22 +21,20 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api import distribution_pb2 as distribution # type: ignore -from google.api import metric_pb2 as ga_metric # type: ignore -from google.api import metric_pb2 as metric # type: ignore +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import MetricsServiceV2GrpcTransport from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport @@ -59,7 +55,7 @@ class MetricsServiceV2ClientMeta(type): _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[MetricsServiceV2Transport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -82,7 +78,8 @@ class MetricsServiceV2Client(metaclass=MetricsServiceV2ClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -116,7 +113,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -133,7 +131,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -152,29 +150,30 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> MetricsServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - MetricsServiceV2Transport: The transport used by the client instance. + MetricsServiceV2Transport: The transport used by the client + instance. """ return self._transport @staticmethod def log_metric_path(project: str, metric: str,) -> str: - """Return a fully-qualified log_metric string.""" + """Returns a fully-qualified log_metric string.""" return "projects/{project}/metrics/{metric}".format( project=project, metric=metric, ) @staticmethod def parse_log_metric_path(path: str) -> Dict[str, str]: - """Parse a log_metric path into its component segments.""" + """Parses a log_metric path into its component segments.""" m = re.match(r"^projects/(?P.+?)/metrics/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -187,7 +186,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -198,7 +197,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -209,7 +208,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -220,7 +219,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -234,12 +233,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, MetricsServiceV2Transport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the metrics service v2 client. + """Instantiates the metrics service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -294,9 +293,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -308,12 +308,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -328,8 +330,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -369,7 +371,6 @@ def list_log_metrics( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -400,10 +401,8 @@ def list_log_metrics( # there are no flattened fields. if not isinstance(request, logging_metrics.ListLogMetricsRequest): request = logging_metrics.ListLogMetricsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -453,7 +452,6 @@ def get_log_metric( This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -491,10 +489,8 @@ def get_log_metric( # there are no flattened fields. if not isinstance(request, logging_metrics.GetLogMetricRequest): request = logging_metrics.GetLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name @@ -552,7 +548,6 @@ def create_log_metric( This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -590,10 +585,8 @@ def create_log_metric( # there are no flattened fields. if not isinstance(request, logging_metrics.CreateLogMetricRequest): request = logging_metrics.CreateLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if metric is not None: @@ -650,7 +643,6 @@ def update_log_metric( This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -688,10 +680,8 @@ def update_log_metric( # there are no flattened fields. if not isinstance(request, logging_metrics.UpdateLogMetricRequest): request = logging_metrics.UpdateLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name if metric is not None: @@ -739,7 +729,6 @@ def delete_log_metric( This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -762,10 +751,8 @@ def delete_log_metric( # there are no flattened fields. if not isinstance(request, logging_metrics.DeleteLogMetricRequest): request = logging_metrics.DeleteLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 15134ac578fb..8ff178d2409d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index f748403b4305..10ccb830c7e3 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index c6ae3da41e5e..814f62590b2a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -36,6 +35,17 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class MetricsServiceV2Transport(abc.ABC): """Abstract transport class for MetricsServiceV2.""" @@ -48,21 +58,24 @@ class MetricsServiceV2Transport(abc.ABC): "https://www.googleapis.com/auth/logging.write", ) + DEFAULT_HOST: str = "logging.googleapis.com" + def __init__( self, *, - host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -71,7 +84,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -85,29 +98,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -118,9 +178,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -134,9 +194,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -153,9 +213,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -169,9 +229,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -183,11 +243,11 @@ def _prep_wrapped_messages(self, client_info): @property def list_log_metrics( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_metrics.ListLogMetricsRequest], - typing.Union[ + Union[ logging_metrics.ListLogMetricsResponse, - typing.Awaitable[logging_metrics.ListLogMetricsResponse], + Awaitable[logging_metrics.ListLogMetricsResponse], ], ]: raise NotImplementedError() @@ -195,42 +255,36 @@ def list_log_metrics( @property def get_log_metric( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_metrics.GetLogMetricRequest], - typing.Union[ - logging_metrics.LogMetric, typing.Awaitable[logging_metrics.LogMetric] - ], + Union[logging_metrics.LogMetric, Awaitable[logging_metrics.LogMetric]], ]: raise NotImplementedError() @property def create_log_metric( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_metrics.CreateLogMetricRequest], - typing.Union[ - logging_metrics.LogMetric, typing.Awaitable[logging_metrics.LogMetric] - ], + Union[logging_metrics.LogMetric, Awaitable[logging_metrics.LogMetric]], ]: raise NotImplementedError() @property def update_log_metric( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_metrics.UpdateLogMetricRequest], - typing.Union[ - logging_metrics.LogMetric, typing.Awaitable[logging_metrics.LogMetric] - ], + Union[logging_metrics.LogMetric, Awaitable[logging_metrics.LogMetric]], ]: raise NotImplementedError() @property def delete_log_metric( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_metrics.DeleteLogMetricRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index a9447ac26a9a..1c9b3dde9924 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO @@ -51,7 +48,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -65,7 +62,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -175,7 +173,7 @@ def __init__( def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -206,13 +204,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -331,7 +331,7 @@ def update_log_metric( @property def delete_log_metric( self, - ) -> Callable[[logging_metrics.DeleteLogMetricRequest], empty.Empty]: + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], empty_pb2.Empty]: r"""Return a callable for the delete log metric method over gRPC. Deletes a logs-based metric. @@ -350,7 +350,7 @@ def delete_log_metric( self._stubs["delete_log_metric"] = self.grpc_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_log_metric"] diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 94017be9d889..62a0bf0f855a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import MetricsServiceV2GrpcTransport @@ -54,7 +51,7 @@ class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -81,13 +78,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -95,7 +94,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -109,7 +108,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -167,7 +167,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -342,7 +341,7 @@ def update_log_metric( @property def delete_log_metric( self, - ) -> Callable[[logging_metrics.DeleteLogMetricRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log metric method over gRPC. Deletes a logs-based metric. @@ -361,7 +360,7 @@ def delete_log_metric( self._stubs["delete_log_metric"] = self.grpc_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_log_metric"] diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py index 9519c0777f43..7d1cdd99e4be 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .log_entry import ( LogEntry, LogEntryOperation, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py index e63d6086f95f..6c57b22d5725 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,16 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore -from google.logging.type import http_request_pb2 as glt_http_request # type: ignore -from google.logging.type import log_severity_pb2 as log_severity # type: ignore -from google.protobuf import any_pb2 as gp_any # type: ignore -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as gp_timestamp # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.logging.type import http_request_pb2 # type: ignore +from google.logging.type import log_severity_pb2 # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -34,7 +31,6 @@ class LogEntry(proto.Message): r"""An individual entry in a log. - Attributes: log_name (str): Required. The resource name of the log to which this log @@ -163,46 +159,31 @@ class LogEntry(proto.Message): associated with the log entry, if any. """ - log_name = proto.Field(proto.STRING, number=12) - + log_name = proto.Field(proto.STRING, number=12,) resource = proto.Field( - proto.MESSAGE, number=8, message=monitored_resource.MonitoredResource, + proto.MESSAGE, number=8, message=monitored_resource_pb2.MonitoredResource, ) - proto_payload = proto.Field( - proto.MESSAGE, number=2, oneof="payload", message=gp_any.Any, + proto.MESSAGE, number=2, oneof="payload", message=any_pb2.Any, ) - - text_payload = proto.Field(proto.STRING, number=3, oneof="payload") - + text_payload = proto.Field(proto.STRING, number=3, oneof="payload",) json_payload = proto.Field( - proto.MESSAGE, number=6, oneof="payload", message=struct.Struct, + proto.MESSAGE, number=6, oneof="payload", message=struct_pb2.Struct, ) - - timestamp = proto.Field(proto.MESSAGE, number=9, message=gp_timestamp.Timestamp,) - + timestamp = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) receive_timestamp = proto.Field( - proto.MESSAGE, number=24, message=gp_timestamp.Timestamp, + proto.MESSAGE, number=24, message=timestamp_pb2.Timestamp, ) - - severity = proto.Field(proto.ENUM, number=10, enum=log_severity.LogSeverity,) - - insert_id = proto.Field(proto.STRING, number=4) - + severity = proto.Field(proto.ENUM, number=10, enum=log_severity_pb2.LogSeverity,) + insert_id = proto.Field(proto.STRING, number=4,) http_request = proto.Field( - proto.MESSAGE, number=7, message=glt_http_request.HttpRequest, + proto.MESSAGE, number=7, message=http_request_pb2.HttpRequest, ) - - labels = proto.MapField(proto.STRING, proto.STRING, number=11) - + labels = proto.MapField(proto.STRING, proto.STRING, number=11,) operation = proto.Field(proto.MESSAGE, number=15, message="LogEntryOperation",) - - trace = proto.Field(proto.STRING, number=22) - - span_id = proto.Field(proto.STRING, number=27) - - trace_sampled = proto.Field(proto.BOOL, number=30) - + trace = proto.Field(proto.STRING, number=22,) + span_id = proto.Field(proto.STRING, number=27,) + trace_sampled = proto.Field(proto.BOOL, number=30,) source_location = proto.Field( proto.MESSAGE, number=23, message="LogEntrySourceLocation", ) @@ -230,13 +211,10 @@ class LogEntryOperation(proto.Message): last log entry in the operation. """ - id = proto.Field(proto.STRING, number=1) - - producer = proto.Field(proto.STRING, number=2) - - first = proto.Field(proto.BOOL, number=3) - - last = proto.Field(proto.BOOL, number=4) + id = proto.Field(proto.STRING, number=1,) + producer = proto.Field(proto.STRING, number=2,) + first = proto.Field(proto.BOOL, number=3,) + last = proto.Field(proto.BOOL, number=4,) class LogEntrySourceLocation(proto.Message): @@ -261,11 +239,9 @@ class LogEntrySourceLocation(proto.Message): (Go), ``function`` (Python). """ - file = proto.Field(proto.STRING, number=1) - - line = proto.Field(proto.INT64, number=2) - - function = proto.Field(proto.STRING, number=3) + file = proto.Field(proto.STRING, number=1,) + line = proto.Field(proto.INT64, number=2,) + function = proto.Field(proto.STRING, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py index ca739c02ce02..6d64b9a9164b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,14 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.types import log_entry -from google.protobuf import duration_pb2 as duration # type: ignore -from google.rpc import status_pb2 as status # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore __protobuf__ = proto.module( @@ -45,7 +42,6 @@ class DeleteLogRequest(proto.Message): r"""The parameters to DeleteLog. - Attributes: log_name (str): Required. The resource name of the log to delete: @@ -64,12 +60,11 @@ class DeleteLogRequest(proto.Message): [LogEntry][google.logging.v2.LogEntry]. """ - log_name = proto.Field(proto.STRING, number=1) + log_name = proto.Field(proto.STRING, number=1,) class WriteLogEntriesRequest(proto.Message): r"""The parameters to WriteLogEntries. - Attributes: log_name (str): Optional. A default log resource name that is assigned to @@ -158,28 +153,22 @@ class WriteLogEntriesRequest(proto.Message): properly before sending valuable data. """ - log_name = proto.Field(proto.STRING, number=1) - + log_name = proto.Field(proto.STRING, number=1,) resource = proto.Field( - proto.MESSAGE, number=2, message=monitored_resource.MonitoredResource, + proto.MESSAGE, number=2, message=monitored_resource_pb2.MonitoredResource, ) - - labels = proto.MapField(proto.STRING, proto.STRING, number=3) - + labels = proto.MapField(proto.STRING, proto.STRING, number=3,) entries = proto.RepeatedField(proto.MESSAGE, number=4, message=log_entry.LogEntry,) - - partial_success = proto.Field(proto.BOOL, number=5) - - dry_run = proto.Field(proto.BOOL, number=6) + partial_success = proto.Field(proto.BOOL, number=5,) + dry_run = proto.Field(proto.BOOL, number=6,) class WriteLogEntriesResponse(proto.Message): - r"""Result returned from WriteLogEntries.""" + r"""Result returned from WriteLogEntries. """ class WriteLogEntriesPartialErrors(proto.Message): r"""Error details for WriteLogEntries with partial success. - Attributes: log_entry_errors (Sequence[google.cloud.logging_v2.types.WriteLogEntriesPartialErrors.LogEntryErrorsEntry]): When ``WriteLogEntriesRequest.partial_success`` is true, @@ -192,13 +181,12 @@ class WriteLogEntriesPartialErrors(proto.Message): """ log_entry_errors = proto.MapField( - proto.INT32, proto.MESSAGE, number=1, message=status.Status, + proto.INT32, proto.MESSAGE, number=1, message=status_pb2.Status, ) class ListLogEntriesRequest(proto.Message): r"""The parameters to ``ListLogEntries``. - Attributes: resource_names (Sequence[str]): Required. Names of one or more parent resources from which @@ -252,20 +240,15 @@ class ListLogEntriesRequest(proto.Message): should be identical to those in the previous call. """ - resource_names = proto.RepeatedField(proto.STRING, number=8) - - filter = proto.Field(proto.STRING, number=2) - - order_by = proto.Field(proto.STRING, number=3) - - page_size = proto.Field(proto.INT32, number=4) - - page_token = proto.Field(proto.STRING, number=5) + resource_names = proto.RepeatedField(proto.STRING, number=8,) + filter = proto.Field(proto.STRING, number=2,) + order_by = proto.Field(proto.STRING, number=3,) + page_size = proto.Field(proto.INT32, number=4,) + page_token = proto.Field(proto.STRING, number=5,) class ListLogEntriesResponse(proto.Message): r"""Result returned from ``ListLogEntries``. - Attributes: entries (Sequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. If ``entries`` is empty, @@ -293,13 +276,11 @@ def raw_page(self): return self entries = proto.RepeatedField(proto.MESSAGE, number=1, message=log_entry.LogEntry,) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class ListMonitoredResourceDescriptorsRequest(proto.Message): r"""The parameters to ListMonitoredResourceDescriptors - Attributes: page_size (int): Optional. The maximum number of results to return from this @@ -314,14 +295,12 @@ class ListMonitoredResourceDescriptorsRequest(proto.Message): should be identical to those in the previous call. """ - page_size = proto.Field(proto.INT32, number=1) - - page_token = proto.Field(proto.STRING, number=2) + page_size = proto.Field(proto.INT32, number=1,) + page_token = proto.Field(proto.STRING, number=2,) class ListMonitoredResourceDescriptorsResponse(proto.Message): r"""Result returned from ListMonitoredResourceDescriptors. - Attributes: resource_descriptors (Sequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): A list of resource descriptors. @@ -337,15 +316,15 @@ def raw_page(self): return self resource_descriptors = proto.RepeatedField( - proto.MESSAGE, number=1, message=monitored_resource.MonitoredResourceDescriptor, + proto.MESSAGE, + number=1, + message=monitored_resource_pb2.MonitoredResourceDescriptor, ) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class ListLogsRequest(proto.Message): r"""The parameters to ListLogs. - Attributes: parent (str): Required. The resource name that owns the logs: @@ -379,18 +358,14 @@ class ListLogsRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - resource_names = proto.RepeatedField(proto.STRING, number=8) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + resource_names = proto.RepeatedField(proto.STRING, number=8,) class ListLogsResponse(proto.Message): r"""Result returned from ListLogs. - Attributes: log_names (Sequence[str]): A list of log names. For example, @@ -407,14 +382,12 @@ class ListLogsResponse(proto.Message): def raw_page(self): return self - log_names = proto.RepeatedField(proto.STRING, number=3) - - next_page_token = proto.Field(proto.STRING, number=2) + log_names = proto.RepeatedField(proto.STRING, number=3,) + next_page_token = proto.Field(proto.STRING, number=2,) class TailLogEntriesRequest(proto.Message): r"""The parameters to ``TailLogEntries``. - Attributes: resource_names (Sequence[str]): Required. Name of a parent resource from which to retrieve @@ -451,16 +424,13 @@ class TailLogEntriesRequest(proto.Message): milliseconds. """ - resource_names = proto.RepeatedField(proto.STRING, number=1) - - filter = proto.Field(proto.STRING, number=2) - - buffer_window = proto.Field(proto.MESSAGE, number=3, message=duration.Duration,) + resource_names = proto.RepeatedField(proto.STRING, number=1,) + filter = proto.Field(proto.STRING, number=2,) + buffer_window = proto.Field(proto.MESSAGE, number=3, message=duration_pb2.Duration,) class TailLogEntriesResponse(proto.Message): r"""Result returned from ``TailLogEntries``. - Attributes: entries (Sequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. Each response in the stream will @@ -480,7 +450,6 @@ class TailLogEntriesResponse(proto.Message): class SuppressionInfo(proto.Message): r"""Information about entries that were omitted from the session. - Attributes: reason (google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo.Reason): The reason that entries were omitted from the @@ -499,11 +468,9 @@ class Reason(proto.Enum): reason = proto.Field( proto.ENUM, number=1, enum="TailLogEntriesResponse.SuppressionInfo.Reason", ) - - suppressed_count = proto.Field(proto.INT32, number=2) + suppressed_count = proto.Field(proto.INT32, number=2,) entries = proto.RepeatedField(proto.MESSAGE, number=1, message=log_entry.LogEntry,) - suppression_info = proto.RepeatedField( proto.MESSAGE, number=2, message=SuppressionInfo, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index 0d1f896e09f6..9b62807311da 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -72,7 +69,6 @@ class LifecycleState(proto.Enum): class LogBucket(proto.Message): r"""Describes a repository of logs. - Attributes: name (str): The resource name of the bucket. For example: @@ -107,24 +103,17 @@ class LogBucket(proto.Message): Output only. The bucket lifecycle state. """ - name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=3) - - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - - retention_days = proto.Field(proto.INT32, number=11) - - locked = proto.Field(proto.BOOL, number=9) - + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=3,) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + retention_days = proto.Field(proto.INT32, number=11,) + locked = proto.Field(proto.BOOL, number=9,) lifecycle_state = proto.Field(proto.ENUM, number=12, enum="LifecycleState",) class LogView(proto.Message): r"""Describes a view over logs in a bucket. - Attributes: name (str): The resource name of the view. @@ -148,15 +137,11 @@ class LogView(proto.Message): resource.type = "gce_instance" AND LOG_ID("stdout") """ - name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=3) - - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - - filter = proto.Field(proto.STRING, number=7) + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=3,) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + filter = proto.Field(proto.STRING, number=7,) class LogSink(proto.Message): @@ -267,31 +252,24 @@ class VersionFormat(proto.Enum): V2 = 1 V1 = 2 - name = proto.Field(proto.STRING, number=1) - - destination = proto.Field(proto.STRING, number=3) - - filter = proto.Field(proto.STRING, number=5) - - description = proto.Field(proto.STRING, number=18) - - disabled = proto.Field(proto.BOOL, number=19) - + name = proto.Field(proto.STRING, number=1,) + destination = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=5,) + description = proto.Field(proto.STRING, number=18,) + disabled = proto.Field(proto.BOOL, number=19,) exclusions = proto.RepeatedField(proto.MESSAGE, number=16, message="LogExclusion",) - output_version_format = proto.Field(proto.ENUM, number=6, enum=VersionFormat,) - - writer_identity = proto.Field(proto.STRING, number=8) - - include_children = proto.Field(proto.BOOL, number=9) - + writer_identity = proto.Field(proto.STRING, number=8,) + include_children = proto.Field(proto.BOOL, number=9,) bigquery_options = proto.Field( proto.MESSAGE, number=12, oneof="options", message="BigQueryOptions", ) - - create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, + ) class BigQueryOptions(proto.Message): @@ -319,14 +297,12 @@ class BigQueryOptions(proto.Message): will have this field set to false. """ - use_partitioned_tables = proto.Field(proto.BOOL, number=1) - - uses_timestamp_column_partitioning = proto.Field(proto.BOOL, number=3) + use_partitioned_tables = proto.Field(proto.BOOL, number=1,) + uses_timestamp_column_partitioning = proto.Field(proto.BOOL, number=3,) class ListBucketsRequest(proto.Message): r"""The parameters to ``ListBuckets``. - Attributes: parent (str): Required. The parent resource whose buckets are to be @@ -355,16 +331,13 @@ class ListBucketsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) class ListBucketsResponse(proto.Message): r"""The response from ListBuckets. - Attributes: buckets (Sequence[google.cloud.logging_v2.types.LogBucket]): A list of buckets. @@ -380,13 +353,11 @@ def raw_page(self): return self buckets = proto.RepeatedField(proto.MESSAGE, number=1, message="LogBucket",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class CreateBucketRequest(proto.Message): r"""The parameters to ``CreateBucket``. - Attributes: parent (str): Required. The resource in which to create the bucket: @@ -408,16 +379,13 @@ class CreateBucketRequest(proto.Message): name field in the bucket is ignored. """ - parent = proto.Field(proto.STRING, number=1) - - bucket_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + bucket_id = proto.Field(proto.STRING, number=2,) bucket = proto.Field(proto.MESSAGE, number=3, message="LogBucket",) class UpdateBucketRequest(proto.Message): r"""The parameters to ``UpdateBucket``. - Attributes: name (str): Required. The full resource name of the bucket to update. @@ -448,16 +416,15 @@ class UpdateBucketRequest(proto.Message): Example: ``updateMask=retention_days``. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) bucket = proto.Field(proto.MESSAGE, number=2, message="LogBucket",) - - update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + ) class GetBucketRequest(proto.Message): r"""The parameters to ``GetBucket``. - Attributes: name (str): Required. The resource name of the bucket: @@ -473,12 +440,11 @@ class GetBucketRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class DeleteBucketRequest(proto.Message): r"""The parameters to ``DeleteBucket``. - Attributes: name (str): Required. The full resource name of the bucket to delete. @@ -494,12 +460,11 @@ class DeleteBucketRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UndeleteBucketRequest(proto.Message): r"""The parameters to ``UndeleteBucket``. - Attributes: name (str): Required. The full resource name of the bucket to undelete. @@ -515,12 +480,11 @@ class UndeleteBucketRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ListViewsRequest(proto.Message): r"""The parameters to ``ListViews``. - Attributes: parent (str): Required. The bucket whose views are to be listed: @@ -541,16 +505,13 @@ class ListViewsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) class ListViewsResponse(proto.Message): r"""The response from ListViews. - Attributes: views (Sequence[google.cloud.logging_v2.types.LogView]): A list of views. @@ -566,13 +527,11 @@ def raw_page(self): return self views = proto.RepeatedField(proto.MESSAGE, number=1, message="LogView",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class CreateViewRequest(proto.Message): r"""The parameters to ``CreateView``. - Attributes: parent (str): Required. The bucket in which to create the view @@ -589,16 +548,13 @@ class CreateViewRequest(proto.Message): Required. The new view. """ - parent = proto.Field(proto.STRING, number=1) - - view_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + view_id = proto.Field(proto.STRING, number=2,) view = proto.Field(proto.MESSAGE, number=3, message="LogView",) class UpdateViewRequest(proto.Message): r"""The parameters to ``UpdateView``. - Attributes: name (str): Required. The full resource name of the view to update @@ -623,16 +579,15 @@ class UpdateViewRequest(proto.Message): Example: ``updateMask=filter``. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) view = proto.Field(proto.MESSAGE, number=2, message="LogView",) - - update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + ) class GetViewRequest(proto.Message): r"""The parameters to ``GetView``. - Attributes: name (str): Required. The resource name of the policy: @@ -645,12 +600,11 @@ class GetViewRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class DeleteViewRequest(proto.Message): r"""The parameters to ``DeleteView``. - Attributes: name (str): Required. The full resource name of the view to delete: @@ -663,12 +617,11 @@ class DeleteViewRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ListSinksRequest(proto.Message): r"""The parameters to ``ListSinks``. - Attributes: parent (str): Required. The parent resource whose sinks are to be listed: @@ -692,16 +645,13 @@ class ListSinksRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) class ListSinksResponse(proto.Message): r"""Result returned from ``ListSinks``. - Attributes: sinks (Sequence[google.cloud.logging_v2.types.LogSink]): A list of sinks. @@ -717,13 +667,11 @@ def raw_page(self): return self sinks = proto.RepeatedField(proto.MESSAGE, number=1, message="LogSink",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class GetSinkRequest(proto.Message): r"""The parameters to ``GetSink``. - Attributes: sink_name (str): Required. The resource name of the sink: @@ -738,12 +686,11 @@ class GetSinkRequest(proto.Message): Example: ``"projects/my-project-id/sinks/my-sink-id"``. """ - sink_name = proto.Field(proto.STRING, number=1) + sink_name = proto.Field(proto.STRING, number=1,) class CreateSinkRequest(proto.Message): r"""The parameters to ``CreateSink``. - Attributes: parent (str): Required. The resource in which to create the sink: @@ -777,16 +724,13 @@ class CreateSinkRequest(proto.Message): [LogSink][google.logging.v2.LogSink]. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) sink = proto.Field(proto.MESSAGE, number=2, message="LogSink",) - - unique_writer_identity = proto.Field(proto.BOOL, number=3) + unique_writer_identity = proto.Field(proto.BOOL, number=3,) class UpdateSinkRequest(proto.Message): r"""The parameters to ``UpdateSink``. - Attributes: sink_name (str): Required. The full resource name of the sink to update, @@ -837,18 +781,16 @@ class UpdateSinkRequest(proto.Message): Example: ``updateMask=filter``. """ - sink_name = proto.Field(proto.STRING, number=1) - + sink_name = proto.Field(proto.STRING, number=1,) sink = proto.Field(proto.MESSAGE, number=2, message="LogSink",) - - unique_writer_identity = proto.Field(proto.BOOL, number=3) - - update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + unique_writer_identity = proto.Field(proto.BOOL, number=3,) + update_mask = proto.Field( + proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + ) class DeleteSinkRequest(proto.Message): r"""The parameters to ``DeleteSink``. - Attributes: sink_name (str): Required. The full resource name of the sink to delete, @@ -864,7 +806,7 @@ class DeleteSinkRequest(proto.Message): Example: ``"projects/my-project-id/sinks/my-sink-id"``. """ - sink_name = proto.Field(proto.STRING, number=1) + sink_name = proto.Field(proto.STRING, number=1,) class LogExclusion(proto.Message): @@ -913,22 +855,16 @@ class LogExclusion(proto.Message): exclusions. """ - name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=2) - - filter = proto.Field(proto.STRING, number=3) - - disabled = proto.Field(proto.BOOL, number=4) - - create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) + filter = proto.Field(proto.STRING, number=3,) + disabled = proto.Field(proto.BOOL, number=4,) + create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,) class ListExclusionsRequest(proto.Message): r"""The parameters to ``ListExclusions``. - Attributes: parent (str): Required. The parent resource whose exclusions are to be @@ -953,16 +889,13 @@ class ListExclusionsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) class ListExclusionsResponse(proto.Message): r"""Result returned from ``ListExclusions``. - Attributes: exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): A list of exclusions. @@ -978,13 +911,11 @@ def raw_page(self): return self exclusions = proto.RepeatedField(proto.MESSAGE, number=1, message="LogExclusion",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class GetExclusionRequest(proto.Message): r"""The parameters to ``GetExclusion``. - Attributes: name (str): Required. The resource name of an existing exclusion: @@ -1000,12 +931,11 @@ class GetExclusionRequest(proto.Message): ``"projects/my-project-id/exclusions/my-exclusion-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateExclusionRequest(proto.Message): r"""The parameters to ``CreateExclusion``. - Attributes: parent (str): Required. The parent resource in which to create the @@ -1026,14 +956,12 @@ class CreateExclusionRequest(proto.Message): resource. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) exclusion = proto.Field(proto.MESSAGE, number=2, message="LogExclusion",) class UpdateExclusionRequest(proto.Message): r"""The parameters to ``UpdateExclusion``. - Attributes: name (str): Required. The resource name of the exclusion to update: @@ -1063,16 +991,15 @@ class UpdateExclusionRequest(proto.Message): ``"filter,description"``. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) exclusion = proto.Field(proto.MESSAGE, number=2, message="LogExclusion",) - - update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + ) class DeleteExclusionRequest(proto.Message): r"""The parameters to ``DeleteExclusion``. - Attributes: name (str): Required. The resource name of an existing exclusion to @@ -1089,7 +1016,7 @@ class DeleteExclusionRequest(proto.Message): ``"projects/my-project-id/exclusions/my-exclusion-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class GetCmekSettingsRequest(proto.Message): @@ -1118,7 +1045,7 @@ class GetCmekSettingsRequest(proto.Message): applies to all projects and folders in the GCP organization. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UpdateCmekSettingsRequest(proto.Message): @@ -1163,11 +1090,11 @@ class UpdateCmekSettingsRequest(proto.Message): Example: ``"updateMask=kmsKeyName"`` """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) cmek_settings = proto.Field(proto.MESSAGE, number=2, message="CmekSettings",) - - update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + ) class CmekSettings(proto.Message): @@ -1232,11 +1159,9 @@ class CmekSettings(proto.Message): for more information. """ - name = proto.Field(proto.STRING, number=1) - - kms_key_name = proto.Field(proto.STRING, number=2) - - service_account_id = proto.Field(proto.STRING, number=3) + name = proto.Field(proto.STRING, number=1,) + kms_key_name = proto.Field(proto.STRING, number=2,) + service_account_id = proto.Field(proto.STRING, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py index c2a8a60072a5..4b39650f24ec 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,13 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.api import distribution_pb2 as distribution # type: ignore -from google.api import metric_pb2 as ga_metric # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -170,34 +167,26 @@ class ApiVersion(proto.Enum): V2 = 0 V1 = 1 - name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=2) - - filter = proto.Field(proto.STRING, number=3) - + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) + filter = proto.Field(proto.STRING, number=3,) metric_descriptor = proto.Field( - proto.MESSAGE, number=5, message=ga_metric.MetricDescriptor, + proto.MESSAGE, number=5, message=metric_pb2.MetricDescriptor, ) - - value_extractor = proto.Field(proto.STRING, number=6) - - label_extractors = proto.MapField(proto.STRING, proto.STRING, number=7) - + value_extractor = proto.Field(proto.STRING, number=6,) + label_extractors = proto.MapField(proto.STRING, proto.STRING, number=7,) bucket_options = proto.Field( - proto.MESSAGE, number=8, message=distribution.Distribution.BucketOptions, + proto.MESSAGE, number=8, message=distribution_pb2.Distribution.BucketOptions, + ) + create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) + update_time = proto.Field( + proto.MESSAGE, number=10, message=timestamp_pb2.Timestamp, ) - - create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,) - version = proto.Field(proto.ENUM, number=4, enum=ApiVersion,) class ListLogMetricsRequest(proto.Message): r"""The parameters to ListLogMetrics. - Attributes: parent (str): Required. The name of the project containing the metrics: @@ -218,16 +207,13 @@ class ListLogMetricsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) class ListLogMetricsResponse(proto.Message): r"""Result returned from ListLogMetrics. - Attributes: metrics (Sequence[google.cloud.logging_v2.types.LogMetric]): A list of logs-based metrics. @@ -243,13 +229,11 @@ def raw_page(self): return self metrics = proto.RepeatedField(proto.MESSAGE, number=1, message="LogMetric",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class GetLogMetricRequest(proto.Message): r"""The parameters to GetLogMetric. - Attributes: metric_name (str): Required. The resource name of the desired metric: @@ -259,12 +243,11 @@ class GetLogMetricRequest(proto.Message): "projects/[PROJECT_ID]/metrics/[METRIC_ID]". """ - metric_name = proto.Field(proto.STRING, number=1) + metric_name = proto.Field(proto.STRING, number=1,) class CreateLogMetricRequest(proto.Message): r"""The parameters to CreateLogMetric. - Attributes: parent (str): Required. The resource name of the project in which to @@ -280,14 +263,12 @@ class CreateLogMetricRequest(proto.Message): must not have an identifier that already exists. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) metric = proto.Field(proto.MESSAGE, number=2, message="LogMetric",) class UpdateLogMetricRequest(proto.Message): r"""The parameters to UpdateLogMetric. - Attributes: metric_name (str): Required. The resource name of the metric to update: @@ -304,14 +285,12 @@ class UpdateLogMetricRequest(proto.Message): Required. The updated metric. """ - metric_name = proto.Field(proto.STRING, number=1) - + metric_name = proto.Field(proto.STRING, number=1,) metric = proto.Field(proto.MESSAGE, number=2, message="LogMetric",) class DeleteLogMetricRequest(proto.Message): r"""The parameters to DeleteLogMetric. - Attributes: metric_name (str): Required. The resource name of the metric to delete: @@ -321,7 +300,7 @@ class DeleteLogMetricRequest(proto.Message): "projects/[PROJECT_ID]/metrics/[METRIC_ID]". """ - metric_name = proto.Field(proto.STRING, number=1) + metric_name = proto.Field(proto.STRING, number=1,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/tests/__init__.py b/packages/google-cloud-logging/tests/__init__.py index e69de29bb2d1..4de65971c238 100644 --- a/packages/google-cloud-logging/tests/__init__.py +++ b/packages/google-cloud-logging/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-logging/tests/unit/__init__.py b/packages/google-cloud-logging/tests/unit/__init__.py index df379f1e9d88..4de65971c238 100644 --- a/packages/google-cloud-logging/tests/unit/__init__.py +++ b/packages/google-cloud-logging/tests/unit/__init__.py @@ -1,4 +1,5 @@ -# Copyright 2016 Google LLC +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,3 +12,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# diff --git a/packages/google-cloud-logging/tests/unit/gapic/__init__.py b/packages/google-cloud-logging/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..4de65971c238 --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py index 42ffdf2bc43d..4de65971c238 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index d6a2f3983293..8be1ee06fa71 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,13 +23,13 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.config_service_v2 import ( ConfigServiceV2AsyncClient, @@ -38,10 +37,40 @@ from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.services.config_service_v2 import transports +from google.cloud.logging_v2.services.config_service_v2.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.logging_v2.services.config_service_v2.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.logging_v2.types import logging_config from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -92,7 +121,7 @@ def test__get_default_mtls_endpoint(): "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] ) def test_config_service_v2_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -109,7 +138,7 @@ def test_config_service_v2_client_from_service_account_info(client_class): "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] ) def test_config_service_v2_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -162,7 +191,7 @@ def test_config_service_v2_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(ConfigServiceV2Client, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -460,7 +489,7 @@ def test_list_buckets( transport: str = "grpc", request_type=logging_config.ListBucketsRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -473,19 +502,15 @@ def test_list_buckets( call.return_value = logging_config.ListBucketsResponse( next_page_token="next_page_token_value", ) - response = client.list_buckets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBucketsPager) - assert response.next_page_token == "next_page_token_value" @@ -497,7 +522,7 @@ def test_list_buckets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -505,7 +530,6 @@ def test_list_buckets_empty_call(): client.list_buckets() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() @@ -514,7 +538,7 @@ async def test_list_buckets_async( transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -527,18 +551,15 @@ async def test_list_buckets_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListBucketsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_buckets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -548,17 +569,17 @@ async def test_list_buckets_async_from_dict(): def test_list_buckets_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: call.return_value = logging_config.ListBucketsResponse() - client.list_buckets(request) # Establish that the underlying gRPC stub method was called. @@ -573,11 +594,14 @@ def test_list_buckets_field_headers(): @pytest.mark.asyncio async def test_list_buckets_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -585,7 +609,6 @@ async def test_list_buckets_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListBucketsResponse() ) - await client.list_buckets(request) # Establish that the underlying gRPC stub method was called. @@ -599,13 +622,12 @@ async def test_list_buckets_field_headers_async(): def test_list_buckets_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListBucketsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_buckets(parent="parent_value",) @@ -614,12 +636,11 @@ def test_list_buckets_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_buckets_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -631,7 +652,9 @@ def test_list_buckets_flattened_error(): @pytest.mark.asyncio async def test_list_buckets_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -649,13 +672,14 @@ async def test_list_buckets_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_buckets_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -666,7 +690,7 @@ async def test_list_buckets_flattened_error_async(): def test_list_buckets_pager(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -704,7 +728,7 @@ def test_list_buckets_pager(): def test_list_buckets_pages(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -734,7 +758,9 @@ def test_list_buckets_pages(): @pytest.mark.asyncio async def test_list_buckets_async_pager(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -771,7 +797,9 @@ async def test_list_buckets_async_pager(): @pytest.mark.asyncio async def test_list_buckets_async_pages(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -807,7 +835,7 @@ def test_get_bucket( transport: str = "grpc", request_type=logging_config.GetBucketRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -824,27 +852,19 @@ def test_get_bucket( locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, ) - response = client.get_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -856,7 +876,7 @@ def test_get_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -864,7 +884,6 @@ def test_get_bucket_empty_call(): client.get_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() @@ -873,7 +892,7 @@ async def test_get_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -892,26 +911,19 @@ async def test_get_bucket_async( lifecycle_state=logging_config.LifecycleState.ACTIVE, ) ) - response = await client.get_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -921,17 +933,17 @@ async def test_get_bucket_async_from_dict(): def test_get_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() - client.get_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -946,11 +958,14 @@ def test_get_bucket_field_headers(): @pytest.mark.asyncio async def test_get_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -958,7 +973,6 @@ async def test_get_bucket_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogBucket() ) - await client.get_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -975,7 +989,7 @@ def test_create_bucket( transport: str = "grpc", request_type=logging_config.CreateBucketRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -992,27 +1006,19 @@ def test_create_bucket( locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, ) - response = client.create_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -1024,7 +1030,7 @@ def test_create_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1032,7 +1038,6 @@ def test_create_bucket_empty_call(): client.create_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() @@ -1041,7 +1046,7 @@ async def test_create_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1060,26 +1065,19 @@ async def test_create_bucket_async( lifecycle_state=logging_config.LifecycleState.ACTIVE, ) ) - response = await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -1089,17 +1087,17 @@ async def test_create_bucket_async_from_dict(): def test_create_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() - client.create_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1114,11 +1112,14 @@ def test_create_bucket_field_headers(): @pytest.mark.asyncio async def test_create_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1126,7 +1127,6 @@ async def test_create_bucket_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogBucket() ) - await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1143,7 +1143,7 @@ def test_update_bucket( transport: str = "grpc", request_type=logging_config.UpdateBucketRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1160,27 +1160,19 @@ def test_update_bucket( locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, ) - response = client.update_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -1192,7 +1184,7 @@ def test_update_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1200,7 +1192,6 @@ def test_update_bucket_empty_call(): client.update_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() @@ -1209,7 +1200,7 @@ async def test_update_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1228,26 +1219,19 @@ async def test_update_bucket_async( lifecycle_state=logging_config.LifecycleState.ACTIVE, ) ) - response = await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -1257,17 +1241,17 @@ async def test_update_bucket_async_from_dict(): def test_update_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() - client.update_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1282,11 +1266,14 @@ def test_update_bucket_field_headers(): @pytest.mark.asyncio async def test_update_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1294,7 +1281,6 @@ async def test_update_bucket_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogBucket() ) - await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1311,7 +1297,7 @@ def test_delete_bucket( transport: str = "grpc", request_type=logging_config.DeleteBucketRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1322,13 +1308,11 @@ def test_delete_bucket( with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() # Establish that the response is the type that we expect. @@ -1343,7 +1327,7 @@ def test_delete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1351,7 +1335,6 @@ def test_delete_bucket_empty_call(): client.delete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() @@ -1360,7 +1343,7 @@ async def test_delete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1371,13 +1354,11 @@ async def test_delete_bucket_async( with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() # Establish that the response is the type that we expect. @@ -1390,17 +1371,17 @@ async def test_delete_bucket_async_from_dict(): def test_delete_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: call.return_value = None - client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1415,17 +1396,19 @@ def test_delete_bucket_field_headers(): @pytest.mark.asyncio async def test_delete_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1442,7 +1425,7 @@ def test_undelete_bucket( transport: str = "grpc", request_type=logging_config.UndeleteBucketRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1453,13 +1436,11 @@ def test_undelete_bucket( with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() # Establish that the response is the type that we expect. @@ -1474,7 +1455,7 @@ def test_undelete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1482,7 +1463,6 @@ def test_undelete_bucket_empty_call(): client.undelete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() @@ -1491,7 +1471,7 @@ async def test_undelete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1502,13 +1482,11 @@ async def test_undelete_bucket_async( with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() # Establish that the response is the type that we expect. @@ -1521,17 +1499,17 @@ async def test_undelete_bucket_async_from_dict(): def test_undelete_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: call.return_value = None - client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1546,17 +1524,19 @@ def test_undelete_bucket_field_headers(): @pytest.mark.asyncio async def test_undelete_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1573,7 +1553,7 @@ def test_list_views( transport: str = "grpc", request_type=logging_config.ListViewsRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1586,19 +1566,15 @@ def test_list_views( call.return_value = logging_config.ListViewsResponse( next_page_token="next_page_token_value", ) - response = client.list_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsPager) - assert response.next_page_token == "next_page_token_value" @@ -1610,7 +1586,7 @@ def test_list_views_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1618,7 +1594,6 @@ def test_list_views_empty_call(): client.list_views() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() @@ -1627,7 +1602,7 @@ async def test_list_views_async( transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1640,18 +1615,15 @@ async def test_list_views_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListViewsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListViewsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1661,17 +1633,17 @@ async def test_list_views_async_from_dict(): def test_list_views_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: call.return_value = logging_config.ListViewsResponse() - client.list_views(request) # Establish that the underlying gRPC stub method was called. @@ -1686,11 +1658,14 @@ def test_list_views_field_headers(): @pytest.mark.asyncio async def test_list_views_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1698,7 +1673,6 @@ async def test_list_views_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListViewsResponse() ) - await client.list_views(request) # Establish that the underlying gRPC stub method was called. @@ -1712,13 +1686,12 @@ async def test_list_views_field_headers_async(): def test_list_views_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListViewsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_views(parent="parent_value",) @@ -1727,12 +1700,11 @@ def test_list_views_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_views_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1744,7 +1716,9 @@ def test_list_views_flattened_error(): @pytest.mark.asyncio async def test_list_views_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1762,13 +1736,14 @@ async def test_list_views_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_views_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1779,7 +1754,7 @@ async def test_list_views_flattened_error_async(): def test_list_views_pager(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1817,7 +1792,7 @@ def test_list_views_pager(): def test_list_views_pages(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1847,7 +1822,9 @@ def test_list_views_pages(): @pytest.mark.asyncio async def test_list_views_async_pager(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1884,7 +1861,9 @@ async def test_list_views_async_pager(): @pytest.mark.asyncio async def test_list_views_async_pages(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1918,7 +1897,7 @@ async def test_list_views_async_pages(): def test_get_view(transport: str = "grpc", request_type=logging_config.GetViewRequest): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1931,23 +1910,17 @@ def test_get_view(transport: str = "grpc", request_type=logging_config.GetViewRe call.return_value = logging_config.LogView( name="name_value", description="description_value", filter="filter_value", ) - response = client.get_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -1959,7 +1932,7 @@ def test_get_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1967,7 +1940,6 @@ def test_get_view_empty_call(): client.get_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() @@ -1976,7 +1948,7 @@ async def test_get_view_async( transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1993,22 +1965,17 @@ async def test_get_view_async( filter="filter_value", ) ) - response = await client.get_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -2018,17 +1985,17 @@ async def test_get_view_async_from_dict(): def test_get_view_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_view), "__call__") as call: call.return_value = logging_config.LogView() - client.get_view(request) # Establish that the underlying gRPC stub method was called. @@ -2043,11 +2010,14 @@ def test_get_view_field_headers(): @pytest.mark.asyncio async def test_get_view_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2055,7 +2025,6 @@ async def test_get_view_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogView() ) - await client.get_view(request) # Establish that the underlying gRPC stub method was called. @@ -2072,7 +2041,7 @@ def test_create_view( transport: str = "grpc", request_type=logging_config.CreateViewRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2085,23 +2054,17 @@ def test_create_view( call.return_value = logging_config.LogView( name="name_value", description="description_value", filter="filter_value", ) - response = client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -2113,7 +2076,7 @@ def test_create_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2121,7 +2084,6 @@ def test_create_view_empty_call(): client.create_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() @@ -2130,7 +2092,7 @@ async def test_create_view_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2147,22 +2109,17 @@ async def test_create_view_async( filter="filter_value", ) ) - response = await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -2172,17 +2129,17 @@ async def test_create_view_async_from_dict(): def test_create_view_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value = logging_config.LogView() - client.create_view(request) # Establish that the underlying gRPC stub method was called. @@ -2197,11 +2154,14 @@ def test_create_view_field_headers(): @pytest.mark.asyncio async def test_create_view_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2209,7 +2169,6 @@ async def test_create_view_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogView() ) - await client.create_view(request) # Establish that the underlying gRPC stub method was called. @@ -2226,7 +2185,7 @@ def test_update_view( transport: str = "grpc", request_type=logging_config.UpdateViewRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2239,23 +2198,17 @@ def test_update_view( call.return_value = logging_config.LogView( name="name_value", description="description_value", filter="filter_value", ) - response = client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -2267,7 +2220,7 @@ def test_update_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2275,7 +2228,6 @@ def test_update_view_empty_call(): client.update_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() @@ -2284,7 +2236,7 @@ async def test_update_view_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2301,22 +2253,17 @@ async def test_update_view_async( filter="filter_value", ) ) - response = await client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -2326,17 +2273,17 @@ async def test_update_view_async_from_dict(): def test_update_view_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_view), "__call__") as call: call.return_value = logging_config.LogView() - client.update_view(request) # Establish that the underlying gRPC stub method was called. @@ -2351,11 +2298,14 @@ def test_update_view_field_headers(): @pytest.mark.asyncio async def test_update_view_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2363,7 +2313,6 @@ async def test_update_view_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogView() ) - await client.update_view(request) # Establish that the underlying gRPC stub method was called. @@ -2380,7 +2329,7 @@ def test_delete_view( transport: str = "grpc", request_type=logging_config.DeleteViewRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2391,13 +2340,11 @@ def test_delete_view( with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() # Establish that the response is the type that we expect. @@ -2412,7 +2359,7 @@ def test_delete_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2420,7 +2367,6 @@ def test_delete_view_empty_call(): client.delete_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() @@ -2429,7 +2375,7 @@ async def test_delete_view_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2440,13 +2386,11 @@ async def test_delete_view_async( with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() # Establish that the response is the type that we expect. @@ -2459,17 +2403,17 @@ async def test_delete_view_async_from_dict(): def test_delete_view_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_view), "__call__") as call: call.return_value = None - client.delete_view(request) # Establish that the underlying gRPC stub method was called. @@ -2484,17 +2428,19 @@ def test_delete_view_field_headers(): @pytest.mark.asyncio async def test_delete_view_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_view), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_view(request) # Establish that the underlying gRPC stub method was called. @@ -2511,7 +2457,7 @@ def test_list_sinks( transport: str = "grpc", request_type=logging_config.ListSinksRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2524,19 +2470,15 @@ def test_list_sinks( call.return_value = logging_config.ListSinksResponse( next_page_token="next_page_token_value", ) - response = client.list_sinks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSinksPager) - assert response.next_page_token == "next_page_token_value" @@ -2548,7 +2490,7 @@ def test_list_sinks_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2556,7 +2498,6 @@ def test_list_sinks_empty_call(): client.list_sinks() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() @@ -2565,7 +2506,7 @@ async def test_list_sinks_async( transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2578,18 +2519,15 @@ async def test_list_sinks_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListSinksResponse(next_page_token="next_page_token_value",) ) - response = await client.list_sinks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSinksAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -2599,17 +2537,17 @@ async def test_list_sinks_async_from_dict(): def test_list_sinks_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: call.return_value = logging_config.ListSinksResponse() - client.list_sinks(request) # Establish that the underlying gRPC stub method was called. @@ -2624,11 +2562,14 @@ def test_list_sinks_field_headers(): @pytest.mark.asyncio async def test_list_sinks_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2636,7 +2577,6 @@ async def test_list_sinks_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListSinksResponse() ) - await client.list_sinks(request) # Establish that the underlying gRPC stub method was called. @@ -2650,13 +2590,12 @@ async def test_list_sinks_field_headers_async(): def test_list_sinks_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListSinksResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_sinks(parent="parent_value",) @@ -2665,12 +2604,11 @@ def test_list_sinks_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_sinks_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2682,7 +2620,9 @@ def test_list_sinks_flattened_error(): @pytest.mark.asyncio async def test_list_sinks_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -2700,13 +2640,14 @@ async def test_list_sinks_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_sinks_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2717,7 +2658,7 @@ async def test_list_sinks_flattened_error_async(): def test_list_sinks_pager(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -2755,7 +2696,7 @@ def test_list_sinks_pager(): def test_list_sinks_pages(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -2785,7 +2726,9 @@ def test_list_sinks_pages(): @pytest.mark.asyncio async def test_list_sinks_async_pager(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2822,7 +2765,9 @@ async def test_list_sinks_async_pager(): @pytest.mark.asyncio async def test_list_sinks_async_pages(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2856,7 +2801,7 @@ async def test_list_sinks_async_pages(): def test_get_sink(transport: str = "grpc", request_type=logging_config.GetSinkRequest): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2879,33 +2824,22 @@ def test_get_sink(transport: str = "grpc", request_type=logging_config.GetSinkRe use_partitioned_tables=True ), ) - response = client.get_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -2917,7 +2851,7 @@ def test_get_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2925,7 +2859,6 @@ def test_get_sink_empty_call(): client.get_sink() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() @@ -2934,7 +2867,7 @@ async def test_get_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2956,32 +2889,22 @@ async def test_get_sink_async( include_children=True, ) ) - response = await client.get_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -2991,17 +2914,17 @@ async def test_get_sink_async_from_dict(): def test_get_sink_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: call.return_value = logging_config.LogSink() - client.get_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3016,11 +2939,14 @@ def test_get_sink_field_headers(): @pytest.mark.asyncio async def test_get_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3028,7 +2954,6 @@ async def test_get_sink_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogSink() ) - await client.get_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3042,13 +2967,12 @@ async def test_get_sink_field_headers_async(): def test_get_sink_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_sink(sink_name="sink_name_value",) @@ -3057,12 +2981,11 @@ def test_get_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" def test_get_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3074,7 +2997,9 @@ def test_get_sink_flattened_error(): @pytest.mark.asyncio async def test_get_sink_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: @@ -3092,13 +3017,14 @@ async def test_get_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" @pytest.mark.asyncio async def test_get_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3112,7 +3038,7 @@ def test_create_sink( transport: str = "grpc", request_type=logging_config.CreateSinkRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3135,33 +3061,22 @@ def test_create_sink( use_partitioned_tables=True ), ) - response = client.create_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -3173,7 +3088,7 @@ def test_create_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3181,7 +3096,6 @@ def test_create_sink_empty_call(): client.create_sink() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() @@ -3190,7 +3104,7 @@ async def test_create_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3212,32 +3126,22 @@ async def test_create_sink_async( include_children=True, ) ) - response = await client.create_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -3247,17 +3151,17 @@ async def test_create_sink_async_from_dict(): def test_create_sink_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: call.return_value = logging_config.LogSink() - client.create_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3272,11 +3176,14 @@ def test_create_sink_field_headers(): @pytest.mark.asyncio async def test_create_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3284,7 +3191,6 @@ async def test_create_sink_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogSink() ) - await client.create_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3298,13 +3204,12 @@ async def test_create_sink_field_headers_async(): def test_create_sink_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_sink( @@ -3315,14 +3220,12 @@ def test_create_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].sink == logging_config.LogSink(name="name_value") def test_create_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3336,7 +3239,9 @@ def test_create_sink_flattened_error(): @pytest.mark.asyncio async def test_create_sink_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: @@ -3356,15 +3261,15 @@ async def test_create_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].sink == logging_config.LogSink(name="name_value") @pytest.mark.asyncio async def test_create_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3380,7 +3285,7 @@ def test_update_sink( transport: str = "grpc", request_type=logging_config.UpdateSinkRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3403,33 +3308,22 @@ def test_update_sink( use_partitioned_tables=True ), ) - response = client.update_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -3441,7 +3335,7 @@ def test_update_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3449,7 +3343,6 @@ def test_update_sink_empty_call(): client.update_sink() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() @@ -3458,7 +3351,7 @@ async def test_update_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3480,32 +3373,22 @@ async def test_update_sink_async( include_children=True, ) ) - response = await client.update_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -3515,17 +3398,17 @@ async def test_update_sink_async_from_dict(): def test_update_sink_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: call.return_value = logging_config.LogSink() - client.update_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3540,11 +3423,14 @@ def test_update_sink_field_headers(): @pytest.mark.asyncio async def test_update_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3552,7 +3438,6 @@ async def test_update_sink_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogSink() ) - await client.update_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3566,35 +3451,31 @@ async def test_update_sink_field_headers_async(): def test_update_sink_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_sink( sink_name="sink_name_value", sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" - assert args[0].sink == logging_config.LogSink(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3603,13 +3484,15 @@ def test_update_sink_flattened_error(): logging_config.UpdateSinkRequest(), sink_name="sink_name_value", sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_sink_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: @@ -3624,24 +3507,23 @@ async def test_update_sink_flattened_async(): response = await client.update_sink( sink_name="sink_name_value", sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" - assert args[0].sink == logging_config.LogSink(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3650,7 +3532,7 @@ async def test_update_sink_flattened_error_async(): logging_config.UpdateSinkRequest(), sink_name="sink_name_value", sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -3658,7 +3540,7 @@ def test_delete_sink( transport: str = "grpc", request_type=logging_config.DeleteSinkRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3669,13 +3551,11 @@ def test_delete_sink( with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() # Establish that the response is the type that we expect. @@ -3690,7 +3570,7 @@ def test_delete_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3698,7 +3578,6 @@ def test_delete_sink_empty_call(): client.delete_sink() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() @@ -3707,7 +3586,7 @@ async def test_delete_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3718,13 +3597,11 @@ async def test_delete_sink_async( with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() # Establish that the response is the type that we expect. @@ -3737,17 +3614,17 @@ async def test_delete_sink_async_from_dict(): def test_delete_sink_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: call.return_value = None - client.delete_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3762,17 +3639,19 @@ def test_delete_sink_field_headers(): @pytest.mark.asyncio async def test_delete_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3786,13 +3665,12 @@ async def test_delete_sink_field_headers_async(): def test_delete_sink_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_sink(sink_name="sink_name_value",) @@ -3801,12 +3679,11 @@ def test_delete_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" def test_delete_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3818,7 +3695,9 @@ def test_delete_sink_flattened_error(): @pytest.mark.asyncio async def test_delete_sink_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: @@ -3834,13 +3713,14 @@ async def test_delete_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" @pytest.mark.asyncio async def test_delete_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3854,7 +3734,7 @@ def test_list_exclusions( transport: str = "grpc", request_type=logging_config.ListExclusionsRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3867,19 +3747,15 @@ def test_list_exclusions( call.return_value = logging_config.ListExclusionsResponse( next_page_token="next_page_token_value", ) - response = client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExclusionsPager) - assert response.next_page_token == "next_page_token_value" @@ -3891,7 +3767,7 @@ def test_list_exclusions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3899,7 +3775,6 @@ def test_list_exclusions_empty_call(): client.list_exclusions() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() @@ -3908,7 +3783,7 @@ async def test_list_exclusions_async( transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3923,18 +3798,15 @@ async def test_list_exclusions_async( next_page_token="next_page_token_value", ) ) - response = await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExclusionsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -3944,17 +3816,17 @@ async def test_list_exclusions_async_from_dict(): def test_list_exclusions_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: call.return_value = logging_config.ListExclusionsResponse() - client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. @@ -3969,11 +3841,14 @@ def test_list_exclusions_field_headers(): @pytest.mark.asyncio async def test_list_exclusions_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3981,7 +3856,6 @@ async def test_list_exclusions_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListExclusionsResponse() ) - await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. @@ -3995,13 +3869,12 @@ async def test_list_exclusions_field_headers_async(): def test_list_exclusions_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListExclusionsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_exclusions(parent="parent_value",) @@ -4010,12 +3883,11 @@ def test_list_exclusions_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_exclusions_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4027,7 +3899,9 @@ def test_list_exclusions_flattened_error(): @pytest.mark.asyncio async def test_list_exclusions_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4045,13 +3919,14 @@ async def test_list_exclusions_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_exclusions_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4062,7 +3937,7 @@ async def test_list_exclusions_flattened_error_async(): def test_list_exclusions_pager(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4105,7 +3980,7 @@ def test_list_exclusions_pager(): def test_list_exclusions_pages(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4140,7 +4015,9 @@ def test_list_exclusions_pages(): @pytest.mark.asyncio async def test_list_exclusions_async_pager(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4182,7 +4059,9 @@ async def test_list_exclusions_async_pager(): @pytest.mark.asyncio async def test_list_exclusions_async_pages(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4223,7 +4102,7 @@ def test_get_exclusion( transport: str = "grpc", request_type=logging_config.GetExclusionRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4239,25 +4118,18 @@ def test_get_exclusion( filter="filter_value", disabled=True, ) - response = client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4269,7 +4141,7 @@ def test_get_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4277,7 +4149,6 @@ def test_get_exclusion_empty_call(): client.get_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() @@ -4286,7 +4157,7 @@ async def test_get_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4304,24 +4175,18 @@ async def test_get_exclusion_async( disabled=True, ) ) - response = await client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4331,17 +4196,17 @@ async def test_get_exclusion_async_from_dict(): def test_get_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() - client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4356,11 +4221,14 @@ def test_get_exclusion_field_headers(): @pytest.mark.asyncio async def test_get_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4368,7 +4236,6 @@ async def test_get_exclusion_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogExclusion() ) - await client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4382,13 +4249,12 @@ async def test_get_exclusion_field_headers_async(): def test_get_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_exclusion(name="name_value",) @@ -4397,12 +4263,11 @@ def test_get_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4414,7 +4279,9 @@ def test_get_exclusion_flattened_error(): @pytest.mark.asyncio async def test_get_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: @@ -4432,13 +4299,14 @@ async def test_get_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4452,7 +4320,7 @@ def test_create_exclusion( transport: str = "grpc", request_type=logging_config.CreateExclusionRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4468,25 +4336,18 @@ def test_create_exclusion( filter="filter_value", disabled=True, ) - response = client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4498,7 +4359,7 @@ def test_create_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4506,7 +4367,6 @@ def test_create_exclusion_empty_call(): client.create_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() @@ -4515,7 +4375,7 @@ async def test_create_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4533,24 +4393,18 @@ async def test_create_exclusion_async( disabled=True, ) ) - response = await client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4560,17 +4414,17 @@ async def test_create_exclusion_async_from_dict(): def test_create_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() - client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4585,11 +4439,14 @@ def test_create_exclusion_field_headers(): @pytest.mark.asyncio async def test_create_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4597,7 +4454,6 @@ async def test_create_exclusion_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogExclusion() ) - await client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4611,13 +4467,12 @@ async def test_create_exclusion_field_headers_async(): def test_create_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_exclusion( @@ -4629,14 +4484,12 @@ def test_create_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") def test_create_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4650,7 +4503,9 @@ def test_create_exclusion_flattened_error(): @pytest.mark.asyncio async def test_create_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: @@ -4671,15 +4526,15 @@ async def test_create_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") @pytest.mark.asyncio async def test_create_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4695,7 +4550,7 @@ def test_update_exclusion( transport: str = "grpc", request_type=logging_config.UpdateExclusionRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4711,25 +4566,18 @@ def test_update_exclusion( filter="filter_value", disabled=True, ) - response = client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4741,7 +4589,7 @@ def test_update_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4749,7 +4597,6 @@ def test_update_exclusion_empty_call(): client.update_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() @@ -4758,7 +4605,7 @@ async def test_update_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4776,24 +4623,18 @@ async def test_update_exclusion_async( disabled=True, ) ) - response = await client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4803,17 +4644,17 @@ async def test_update_exclusion_async_from_dict(): def test_update_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() - client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4828,11 +4669,14 @@ def test_update_exclusion_field_headers(): @pytest.mark.asyncio async def test_update_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4840,7 +4684,6 @@ async def test_update_exclusion_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogExclusion() ) - await client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4854,35 +4697,31 @@ async def test_update_exclusion_field_headers_async(): def test_update_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_exclusion( name="name_value", exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4891,13 +4730,15 @@ def test_update_exclusion_flattened_error(): logging_config.UpdateExclusionRequest(), name="name_value", exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: @@ -4912,24 +4753,23 @@ async def test_update_exclusion_flattened_async(): response = await client.update_exclusion( name="name_value", exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4938,7 +4778,7 @@ async def test_update_exclusion_flattened_error_async(): logging_config.UpdateExclusionRequest(), name="name_value", exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -4946,7 +4786,7 @@ def test_delete_exclusion( transport: str = "grpc", request_type=logging_config.DeleteExclusionRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4957,13 +4797,11 @@ def test_delete_exclusion( with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() # Establish that the response is the type that we expect. @@ -4978,7 +4816,7 @@ def test_delete_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4986,7 +4824,6 @@ def test_delete_exclusion_empty_call(): client.delete_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() @@ -4995,7 +4832,7 @@ async def test_delete_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5006,13 +4843,11 @@ async def test_delete_exclusion_async( with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() # Establish that the response is the type that we expect. @@ -5025,17 +4860,17 @@ async def test_delete_exclusion_async_from_dict(): def test_delete_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: call.return_value = None - client.delete_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -5050,17 +4885,19 @@ def test_delete_exclusion_field_headers(): @pytest.mark.asyncio async def test_delete_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -5074,13 +4911,12 @@ async def test_delete_exclusion_field_headers_async(): def test_delete_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_exclusion(name="name_value",) @@ -5089,12 +4925,11 @@ def test_delete_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5106,7 +4941,9 @@ def test_delete_exclusion_flattened_error(): @pytest.mark.asyncio async def test_delete_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: @@ -5122,13 +4959,14 @@ async def test_delete_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5142,7 +4980,7 @@ def test_get_cmek_settings( transport: str = "grpc", request_type=logging_config.GetCmekSettingsRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5159,23 +4997,17 @@ def test_get_cmek_settings( kms_key_name="kms_key_name_value", service_account_id="service_account_id_value", ) - response = client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" @@ -5187,7 +5019,7 @@ def test_get_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5197,7 +5029,6 @@ def test_get_cmek_settings_empty_call(): client.get_cmek_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() @@ -5206,7 +5037,7 @@ async def test_get_cmek_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5225,22 +5056,17 @@ async def test_get_cmek_settings_async( service_account_id="service_account_id_value", ) ) - response = await client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" @@ -5250,11 +5076,12 @@ async def test_get_cmek_settings_async_from_dict(): def test_get_cmek_settings_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5262,7 +5089,6 @@ def test_get_cmek_settings_field_headers(): type(client.transport.get_cmek_settings), "__call__" ) as call: call.return_value = logging_config.CmekSettings() - client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -5277,11 +5103,14 @@ def test_get_cmek_settings_field_headers(): @pytest.mark.asyncio async def test_get_cmek_settings_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5291,7 +5120,6 @@ async def test_get_cmek_settings_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.CmekSettings() ) - await client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -5308,7 +5136,7 @@ def test_update_cmek_settings( transport: str = "grpc", request_type=logging_config.UpdateCmekSettingsRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5325,23 +5153,17 @@ def test_update_cmek_settings( kms_key_name="kms_key_name_value", service_account_id="service_account_id_value", ) - response = client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" @@ -5353,7 +5175,7 @@ def test_update_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5363,7 +5185,6 @@ def test_update_cmek_settings_empty_call(): client.update_cmek_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() @@ -5373,7 +5194,7 @@ async def test_update_cmek_settings_async( request_type=logging_config.UpdateCmekSettingsRequest, ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5392,22 +5213,17 @@ async def test_update_cmek_settings_async( service_account_id="service_account_id_value", ) ) - response = await client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" @@ -5417,11 +5233,12 @@ async def test_update_cmek_settings_async_from_dict(): def test_update_cmek_settings_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5429,7 +5246,6 @@ def test_update_cmek_settings_field_headers(): type(client.transport.update_cmek_settings), "__call__" ) as call: call.return_value = logging_config.CmekSettings() - client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -5444,11 +5260,14 @@ def test_update_cmek_settings_field_headers(): @pytest.mark.asyncio async def test_update_cmek_settings_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5458,7 +5277,6 @@ async def test_update_cmek_settings_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.CmekSettings() ) - await client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -5474,16 +5292,16 @@ async def test_update_cmek_settings_field_headers_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ConfigServiceV2Client( @@ -5493,7 +5311,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ConfigServiceV2Client( @@ -5504,7 +5322,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = ConfigServiceV2Client(transport=transport) assert client.transport is transport @@ -5513,13 +5331,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.ConfigServiceV2GrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -5534,23 +5352,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.ConfigServiceV2GrpcTransport,) def test_config_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.ConfigServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -5562,7 +5380,7 @@ def test_config_service_v2_base_transport(): ) as Transport: Transport.return_value = None transport = transports.ConfigServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -5597,15 +5415,42 @@ def test_config_service_v2_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_config_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfigServiceV2Transport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_config_service_v2_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -5623,19 +5468,38 @@ def test_config_service_v2_base_transport_with_credentials_file(): def test_config_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigServiceV2Transport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_config_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ConfigServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_config_service_v2_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) ConfigServiceV2Client() adc.assert_called_once_with( scopes=( @@ -5648,14 +5512,46 @@ def test_config_service_v2_auth_adc(): ) -def test_config_service_v2_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_config_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.ConfigServiceV2GrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_config_service_v2_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/cloud-platform", @@ -5667,6 +5563,125 @@ def test_config_service_v2_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConfigServiceV2GrpcTransport, grpc_helpers), + (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_config_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConfigServiceV2GrpcTransport, grpc_helpers), + (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_config_service_v2_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConfigServiceV2GrpcTransport, grpc_helpers), + (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_config_service_v2_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -5675,7 +5690,7 @@ def test_config_service_v2_transport_auth_adc(): ], ) def test_config_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -5719,7 +5734,7 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls(transport_ def test_config_service_v2_host_no_port(): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com" ), @@ -5729,7 +5744,7 @@ def test_config_service_v2_host_no_port(): def test_config_service_v2_host_with_port(): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com:8000" ), @@ -5785,9 +5800,9 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -5873,7 +5888,6 @@ def test_config_service_v2_transport_channel_mtls_with_adc(transport_class): def test_cmek_settings_path(): project = "squid" - expected = "projects/{project}/cmekSettings".format(project=project,) actual = ConfigServiceV2Client.cmek_settings_path(project) assert expected == actual @@ -5894,7 +5908,6 @@ def test_log_bucket_path(): project = "whelk" location = "octopus" bucket = "oyster" - expected = "projects/{project}/locations/{location}/buckets/{bucket}".format( project=project, location=location, bucket=bucket, ) @@ -5918,7 +5931,6 @@ def test_parse_log_bucket_path(): def test_log_exclusion_path(): project = "winkle" exclusion = "nautilus" - expected = "projects/{project}/exclusions/{exclusion}".format( project=project, exclusion=exclusion, ) @@ -5941,7 +5953,6 @@ def test_parse_log_exclusion_path(): def test_log_sink_path(): project = "squid" sink = "clam" - expected = "projects/{project}/sinks/{sink}".format(project=project, sink=sink,) actual = ConfigServiceV2Client.log_sink_path(project, sink) assert expected == actual @@ -5964,7 +5975,6 @@ def test_log_view_path(): location = "nudibranch" bucket = "cuttlefish" view = "mussel" - expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( project=project, location=location, bucket=bucket, view=view, ) @@ -5988,7 +5998,6 @@ def test_parse_log_view_path(): def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -6009,7 +6018,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) actual = ConfigServiceV2Client.common_folder_path(folder) assert expected == actual @@ -6028,7 +6036,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) actual = ConfigServiceV2Client.common_organization_path(organization) assert expected == actual @@ -6047,7 +6054,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) actual = ConfigServiceV2Client.common_project_path(project) assert expected == actual @@ -6067,7 +6073,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -6094,7 +6099,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.ConfigServiceV2Transport, "_prep_wrapped_messages" ) as prep: client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -6103,6 +6108,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = ConfigServiceV2Client.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 66f22621cf9d..5de01cf2163c 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,14 +23,14 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.logging_service_v2 import ( LoggingServiceV2AsyncClient, @@ -39,15 +38,45 @@ from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.services.logging_service_v2 import transports +from google.cloud.logging_v2.services.logging_service_v2.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.logging_v2.services.logging_service_v2.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging -from google.logging.type import http_request_pb2 as http_request # type: ignore -from google.logging.type import log_severity_pb2 as log_severity # type: ignore +from google.logging.type import http_request_pb2 # type: ignore +from google.logging.type import log_severity_pb2 # type: ignore from google.oauth2 import service_account -from google.protobuf import any_pb2 as gp_any # type: ignore -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -99,7 +128,7 @@ def test__get_default_mtls_endpoint(): "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] ) def test_logging_service_v2_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -116,7 +145,7 @@ def test_logging_service_v2_client_from_service_account_info(client_class): "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] ) def test_logging_service_v2_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -169,7 +198,7 @@ def test_logging_service_v2_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(LoggingServiceV2Client, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -465,7 +494,7 @@ def test_logging_service_v2_client_client_options_from_dict(): def test_delete_log(transport: str = "grpc", request_type=logging.DeleteLogRequest): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -476,13 +505,11 @@ def test_delete_log(transport: str = "grpc", request_type=logging.DeleteLogReque with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_log(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() # Establish that the response is the type that we expect. @@ -497,7 +524,7 @@ def test_delete_log_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -505,7 +532,6 @@ def test_delete_log_empty_call(): client.delete_log() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() @@ -514,7 +540,7 @@ async def test_delete_log_async( transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -525,13 +551,11 @@ async def test_delete_log_async( with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_log(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() # Establish that the response is the type that we expect. @@ -544,17 +568,17 @@ async def test_delete_log_async_from_dict(): def test_delete_log_field_headers(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() + request.log_name = "log_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: call.return_value = None - client.delete_log(request) # Establish that the underlying gRPC stub method was called. @@ -570,18 +594,18 @@ def test_delete_log_field_headers(): @pytest.mark.asyncio async def test_delete_log_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() + request.log_name = "log_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_log(request) # Establish that the underlying gRPC stub method was called. @@ -595,13 +619,12 @@ async def test_delete_log_field_headers_async(): def test_delete_log_flattened(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_log(log_name="log_name_value",) @@ -610,12 +633,11 @@ def test_delete_log_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" def test_delete_log_flattened_error(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -628,7 +650,7 @@ def test_delete_log_flattened_error(): @pytest.mark.asyncio async def test_delete_log_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -645,14 +667,13 @@ async def test_delete_log_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" @pytest.mark.asyncio async def test_delete_log_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -667,7 +688,7 @@ def test_write_log_entries( transport: str = "grpc", request_type=logging.WriteLogEntriesRequest ): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -680,17 +701,14 @@ def test_write_log_entries( ) as call: # Designate an appropriate return value for the call. call.return_value = logging.WriteLogEntriesResponse() - response = client.write_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging.WriteLogEntriesResponse) @@ -702,7 +720,7 @@ def test_write_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -712,7 +730,6 @@ def test_write_log_entries_empty_call(): client.write_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() @@ -721,7 +738,7 @@ async def test_write_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -736,13 +753,11 @@ async def test_write_log_entries_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging.WriteLogEntriesResponse() ) - response = await client.write_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() # Establish that the response is the type that we expect. @@ -755,7 +770,7 @@ async def test_write_log_entries_async_from_dict(): def test_write_log_entries_flattened(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -763,12 +778,11 @@ def test_write_log_entries_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = logging.WriteLogEntriesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.write_log_entries( log_name="log_name_value", - resource=monitored_resource.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type__value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -777,20 +791,16 @@ def test_write_log_entries_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" - - assert args[0].resource == monitored_resource.MonitoredResource( + assert args[0].resource == monitored_resource_pb2.MonitoredResource( type="type__value" ) - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entries == [log_entry.LogEntry(log_name="log_name_value")] def test_write_log_entries_flattened_error(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -798,7 +808,7 @@ def test_write_log_entries_flattened_error(): client.write_log_entries( logging.WriteLogEntriesRequest(), log_name="log_name_value", - resource=monitored_resource.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type__value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -807,7 +817,7 @@ def test_write_log_entries_flattened_error(): @pytest.mark.asyncio async def test_write_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -824,7 +834,7 @@ async def test_write_log_entries_flattened_async(): # using the keyword arguments to the method. response = await client.write_log_entries( log_name="log_name_value", - resource=monitored_resource.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type__value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -833,22 +843,18 @@ async def test_write_log_entries_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" - - assert args[0].resource == monitored_resource.MonitoredResource( + assert args[0].resource == monitored_resource_pb2.MonitoredResource( type="type__value" ) - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entries == [log_entry.LogEntry(log_name="log_name_value")] @pytest.mark.asyncio async def test_write_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -857,7 +863,7 @@ async def test_write_log_entries_flattened_error_async(): await client.write_log_entries( logging.WriteLogEntriesRequest(), log_name="log_name_value", - resource=monitored_resource.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type__value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -867,7 +873,7 @@ def test_list_log_entries( transport: str = "grpc", request_type=logging.ListLogEntriesRequest ): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -880,19 +886,15 @@ def test_list_log_entries( call.return_value = logging.ListLogEntriesResponse( next_page_token="next_page_token_value", ) - response = client.list_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogEntriesPager) - assert response.next_page_token == "next_page_token_value" @@ -904,7 +906,7 @@ def test_list_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -912,7 +914,6 @@ def test_list_log_entries_empty_call(): client.list_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() @@ -921,7 +922,7 @@ async def test_list_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -934,18 +935,15 @@ async def test_list_log_entries_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging.ListLogEntriesResponse(next_page_token="next_page_token_value",) ) - response = await client.list_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -955,13 +953,12 @@ async def test_list_log_entries_async_from_dict(): def test_list_log_entries_flattened(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogEntriesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_log_entries( @@ -974,16 +971,13 @@ def test_list_log_entries_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].resource_names == ["resource_names_value"] - assert args[0].filter == "filter_value" - assert args[0].order_by == "order_by_value" def test_list_log_entries_flattened_error(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -999,7 +993,7 @@ def test_list_log_entries_flattened_error(): @pytest.mark.asyncio async def test_list_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1022,18 +1016,15 @@ async def test_list_log_entries_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].resource_names == ["resource_names_value"] - assert args[0].filter == "filter_value" - assert args[0].order_by == "order_by_value" @pytest.mark.asyncio async def test_list_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1048,7 +1039,7 @@ async def test_list_log_entries_flattened_error_async(): def test_list_log_entries_pager(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: @@ -1083,7 +1074,7 @@ def test_list_log_entries_pager(): def test_list_log_entries_pages(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: @@ -1113,7 +1104,9 @@ def test_list_log_entries_pages(): @pytest.mark.asyncio async def test_list_log_entries_async_pager(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1150,7 +1143,9 @@ async def test_list_log_entries_async_pager(): @pytest.mark.asyncio async def test_list_log_entries_async_pages(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1187,7 +1182,7 @@ def test_list_monitored_resource_descriptors( request_type=logging.ListMonitoredResourceDescriptorsRequest, ): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1202,19 +1197,15 @@ def test_list_monitored_resource_descriptors( call.return_value = logging.ListMonitoredResourceDescriptorsResponse( next_page_token="next_page_token_value", ) - response = client.list_monitored_resource_descriptors(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) - assert response.next_page_token == "next_page_token_value" @@ -1226,7 +1217,7 @@ def test_list_monitored_resource_descriptors_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1236,7 +1227,6 @@ def test_list_monitored_resource_descriptors_empty_call(): client.list_monitored_resource_descriptors() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() @@ -1246,7 +1236,7 @@ async def test_list_monitored_resource_descriptors_async( request_type=logging.ListMonitoredResourceDescriptorsRequest, ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1263,18 +1253,15 @@ async def test_list_monitored_resource_descriptors_async( next_page_token="next_page_token_value", ) ) - response = await client.list_monitored_resource_descriptors(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1284,7 +1271,7 @@ async def test_list_monitored_resource_descriptors_async_from_dict(): def test_list_monitored_resource_descriptors_pager(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1294,9 +1281,9 @@ def test_list_monitored_resource_descriptors_pager(): call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="abc", ), @@ -1305,14 +1292,14 @@ def test_list_monitored_resource_descriptors_pager(): ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], ), RuntimeError, @@ -1326,13 +1313,13 @@ def test_list_monitored_resource_descriptors_pager(): results = [i for i in pager] assert len(results) == 6 assert all( - isinstance(i, monitored_resource.MonitoredResourceDescriptor) + isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) for i in results ) def test_list_monitored_resource_descriptors_pages(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1342,9 +1329,9 @@ def test_list_monitored_resource_descriptors_pages(): call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="abc", ), @@ -1353,14 +1340,14 @@ def test_list_monitored_resource_descriptors_pages(): ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], ), RuntimeError, @@ -1372,7 +1359,9 @@ def test_list_monitored_resource_descriptors_pages(): @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pager(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1384,9 +1373,9 @@ async def test_list_monitored_resource_descriptors_async_pager(): call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="abc", ), @@ -1395,14 +1384,14 @@ async def test_list_monitored_resource_descriptors_async_pager(): ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], ), RuntimeError, @@ -1415,14 +1404,16 @@ async def test_list_monitored_resource_descriptors_async_pager(): assert len(responses) == 6 assert all( - isinstance(i, monitored_resource.MonitoredResourceDescriptor) + isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) for i in responses ) @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pages(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1434,9 +1425,9 @@ async def test_list_monitored_resource_descriptors_async_pages(): call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="abc", ), @@ -1445,14 +1436,14 @@ async def test_list_monitored_resource_descriptors_async_pages(): ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], ), RuntimeError, @@ -1468,7 +1459,7 @@ async def test_list_monitored_resource_descriptors_async_pages(): def test_list_logs(transport: str = "grpc", request_type=logging.ListLogsRequest): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1481,21 +1472,16 @@ def test_list_logs(transport: str = "grpc", request_type=logging.ListLogsRequest call.return_value = logging.ListLogsResponse( log_names=["log_names_value"], next_page_token="next_page_token_value", ) - response = client.list_logs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogsPager) - assert response.log_names == ["log_names_value"] - assert response.next_page_token == "next_page_token_value" @@ -1507,7 +1493,7 @@ def test_list_logs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1515,7 +1501,6 @@ def test_list_logs_empty_call(): client.list_logs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() @@ -1524,7 +1509,7 @@ async def test_list_logs_async( transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1539,20 +1524,16 @@ async def test_list_logs_async( log_names=["log_names_value"], next_page_token="next_page_token_value", ) ) - response = await client.list_logs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsAsyncPager) - assert response.log_names == ["log_names_value"] - assert response.next_page_token == "next_page_token_value" @@ -1562,17 +1543,17 @@ async def test_list_logs_async_from_dict(): def test_list_logs_field_headers(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: call.return_value = logging.ListLogsResponse() - client.list_logs(request) # Establish that the underlying gRPC stub method was called. @@ -1588,12 +1569,13 @@ def test_list_logs_field_headers(): @pytest.mark.asyncio async def test_list_logs_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1601,7 +1583,6 @@ async def test_list_logs_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging.ListLogsResponse() ) - await client.list_logs(request) # Establish that the underlying gRPC stub method was called. @@ -1615,13 +1596,12 @@ async def test_list_logs_field_headers_async(): def test_list_logs_flattened(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_logs(parent="parent_value",) @@ -1630,12 +1610,11 @@ def test_list_logs_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_logs_flattened_error(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1648,7 +1627,7 @@ def test_list_logs_flattened_error(): @pytest.mark.asyncio async def test_list_logs_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1667,14 +1646,13 @@ async def test_list_logs_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_logs_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1686,7 +1664,7 @@ async def test_list_logs_flattened_error_async(): def test_list_logs_pager(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1715,7 +1693,7 @@ def test_list_logs_pager(): def test_list_logs_pages(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1736,7 +1714,9 @@ def test_list_logs_pages(): @pytest.mark.asyncio async def test_list_logs_async_pager(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1764,7 +1744,9 @@ async def test_list_logs_async_pager(): @pytest.mark.asyncio async def test_list_logs_async_pages(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1791,26 +1773,23 @@ def test_tail_log_entries( transport: str = "grpc", request_type=logging.TailLogEntriesRequest ): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() - requests = [request] # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.tail_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = iter([logging.TailLogEntriesResponse()]) - response = client.tail_log_entries(iter(requests)) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert next(args[0]) == request # Establish that the response is the type that we expect. @@ -1827,13 +1806,12 @@ async def test_tail_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() - requests = [request] # Mock the actual call within the gRPC stub, and fake the request. @@ -1843,13 +1821,11 @@ async def test_tail_log_entries_async( call.return_value.read = mock.AsyncMock( side_effect=[logging.TailLogEntriesResponse()] ) - response = await client.tail_log_entries(iter(requests)) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert next(args[0]) == request # Establish that the response is the type that we expect. @@ -1865,16 +1841,16 @@ async def test_tail_log_entries_async_from_dict(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = LoggingServiceV2Client( @@ -1884,7 +1860,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = LoggingServiceV2Client( @@ -1895,7 +1871,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = LoggingServiceV2Client(transport=transport) assert client.transport is transport @@ -1904,13 +1880,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.LoggingServiceV2GrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1925,23 +1901,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.LoggingServiceV2GrpcTransport,) def test_logging_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.LoggingServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1953,7 +1929,7 @@ def test_logging_service_v2_base_transport(): ) as Transport: Transport.return_value = None transport = transports.LoggingServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1971,15 +1947,43 @@ def test_logging_service_v2_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_logging_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LoggingServiceV2Transport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_logging_service_v2_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.LoggingServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1998,19 +2002,39 @@ def test_logging_service_v2_base_transport_with_credentials_file(): def test_logging_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.LoggingServiceV2Transport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_logging_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LoggingServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_logging_service_v2_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) LoggingServiceV2Client() adc.assert_called_once_with( scopes=( @@ -2024,14 +2048,47 @@ def test_logging_service_v2_auth_adc(): ) -def test_logging_service_v2_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_logging_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.LoggingServiceV2GrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_logging_service_v2_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/cloud-platform", @@ -2044,6 +2101,127 @@ def test_logging_service_v2_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LoggingServiceV2GrpcTransport, grpc_helpers), + (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LoggingServiceV2GrpcTransport, grpc_helpers), + (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_logging_service_v2_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LoggingServiceV2GrpcTransport, grpc_helpers), + (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_logging_service_v2_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -2052,7 +2230,7 @@ def test_logging_service_v2_transport_auth_adc(): ], ) def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2097,7 +2275,7 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls(transport def test_logging_service_v2_host_no_port(): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com" ), @@ -2107,7 +2285,7 @@ def test_logging_service_v2_host_no_port(): def test_logging_service_v2_host_with_port(): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com:8000" ), @@ -2163,9 +2341,9 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2254,7 +2432,6 @@ def test_logging_service_v2_transport_channel_mtls_with_adc(transport_class): def test_log_path(): project = "squid" log = "clam" - expected = "projects/{project}/logs/{log}".format(project=project, log=log,) actual = LoggingServiceV2Client.log_path(project, log) assert expected == actual @@ -2274,7 +2451,6 @@ def test_parse_log_path(): def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2295,7 +2471,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder,) actual = LoggingServiceV2Client.common_folder_path(folder) assert expected == actual @@ -2314,7 +2489,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization,) actual = LoggingServiceV2Client.common_organization_path(organization) assert expected == actual @@ -2333,7 +2507,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project,) actual = LoggingServiceV2Client.common_project_path(project) assert expected == actual @@ -2353,7 +2526,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2380,7 +2552,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.LoggingServiceV2Transport, "_prep_wrapped_messages" ) as prep: client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2389,6 +2561,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = LoggingServiceV2Client.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 6faec201e7bc..a8a420a28064 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,18 +23,17 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth -from google.api import distribution_pb2 as distribution # type: ignore -from google.api import label_pb2 as label # type: ignore -from google.api import launch_stage_pb2 as launch_stage # type: ignore -from google.api import metric_pb2 as ga_metric # type: ignore -from google.api import metric_pb2 as metric # type: ignore + +from google.api import distribution_pb2 # type: ignore +from google.api import label_pb2 # type: ignore +from google.api import launch_stage_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.metrics_service_v2 import ( MetricsServiceV2AsyncClient, @@ -43,10 +41,40 @@ from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2Client from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.services.metrics_service_v2 import transports +from google.cloud.logging_v2.services.metrics_service_v2.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.logging_v2.services.metrics_service_v2.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.logging_v2.types import logging_metrics from google.oauth2 import service_account -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -98,7 +126,7 @@ def test__get_default_mtls_endpoint(): "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] ) def test_metrics_service_v2_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -115,7 +143,7 @@ def test_metrics_service_v2_client_from_service_account_info(client_class): "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] ) def test_metrics_service_v2_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -168,7 +196,7 @@ def test_metrics_service_v2_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(MetricsServiceV2Client, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -466,7 +494,7 @@ def test_list_log_metrics( transport: str = "grpc", request_type=logging_metrics.ListLogMetricsRequest ): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -479,19 +507,15 @@ def test_list_log_metrics( call.return_value = logging_metrics.ListLogMetricsResponse( next_page_token="next_page_token_value", ) - response = client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogMetricsPager) - assert response.next_page_token == "next_page_token_value" @@ -503,7 +527,7 @@ def test_list_log_metrics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -511,7 +535,6 @@ def test_list_log_metrics_empty_call(): client.list_log_metrics() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() @@ -520,7 +543,7 @@ async def test_list_log_metrics_async( transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest ): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -535,18 +558,15 @@ async def test_list_log_metrics_async( next_page_token="next_page_token_value", ) ) - response = await client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -556,17 +576,17 @@ async def test_list_log_metrics_async_from_dict(): def test_list_log_metrics_field_headers(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: call.return_value = logging_metrics.ListLogMetricsResponse() - client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. @@ -582,12 +602,13 @@ def test_list_log_metrics_field_headers(): @pytest.mark.asyncio async def test_list_log_metrics_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -595,7 +616,6 @@ async def test_list_log_metrics_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_metrics.ListLogMetricsResponse() ) - await client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. @@ -609,13 +629,12 @@ async def test_list_log_metrics_field_headers_async(): def test_list_log_metrics_flattened(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.ListLogMetricsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_log_metrics(parent="parent_value",) @@ -624,12 +643,11 @@ def test_list_log_metrics_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_log_metrics_flattened_error(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -642,7 +660,7 @@ def test_list_log_metrics_flattened_error(): @pytest.mark.asyncio async def test_list_log_metrics_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -661,14 +679,13 @@ async def test_list_log_metrics_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_log_metrics_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -680,7 +697,7 @@ async def test_list_log_metrics_flattened_error_async(): def test_list_log_metrics_pager(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -718,7 +735,7 @@ def test_list_log_metrics_pager(): def test_list_log_metrics_pages(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -748,7 +765,9 @@ def test_list_log_metrics_pages(): @pytest.mark.asyncio async def test_list_log_metrics_async_pager(): - client = MetricsServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -785,7 +804,9 @@ async def test_list_log_metrics_async_pager(): @pytest.mark.asyncio async def test_list_log_metrics_async_pages(): - client = MetricsServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -821,7 +842,7 @@ def test_get_log_metric( transport: str = "grpc", request_type=logging_metrics.GetLogMetricRequest ): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -838,27 +859,19 @@ def test_get_log_metric( value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) - response = client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -870,7 +883,7 @@ def test_get_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -878,7 +891,6 @@ def test_get_log_metric_empty_call(): client.get_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() @@ -887,7 +899,7 @@ async def test_get_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -906,26 +918,19 @@ async def test_get_log_metric_async( version=logging_metrics.LogMetric.ApiVersion.V1, ) ) - response = await client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -935,17 +940,17 @@ async def test_get_log_metric_async_from_dict(): def test_get_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: call.return_value = logging_metrics.LogMetric() - client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -961,12 +966,13 @@ def test_get_log_metric_field_headers(): @pytest.mark.asyncio async def test_get_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -974,7 +980,6 @@ async def test_get_log_metric_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_metrics.LogMetric() ) - await client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -988,13 +993,12 @@ async def test_get_log_metric_field_headers_async(): def test_get_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_log_metric(metric_name="metric_name_value",) @@ -1003,12 +1007,11 @@ def test_get_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" def test_get_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1021,7 +1024,7 @@ def test_get_log_metric_flattened_error(): @pytest.mark.asyncio async def test_get_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1040,14 +1043,13 @@ async def test_get_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" @pytest.mark.asyncio async def test_get_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1062,7 +1064,7 @@ def test_create_log_metric( transport: str = "grpc", request_type=logging_metrics.CreateLogMetricRequest ): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1081,27 +1083,19 @@ def test_create_log_metric( value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) - response = client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1113,7 +1107,7 @@ def test_create_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1123,7 +1117,6 @@ def test_create_log_metric_empty_call(): client.create_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() @@ -1132,7 +1125,7 @@ async def test_create_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1153,26 +1146,19 @@ async def test_create_log_metric_async( version=logging_metrics.LogMetric.ApiVersion.V1, ) ) - response = await client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1182,11 +1168,12 @@ async def test_create_log_metric_async_from_dict(): def test_create_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1194,7 +1181,6 @@ def test_create_log_metric_field_headers(): type(client.transport.create_log_metric), "__call__" ) as call: call.return_value = logging_metrics.LogMetric() - client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1210,12 +1196,13 @@ def test_create_log_metric_field_headers(): @pytest.mark.asyncio async def test_create_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1225,7 +1212,6 @@ async def test_create_log_metric_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_metrics.LogMetric() ) - await client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1239,7 +1225,7 @@ async def test_create_log_metric_field_headers_async(): def test_create_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1247,7 +1233,6 @@ def test_create_log_metric_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_log_metric( @@ -1258,14 +1243,12 @@ def test_create_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") def test_create_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1280,7 +1263,7 @@ def test_create_log_metric_flattened_error(): @pytest.mark.asyncio async def test_create_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1303,16 +1286,14 @@ async def test_create_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") @pytest.mark.asyncio async def test_create_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1329,7 +1310,7 @@ def test_update_log_metric( transport: str = "grpc", request_type=logging_metrics.UpdateLogMetricRequest ): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1348,27 +1329,19 @@ def test_update_log_metric( value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) - response = client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1380,7 +1353,7 @@ def test_update_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1390,7 +1363,6 @@ def test_update_log_metric_empty_call(): client.update_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() @@ -1399,7 +1371,7 @@ async def test_update_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1420,26 +1392,19 @@ async def test_update_log_metric_async( version=logging_metrics.LogMetric.ApiVersion.V1, ) ) - response = await client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1449,11 +1414,12 @@ async def test_update_log_metric_async_from_dict(): def test_update_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1461,7 +1427,6 @@ def test_update_log_metric_field_headers(): type(client.transport.update_log_metric), "__call__" ) as call: call.return_value = logging_metrics.LogMetric() - client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1477,12 +1442,13 @@ def test_update_log_metric_field_headers(): @pytest.mark.asyncio async def test_update_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1492,7 +1458,6 @@ async def test_update_log_metric_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_metrics.LogMetric() ) - await client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1506,7 +1471,7 @@ async def test_update_log_metric_field_headers_async(): def test_update_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1514,7 +1479,6 @@ def test_update_log_metric_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_log_metric( @@ -1526,14 +1490,12 @@ def test_update_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") def test_update_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1548,7 +1510,7 @@ def test_update_log_metric_flattened_error(): @pytest.mark.asyncio async def test_update_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1572,16 +1534,14 @@ async def test_update_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") @pytest.mark.asyncio async def test_update_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1598,7 +1558,7 @@ def test_delete_log_metric( transport: str = "grpc", request_type=logging_metrics.DeleteLogMetricRequest ): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1611,13 +1571,11 @@ def test_delete_log_metric( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() # Establish that the response is the type that we expect. @@ -1632,7 +1590,7 @@ def test_delete_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1642,7 +1600,6 @@ def test_delete_log_metric_empty_call(): client.delete_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() @@ -1651,7 +1608,7 @@ async def test_delete_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1664,13 +1621,11 @@ async def test_delete_log_metric_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() # Establish that the response is the type that we expect. @@ -1683,11 +1638,12 @@ async def test_delete_log_metric_async_from_dict(): def test_delete_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1695,7 +1651,6 @@ def test_delete_log_metric_field_headers(): type(client.transport.delete_log_metric), "__call__" ) as call: call.return_value = None - client.delete_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1711,12 +1666,13 @@ def test_delete_log_metric_field_headers(): @pytest.mark.asyncio async def test_delete_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1724,7 +1680,6 @@ async def test_delete_log_metric_field_headers_async(): type(client.transport.delete_log_metric), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1738,7 +1693,7 @@ async def test_delete_log_metric_field_headers_async(): def test_delete_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1746,7 +1701,6 @@ def test_delete_log_metric_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_log_metric(metric_name="metric_name_value",) @@ -1755,12 +1709,11 @@ def test_delete_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" def test_delete_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1773,7 +1726,7 @@ def test_delete_log_metric_flattened_error(): @pytest.mark.asyncio async def test_delete_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1792,14 +1745,13 @@ async def test_delete_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" @pytest.mark.asyncio async def test_delete_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1813,16 +1765,16 @@ async def test_delete_log_metric_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = MetricsServiceV2Client( @@ -1832,7 +1784,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = MetricsServiceV2Client( @@ -1843,7 +1795,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = MetricsServiceV2Client(transport=transport) assert client.transport is transport @@ -1852,13 +1804,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.MetricsServiceV2GrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1873,23 +1825,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.MetricsServiceV2GrpcTransport,) def test_metrics_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.MetricsServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1901,7 +1853,7 @@ def test_metrics_service_v2_base_transport(): ) as Transport: Transport.return_value = None transport = transports.MetricsServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1918,15 +1870,43 @@ def test_metrics_service_v2_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_metrics_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricsServiceV2Transport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_metrics_service_v2_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MetricsServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1945,19 +1925,39 @@ def test_metrics_service_v2_base_transport_with_credentials_file(): def test_metrics_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MetricsServiceV2Transport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_metrics_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MetricsServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_metrics_service_v2_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) MetricsServiceV2Client() adc.assert_called_once_with( scopes=( @@ -1971,14 +1971,47 @@ def test_metrics_service_v2_auth_adc(): ) -def test_metrics_service_v2_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_metrics_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.MetricsServiceV2GrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_metrics_service_v2_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/cloud-platform", @@ -1991,6 +2024,127 @@ def test_metrics_service_v2_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetricsServiceV2GrpcTransport, grpc_helpers), + (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetricsServiceV2GrpcTransport, grpc_helpers), + (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_metrics_service_v2_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetricsServiceV2GrpcTransport, grpc_helpers), + (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_metrics_service_v2_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -1999,7 +2153,7 @@ def test_metrics_service_v2_transport_auth_adc(): ], ) def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2044,7 +2198,7 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls(transport def test_metrics_service_v2_host_no_port(): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com" ), @@ -2054,7 +2208,7 @@ def test_metrics_service_v2_host_no_port(): def test_metrics_service_v2_host_with_port(): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com:8000" ), @@ -2110,9 +2264,9 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2201,7 +2355,6 @@ def test_metrics_service_v2_transport_channel_mtls_with_adc(transport_class): def test_log_metric_path(): project = "squid" metric = "clam" - expected = "projects/{project}/metrics/{metric}".format( project=project, metric=metric, ) @@ -2223,7 +2376,6 @@ def test_parse_log_metric_path(): def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2244,7 +2396,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder,) actual = MetricsServiceV2Client.common_folder_path(folder) assert expected == actual @@ -2263,7 +2414,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization,) actual = MetricsServiceV2Client.common_organization_path(organization) assert expected == actual @@ -2282,7 +2432,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project,) actual = MetricsServiceV2Client.common_project_path(project) assert expected == actual @@ -2302,7 +2451,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2329,7 +2477,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.MetricsServiceV2Transport, "_prep_wrapped_messages" ) as prep: client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2338,6 +2486,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = MetricsServiceV2Client.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) From df6696eb8c237dacade7d4fa2f610dc866243d00 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 14 May 2021 19:14:04 -0600 Subject: [PATCH 481/855] fix(deps): add packaging requirement (#300) Add packaging requirement. packaging.version is used for a version comparison in transports/base.py and is needed after the upgrade to gapic-generator-python 0.46.3 --- packages/google-cloud-logging/setup.py | 1 + packages/google-cloud-logging/testing/constraints-3.6.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 36426fb60aaa..9fbf1b682612 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -32,6 +32,7 @@ "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "google-cloud-core >= 1.4.1, < 2.0dev", "proto-plus >= 1.11.0", + "packaging >= 14.3", ] extras = {} diff --git a/packages/google-cloud-logging/testing/constraints-3.6.txt b/packages/google-cloud-logging/testing/constraints-3.6.txt index ae89ab4a1cca..61bbd6ec66fc 100644 --- a/packages/google-cloud-logging/testing/constraints-3.6.txt +++ b/packages/google-cloud-logging/testing/constraints-3.6.txt @@ -8,3 +8,4 @@ google-api-core==1.22.2 google-cloud-core==1.4.1 proto-plus==1.11.0 +packaging==14.3 From af9a225072d33e2691ac7b0a56ec8d80214fa847 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 16 May 2021 13:28:02 +0000 Subject: [PATCH 482/855] chore: new owl bot post processor docker image (#302) gcr.io/repo-automation-bots/owlbot-python:latest@sha256:4c981a6b6f2b8914a448d7b3a01688365be03e3ed26dfee399a6aa77fb112eaa --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/.pre-commit-config.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index b5c26ed01808..864c17653f80 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:457583330eec64daa02aeb7a72a04d33e7be2428f646671ce4045dcbc0191b1e + digest: sha256:4c981a6b6f2b8914a448d7b3a01688365be03e3ed26dfee399a6aa77fb112eaa diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index 1bbd787833ec..4f00c7cffcfd 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.1 + rev: 3.9.2 hooks: - id: flake8 From acbf624f0b038040faddaf4f3f6a24745e9cf1a3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 21 May 2021 00:16:15 +0200 Subject: [PATCH 483/855] chore(deps): update dependency google-cloud-pubsub to v2.5.0 (#303) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 97e818af0611..eee5782bf468 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.4.0 google-cloud-bigquery==2.16.1 google-cloud-storage==1.38.0 -google-cloud-pubsub==2.4.2 +google-cloud-pubsub==2.5.0 From cf0aa6ec1eee08d842f269387123aa29c2bdbe22 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 22 May 2021 09:18:14 +0000 Subject: [PATCH 484/855] chore: new owl bot post processor docker image (#308) gcr.io/repo-automation-bots/owlbot-python:latest@sha256:3c3a445b3ddc99ccd5d31edc4b4519729635d20693900db32c4f587ed51f7479 --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/noxfile.py | 6 ++++-- packages/google-cloud-logging/samples/snippets/noxfile.py | 8 +++++++- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 864c17653f80..46e3f021cc72 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:4c981a6b6f2b8914a448d7b3a01688365be03e3ed26dfee399a6aa77fb112eaa + digest: sha256:3c3a445b3ddc99ccd5d31edc4b4519729635d20693900db32c4f587ed51f7479 diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 493d67e6f55a..9e49bd0d9f8d 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -198,7 +198,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -220,7 +220,9 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") + session.install( + "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 956cdf4f9250..5ff9e1db5808 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -50,7 +50,10 @@ # to use your own Cloud project. 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. 'envs': {}, @@ -170,6 +173,9 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): if os.path.exists("constraints.txt"): From 2f8fbaf300bd95cbf342f2d7a49879f3b844d4f3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 25 May 2021 07:36:27 +0200 Subject: [PATCH 485/855] chore(deps): update dependency google-cloud-bigquery to v2.17.0 (#306) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index eee5782bf468..800aa855706f 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.4.0 -google-cloud-bigquery==2.16.1 +google-cloud-bigquery==2.17.0 google-cloud-storage==1.38.0 google-cloud-pubsub==2.5.0 From 84b642044e3f45735d86f8feec83d5d1b8578ad1 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 28 May 2021 11:57:44 -0400 Subject: [PATCH 486/855] chore: new owl bot post processor docker image (#309) Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:c66ba3c8d7bc8566f47df841f98cd0097b28fff0b1864c86f5817f4c8c3e8600 Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/docs/conf.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 46e3f021cc72..da616c91a3b6 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:3c3a445b3ddc99ccd5d31edc4b4519729635d20693900db32c4f587ed51f7479 + digest: sha256:c66ba3c8d7bc8566f47df841f98cd0097b28fff0b1864c86f5817f4c8c3e8600 diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index 8e1d46bc779a..b60a9ce4c620 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -363,6 +363,7 @@ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } From 7058ec03d746a02328a5986d9f2d29de404f6dd6 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 2 Jun 2021 12:12:07 -0400 Subject: [PATCH 487/855] chore: delete unused protos (#307) --- .../cloud/logging_v2/proto/log_entry.proto | 210 --- .../cloud/logging_v2/proto/logging.proto | 478 ----- .../logging_v2/proto/logging_config.proto | 1567 ----------------- .../logging_v2/proto/logging_metrics.proto | 320 ---- 4 files changed, 2575 deletions(-) delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto delete mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_metrics.proto diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto deleted file mode 100644 index 3ad2cfbb5834..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/log_entry.proto +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.logging.v2; - -import "google/api/field_behavior.proto"; -import "google/api/monitored_resource.proto"; -import "google/api/resource.proto"; -import "google/logging/type/http_request.proto"; -import "google/logging/type/log_severity.proto"; -import "google/protobuf/any.proto"; -import "google/protobuf/struct.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/status.proto"; -import "google/api/annotations.proto"; - -option cc_enable_arenas = true; -option csharp_namespace = "Google.Cloud.Logging.V2"; -option go_package = "google.golang.org/genproto/googleapis/logging/v2;logging"; -option java_multiple_files = true; -option java_outer_classname = "LogEntryProto"; -option java_package = "com.google.logging.v2"; -option php_namespace = "Google\\Cloud\\Logging\\V2"; -option ruby_package = "Google::Cloud::Logging::V2"; - -// An individual entry in a log. -// -// -message LogEntry { - option (google.api.resource) = { - type: "logging.googleapis.com/Log" - pattern: "projects/{project}/logs/{log}" - pattern: "organizations/{organization}/logs/{log}" - pattern: "folders/{folder}/logs/{log}" - pattern: "billingAccounts/{billing_account}/logs/{log}" - name_field: "log_name" - }; - - // Required. The resource name of the log to which this log entry belongs: - // - // "projects/[PROJECT_ID]/logs/[LOG_ID]" - // "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - // "folders/[FOLDER_ID]/logs/[LOG_ID]" - // - // A project number may be used in place of PROJECT_ID. The project number is - // translated to its corresponding PROJECT_ID internally and the `log_name` - // field will contain PROJECT_ID in queries and exports. - // - // `[LOG_ID]` must be URL-encoded within `log_name`. Example: - // `"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"`. - // `[LOG_ID]` must be less than 512 characters long and can only include the - // following characters: upper and lower case alphanumeric characters, - // forward-slash, underscore, hyphen, and period. - // - // For backward compatibility, if `log_name` begins with a forward-slash, such - // as `/projects/...`, then the log entry is ingested as usual but the - // forward-slash is removed. Listing the log entry will not show the leading - // slash and filtering for a log name with a leading slash will never return - // any results. - string log_name = 12 [(google.api.field_behavior) = REQUIRED]; - - // Required. The monitored resource that produced this log entry. - // - // Example: a log entry that reports a database error would be associated with - // the monitored resource designating the particular database that reported - // the error. - google.api.MonitoredResource resource = 8 [(google.api.field_behavior) = REQUIRED]; - - // The log entry payload, which can be one of multiple types. - oneof payload { - // The log entry payload, represented as a protocol buffer. Some Google - // Cloud Platform services use this field for their log entry payloads. - // - // The following protocol buffer types are supported; user-defined types - // are not supported: - // - // "type.googleapis.com/google.cloud.audit.AuditLog" - // "type.googleapis.com/google.appengine.logging.v1.RequestLog" - google.protobuf.Any proto_payload = 2; - - // The log entry payload, represented as a Unicode string (UTF-8). - string text_payload = 3; - - // The log entry payload, represented as a structure that is - // expressed as a JSON object. - google.protobuf.Struct json_payload = 6; - } - - // Optional. The time the event described by the log entry occurred. This time is used - // to compute the log entry's age and to enforce the logs retention period. - // If this field is omitted in a new log entry, then Logging assigns it the - // current time. Timestamps have nanosecond accuracy, but trailing zeros in - // the fractional seconds might be omitted when the timestamp is displayed. - // - // Incoming log entries must have timestamps that don't exceed the - // [logs retention - // period](https://cloud.google.com/logging/quotas#logs_retention_periods) in - // the past, and that don't exceed 24 hours in the future. Log entries outside - // those time boundaries aren't ingested by Logging. - google.protobuf.Timestamp timestamp = 9 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. The time the log entry was received by Logging. - google.protobuf.Timestamp receive_timestamp = 24 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. The severity of the log entry. The default value is `LogSeverity.DEFAULT`. - google.logging.type.LogSeverity severity = 10 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A unique identifier for the log entry. If you provide a value, then - // Logging considers other log entries in the same project, with the same - // `timestamp`, and with the same `insert_id` to be duplicates which are - // removed in a single query result. However, there are no guarantees of - // de-duplication in the export of logs. - // - // If the `insert_id` is omitted when writing a log entry, the Logging API - // assigns its own unique identifier in this field. - // - // In queries, the `insert_id` is also used to order log entries that have - // the same `log_name` and `timestamp` values. - string insert_id = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Information about the HTTP request associated with this log entry, if - // applicable. - google.logging.type.HttpRequest http_request = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A set of user-defined (key, value) data that provides additional - // information about the log entry. - map labels = 11 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Information about an operation associated with the log entry, if - // applicable. - LogEntryOperation operation = 15 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Resource name of the trace associated with the log entry, if any. If it - // contains a relative resource name, the name is assumed to be relative to - // `//tracing.googleapis.com`. Example: - // `projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824` - string trace = 22 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The span ID within the trace associated with the log entry. - // - // For Trace spans, this is the same format that the Trace API v2 uses: a - // 16-character hexadecimal encoding of an 8-byte array, such as - // `000000000000004a`. - string span_id = 27 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The sampling decision of the trace associated with the log entry. - // - // True means that the trace resource name in the `trace` field was sampled - // for storage in a trace backend. False means that the trace was not sampled - // for storage when this log entry was written, or the sampling decision was - // unknown at the time. A non-sampled `trace` value is still useful as a - // request correlation identifier. The default is False. - bool trace_sampled = 30 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Source code location information associated with the log entry, if any. - LogEntrySourceLocation source_location = 23 [(google.api.field_behavior) = OPTIONAL]; -} - -// Additional information about a potentially long-running operation with which -// a log entry is associated. -message LogEntryOperation { - // Optional. An arbitrary operation identifier. Log entries with the same - // identifier are assumed to be part of the same operation. - string id = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. An arbitrary producer identifier. The combination of `id` and - // `producer` must be globally unique. Examples for `producer`: - // `"MyDivision.MyBigCompany.com"`, `"github.com/MyProject/MyApplication"`. - string producer = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Set this to True if this is the first log entry in the operation. - bool first = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Set this to True if this is the last log entry in the operation. - bool last = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// Additional information about the source code location that produced the log -// entry. -message LogEntrySourceLocation { - // Optional. Source file name. Depending on the runtime environment, this - // might be a simple name or a fully-qualified name. - string file = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Line within the source file. 1-based; 0 indicates no line number - // available. - int64 line = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Human-readable name of the function or method being invoked, with - // optional context such as the class or package name. This information may be - // used in contexts such as the logs viewer, where a file and line number are - // less meaningful. The format can vary by language. For example: - // `qual.if.ied.Class.method` (Java), `dir/package.func` (Go), `function` - // (Python). - string function = 3 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto deleted file mode 100644 index f8b01a71e6b4..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging.proto +++ /dev/null @@ -1,478 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.logging.v2; - -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/monitored_resource.proto"; -import "google/api/resource.proto"; -import "google/logging/v2/log_entry.proto"; -import "google/logging/v2/logging_config.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/status.proto"; -import "google/api/annotations.proto"; - -option cc_enable_arenas = true; -option csharp_namespace = "Google.Cloud.Logging.V2"; -option go_package = "google.golang.org/genproto/googleapis/logging/v2;logging"; -option java_multiple_files = true; -option java_outer_classname = "LoggingProto"; -option java_package = "com.google.logging.v2"; -option php_namespace = "Google\\Cloud\\Logging\\V2"; -option ruby_package = "Google::Cloud::Logging::V2"; - -// Service for ingesting and querying logs. -service LoggingServiceV2 { - option (google.api.default_host) = "logging.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform," - "https://www.googleapis.com/auth/cloud-platform.read-only," - "https://www.googleapis.com/auth/logging.admin," - "https://www.googleapis.com/auth/logging.read," - "https://www.googleapis.com/auth/logging.write"; - - // Deletes all the log entries in a log. The log reappears if it receives new - // entries. Log entries written shortly before the delete operation might not - // be deleted. Entries received after the delete operation with a timestamp - // before the operation will be deleted. - rpc DeleteLog(DeleteLogRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{log_name=projects/*/logs/*}" - additional_bindings { - delete: "/v2/{log_name=*/*/logs/*}" - } - additional_bindings { - delete: "/v2/{log_name=organizations/*/logs/*}" - } - additional_bindings { - delete: "/v2/{log_name=folders/*/logs/*}" - } - additional_bindings { - delete: "/v2/{log_name=billingAccounts/*/logs/*}" - } - }; - option (google.api.method_signature) = "log_name"; - } - - // Writes log entries to Logging. This API method is the - // only way to send log entries to Logging. This method - // is used, directly or indirectly, by the Logging agent - // (fluentd) and all logging libraries configured to use Logging. - // A single request may contain log entries for a maximum of 1000 - // different resources (projects, organizations, billing accounts or - // folders) - rpc WriteLogEntries(WriteLogEntriesRequest) returns (WriteLogEntriesResponse) { - option (google.api.http) = { - post: "/v2/entries:write" - body: "*" - }; - option (google.api.method_signature) = "log_name,resource,labels,entries"; - } - - // Lists log entries. Use this method to retrieve log entries that originated - // from a project/folder/organization/billing account. For ways to export log - // entries, see [Exporting - // Logs](https://cloud.google.com/logging/docs/export). - rpc ListLogEntries(ListLogEntriesRequest) returns (ListLogEntriesResponse) { - option (google.api.http) = { - post: "/v2/entries:list" - body: "*" - }; - option (google.api.method_signature) = "resource_names,filter,order_by"; - } - - // Lists the descriptors for monitored resource types used by Logging. - rpc ListMonitoredResourceDescriptors(ListMonitoredResourceDescriptorsRequest) returns (ListMonitoredResourceDescriptorsResponse) { - option (google.api.http) = { - get: "/v2/monitoredResourceDescriptors" - }; - } - - // Lists the logs in projects, organizations, folders, or billing accounts. - // Only logs that have entries are listed. - rpc ListLogs(ListLogsRequest) returns (ListLogsResponse) { - option (google.api.http) = { - get: "/v2/{parent=*/*}/logs" - additional_bindings { - get: "/v2/{parent=projects/*}/logs" - } - additional_bindings { - get: "/v2/{parent=organizations/*}/logs" - } - additional_bindings { - get: "/v2/{parent=folders/*}/logs" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*}/logs" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Streaming read of log entries as they are ingested. Until the stream is - // terminated, it will continue reading logs. - rpc TailLogEntries(stream TailLogEntriesRequest) returns (stream TailLogEntriesResponse) { - option (google.api.http) = { - post: "/v2/entries:tail" - body: "*" - }; - } -} - -// The parameters to DeleteLog. -message DeleteLogRequest { - // Required. The resource name of the log to delete: - // - // "projects/[PROJECT_ID]/logs/[LOG_ID]" - // "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - // "folders/[FOLDER_ID]/logs/[LOG_ID]" - // - // `[LOG_ID]` must be URL-encoded. For example, - // `"projects/my-project-id/logs/syslog"`, - // `"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"`. - // For more information about log names, see - // [LogEntry][google.logging.v2.LogEntry]. - string log_name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/Log" - } - ]; -} - -// The parameters to WriteLogEntries. -message WriteLogEntriesRequest { - // Optional. A default log resource name that is assigned to all log entries - // in `entries` that do not specify a value for `log_name`: - // - // "projects/[PROJECT_ID]/logs/[LOG_ID]" - // "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - // "folders/[FOLDER_ID]/logs/[LOG_ID]" - // - // `[LOG_ID]` must be URL-encoded. For example: - // - // "projects/my-project-id/logs/syslog" - // "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" - // - // The permission `logging.logEntries.create` is needed on each project, - // organization, billing account, or folder that is receiving new log - // entries, whether the resource is specified in `logName` or in an - // individual log entry. - string log_name = 1 [ - (google.api.field_behavior) = OPTIONAL, - (google.api.resource_reference) = { - type: "logging.googleapis.com/Log" - } - ]; - - // Optional. A default monitored resource object that is assigned to all log - // entries in `entries` that do not specify a value for `resource`. Example: - // - // { "type": "gce_instance", - // "labels": { - // "zone": "us-central1-a", "instance_id": "00000000000000000000" }} - // - // See [LogEntry][google.logging.v2.LogEntry]. - google.api.MonitoredResource resource = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Default labels that are added to the `labels` field of all log - // entries in `entries`. If a log entry already has a label with the same key - // as a label in this parameter, then the log entry's label is not changed. - // See [LogEntry][google.logging.v2.LogEntry]. - map labels = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The log entries to send to Logging. The order of log - // entries in this list does not matter. Values supplied in this method's - // `log_name`, `resource`, and `labels` fields are copied into those log - // entries in this list that do not include values for their corresponding - // fields. For more information, see the - // [LogEntry][google.logging.v2.LogEntry] type. - // - // If the `timestamp` or `insert_id` fields are missing in log entries, then - // this method supplies the current time or a unique identifier, respectively. - // The supplied values are chosen so that, among the log entries that did not - // supply their own values, the entries earlier in the list will sort before - // the entries later in the list. See the `entries.list` method. - // - // Log entries with timestamps that are more than the - // [logs retention period](https://cloud.google.com/logging/quota-policy) in - // the past or more than 24 hours in the future will not be available when - // calling `entries.list`. However, those log entries can still be [exported - // with - // LogSinks](https://cloud.google.com/logging/docs/api/tasks/exporting-logs). - // - // To improve throughput and to avoid exceeding the - // [quota limit](https://cloud.google.com/logging/quota-policy) for calls to - // `entries.write`, you should try to include several log entries in this - // list, rather than calling this method for each individual log entry. - repeated LogEntry entries = 4 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Whether valid entries should be written even if some other - // entries fail due to INVALID_ARGUMENT or PERMISSION_DENIED errors. If any - // entry is not written, then the response status is the error associated - // with one of the failed entries and the response includes error details - // keyed by the entries' zero-based index in the `entries.write` method. - bool partial_success = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If true, the request should expect normal response, but the - // entries won't be persisted nor exported. Useful for checking whether the - // logging API endpoints are working properly before sending valuable data. - bool dry_run = 6 [(google.api.field_behavior) = OPTIONAL]; -} - -// Result returned from WriteLogEntries. -message WriteLogEntriesResponse {} - -// Error details for WriteLogEntries with partial success. -message WriteLogEntriesPartialErrors { - // When `WriteLogEntriesRequest.partial_success` is true, records the error - // status for entries that were not written due to a permanent error, keyed - // by the entry's zero-based index in `WriteLogEntriesRequest.entries`. - // - // Failed requests for which no entries are written will not include - // per-entry errors. - map log_entry_errors = 1; -} - -// The parameters to `ListLogEntries`. -message ListLogEntriesRequest { - // Required. Names of one or more parent resources from which to - // retrieve log entries: - // - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - // - // May alternatively be one or more views - // projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // - // Projects listed in the `project_ids` field are added to this list. - repeated string resource_names = 8 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "logging.googleapis.com/Log" - } - ]; - - // Optional. A filter that chooses which log entries to return. See [Advanced - // Logs Queries](https://cloud.google.com/logging/docs/view/advanced-queries). - // Only log entries that match the filter are returned. An empty filter - // matches all log entries in the resources listed in `resource_names`. - // Referencing a parent resource that is not listed in `resource_names` will - // cause the filter to return no results. The maximum length of the filter is - // 20000 characters. - string filter = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. How the results should be sorted. Presently, the only permitted - // values are `"timestamp asc"` (default) and `"timestamp desc"`. The first - // option returns entries in order of increasing values of - // `LogEntry.timestamp` (oldest first), and the second option returns entries - // in order of decreasing timestamps (newest first). Entries with equal - // timestamps are returned in order of their `insert_id` values. - string order_by = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of results to return from this request. - // Default is 50. If the value is negative or exceeds 1000, - // the request is rejected. The presence of `next_page_token` in the - // response indicates that more results might be available. - int32 page_size = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `page_token` must be the value of - // `next_page_token` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Result returned from `ListLogEntries`. -message ListLogEntriesResponse { - // A list of log entries. If `entries` is empty, `nextPageToken` may still be - // returned, indicating that more entries may exist. See `nextPageToken` for - // more information. - repeated LogEntry entries = 1; - - // If there might be more results than those appearing in this response, then - // `nextPageToken` is included. To get the next set of results, call this - // method again using the value of `nextPageToken` as `pageToken`. - // - // If a value for `next_page_token` appears and the `entries` field is empty, - // it means that the search found no log entries so far but it did not have - // time to search all the possible log entries. Retry the method with this - // value for `page_token` to continue the search. Alternatively, consider - // speeding up the search by changing your filter to specify a single log name - // or resource type, or to narrow the time range of the search. - string next_page_token = 2; -} - -// The parameters to ListMonitoredResourceDescriptors -message ListMonitoredResourceDescriptorsRequest { - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// Result returned from ListMonitoredResourceDescriptors. -message ListMonitoredResourceDescriptorsResponse { - // A list of resource descriptors. - repeated google.api.MonitoredResourceDescriptor resource_descriptors = 1; - - // If there might be more results than those appearing in this response, then - // `nextPageToken` is included. To get the next set of results, call this - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} - -// The parameters to ListLogs. -message ListLogsRequest { - // Required. The resource name that owns the logs: - // - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "logging.googleapis.com/Log" - } - ]; - - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The resource name that owns the logs: - // projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // - // To support legacy queries, it could also be: - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - repeated string resource_names = 8 [(google.api.field_behavior) = OPTIONAL]; -} - -// Result returned from ListLogs. -message ListLogsResponse { - // A list of log names. For example, - // `"projects/my-project/logs/syslog"` or - // `"organizations/123/logs/cloudresourcemanager.googleapis.com%2Factivity"`. - repeated string log_names = 3; - - // If there might be more results than those appearing in this response, then - // `nextPageToken` is included. To get the next set of results, call this - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} - -// The parameters to `TailLogEntries`. -message TailLogEntriesRequest { - // Required. Name of a parent resource from which to retrieve log entries: - // - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - // - // May alternatively be one or more views: - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - // "organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - repeated string resource_names = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A filter that chooses which log entries to return. See [Advanced - // Logs Filters](https://cloud.google.com/logging/docs/view/advanced_filters). - // Only log entries that match the filter are returned. An empty filter - // matches all log entries in the resources listed in `resource_names`. - // Referencing a parent resource that is not in `resource_names` will cause - // the filter to return no results. The maximum length of the filter is 20000 - // characters. - string filter = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The amount of time to buffer log entries at the server before - // being returned to prevent out of order results due to late arriving log - // entries. Valid values are between 0-60000 milliseconds. Defaults to 2000 - // milliseconds. - google.protobuf.Duration buffer_window = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// Result returned from `TailLogEntries`. -message TailLogEntriesResponse { - // Information about entries that were omitted from the session. - message SuppressionInfo { - // An indicator of why entries were omitted. - enum Reason { - // Unexpected default. - REASON_UNSPECIFIED = 0; - - // Indicates suppression occurred due to relevant entries being - // received in excess of rate limits. For quotas and limits, see - // [Logging API quotas and - // limits](https://cloud.google.com/logging/quotas#api-limits). - RATE_LIMIT = 1; - - // Indicates suppression occurred due to the client not consuming - // responses quickly enough. - NOT_CONSUMED = 2; - } - - // The reason that entries were omitted from the session. - Reason reason = 1; - - // A lower bound on the count of entries omitted due to `reason`. - int32 suppressed_count = 2; - } - - // A list of log entries. Each response in the stream will order entries with - // increasing values of `LogEntry.timestamp`. Ordering is not guaranteed - // between separate responses. - repeated LogEntry entries = 1; - - // If entries that otherwise would have been included in the session were not - // sent back to the client, counts of relevant entries omitted from the - // session with the reason that they were not included. There will be at most - // one of each reason per response. The counts represent the number of - // suppressed entries since the last streamed response. - repeated SuppressionInfo suppression_info = 2; -} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto b/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto deleted file mode 100644 index 9b10932d637b..000000000000 --- a/packages/google-cloud-logging/google/cloud/logging_v2/proto/logging_config.proto +++ /dev/null @@ -1,1567 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.logging.v2; - -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/protobuf/timestamp.proto"; -import "google/api/annotations.proto"; - -option cc_enable_arenas = true; -option csharp_namespace = "Google.Cloud.Logging.V2"; -option go_package = "google.golang.org/genproto/googleapis/logging/v2;logging"; -option java_multiple_files = true; -option java_outer_classname = "LoggingConfigProto"; -option java_package = "com.google.logging.v2"; -option php_namespace = "Google\\Cloud\\Logging\\V2"; -option ruby_package = "Google::Cloud::Logging::V2"; -option (google.api.resource_definition) = { - type: "logging.googleapis.com/OrganizationLocation" - pattern: "organizations/{organization}/locations/{location}" -}; -option (google.api.resource_definition) = { - type: "logging.googleapis.com/FolderLocation" - pattern: "folders/{folder}/locations/{location}" -}; -option (google.api.resource_definition) = { - type: "logging.googleapis.com/BillingAccountLocation" - pattern: "billingAccounts/{billing_account}/locations/{location}" -}; - -// Service for configuring sinks used to route log entries. -service ConfigServiceV2 { - option (google.api.default_host) = "logging.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform," - "https://www.googleapis.com/auth/cloud-platform.read-only," - "https://www.googleapis.com/auth/logging.admin," - "https://www.googleapis.com/auth/logging.read"; - - // Lists buckets. - rpc ListBuckets(ListBucketsRequest) returns (ListBucketsResponse) { - option (google.api.http) = { - get: "/v2/{parent=*/*/locations/*}/buckets" - additional_bindings { - get: "/v2/{parent=projects/*/locations/*}/buckets" - } - additional_bindings { - get: "/v2/{parent=organizations/*/locations/*}/buckets" - } - additional_bindings { - get: "/v2/{parent=folders/*/locations/*}/buckets" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*/locations/*}/buckets" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Gets a bucket. - rpc GetBucket(GetBucketRequest) returns (LogBucket) { - option (google.api.http) = { - get: "/v2/{name=*/*/locations/*/buckets/*}" - additional_bindings { - get: "/v2/{name=projects/*/locations/*/buckets/*}" - } - additional_bindings { - get: "/v2/{name=organizations/*/locations/*/buckets/*}" - } - additional_bindings { - get: "/v2/{name=folders/*/locations/*/buckets/*}" - } - additional_bindings { - get: "/v2/{name=billingAccounts/*/buckets/*}" - } - }; - } - - // Creates a bucket that can be used to store log entries. Once a bucket has - // been created, the region cannot be changed. - rpc CreateBucket(CreateBucketRequest) returns (LogBucket) { - option (google.api.http) = { - post: "/v2/{parent=*/*/locations/*}/buckets" - body: "bucket" - additional_bindings { - post: "/v2/{parent=projects/*/locations/*}/buckets" - body: "bucket" - } - additional_bindings { - post: "/v2/{parent=organizations/*/locations/*}/buckets" - body: "bucket" - } - additional_bindings { - post: "/v2/{parent=folders/*/locations/*}/buckets" - body: "bucket" - } - additional_bindings { - post: "/v2/{parent=billingAccounts/*/locations/*}/buckets" - body: "bucket" - } - }; - } - - // Updates a bucket. This method replaces the following fields in the - // existing bucket with values from the new bucket: `retention_period` - // - // If the retention period is decreased and the bucket is locked, - // FAILED_PRECONDITION will be returned. - // - // If the bucket has a LifecycleState of DELETE_REQUESTED, FAILED_PRECONDITION - // will be returned. - // - // A buckets region may not be modified after it is created. - rpc UpdateBucket(UpdateBucketRequest) returns (LogBucket) { - option (google.api.http) = { - patch: "/v2/{name=*/*/locations/*/buckets/*}" - body: "bucket" - additional_bindings { - patch: "/v2/{name=projects/*/locations/*/buckets/*}" - body: "bucket" - } - additional_bindings { - patch: "/v2/{name=organizations/*/locations/*/buckets/*}" - body: "bucket" - } - additional_bindings { - patch: "/v2/{name=folders/*/locations/*/buckets/*}" - body: "bucket" - } - additional_bindings { - patch: "/v2/{name=billingAccounts/*/locations/*/buckets/*}" - body: "bucket" - } - }; - } - - // Deletes a bucket. - // Moves the bucket to the DELETE_REQUESTED state. After 7 days, the - // bucket will be purged and all logs in the bucket will be permanently - // deleted. - rpc DeleteBucket(DeleteBucketRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{name=*/*/locations/*/buckets/*}" - additional_bindings { - delete: "/v2/{name=projects/*/locations/*/buckets/*}" - } - additional_bindings { - delete: "/v2/{name=organizations/*/locations/*/buckets/*}" - } - additional_bindings { - delete: "/v2/{name=folders/*/locations/*/buckets/*}" - } - additional_bindings { - delete: "/v2/{name=billingAccounts/*/locations/*/buckets/*}" - } - }; - } - - // Undeletes a bucket. A bucket that has been deleted may be undeleted within - // the grace period of 7 days. - rpc UndeleteBucket(UndeleteBucketRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v2/{name=*/*/locations/*/buckets/*}:undelete" - body: "*" - additional_bindings { - post: "/v2/{name=projects/*/locations/*/buckets/*}:undelete" - body: "*" - } - additional_bindings { - post: "/v2/{name=organizations/*/locations/*/buckets/*}:undelete" - body: "*" - } - additional_bindings { - post: "/v2/{name=folders/*/locations/*/buckets/*}:undelete" - body: "*" - } - additional_bindings { - post: "/v2/{name=billingAccounts/*/locations/*/buckets/*}:undelete" - body: "*" - } - }; - } - - // Lists views on a bucket. - rpc ListViews(ListViewsRequest) returns (ListViewsResponse) { - option (google.api.http) = { - get: "/v2/{parent=*/*/locations/*/buckets/*}/views" - additional_bindings { - get: "/v2/{parent=projects/*/locations/*/buckets/*}/views" - } - additional_bindings { - get: "/v2/{parent=organizations/*/locations/*/buckets/*}/views" - } - additional_bindings { - get: "/v2/{parent=folders/*/locations/*/buckets/*}/views" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*/locations/*/buckets/*}/views" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Gets a view. - rpc GetView(GetViewRequest) returns (LogView) { - option (google.api.http) = { - get: "/v2/{name=*/*/locations/*/buckets/*/views/*}" - additional_bindings { - get: "/v2/{name=projects/*/locations/*/buckets/*/views/*}" - } - additional_bindings { - get: "/v2/{name=organizations/*/locations/*/buckets/*/views/*}" - } - additional_bindings { - get: "/v2/{name=folders/*/locations/*/buckets/*/views/*}" - } - additional_bindings { - get: "/v2/{name=billingAccounts/*/buckets/*/views/*}" - } - }; - } - - // Creates a view over logs in a bucket. A bucket may contain a maximum of - // 50 views. - rpc CreateView(CreateViewRequest) returns (LogView) { - option (google.api.http) = { - post: "/v2/{parent=*/*/locations/*/buckets/*}/views" - body: "view" - additional_bindings { - post: "/v2/{parent=projects/*/locations/*/buckets/*}/views" - body: "view" - } - additional_bindings { - post: "/v2/{parent=organizations/*/locations/*/buckets/*}/views" - body: "view" - } - additional_bindings { - post: "/v2/{parent=folders/*/locations/*/buckets/*}/views" - body: "view" - } - additional_bindings { - post: "/v2/{parent=billingAccounts/*/locations/*/buckets/*}/views" - body: "view" - } - }; - } - - // Updates a view. This method replaces the following fields in the existing - // view with values from the new view: `filter`. - rpc UpdateView(UpdateViewRequest) returns (LogView) { - option (google.api.http) = { - patch: "/v2/{name=*/*/locations/*/buckets/*/views/*}" - body: "view" - additional_bindings { - patch: "/v2/{name=projects/*/locations/*/buckets/*/views/*}" - body: "view" - } - additional_bindings { - patch: "/v2/{name=organizations/*/locations/*/buckets/*/views/*}" - body: "view" - } - additional_bindings { - patch: "/v2/{name=folders/*/locations/*/buckets/*/views/*}" - body: "view" - } - additional_bindings { - patch: "/v2/{name=billingAccounts/*/locations/*/buckets/*/views/*}" - body: "view" - } - }; - } - - // Deletes a view from a bucket. - rpc DeleteView(DeleteViewRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{name=*/*/locations/*/buckets/*/views/*}" - additional_bindings { - delete: "/v2/{name=projects/*/locations/*/buckets/*/views/*}" - } - additional_bindings { - delete: "/v2/{name=organizations/*/locations/*/buckets/*/views/*}" - } - additional_bindings { - delete: "/v2/{name=folders/*/locations/*/buckets/*/views/*}" - } - additional_bindings { - delete: "/v2/{name=billingAccounts/*/locations/*/buckets/*/views/*}" - } - }; - } - - // Lists sinks. - rpc ListSinks(ListSinksRequest) returns (ListSinksResponse) { - option (google.api.http) = { - get: "/v2/{parent=*/*}/sinks" - additional_bindings { - get: "/v2/{parent=projects/*}/sinks" - } - additional_bindings { - get: "/v2/{parent=organizations/*}/sinks" - } - additional_bindings { - get: "/v2/{parent=folders/*}/sinks" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*}/sinks" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Gets a sink. - rpc GetSink(GetSinkRequest) returns (LogSink) { - option (google.api.http) = { - get: "/v2/{sink_name=*/*/sinks/*}" - additional_bindings { - get: "/v2/{sink_name=projects/*/sinks/*}" - } - additional_bindings { - get: "/v2/{sink_name=organizations/*/sinks/*}" - } - additional_bindings { - get: "/v2/{sink_name=folders/*/sinks/*}" - } - additional_bindings { - get: "/v2/{sink_name=billingAccounts/*/sinks/*}" - } - }; - option (google.api.method_signature) = "sink_name"; - } - - // Creates a sink that exports specified log entries to a destination. The - // export of newly-ingested log entries begins immediately, unless the sink's - // `writer_identity` is not permitted to write to the destination. A sink can - // export log entries only from the resource owning the sink. - rpc CreateSink(CreateSinkRequest) returns (LogSink) { - option (google.api.http) = { - post: "/v2/{parent=*/*}/sinks" - body: "sink" - additional_bindings { - post: "/v2/{parent=projects/*}/sinks" - body: "sink" - } - additional_bindings { - post: "/v2/{parent=organizations/*}/sinks" - body: "sink" - } - additional_bindings { - post: "/v2/{parent=folders/*}/sinks" - body: "sink" - } - additional_bindings { - post: "/v2/{parent=billingAccounts/*}/sinks" - body: "sink" - } - }; - option (google.api.method_signature) = "parent,sink"; - } - - // Updates a sink. This method replaces the following fields in the existing - // sink with values from the new sink: `destination`, and `filter`. - // - // The updated sink might also have a new `writer_identity`; see the - // `unique_writer_identity` field. - rpc UpdateSink(UpdateSinkRequest) returns (LogSink) { - option (google.api.http) = { - put: "/v2/{sink_name=*/*/sinks/*}" - body: "sink" - additional_bindings { - put: "/v2/{sink_name=projects/*/sinks/*}" - body: "sink" - } - additional_bindings { - put: "/v2/{sink_name=organizations/*/sinks/*}" - body: "sink" - } - additional_bindings { - put: "/v2/{sink_name=folders/*/sinks/*}" - body: "sink" - } - additional_bindings { - put: "/v2/{sink_name=billingAccounts/*/sinks/*}" - body: "sink" - } - additional_bindings { - patch: "/v2/{sink_name=projects/*/sinks/*}" - body: "sink" - } - additional_bindings { - patch: "/v2/{sink_name=organizations/*/sinks/*}" - body: "sink" - } - additional_bindings { - patch: "/v2/{sink_name=folders/*/sinks/*}" - body: "sink" - } - additional_bindings { - patch: "/v2/{sink_name=billingAccounts/*/sinks/*}" - body: "sink" - } - }; - option (google.api.method_signature) = "sink_name,sink,update_mask"; - option (google.api.method_signature) = "sink_name,sink"; - } - - // Deletes a sink. If the sink has a unique `writer_identity`, then that - // service account is also deleted. - rpc DeleteSink(DeleteSinkRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{sink_name=*/*/sinks/*}" - additional_bindings { - delete: "/v2/{sink_name=projects/*/sinks/*}" - } - additional_bindings { - delete: "/v2/{sink_name=organizations/*/sinks/*}" - } - additional_bindings { - delete: "/v2/{sink_name=folders/*/sinks/*}" - } - additional_bindings { - delete: "/v2/{sink_name=billingAccounts/*/sinks/*}" - } - }; - option (google.api.method_signature) = "sink_name"; - } - - // Lists all the exclusions in a parent resource. - rpc ListExclusions(ListExclusionsRequest) returns (ListExclusionsResponse) { - option (google.api.http) = { - get: "/v2/{parent=*/*}/exclusions" - additional_bindings { - get: "/v2/{parent=projects/*}/exclusions" - } - additional_bindings { - get: "/v2/{parent=organizations/*}/exclusions" - } - additional_bindings { - get: "/v2/{parent=folders/*}/exclusions" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*}/exclusions" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Gets the description of an exclusion. - rpc GetExclusion(GetExclusionRequest) returns (LogExclusion) { - option (google.api.http) = { - get: "/v2/{name=*/*/exclusions/*}" - additional_bindings { - get: "/v2/{name=projects/*/exclusions/*}" - } - additional_bindings { - get: "/v2/{name=organizations/*/exclusions/*}" - } - additional_bindings { - get: "/v2/{name=folders/*/exclusions/*}" - } - additional_bindings { - get: "/v2/{name=billingAccounts/*/exclusions/*}" - } - }; - option (google.api.method_signature) = "name"; - } - - // Creates a new exclusion in a specified parent resource. - // Only log entries belonging to that resource can be excluded. - // You can have up to 10 exclusions in a resource. - rpc CreateExclusion(CreateExclusionRequest) returns (LogExclusion) { - option (google.api.http) = { - post: "/v2/{parent=*/*}/exclusions" - body: "exclusion" - additional_bindings { - post: "/v2/{parent=projects/*}/exclusions" - body: "exclusion" - } - additional_bindings { - post: "/v2/{parent=organizations/*}/exclusions" - body: "exclusion" - } - additional_bindings { - post: "/v2/{parent=folders/*}/exclusions" - body: "exclusion" - } - additional_bindings { - post: "/v2/{parent=billingAccounts/*}/exclusions" - body: "exclusion" - } - }; - option (google.api.method_signature) = "parent,exclusion"; - } - - // Changes one or more properties of an existing exclusion. - rpc UpdateExclusion(UpdateExclusionRequest) returns (LogExclusion) { - option (google.api.http) = { - patch: "/v2/{name=*/*/exclusions/*}" - body: "exclusion" - additional_bindings { - patch: "/v2/{name=projects/*/exclusions/*}" - body: "exclusion" - } - additional_bindings { - patch: "/v2/{name=organizations/*/exclusions/*}" - body: "exclusion" - } - additional_bindings { - patch: "/v2/{name=folders/*/exclusions/*}" - body: "exclusion" - } - additional_bindings { - patch: "/v2/{name=billingAccounts/*/exclusions/*}" - body: "exclusion" - } - }; - option (google.api.method_signature) = "name,exclusion,update_mask"; - } - - // Deletes an exclusion. - rpc DeleteExclusion(DeleteExclusionRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{name=*/*/exclusions/*}" - additional_bindings { - delete: "/v2/{name=projects/*/exclusions/*}" - } - additional_bindings { - delete: "/v2/{name=organizations/*/exclusions/*}" - } - additional_bindings { - delete: "/v2/{name=folders/*/exclusions/*}" - } - additional_bindings { - delete: "/v2/{name=billingAccounts/*/exclusions/*}" - } - }; - option (google.api.method_signature) = "name"; - } - - // Gets the Logs Router CMEK settings for the given resource. - // - // Note: CMEK for the Logs Router can currently only be configured for GCP - // organizations. Once configured, it applies to all projects and folders in - // the GCP organization. - // - // See [Enabling CMEK for Logs - // Router](https://cloud.google.com/logging/docs/routing/managed-encryption) - // for more information. - rpc GetCmekSettings(GetCmekSettingsRequest) returns (CmekSettings) { - option (google.api.http) = { - get: "/v2/{name=*/*}/cmekSettings" - additional_bindings { - get: "/v2/{name=organizations/*}/cmekSettings" - } - }; - } - - // Updates the Logs Router CMEK settings for the given resource. - // - // Note: CMEK for the Logs Router can currently only be configured for GCP - // organizations. Once configured, it applies to all projects and folders in - // the GCP organization. - // - // [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] - // will fail if 1) `kms_key_name` is invalid, or 2) the associated service - // account does not have the required - // `roles/cloudkms.cryptoKeyEncrypterDecrypter` role assigned for the key, or - // 3) access to the key is disabled. - // - // See [Enabling CMEK for Logs - // Router](https://cloud.google.com/logging/docs/routing/managed-encryption) - // for more information. - rpc UpdateCmekSettings(UpdateCmekSettingsRequest) returns (CmekSettings) { - option (google.api.http) = { - patch: "/v2/{name=*/*}/cmekSettings" - body: "cmek_settings" - additional_bindings { - patch: "/v2/{name=organizations/*}/cmekSettings" - body: "cmek_settings" - } - }; - } -} - -// Describes a repository of logs. -message LogBucket { - option (google.api.resource) = { - type: "logging.googleapis.com/LogBucket" - pattern: "projects/{project}/locations/{location}/buckets/{bucket}" - pattern: "organizations/{organization}/locations/{location}/buckets/{bucket}" - pattern: "folders/{folder}/locations/{location}/buckets/{bucket}" - pattern: "billingAccounts/{billing_account}/locations/{location}/buckets/{bucket}" - }; - - // The resource name of the bucket. - // For example: - // "projects/my-project-id/locations/my-location/buckets/my-bucket-id The - // supported locations are: - // "global" - // - // For the location of `global` it is unspecified where logs are actually - // stored. - // Once a bucket has been created, the location can not be changed. - string name = 1; - - // Describes this bucket. - string description = 3; - - // Output only. The creation timestamp of the bucket. This is not set for any of the - // default buckets. - google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The last update timestamp of the bucket. - google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Logs will be retained by default for this amount of time, after which they - // will automatically be deleted. The minimum retention period is 1 day. - // If this value is set to zero at bucket creation time, the default time of - // 30 days will be used. - int32 retention_days = 11; - - // Whether the bucket has been locked. - // The retention period on a locked bucket may not be changed. - // Locked buckets may only be deleted if they are empty. - bool locked = 9; - - // Output only. The bucket lifecycle state. - LifecycleState lifecycle_state = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// LogBucket lifecycle states. -enum LifecycleState { - // Unspecified state. This is only used/useful for distinguishing - // unset values. - LIFECYCLE_STATE_UNSPECIFIED = 0; - - // The normal and active state. - ACTIVE = 1; - - // The bucket has been marked for deletion by the user. - DELETE_REQUESTED = 2; -} - -// Describes a view over logs in a bucket. -message LogView { - option (google.api.resource) = { - type: "logging.googleapis.com/LogView" - pattern: "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" - pattern: "organizations/{organization}/locations/{location}/buckets/{bucket}/views/{view}" - pattern: "folders/{folder}/locations/{location}/buckets/{bucket}/views/{view}" - pattern: "billingAccounts/{billing_account}/locations/{location}/buckets/{bucket}/views/{view}" - }; - - // The resource name of the view. - // For example - // "projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view - string name = 1; - - // Describes this view. - string description = 3; - - // Output only. The creation timestamp of the view. - google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The last update timestamp of the view. - google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Filter that restricts which log entries in a bucket are visible in this - // view. Filters are restricted to be a logical AND of ==/!= of any of the - // following: - // originating project/folder/organization/billing account. - // resource type - // log id - // Example: SOURCE("projects/myproject") AND resource.type = "gce_instance" - // AND LOG_ID("stdout") - string filter = 7; -} - -// Describes a sink used to export log entries to one of the following -// destinations in any project: a Cloud Storage bucket, a BigQuery dataset, or a -// Cloud Pub/Sub topic. A logs filter controls which log entries are exported. -// The sink must be created within a project, organization, billing account, or -// folder. -message LogSink { - option (google.api.resource) = { - type: "logging.googleapis.com/LogSink" - pattern: "projects/{project}/sinks/{sink}" - pattern: "organizations/{organization}/sinks/{sink}" - pattern: "folders/{folder}/sinks/{sink}" - pattern: "billingAccounts/{billing_account}/sinks/{sink}" - }; - - // Deprecated. This is unused. - enum VersionFormat { - // An unspecified format version that will default to V2. - VERSION_FORMAT_UNSPECIFIED = 0; - - // `LogEntry` version 2 format. - V2 = 1; - - // `LogEntry` version 1 format. - V1 = 2; - } - - // Required. The client-assigned sink identifier, unique within the project. Example: - // `"my-syslog-errors-to-pubsub"`. Sink identifiers are limited to 100 - // characters and can include only the following characters: upper and - // lower-case alphanumeric characters, underscores, hyphens, and periods. - // First character has to be alphanumeric. - string name = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The export destination: - // - // "storage.googleapis.com/[GCS_BUCKET]" - // "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]" - // "pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" - // - // The sink's `writer_identity`, set when the sink is created, must - // have permission to write to the destination or else the log - // entries are not exported. For more information, see - // [Exporting Logs with - // Sinks](https://cloud.google.com/logging/docs/api/tasks/exporting-logs). - string destination = 3 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "*" - } - ]; - - // Optional. An [advanced logs - // filter](https://cloud.google.com/logging/docs/view/advanced-queries). The - // only exported log entries are those that are in the resource owning the - // sink and that match the filter. For example: - // - // logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A description of this sink. - // The maximum length of the description is 8000 characters. - string description = 18 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If set to True, then this sink is disabled and it does not - // export any log entries. - bool disabled = 19 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Log entries that match any of the exclusion filters will not be exported. - // If a log entry is matched by both `filter` and one of `exclusion_filters` - // it will not be exported. - repeated LogExclusion exclusions = 16 [(google.api.field_behavior) = OPTIONAL]; - - // Deprecated. This field is unused. - VersionFormat output_version_format = 6 [deprecated = true]; - - // Output only. An IAM identity—a service account or group—under which Logging - // writes the exported log entries to the sink's destination. This field is - // set by [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] and - // [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] based on the - // value of `unique_writer_identity` in those methods. - // - // Until you grant this identity write-access to the destination, log entry - // exports from this sink will fail. For more information, - // see [Granting Access for a - // Resource](https://cloud.google.com/iam/docs/granting-roles-to-service-accounts#granting_access_to_a_service_account_for_a_resource). - // Consult the destination service's documentation to determine the - // appropriate IAM roles to assign to the identity. - string writer_identity = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. This field applies only to sinks owned by organizations and - // folders. If the field is false, the default, only the logs owned by the - // sink's parent resource are available for export. If the field is true, then - // logs from all the projects, folders, and billing accounts contained in the - // sink's parent resource are also available for export. Whether a particular - // log entry from the children is exported depends on the sink's filter - // expression. For example, if this field is true, then the filter - // `resource.type=gce_instance` would export all Compute Engine VM instance - // log entries from all projects in the sink's parent. To only export entries - // from certain child projects, filter on the project part of the log name: - // - // logName:("projects/test-project1/" OR "projects/test-project2/") AND - // resource.type=gce_instance - bool include_children = 9 [(google.api.field_behavior) = OPTIONAL]; - - // Destination dependent options. - oneof options { - // Optional. Options that affect sinks exporting data to BigQuery. - BigQueryOptions bigquery_options = 12 [(google.api.field_behavior) = OPTIONAL]; - } - - // Output only. The creation timestamp of the sink. - // - // This field may not be present for older sinks. - google.protobuf.Timestamp create_time = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The last update timestamp of the sink. - // - // This field may not be present for older sinks. - google.protobuf.Timestamp update_time = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Options that change functionality of a sink exporting data to BigQuery. -message BigQueryOptions { - // Optional. Whether to use [BigQuery's partition - // tables](https://cloud.google.com/bigquery/docs/partitioned-tables). By - // default, Logging creates dated tables based on the log entries' timestamps, - // e.g. syslog_20170523. With partitioned tables the date suffix is no longer - // present and [special query - // syntax](https://cloud.google.com/bigquery/docs/querying-partitioned-tables) - // has to be used instead. In both cases, tables are sharded based on UTC - // timezone. - bool use_partitioned_tables = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. True if new timestamp column based partitioning is in use, - // false if legacy ingestion-time partitioning is in use. - // All new sinks will have this field set true and will use timestamp column - // based partitioning. If use_partitioned_tables is false, this value has no - // meaning and will be false. Legacy sinks using partitioned tables will have - // this field set to false. - bool uses_timestamp_column_partitioning = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// The parameters to `ListBuckets`. -message ListBucketsRequest { - // Required. The parent resource whose buckets are to be listed: - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]" - // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" - // "folders/[FOLDER_ID]/locations/[LOCATION_ID]" - // - // Note: The locations portion of the resource must be specified, but - // supplying the character `-` in place of [LOCATION_ID] will return all - // buckets. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "logging.googleapis.com/LogBucket" - } - ]; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// The response from ListBuckets. -message ListBucketsResponse { - // A list of buckets. - repeated LogBucket buckets = 1; - - // If there might be more results than appear in this response, then - // `nextPageToken` is included. To get the next set of results, call the same - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} - -// The parameters to `CreateBucket`. -message CreateBucketRequest { - // Required. The resource in which to create the bucket: - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]" - // - // Example: `"projects/my-logging-project/locations/global"` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "logging.googleapis.com/LogBucket" - } - ]; - - // Required. A client-assigned identifier such as `"my-bucket"`. Identifiers are - // limited to 100 characters and can include only letters, digits, - // underscores, hyphens, and periods. - string bucket_id = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The new bucket. The region specified in the new bucket must be compliant - // with any Location Restriction Org Policy. The name field in the bucket is - // ignored. - LogBucket bucket = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// The parameters to `UpdateBucket`. -message UpdateBucketRequest { - // Required. The full resource name of the bucket to update. - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // - // Example: - // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id"`. Also - // requires permission "resourcemanager.projects.updateLiens" to set the - // locked property - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogBucket" - } - ]; - - // Required. The updated bucket. - LogBucket bucket = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. Field mask that specifies the fields in `bucket` that need an update. A - // bucket field will be overwritten if, and only if, it is in the update - // mask. `name` and output only fields cannot be updated. - // - // For a detailed `FieldMask` definition, see - // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - // - // Example: `updateMask=retention_days`. - google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// The parameters to `GetBucket`. -message GetBucketRequest { - // Required. The resource name of the bucket: - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // - // Example: - // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id"`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogBucket" - } - ]; -} - -// The parameters to `DeleteBucket`. -message DeleteBucketRequest { - // Required. The full resource name of the bucket to delete. - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // - // Example: - // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id"`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogBucket" - } - ]; -} - -// The parameters to `UndeleteBucket`. -message UndeleteBucketRequest { - // Required. The full resource name of the bucket to undelete. - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // - // Example: - // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id"`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogBucket" - } - ]; -} - -// The parameters to `ListViews`. -message ListViewsRequest { - // Required. The bucket whose views are to be listed: - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - string parent = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// The response from ListViews. -message ListViewsResponse { - // A list of views. - repeated LogView views = 1; - - // If there might be more results than appear in this response, then - // `nextPageToken` is included. To get the next set of results, call the same - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} - -// The parameters to `CreateView`. -message CreateViewRequest { - // Required. The bucket in which to create the view - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // - // Example: - // `"projects/my-logging-project/locations/my-location/buckets/my-bucket"` - string parent = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The id to use for this view. - string view_id = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The new view. - LogView view = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// The parameters to `UpdateView`. -message UpdateViewRequest { - // Required. The full resource name of the view to update - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - // - // Example: - // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"`. - string name = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The updated view. - LogView view = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Field mask that specifies the fields in `view` that need - // an update. A field will be overwritten if, and only if, it is - // in the update mask. `name` and output only fields cannot be updated. - // - // For a detailed `FieldMask` definition, see - // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - // - // Example: `updateMask=filter`. - google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// The parameters to `GetView`. -message GetViewRequest { - // Required. The resource name of the policy: - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - // - // Example: - // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogView" - } - ]; -} - -// The parameters to `DeleteView`. -message DeleteViewRequest { - // Required. The full resource name of the view to delete: - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - // - // Example: - // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogView" - } - ]; -} - -// The parameters to `ListSinks`. -message ListSinksRequest { - // Required. The parent resource whose sinks are to be listed: - // - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "logging.googleapis.com/LogSink" - } - ]; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// Result returned from `ListSinks`. -message ListSinksResponse { - // A list of sinks. - repeated LogSink sinks = 1; - - // If there might be more results than appear in this response, then - // `nextPageToken` is included. To get the next set of results, call the same - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} - -// The parameters to `GetSink`. -message GetSinkRequest { - // Required. The resource name of the sink: - // - // "projects/[PROJECT_ID]/sinks/[SINK_ID]" - // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - // "folders/[FOLDER_ID]/sinks/[SINK_ID]" - // - // Example: `"projects/my-project-id/sinks/my-sink-id"`. - string sink_name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogSink" - } - ]; -} - -// The parameters to `CreateSink`. -message CreateSinkRequest { - // Required. The resource in which to create the sink: - // - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - // - // Examples: `"projects/my-logging-project"`, `"organizations/123456789"`. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "logging.googleapis.com/LogSink" - } - ]; - - // Required. The new sink, whose `name` parameter is a sink identifier that - // is not already in use. - LogSink sink = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Determines the kind of IAM identity returned as `writer_identity` - // in the new sink. If this value is omitted or set to false, and if the - // sink's parent is a project, then the value returned as `writer_identity` is - // the same group or service account used by Logging before the addition of - // writer identities to this API. The sink's destination must be in the same - // project as the sink itself. - // - // If this field is set to true, or if the sink is owned by a non-project - // resource such as an organization, then the value of `writer_identity` will - // be a unique service account used only for exports from the new sink. For - // more information, see `writer_identity` in [LogSink][google.logging.v2.LogSink]. - bool unique_writer_identity = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// The parameters to `UpdateSink`. -message UpdateSinkRequest { - // Required. The full resource name of the sink to update, including the parent - // resource and the sink identifier: - // - // "projects/[PROJECT_ID]/sinks/[SINK_ID]" - // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - // "folders/[FOLDER_ID]/sinks/[SINK_ID]" - // - // Example: `"projects/my-project-id/sinks/my-sink-id"`. - string sink_name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogSink" - } - ]; - - // Required. The updated sink, whose name is the same identifier that appears as part - // of `sink_name`. - LogSink sink = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. See [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] - // for a description of this field. When updating a sink, the effect of this - // field on the value of `writer_identity` in the updated sink depends on both - // the old and new values of this field: - // - // + If the old and new values of this field are both false or both true, - // then there is no change to the sink's `writer_identity`. - // + If the old value is false and the new value is true, then - // `writer_identity` is changed to a unique service account. - // + It is an error if the old value is true and the new value is - // set to false or defaulted to false. - bool unique_writer_identity = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Field mask that specifies the fields in `sink` that need - // an update. A sink field will be overwritten if, and only if, it is - // in the update mask. `name` and output only fields cannot be updated. - // - // An empty updateMask is temporarily treated as using the following mask - // for backwards compatibility purposes: - // destination,filter,includeChildren - // At some point in the future, behavior will be removed and specifying an - // empty updateMask will be an error. - // - // For a detailed `FieldMask` definition, see - // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - // - // Example: `updateMask=filter`. - google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// The parameters to `DeleteSink`. -message DeleteSinkRequest { - // Required. The full resource name of the sink to delete, including the parent - // resource and the sink identifier: - // - // "projects/[PROJECT_ID]/sinks/[SINK_ID]" - // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - // "folders/[FOLDER_ID]/sinks/[SINK_ID]" - // - // Example: `"projects/my-project-id/sinks/my-sink-id"`. - string sink_name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogSink" - } - ]; -} - -// Specifies a set of log entries that are not to be stored in -// Logging. If your GCP resource receives a large volume of logs, you can -// use exclusions to reduce your chargeable logs. Exclusions are -// processed after log sinks, so you can export log entries before they are -// excluded. Note that organization-level and folder-level exclusions don't -// apply to child resources, and that you can't exclude audit log entries. -message LogExclusion { - option (google.api.resource) = { - type: "logging.googleapis.com/LogExclusion" - pattern: "projects/{project}/exclusions/{exclusion}" - pattern: "organizations/{organization}/exclusions/{exclusion}" - pattern: "folders/{folder}/exclusions/{exclusion}" - pattern: "billingAccounts/{billing_account}/exclusions/{exclusion}" - }; - - // Required. A client-assigned identifier, such as `"load-balancer-exclusion"`. - // Identifiers are limited to 100 characters and can include only letters, - // digits, underscores, hyphens, and periods. First character has to be - // alphanumeric. - string name = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A description of this exclusion. - string description = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Required. An [advanced logs - // filter](https://cloud.google.com/logging/docs/view/advanced-queries) that - // matches the log entries to be excluded. By using the [sample - // function](https://cloud.google.com/logging/docs/view/advanced-queries#sample), - // you can exclude less than 100% of the matching log entries. - // For example, the following query matches 99% of low-severity log - // entries from Google Cloud Storage buckets: - // - // `"resource.type=gcs_bucket severity=ERROR" - // - // The maximum length of the filter is 20000 characters. - string filter = 3 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The metric descriptor associated with the logs-based metric. - // If unspecified, it uses a default metric descriptor with a DELTA metric - // kind, INT64 value type, with no labels and a unit of "1". Such a metric - // counts the number of log entries matching the `filter` expression. - // - // The `name`, `type`, and `description` fields in the `metric_descriptor` - // are output only, and is constructed using the `name` and `description` - // field in the LogMetric. - // - // To create a logs-based metric that records a distribution of log values, a - // DELTA metric kind with a DISTRIBUTION value type must be used along with - // a `value_extractor` expression in the LogMetric. - // - // Each label in the metric descriptor must have a matching label - // name as the key and an extractor expression as the value in the - // `label_extractors` map. - // - // The `metric_kind` and `value_type` fields in the `metric_descriptor` cannot - // be updated once initially configured. New labels can be added in the - // `metric_descriptor`, but existing labels cannot be modified except for - // their description. - google.api.MetricDescriptor metric_descriptor = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A `value_extractor` is required when using a distribution - // logs-based metric to extract the values to record from a log entry. - // Two functions are supported for value extraction: `EXTRACT(field)` or - // `REGEXP_EXTRACT(field, regex)`. The argument are: - // 1. field: The name of the log entry field from which the value is to be - // extracted. - // 2. regex: A regular expression using the Google RE2 syntax - // (https://github.com/google/re2/wiki/Syntax) with a single capture - // group to extract data from the specified log entry field. The value - // of the field is converted to a string before applying the regex. - // It is an error to specify a regex that does not include exactly one - // capture group. - // - // The result of the extraction must be convertible to a double type, as the - // distribution always records double values. If either the extraction or - // the conversion to double fails, then those values are not recorded in the - // distribution. - // - // Example: `REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")` - string value_extractor = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A map from a label key string to an extractor expression which is - // used to extract data from a log entry field and assign as the label value. - // Each label key specified in the LabelDescriptor must have an associated - // extractor expression in this map. The syntax of the extractor expression - // is the same as for the `value_extractor` field. - // - // The extracted value is converted to the type defined in the label - // descriptor. If the either the extraction or the type conversion fails, - // the label will have a default value. The default value for a string - // label is an empty string, for an integer label its 0, and for a boolean - // label its `false`. - // - // Note that there are upper bounds on the maximum number of labels and the - // number of active time series that are allowed in a project. - map label_extractors = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The `bucket_options` are required when the logs-based metric is - // using a DISTRIBUTION value type and it describes the bucket boundaries - // used to create a histogram of the extracted values. - google.api.Distribution.BucketOptions bucket_options = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. The creation timestamp of the metric. - // - // This field may not be present for older metrics. - google.protobuf.Timestamp create_time = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The last update timestamp of the metric. - // - // This field may not be present for older metrics. - google.protobuf.Timestamp update_time = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Deprecated. The API version that created or updated this metric. - // The v2 format is used by default and cannot be changed. - ApiVersion version = 4 [deprecated = true]; -} - -// The parameters to ListLogMetrics. -message ListLogMetricsRequest { - // Required. The name of the project containing the metrics: - // - // "projects/[PROJECT_ID]" - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "cloudresourcemanager.googleapis.com/Project" - } - ]; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// Result returned from ListLogMetrics. -message ListLogMetricsResponse { - // A list of logs-based metrics. - repeated LogMetric metrics = 1; - - // If there might be more results than appear in this response, then - // `nextPageToken` is included. To get the next set of results, call this - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} - -// The parameters to GetLogMetric. -message GetLogMetricRequest { - // Required. The resource name of the desired metric: - // - // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - string metric_name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogMetric" - } - ]; -} - -// The parameters to CreateLogMetric. -message CreateLogMetricRequest { - // Required. The resource name of the project in which to create the metric: - // - // "projects/[PROJECT_ID]" - // - // The new metric must be provided in the request. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "logging.googleapis.com/LogMetric" - } - ]; - - // Required. The new logs-based metric, which must not have an identifier that - // already exists. - LogMetric metric = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// The parameters to UpdateLogMetric. -message UpdateLogMetricRequest { - // Required. The resource name of the metric to update: - // - // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - // - // The updated metric must be provided in the request and it's - // `name` field must be the same as `[METRIC_ID]` If the metric - // does not exist in `[PROJECT_ID]`, then a new metric is created. - string metric_name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogMetric" - } - ]; - - // Required. The updated metric. - LogMetric metric = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// The parameters to DeleteLogMetric. -message DeleteLogMetricRequest { - // Required. The resource name of the metric to delete: - // - // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - string metric_name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogMetric" - } - ]; -} From 8d6323027b0a36a3c63787f5d21ff11af423e5ae Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 5 Jun 2021 01:32:41 +0200 Subject: [PATCH 488/855] chore(deps): update dependency google-cloud-bigquery to v2.18.0 (#312) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 800aa855706f..8960ef889316 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.4.0 -google-cloud-bigquery==2.17.0 +google-cloud-bigquery==2.18.0 google-cloud-storage==1.38.0 google-cloud-pubsub==2.5.0 From 9c97529e5d4dbac8e01729d9c7b8fd81e425e10f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 9 Jun 2021 12:36:20 -0700 Subject: [PATCH 489/855] feat: support AuditLog and RequestLog protos (#274) --- .../google/cloud/logging_v2/entries.py | 12 +- packages/google-cloud-logging/setup.py | 2 + .../tests/system/test_system.py | 133 +++++++++++++++--- .../tests/unit/test_entries.py | 14 ++ 4 files changed, 138 insertions(+), 23 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/entries.py b/packages/google-cloud-logging/google/cloud/logging_v2/entries.py index 87e042018f5c..fa7e5d9d178c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/entries.py @@ -27,6 +27,9 @@ from google.cloud._helpers import _rfc3339_nanos_to_datetime from google.cloud._helpers import _datetime_to_rfc3339 +# import officially supported proto definitions +import google.cloud.audit.audit_log_pb2 # noqa: F401 +import google.cloud.appengine_logging # noqa: F401 _GLOBAL_RESOURCE = Resource(type="global", labels={}) @@ -316,13 +319,18 @@ def payload_pb(self): @property def payload_json(self): - if not isinstance(self.payload, Any): + if isinstance(self.payload, collections.abc.Mapping): return self.payload def to_api_repr(self): """API repr (JSON format) for entry.""" info = super(ProtobufEntry, self).to_api_repr() - info["protoPayload"] = MessageToDict(self.payload) + proto_payload = None + if self.payload_json: + proto_payload = dict(self.payload_json) + elif self.payload_pb: + proto_payload = MessageToDict(self.payload_pb) + info["protoPayload"] = proto_payload return info def parse_message(self, message): diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 9fbf1b682612..bc08741b8343 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -30,6 +30,8 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", + "google-cloud-appengine-logging >= 0.1.0, < 1.0.0dev", + "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", "google-cloud-core >= 1.4.1, < 2.0dev", "proto-plus >= 1.11.0", "packaging >= 14.3", diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index cc6d03804461..81de866ee3b5 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -16,9 +16,11 @@ from datetime import timedelta from datetime import timezone import logging +import numbers import os import pytest import unittest +import uuid from google.api_core.exceptions import BadGateway from google.api_core.exceptions import Conflict @@ -36,6 +38,8 @@ from google.cloud.logging_v2 import client from google.cloud.logging_v2.resource import Resource +from google.protobuf.struct_pb2 import Struct, Value, ListValue, NullValue + from test_utils.retry import RetryErrors from test_utils.retry import RetryResult from test_utils.system import unique_resource_id @@ -142,32 +146,119 @@ def tearDown(self): def _logger_name(prefix): return prefix + unique_resource_id("-") - def test_list_entry_with_unregistered(self): - from google.protobuf import any_pb2 + @staticmethod + def _to_value(data): + if data is None: + return Value(null_value=NullValue.NULL_VALUE) + elif isinstance(data, numbers.Number): + return Value(number_value=data) + elif isinstance(data, str): + return Value(string_value=data) + elif isinstance(data, bool): + return Value(bool_value=data) + elif isinstance(data, (list, tuple, set)): + return Value( + list_value=ListValue(values=(TestLogging._to_value(e) for e in data)) + ) + elif isinstance(data, dict): + return Value(struct_value=TestLogging._dict_to_struct(data)) + else: + raise TypeError("Unknown data type: %r" % type(data)) + + @staticmethod + def _dict_to_struct(data): + return Struct(fields={k: TestLogging._to_value(v) for k, v in data.items()}) + + def test_list_entry_with_auditlog(self): + """ + Test emitting and listing logs containing a google.cloud.audit.AuditLog proto message + """ from google.protobuf import descriptor_pool from google.cloud.logging_v2 import entries pool = descriptor_pool.Default() type_name = "google.cloud.audit.AuditLog" - # Make sure the descriptor is not known in the registry. - with self.assertRaises(KeyError): - pool.FindMessageTypeByName(type_name) - type_url = "type.googleapis.com/" + type_name - filter_ = self.TYPE_FILTER.format(type_url) + f" AND {_time_filter}" - entry_iter = iter(Config.CLIENT.list_entries(page_size=1, filter_=filter_)) + # Make sure the descriptor is known in the registry. + # Raises KeyError if unknown + pool.FindMessageTypeByName(type_name) + + # create log + audit_dict = { + "@type": type_url, + "methodName": "test", + "requestMetadata": {"callerIp": "::1", "callerSuppliedUserAgent": "test"}, + "resourceName": "test", + "serviceName": "test", + "status": {"code": 0}, + } + audit_struct = self._dict_to_struct(audit_dict) + + logger = Config.CLIENT.logger(f"audit-proto-{uuid.uuid1()}") + logger.log_proto(audit_struct) + + # retrieve log + retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) + protobuf_entry = retry(lambda: next(logger.list_entries()))() - retry = RetryErrors(TooManyRequests) - protobuf_entry = retry(lambda: next(entry_iter))() + self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) + self.assertIsNone(protobuf_entry.payload_pb) + self.assertIsInstance(protobuf_entry.payload_json, dict) + self.assertEqual(protobuf_entry.payload_json["@type"], type_url) + self.assertEqual( + protobuf_entry.payload_json["methodName"], audit_dict["methodName"] + ) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url + ) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["methodName"], + audit_dict["methodName"], + ) + + def test_list_entry_with_requestlog(self): + """ + Test emitting and listing logs containing a google.appengine.logging.v1.RequestLog proto message + """ + from google.protobuf import descriptor_pool + from google.cloud.logging_v2 import entries + + pool = descriptor_pool.Default() + type_name = "google.appengine.logging.v1.RequestLog" + type_url = "type.googleapis.com/" + type_name + # Make sure the descriptor is known in the registry. + # Raises KeyError if unknown + pool.FindMessageTypeByName(type_name) + + # create log + req_dict = { + "@type": type_url, + "ip": "0.0.0.0", + "appId": "test", + "versionId": "test", + "requestId": "12345", + "latency": "500.0s", + "method": "GET", + "status": 500, + "resource": "test", + "httpVersion": "HTTP/1.1", + } + req_struct = self._dict_to_struct(req_dict) + + logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}") + logger.log_proto(req_struct) + + # retrieve log + retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) + protobuf_entry = retry(lambda: next(logger.list_entries()))() self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) - if Config.CLIENT._use_grpc: - self.assertIsNone(protobuf_entry.payload_json) - self.assertIsInstance(protobuf_entry.payload_pb, any_pb2.Any) - self.assertEqual(protobuf_entry.payload_pb.type_url, type_url) - else: - self.assertIsNone(protobuf_entry.payload_pb) - self.assertEqual(protobuf_entry.payload_json["@type"], type_url) + self.assertIsNone(protobuf_entry.payload_pb) + self.assertIsInstance(protobuf_entry.payload_json, dict) + self.assertEqual(protobuf_entry.payload_json["@type"], type_url) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url + ) def test_log_text(self): TEXT_PAYLOAD = "System test: test_log_text" @@ -288,7 +379,7 @@ def test_log_handler_async(self): cloud_logger = logging.getLogger(handler.name) cloud_logger.addHandler(handler) - cloud_logger.warn(LOG_MESSAGE) + cloud_logger.warning(LOG_MESSAGE) handler.flush() entries = _list_entries(logger) expected_payload = {"message": LOG_MESSAGE, "python_logger": handler.name} @@ -310,7 +401,7 @@ def test_log_handler_sync(self): LOGGER_NAME = "mylogger" cloud_logger = logging.getLogger(LOGGER_NAME) cloud_logger.addHandler(handler) - cloud_logger.warn(LOG_MESSAGE) + cloud_logger.warning(LOG_MESSAGE) entries = _list_entries(logger) expected_payload = {"message": LOG_MESSAGE, "python_logger": LOGGER_NAME} @@ -342,7 +433,7 @@ def test_handlers_w_extras(self): "resource": Resource(type="cloudiot_device", labels={}), "labels": {"test-label": "manual"}, } - cloud_logger.warn(LOG_MESSAGE, extra=extra) + cloud_logger.warning(LOG_MESSAGE, extra=extra) entries = _list_entries(logger) self.assertEqual(len(entries), 1) @@ -363,7 +454,7 @@ def test_log_root_handler(self): self.to_delete.append(logger) google.cloud.logging.handlers.handlers.setup_logging(handler) - logging.warn(LOG_MESSAGE) + logging.warning(LOG_MESSAGE) entries = _list_entries(logger) expected_payload = {"message": LOG_MESSAGE, "python_logger": "root"} diff --git a/packages/google-cloud-logging/tests/unit/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py index ef90b8159a17..b8795b8ce1e5 100644 --- a/packages/google-cloud-logging/tests/unit/test_entries.py +++ b/packages/google-cloud-logging/tests/unit/test_entries.py @@ -503,6 +503,20 @@ def test_to_api_repr_defaults(self): } self.assertEqual(entry.to_api_repr(), expected) + def test_to_api_repr_struct(self): + from google.protobuf.struct_pb2 import Struct, Value + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + LOG_NAME = "struct.log" + message = Struct(fields={"foo": Value(bool_value=True)}) + entry = self._make_one(log_name=LOG_NAME, payload=message) + expected = { + "logName": LOG_NAME, + "jsonPayload": message, + "resource": _GLOBAL_RESOURCE._to_dict(), + } + self.assertEqual(entry.to_api_repr(), expected) + def test_to_api_repr_explicit(self): import datetime from google.cloud.logging import Resource From 1f5982fdd8ea8c166162f7c4a07745cec6225369 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 10 Jun 2021 14:13:26 -0700 Subject: [PATCH 490/855] fix: structured log handler formatting issues (#319) --- .../logging_v2/handlers/structured_log.py | 18 ++- .../environment/deployable/nodejs/Dockerfile | 5 +- .../environment/deployable/nodejs/app.js | 88 +++++++++--- .../deployable/nodejs/package.json | 4 +- .../environment/deployable/python/snippets.py | 15 ++ .../envctl/env_scripts/go/kubernetes.sh | 3 - .../env_scripts/nodejs/appengine_standard.sh | 89 ++++++++++++ .../envctl/env_scripts/nodejs/cloudrun.sh | 1 - .../envctl/env_scripts/nodejs/compute.sh | 91 ++++++++++++ .../envctl/env_scripts/nodejs/functions.sh | 5 +- .../envctl/env_scripts/nodejs/kubernetes.sh | 132 ++++++++++++++++++ .../tests/environment/tests/common/common.py | 2 +- .../tests/environment/tests/common/python.py | 40 ++++++ .../environment/tests/go/test_kubernetes.py | 4 +- .../tests/nodejs/test_appengine_standard.py | 29 ++++ .../environment/tests/nodejs/test_compute.py | 30 ++++ .../tests/nodejs/test_kubernetes.py | 30 ++++ .../tests/unit/handlers/test_handlers.py | 55 +++++++- .../unit/handlers/test_structured_log.py | 58 +++++++- tests/environment | 2 +- 20 files changed, 654 insertions(+), 47 deletions(-) create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/appengine_standard.sh create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/compute.sh create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/kubernetes.sh create mode 100644 packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/nodejs/test_compute.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py index 0edb5c39eba5..f0b4c69ecd04 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py @@ -14,13 +14,13 @@ """Logging handler for printing formatted structured logs to standard output. """ - +import json import logging.handlers from google.cloud.logging_v2.handlers.handlers import CloudLoggingFilter GCP_FORMAT = ( - '{"message": "%(_msg_str)s", ' + '{"message": %(_formatted_msg)s, ' '"severity": "%(levelname)s", ' '"logging.googleapis.com/labels": %(_labels_str)s, ' '"logging.googleapis.com/trace": "%(_trace_str)s", ' @@ -50,7 +50,7 @@ def __init__(self, *, labels=None, stream=None, project_id=None): self.addFilter(log_filter) # make logs appear in GCP structured logging format - self.formatter = logging.Formatter(GCP_FORMAT) + self._gcp_formatter = logging.Formatter(GCP_FORMAT) def format(self, record): """Format the message into structured log JSON. @@ -59,6 +59,12 @@ def format(self, record): Returns: str: A JSON string formatted for GKE fluentd. """ - - payload = self.formatter.format(record) - return payload + # let other formatters alter the message + super_payload = None + if record.msg: + super_payload = super(StructuredLogHandler, self).format(record) + # properly break any formatting in string to make it json safe + record._formatted_msg = json.dumps(super_payload or "") + # convert to GCP structred logging format + gcp_payload = self._gcp_formatter.format(record) + return gcp_payload diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile b/packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile index f19e36137bf4..f9fcc01026df 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile @@ -19,13 +19,14 @@ FROM node:12-slim # Create and change to the app directory. WORKDIR /usr/src/app -# Copy test script and dependencies to the container image. +# Copy test script and local dependencies to the container image. COPY package*.json ./ COPY app.js ./ COPY tests.js ./ +# Assumption: local file is already built COPY nodejs-logging ./nodejs-logging -# Install dependencies. +# Install test app's dependencies. RUN npm install --production # Environment variable denoting whether to run an app server diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js b/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js index fdaebf8fde4b..459607cd4dac 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js @@ -14,9 +14,30 @@ var tests = require('./tests.js'); +/** ****************** GAE, GKE, GCE ****************** + * Enable app subscriber for all environments, except GCR, GCF. + */ +async function enableSubscriber() { + if (process.env.ENABLE_SUBSCRIBER) { + const gcpMetadata = require('gcp-metadata'); + const projectId = await gcpMetadata.project('project-id'); + const topicId = process.env.PUBSUB_TOPIC || 'logging-test'; + const subscriptionId = `${topicId}-subscriber`; + const topicName = `projects/${projectId}/topics/${topicId}`; + const subscriptionName = `projects/${projectId}/subscriptions/${subscriptionId}` + + const {PubSub} = require('@google-cloud/pubsub'); + const pubSubClient = new PubSub(); + // Creates a new subscription + pubSubClient.topic(topicName).createSubscription(subscriptionName); + listenForMessages(pubSubClient, subscriptionName).catch(console.error); + } +} +enableSubscriber().catch(console.error); + /** - * Only triggers for GCP services that require a running app server. - * For instance, Cloud Functions does not execute this block. + * ****************** GCR, GKE, GCE ****************** + * For GCP services that require a running app server, except GAE and GCF. * RUNSERVER env var is set in the Dockerfile. */ if (process.env.RUNSERVER) { @@ -29,25 +50,27 @@ if (process.env.RUNSERVER) { /** * Cloud Run to be triggered by Pub/Sub. */ - app.post('/', (req, res) => { - if (!req.body) { - const msg = 'no Pub/Sub message received'; - console.error(`error: ${msg}`); - res.status(400).send(`Bad Request: ${msg}`); - return; - } - if (!req.body.message) { - const msg = 'invalid Pub/Sub message format'; - console.error(`error: ${msg}`); - res.status(400).send(`Bad Request: ${msg}`); - return; - } - - const message = req.body.message; - triggerTest(message); + if (process.env.K_CONFIGURATION) { + app.post('/', (req, res) => { + if (!req.body) { + const msg = 'no Pub/Sub message received'; + console.error(`error: ${msg}`); + res.status(400).send(`Bad Request: ${msg}`); + return; + } + if (!req.body.message) { + const msg = 'invalid Pub/Sub message format'; + console.error(`error: ${msg}`); + res.status(400).send(`Bad Request: ${msg}`); + return; + } - res.status(204).send(); - }); + const message = req.body.message; + triggerTest(message); + + res.status(204).send(); + }); + }; // Start app server const PORT = process.env.PORT || 8080; @@ -68,6 +91,31 @@ exports.pubsubFunction = (message, context) => { triggerTest(message); }; +/** + * ****************** GAE, GKE, GCE ****************** + * Asynchronously listens for pubsub messages until a TIMEOUT is reached. + * @param pubSubClient + * @param subscriptionName + */ +async function listenForMessages(pubSubClient, subscriptionName) { + // References an existing subscription + const subscription = pubSubClient.subscription(subscriptionName); + + // Handles incoming messages and triggers tests. + const messageHandler = message => { + triggerTest(message); + // "Ack" (acknowledge receipt of) the message + message.ack(); + }; + + // Listen for new messages until timeout is hit or test is done. + subscription.on('message', messageHandler); + + setTimeout(() => { + subscription.removeListener('message', messageHandler); + }, 600000); // max 10 minutes timeout +} + function triggerTest(message) { const testName = message.data ? Buffer.from(message.data, 'base64').toString() diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/package.json b/packages/google-cloud-logging/tests/environment/deployable/nodejs/package.json index 817b0f2b5964..f6e99b342ecf 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/nodejs/package.json +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/package.json @@ -5,9 +5,11 @@ "description": "", "main": "app.js", "dependencies": { + "@google-cloud/logging": "file:nodejs-logging", + "@google-cloud/pubsub": "^2.12.0", "body-parser": "^1.19.0", "express": "^4.17.1", - "@google-cloud/logging": "file:nodejs-logging" + "gcp-metadata": "^4.2.1" }, "devDependencies": {}, "engines": { diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py index 99ec66c5a397..2be50bea17b9 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -100,6 +100,21 @@ def pylogging(log_text="pylogging", severity="WARNING", **kwargs): else: logging.critical(log_text, extra=kwargs) +def pylogging_multiline(log_text="pylogging", second_line="line 2", **kwargs): + logging.error(f"{log_text}\n{second_line}") + +def pylogging_complex_chars(**kwargs): + logging.error('}"{!@[') + +def pylogging_with_formatter(log_text="pylogging", format_str="%(name)s :: %(levelname)s :: %(message)s", **kwargs): + root_logger = logging.getLogger() + handler = root_logger.handlers[0] + handler.setFormatter(logging.Formatter(fmt=format_str)) + logging.error(log_text) + handler.setFormatter(None) + +def pylogging_with_arg(log_text="my_arg", **kwargs): + logging.error("Arg: %s", log_text) def pylogging_flask( log_text="pylogging_flask", diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/kubernetes.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/kubernetes.sh index 91191e375212..eb1b8b8701b6 100644 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/kubernetes.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/kubernetes.sh @@ -82,9 +82,6 @@ build_go_container(){ } deploy() { -# local SCRIPT="${1:-router.py}" -# TODO: double check this doesn't print impt otkens/pws - set -x attach_or_create_gke_cluster build_go_container cat < $TMP_DIR/gke.yaml diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/appengine_standard.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/appengine_standard.sh new file mode 100755 index 000000000000..316a570cf326 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/appengine_standard.sh @@ -0,0 +1,89 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +SERVICE_NAME="log-node-gae-$(echo $ENVCTL_ID | head -c 8)" +LIBRARY_NAME="nodejs-logging" + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete service + gcloud app services delete $SERVICE_NAME -q 2> /dev/null + set -e +} + +verify() { + set +e + gcloud app services describe $SERVICE_NAME -q > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +deploy() { + # create pub/sub topic + set +e + gcloud pubsub topics create $SERVICE_NAME 2>/dev/null + set -ex + + # copy over local version of the library + pushd $SUPERREPO_ROOT + tar -cvf $TMP_DIR/lib.tar --exclude node_modules --exclude env-tests-logging --exclude test --exclude system-test --exclude .nox --exclude samples --exclude docs . + popd + mkdir -p $TMP_DIR/$LIBRARY_NAME + tar -xvf $TMP_DIR/lib.tar --directory $TMP_DIR/$LIBRARY_NAME + + # Copy over test code and Node dependencies + cp $REPO_ROOT/deployable/nodejs/package.json $TMP_DIR + cp $REPO_ROOT/deployable/nodejs/*.js $TMP_DIR + + # manual_scaling allows 1 instance to continuously run regardless of the load level. + cat < $TMP_DIR/app.yaml + runtime: nodejs12 + service: $SERVICE_NAME + manual_scaling: + instances: 1 + env_variables: + ENABLE_SUBSCRIBER: "true" + PUBSUB_TOPIC: $SERVICE_NAME +EOF + # deploy + pushd $TMP_DIR + gcloud app deploy -q + popd + # wait for the pub/sub subscriber to start + NUM_SUBSCRIBERS=0 + TRIES=0 + while [[ "${NUM_SUBSCRIBERS}" -lt 1 && "${TRIES}" -lt 10 ]]; do + sleep 30 + NUM_SUBSCRIBERS=$(gcloud pubsub topics list-subscriptions $SERVICE_NAME 2> /dev/null | wc -l) + TRIES=$((TRIES + 1)) + done +} + +filter-string() { + echo "resource.type=\"gae_app\" AND resource.labels.module_id=\"$SERVICE_NAME\"" +} diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/cloudrun.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/cloudrun.sh index 97bf97b95337..c9be55c09158 100644 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/cloudrun.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/cloudrun.sh @@ -87,7 +87,6 @@ build_node_container() { } deploy() { - set -x build_node_container gcloud config set run/platform managed gcloud config set run/region us-west1 diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/compute.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/compute.sh new file mode 100755 index 000000000000..90a49bde506f --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/compute.sh @@ -0,0 +1,91 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +SERVICE_NAME="log-node-gce-$(echo $ENVCTL_ID | head -c 8)" +ZONE="us-west2-a" +LIBRARY_NAME="nodejs-logging" + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete container images + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null + # delete service + gcloud compute instances delete $SERVICE_NAME -q + set -e +} + +verify() { + set +e + gcloud compute instances describe $SERVICE_NAME > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + + +build_node_container() { + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + # copy super-repo into deployable dir + _env_tests_relative_path=${REPO_ROOT#"$SUPERREPO_ROOT/"} + _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE + + # copy over local copy of library + pushd $SUPERREPO_ROOT + tar -cvf $_deployable_dir/lib.tar --exclude node_modules --exclude env-tests-logging --exclude test --exclude system-test --exclude .nox --exclude samples --exclude docs . + popd + mkdir -p $_deployable_dir/$LIBRARY_NAME + tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/$LIBRARY_NAME + # build container + docker build -t $GCR_PATH $_deployable_dir + docker push $GCR_PATH +} + +deploy() { + build_node_container + gcloud config set compute/zone $ZONE + gcloud compute instances create-with-container \ + $SERVICE_NAME \ + --container-image $GCR_PATH \ + --container-env PUBSUB_TOPIC="$SERVICE_NAME",ENABLE_SUBSCRIBER="true" + # wait for the pub/sub subscriber to start + NUM_SUBSCRIBERS=0 + TRIES=0 + while [[ "${NUM_SUBSCRIBERS}" -lt 1 && "${TRIES}" -lt 10 ]]; do + sleep 30 + NUM_SUBSCRIBERS=$(gcloud pubsub topics list-subscriptions $SERVICE_NAME 2> /dev/null | wc -l) + TRIES=$((TRIES + 1)) + done + +} + +filter-string() { + #INSTANCE_ID=$(gcloud compute instances list --filter="name~^$SERVICE_NAME$" --format="value(ID)") + #echo "resource.type=\"gce_instance\" AND resource.labels.instance_id=\"$INSTANCE_ID\"" + echo "resource.type=\"global\"" +} + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh index b1bfe95e4849..b521beed3ec0 100644 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh @@ -48,9 +48,6 @@ deploy() { gcloud pubsub topics create $SERVICE_NAME 2>/dev/null set -e - # TODO remove print - set -x - # set up deployment directory # copy over local copy of library pushd $SUPERREPO_ROOT echo "in SUPERREPO_ROOT" @@ -61,7 +58,7 @@ deploy() { mkdir $TMP_DIR/nodejs-logging tar -xvf $TMP_DIR/lib.tar --directory $TMP_DIR/nodejs-logging - # copy test code into temporary test file + # copy test code into deployment folder cp $REPO_ROOT/deployable/nodejs/app.js $TMP_DIR/app.js cp $REPO_ROOT/deployable/nodejs/tests.js $TMP_DIR/tests.js cp $REPO_ROOT/deployable/nodejs/package.json $TMP_DIR/ diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/kubernetes.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/kubernetes.sh new file mode 100755 index 000000000000..079b9162f5e4 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/kubernetes.sh @@ -0,0 +1,132 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +SERVICE_NAME="log-node-gke-$(echo $ENVCTL_ID | head -c 8)" +ZONE=us-central1-a +LIBRARY_NAME="nodejs-logging" + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete container images + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null + # delete cluster + gcloud container clusters delete --zone $ZONE $SERVICE_NAME -q + set -e +} + +verify() { + set +e + gcloud pubsub subscriptions describe $SERVICE_NAME-subscriber 2> /dev/null + if [[ $? != 0 ]]; then + echo "FALSE" + exit 1 + fi + gcloud container clusters describe --zone $ZONE $SERVICE_NAME > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +attach_or_create_gke_cluster(){ + set +e + gcloud container clusters get-credentials $SERVICE_NAME + if [[ $? -ne 0 ]]; then + echo "cluster not found. creating..." + gcloud container clusters create $SERVICE_NAME \ + --zone $ZONE \ + --scopes=gke-default,pubsub + fi + set -e +} + +build_node_container(){ + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + # copy super-repo into deployable dir + _env_tests_relative_path=${REPO_ROOT#"$SUPERREPO_ROOT/"} + _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE + + # copy over local copy of library + pushd $SUPERREPO_ROOT + tar -cvf $_deployable_dir/lib.tar --exclude node_modules --exclude env-tests-logging --exclude test --exclude system-test --exclude .nox --exclude samples --exclude docs . + popd + mkdir -p $_deployable_dir/$LIBRARY_NAME + tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/$LIBRARY_NAME + # build container + docker build -t $GCR_PATH $_deployable_dir + docker push $GCR_PATH +} + +deploy() { + attach_or_create_gke_cluster + build_node_container + cat < $TMP_DIR/gke.yaml + apiVersion: apps/v1 + kind: Deployment + metadata: + name: $SERVICE_NAME + spec: + selector: + matchLabels: + app: $SERVICE_NAME + template: + metadata: + labels: + app: $SERVICE_NAME + spec: + containers: + - name: $SERVICE_NAME + image: $GCR_PATH + env: + - name: PUBSUB_TOPIC + value: $SERVICE_NAME + - name: ENABLE_SUBSCRIBER + value: "true" +EOF + # clean cluster + set +e + kubectl delete deployments --all 2>/dev/null + kubectl delete -f $TMP_DIR 2>/dev/null + set -e + # deploy test container + kubectl apply -f $TMP_DIR + sleep 60 + # wait for pod to spin up + kubectl wait --for=condition=ready pod -l app=$SERVICE_NAME + # wait for the pub/sub subscriber to start + NUM_SUBSCRIBERS=0 + TRIES=0 + while [[ "${NUM_SUBSCRIBERS}" -lt 1 && "${TRIES}" -lt 10 ]]; do + sleep 30 + NUM_SUBSCRIBERS=$(gcloud pubsub topics list-subscriptions $SERVICE_NAME 2> /dev/null | wc -l) + TRIES=$((TRIES + 1)) + done +} + +filter-string() { + echo "resource.type=\"k8s_container\" AND resource.labels.cluster_name=\"$SERVICE_NAME\"" +} diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index 4f37bcfe79be..31a157d99766 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -161,7 +161,7 @@ def test_monitored_resource(self): # TODO: other languages to also support this test return True log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text) + log_list = self.trigger_and_retrieve(log_text, "simplelog") found_resource = log_list[-1].resource self.assertIsNotNone(self.monitored_resource_name) diff --git a/packages/google-cloud-logging/tests/environment/tests/common/python.py b/packages/google-cloud-logging/tests/environment/tests/common/python.py index 7455924d6329..1dcbb9a0f7ca 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/python.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/python.py @@ -15,6 +15,7 @@ import logging import unittest import inspect +import re import google.cloud.logging @@ -52,6 +53,45 @@ def test_pylogging_receive_unicode_log(self): found_log = log self.assertIsNotNone(found_log, "expected unicode log not found") + def test_pylogging_multiline(self): + first_line = f"{inspect.currentframe().f_code.co_name}" + second_line = "hello world" + log_list = self.trigger_and_retrieve(first_line, "pylogging_multiline", second_line=second_line) + found_log = log_list[-1] + found_message = ( + found_log.payload.get("message", None) + if isinstance(found_log.payload, dict) + else str(found_log.payload) + ) + + self.assertTrue(re.match(f"{first_line} .*\n{second_line}", found_message)) + + def test_pylogging_with_argument(self): + log_text = f"{inspect.currentframe().f_code.co_name} Name: %s" + name_arg = "Daniel" + log_list = self.trigger_and_retrieve(log_text, "pylogging_with_arg") + found_log = log_list[-1] + found_message = ( + found_log.payload.get("message", None) + if isinstance(found_log.payload, dict) + else str(found_log.payload) + ) + + self.assertTrue(re.match(f"Arg: {log_text} .*", found_message)) + + def test_pylogging_with_formatter(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + format_str = '%(levelname)s :: %(message)s' + log_list = self.trigger_and_retrieve(log_text, "pylogging_with_formatter", format_str=format_str) + found_log = log_list[-1] + found_message = ( + found_log.payload.get("message", None) + if isinstance(found_log.payload, dict) + else str(found_log.payload) + ) + + self.assertTrue(re.match(f"ERROR :: {log_text} .*", found_message)) + def test_monitored_resource_pylogging(self): log_text = f"{inspect.currentframe().f_code.co_name}" log_list = self.trigger_and_retrieve(log_text, "pylogging") diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py index 10090261eb01..bb30cf105eda 100644 --- a/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py @@ -25,5 +25,5 @@ class TestKubernetesEngine(Common, unittest.TestCase): environment = "kubernetes" language = "go" - monitored_resource_name = "gce_instance" - monitored_resource_labels = ["project_id", "instance_id", "zone"] \ No newline at end of file + monitored_resource_name = "k8s_container" + monitored_resource_labels = ["project_id", "location", "cluster_name", "pod_name", "namespace_name"] diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py new file mode 100644 index 000000000000..8e24fe96a57d --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py @@ -0,0 +1,29 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +import google.cloud.logging + +from ..common.common import Common + + +class TestAppEngineStandard(Common, unittest.TestCase): + + environment = "appengine_standard" + language = "nodejs" + + monitored_resource_name = "gae_app" + monitored_resource_labels = ["project_id", "module_id", "version_id", "zone"] diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_compute.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_compute.py new file mode 100644 index 000000000000..54bb0617f599 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_compute.py @@ -0,0 +1,30 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect + +import google.cloud.logging + +from ..common.common import Common + + +class TestComputeEngine(Common, unittest.TestCase): + + environment = "compute" + language = "nodejs" + + monitored_resource_name = "gce_instance" + monitored_resource_labels = ["instance_id", "zone", "project_id"] diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py new file mode 100644 index 000000000000..ced5d748e5f8 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py @@ -0,0 +1,30 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect + +import google.cloud.logging + +from ..common.common import Common + + +class TestKubernetesEngine(Common, unittest.TestCase): + + environment = "kubernetes" + language = "nodejs" + + monitored_resource_name = "k8s_container" + monitored_resource_labels = ["project_id", "location", "cluster_name", "pod_name", "namespace_name"] diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 4ba05212179b..b7fef1b9eaa9 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -291,7 +291,9 @@ def test_emit(self): ) logname = "loggername" message = "hello world" - record = logging.LogRecord(logname, logging, None, None, message, None, None) + record = logging.LogRecord( + logname, logging.INFO, None, None, message, None, None + ) handler.handle(record) self.assertEqual( handler.transport.send_called_with, @@ -315,7 +317,9 @@ def test_emit_manual_field_override(self): ) logname = "loggername" message = "hello world" - record = logging.LogRecord(logname, logging, None, None, message, None, None) + record = logging.LogRecord( + logname, logging.INFO, None, None, message, None, None + ) # set attributes manually expected_trace = "123" setattr(record, "trace", expected_trace) @@ -350,6 +354,53 @@ def test_emit_manual_field_override(self): ), ) + def test_emit_with_custom_formatter(self): + """ + Handler should respect custom formatters attached + """ + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE, + ) + logFormatter = logging.Formatter(fmt="%(name)s :: %(levelname)s :: %(message)s") + handler.setFormatter(logFormatter) + message = "test" + expected_result = "logname :: INFO :: test" + record = logging.LogRecord( + "logname", logging.INFO, None, None, message, None, None + ) + handler.handle(record) + + self.assertEqual( + handler.transport.send_called_with, + (record, expected_result, _GLOBAL_RESOURCE, None, None, None, None, None,), + ) + + def test_format_with_arguments(self): + """ + Handler should support format string arguments + """ + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE, + ) + message = "name: %s" + name_arg = "Daniel" + expected_result = "name: Daniel" + record = logging.LogRecord( + None, logging.INFO, None, None, message, name_arg, None + ) + handler.handle(record) + + self.assertEqual( + handler.transport.send_called_with, + (record, expected_result, _GLOBAL_RESOURCE, None, None, None, None, None,), + ) + class TestSetupLogging(unittest.TestCase): def _call_fut(self, handler, excludes=None): diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 0536583a514c..3d1c11ab0782 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -109,7 +109,6 @@ def test_format_with_quotes(self): When logging a message containing quotes, escape chars should be added """ import logging - import json handler = self._make_one() message = '"test"' @@ -117,9 +116,60 @@ def test_format_with_quotes(self): record = logging.LogRecord(None, logging.INFO, None, None, message, None, None,) record.created = None handler.filter(record) - result = json.loads(handler.format(record)) - result["message"] = expected_result - self.assertEqual(result["message"], expected_result) + result = handler.format(record) + self.assertIn(expected_result, result) + + def test_format_with_line_break(self): + """ + When logging a message containing \n, it should be properly escaped + """ + import logging + + handler = self._make_one() + message = "test\ntest" + expected_result = "test\\ntest" + record = logging.LogRecord(None, logging.INFO, None, None, message, None, None,) + record.created = None + handler.filter(record) + result = handler.format(record) + self.assertIn(expected_result, result) + + def test_format_with_custom_formatter(self): + """ + Handler should respect custom formatters attached + """ + import logging + + handler = self._make_one() + logFormatter = logging.Formatter(fmt="%(name)s :: %(levelname)s :: %(message)s") + handler.setFormatter(logFormatter) + message = "test" + expected_result = "logname :: INFO :: test" + record = logging.LogRecord( + "logname", logging.INFO, None, None, message, None, None, + ) + record.created = None + handler.filter(record) + result = handler.format(record) + self.assertIn(expected_result, result) + + def test_format_with_arguments(self): + """ + Handler should support format string arguments + """ + import logging + + handler = self._make_one() + message = "name: %s" + name_arg = "Daniel" + expected_result = "name: Daniel" + record = logging.LogRecord( + None, logging.INFO, None, None, message, name_arg, None, + ) + record.created = None + handler.filter(record) + result = handler.format(record) + self.assertIn(expected_result, result) def test_format_with_request(self): import logging diff --git a/tests/environment b/tests/environment index 30d6a80838a1..a0af8d102a3c 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 30d6a80838a1cae6fb3945f41f3e1d90e815c0c9 +Subproject commit a0af8d102a3c711cdff0dd12e01c8bfd357b7a83 From d3c07dc0169fdf63e92f268b6e1a9e9625078781 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 10 Jun 2021 23:14:03 +0200 Subject: [PATCH 491/855] chore(deps): update dependency google-cloud-bigquery to v2.20.0 (#317) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 8960ef889316..effb2f98dc1b 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.4.0 -google-cloud-bigquery==2.18.0 +google-cloud-bigquery==2.20.0 google-cloud-storage==1.38.0 google-cloud-pubsub==2.5.0 From 76b9b3c8a34c8e6721aa828ce4afda6747f8bdba Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 15 Jun 2021 12:05:45 -0700 Subject: [PATCH 492/855] chore: release 2.5.0 (#320) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-logging/CHANGELOG.md | 13 +++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 1828b3e69758..3dd7ab48b8bc 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,19 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [2.5.0](https://www.github.com/googleapis/python-logging/compare/v2.4.0...v2.5.0) (2021-06-10) + + +### Features + +* support AuditLog and RequestLog protos ([#274](https://www.github.com/googleapis/python-logging/issues/274)) ([5d91be9](https://www.github.com/googleapis/python-logging/commit/5d91be9f121c364cbd53c6a9fffc4fb6ca6bd324)) + + +### Bug Fixes + +* **deps:** add packaging requirement ([#300](https://www.github.com/googleapis/python-logging/issues/300)) ([68c5cec](https://www.github.com/googleapis/python-logging/commit/68c5ceced3288253af8e3c6013a35fa3954b37bc)) +* structured log handler formatting issues ([#319](https://www.github.com/googleapis/python-logging/issues/319)) ([db9da37](https://www.github.com/googleapis/python-logging/commit/db9da3700511b5a24c3c44c9f4377705937caf46)) + ## [2.4.0](https://www.github.com/googleapis/python-logging/compare/v2.3.1...v2.4.0) (2021-05-12) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index bc08741b8343..1cdf5e4e2431 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.4.0" +version = "2.5.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From d64ab2085e267eaa0212fc34bbb9871524b31259 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Jun 2021 16:07:28 -0700 Subject: [PATCH 493/855] test: clean up extra topics from snippet tests (#324) * added code to remove extra topics in snippet tests * fixed lint issue --- .../samples/snippets/usage_guide.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/usage_guide.py b/packages/google-cloud-logging/samples/snippets/usage_guide.py index b28d10980ee1..c931ed167977 100644 --- a/packages/google-cloud-logging/samples/snippets/usage_guide.py +++ b/packages/google-cloud-logging/samples/snippets/usage_guide.py @@ -264,13 +264,19 @@ def _sink_pubsub_setup(client): ) # API call # [END sink_topic_permissions] - return topic + # create callback wrapper to delete topic when done + class TopicDeleter: + def delete(self): + client.delete_topic(request={"topic": topic_path}) + + return topic, TopicDeleter() @snippet def sink_pubsub(client, to_delete): """Sink log entries to pubsub.""" - topic = _sink_pubsub_setup(client) + topic, topic_deleter = _sink_pubsub_setup(client) + to_delete.append(topic_deleter) sink_name = "robots-pubsub-%d" % (_millis(),) filter_str = "logName:apache-access AND textPayload:robot" updated_filter = "textPayload:robot" @@ -282,6 +288,7 @@ def sink_pubsub(client, to_delete): sink.create() # API call assert sink.exists() # API call # [END sink_pubsub_create] + to_delete.append(sink) created_sink = sink # [START client_list_sinks] From a74d8eb0da5619ab7402e0a68d091aa1f90534af Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 17 Jun 2021 10:52:15 +0000 Subject: [PATCH 494/855] chore: new owl bot post processor docker image (#325) Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:58c7342b0bccf85028100adaa3d856cb4a871c22ca9c01960d996e66c40548ce --- .../google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/docs/conf.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index da616c91a3b6..ea06d395ea2b 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:c66ba3c8d7bc8566f47df841f98cd0097b28fff0b1864c86f5817f4c8c3e8600 + digest: sha256:58c7342b0bccf85028100adaa3d856cb4a871c22ca9c01960d996e66c40548ce diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index b60a9ce4c620..6e52e94f3a58 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -80,9 +80,9 @@ master_doc = "index" # General information about the project. -project = u"google-cloud-logging" -copyright = u"2019, Google" -author = u"Google APIs" +project = "google-cloud-logging" +copyright = "2019, Google" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -281,7 +281,7 @@ ( master_doc, "google-cloud-logging.tex", - u"google-cloud-logging Documentation", + "google-cloud-logging Documentation", author, "manual", ) @@ -316,7 +316,7 @@ ( master_doc, "google-cloud-logging", - u"google-cloud-logging Documentation", + "google-cloud-logging Documentation", [author], 1, ) @@ -335,7 +335,7 @@ ( master_doc, "google-cloud-logging", - u"google-cloud-logging Documentation", + "google-cloud-logging Documentation", author, "google-cloud-logging", "google-cloud-logging Library", From f7c407d978b7e2bd9efc21ea3cc675e9e0812106 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 19 Jun 2021 01:36:13 +0000 Subject: [PATCH 495/855] docs: omit mention of Python 2.7 in 'CONTRIBUTING.rst' (#1127) (#327) Closes #1126 Source-Link: https://github.com/googleapis/synthtool/commit/b91f129527853d5b756146a0b5044481fb4e09a8 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:b6169fc6a5207b11800a7c002d0c5c2bc6d82697185ca12e666f44031468cfcd --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/CONTRIBUTING.rst | 7 ++----- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index ea06d395ea2b..cc49c6a3dfac 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:58c7342b0bccf85028100adaa3d856cb4a871c22ca9c01960d996e66c40548ce + digest: sha256:b6169fc6a5207b11800a7c002d0c5c2bc6d82697185ca12e666f44031468cfcd diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index 4604493b61f3..45a53f09878d 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -69,7 +69,6 @@ We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: - $ nox -s unit-2.7 $ nox -s unit-3.8 $ ... @@ -144,7 +143,6 @@ Running System Tests # Run all system tests $ nox -s system-3.8 - $ nox -s system-2.7 # Run a single system test $ nox -s system-3.8 -- -k @@ -152,9 +150,8 @@ Running System Tests .. note:: - System tests are only configured to run under Python 2.7 and - Python 3.8. For expediency, we do not run them in older versions - of Python 3. + System tests are only configured to run under Python 3.8. + For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local auth settings and change some configuration in your project to From 2fc0e0bec86c1fc7a540feda6414f4accae2a0f1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sun, 20 Jun 2021 02:56:02 +0200 Subject: [PATCH 496/855] chore(deps): update dependency google-cloud-logging to v2.5.0 (#326) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-logging](https://togithub.com/googleapis/python-logging) | `==2.4.0` -> `==2.5.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-logging/2.5.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-logging/2.5.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-logging/2.5.0/compatibility-slim/2.4.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-logging/2.5.0/confidence-slim/2.4.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-logging ### [`v2.5.0`](https://togithub.com/googleapis/python-logging/blob/master/CHANGELOG.md#​250-httpswwwgithubcomgoogleapispython-loggingcomparev240v250-2021-06-10) [Compare Source](https://togithub.com/googleapis/python-logging/compare/v2.4.0...v2.5.0) ##### Features - support AuditLog and RequestLog protos ([#​274](https://www.github.com/googleapis/python-logging/issues/274)) ([5d91be9](https://www.github.com/googleapis/python-logging/commit/5d91be9f121c364cbd53c6a9fffc4fb6ca6bd324)) ##### Bug Fixes - **deps:** add packaging requirement ([#​300](https://www.github.com/googleapis/python-logging/issues/300)) ([68c5cec](https://www.github.com/googleapis/python-logging/commit/68c5ceced3288253af8e3c6013a35fa3954b37bc)) - structured log handler formatting issues ([#​319](https://www.github.com/googleapis/python-logging/issues/319)) ([db9da37](https://www.github.com/googleapis/python-logging/commit/db9da3700511b5a24c3c44c9f4377705937caf46))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-logging). --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index effb2f98dc1b..7feb0d2cc313 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==2.4.0 +google-cloud-logging==2.5.0 google-cloud-bigquery==2.20.0 google-cloud-storage==1.38.0 google-cloud-pubsub==2.5.0 From 940555ecbfd5013b314406d645b14cd171b4eb61 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 20 Jun 2021 01:10:03 +0000 Subject: [PATCH 497/855] chore: update precommit hook pre-commit/pre-commit-hooks to v4 (#1083) (#329) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [pre-commit/pre-commit-hooks](https://togithub.com/pre-commit/pre-commit-hooks) | repository | major | `v3.4.0` -> `v4.0.1` | --- ### Release Notes
pre-commit/pre-commit-hooks ### [`v4.0.1`](https://togithub.com/pre-commit/pre-commit-hooks/releases/v4.0.1) [Compare Source](https://togithub.com/pre-commit/pre-commit-hooks/compare/v4.0.0...v4.0.1) ##### Fixes - `check-shebang-scripts-are-executable` fix entry point. - [#​602](https://togithub.com/pre-commit/pre-commit-hooks/issues/602) issue by [@​Person-93](https://togithub.com/Person-93). - [#​603](https://togithub.com/pre-commit/pre-commit-hooks/issues/603) PR by [@​scop](https://togithub.com/scop). ### [`v4.0.0`](https://togithub.com/pre-commit/pre-commit-hooks/releases/v4.0.0) [Compare Source](https://togithub.com/pre-commit/pre-commit-hooks/compare/v3.4.0...v4.0.0) ##### Features - `check-json`: report duplicate keys. - [#​558](https://togithub.com/pre-commit/pre-commit-hooks/issues/558) PR by [@​AdityaKhursale](https://togithub.com/AdityaKhursale). - [#​554](https://togithub.com/pre-commit/pre-commit-hooks/issues/554) issue by [@​adamchainz](https://togithub.com/adamchainz). - `no-commit-to-branch`: add `main` to default blocked branches. - [#​565](https://togithub.com/pre-commit/pre-commit-hooks/issues/565) PR by [@​ndevenish](https://togithub.com/ndevenish). - `check-case-conflict`: check conflicts in directory names as well. - [#​575](https://togithub.com/pre-commit/pre-commit-hooks/issues/575) PR by [@​slsyy](https://togithub.com/slsyy). - [#​70](https://togithub.com/pre-commit/pre-commit-hooks/issues/70) issue by [@​andyjack](https://togithub.com/andyjack). - `check-vcs-permalinks`: forbid other branch names. - [#​582](https://togithub.com/pre-commit/pre-commit-hooks/issues/582) PR by [@​jack1142](https://togithub.com/jack1142). - [#​581](https://togithub.com/pre-commit/pre-commit-hooks/issues/581) issue by [@​jack1142](https://togithub.com/jack1142). - `check-shebang-scripts-are-executable`: new hook which ensures shebang'd scripts are executable. - [#​545](https://togithub.com/pre-commit/pre-commit-hooks/issues/545) PR by [@​scop](https://togithub.com/scop). ##### Fixes - `check-executables-have-shebangs`: Short circuit shebang lookup on windows. - [#​544](https://togithub.com/pre-commit/pre-commit-hooks/issues/544) PR by [@​scop](https://togithub.com/scop). - `requirements-txt-fixer`: Fix comments which have indentation - [#​549](https://togithub.com/pre-commit/pre-commit-hooks/issues/549) PR by [@​greshilov](https://togithub.com/greshilov). - [#​548](https://togithub.com/pre-commit/pre-commit-hooks/issues/548) issue by [@​greshilov](https://togithub.com/greshilov). - `pretty-format-json`: write to stdout using UTF-8 encoding. - [#​571](https://togithub.com/pre-commit/pre-commit-hooks/issues/571) PR by [@​jack1142](https://togithub.com/jack1142). - [#​570](https://togithub.com/pre-commit/pre-commit-hooks/issues/570) issue by [@​jack1142](https://togithub.com/jack1142). - Use more inclusive language. - [#​599](https://togithub.com/pre-commit/pre-commit-hooks/issues/599) PR by [@​asottile](https://togithub.com/asottile). ##### Breaking changes - Remove deprecated hooks: `flake8`, `pyflakes`, `autopep8-wrapper`. - [#​597](https://togithub.com/pre-commit/pre-commit-hooks/issues/597) PR by [@​asottile](https://togithub.com/asottile).
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻️ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/synthtool). Source-Link: https://github.com/googleapis/synthtool/commit/333fd90856f1454380514bc59fc0936cdaf1c202 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3 --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/.pre-commit-config.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index cc49c6a3dfac..9602d540595e 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:b6169fc6a5207b11800a7c002d0c5c2bc6d82697185ca12e666f44031468cfcd + digest: sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3 diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index 4f00c7cffcfd..62eb5a77d9a3 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -16,7 +16,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.4.0 + rev: v4.0.1 hooks: - id: trailing-whitespace - id: end-of-file-fixer From ba742233c304cb12517d041d8f6ccf97297c0e03 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 22 Jun 2021 20:08:36 +0000 Subject: [PATCH 498/855] chore: add kokoro 3.9 config templates (#333) Source-Link: https://github.com/googleapis/synthtool/commit/b0eb8a8b30b46a3c98d23c23107acb748c6601a1 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:df50e8d462f86d6bcb42f27ecad55bb12c404f1c65de9c6fe4c4d25120080bd6 --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/samples/python3.9/common.cfg | 40 +++++++++++++++++++ .../.kokoro/samples/python3.9/continuous.cfg | 6 +++ .../samples/python3.9/periodic-head.cfg | 11 +++++ .../.kokoro/samples/python3.9/periodic.cfg | 6 +++ .../.kokoro/samples/python3.9/presubmit.cfg | 6 +++ 6 files changed, 70 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.9/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.9/periodic-head.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.9/periodic.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.9/presubmit.cfg diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 9602d540595e..0954585f2833 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3 + digest: sha256:df50e8d462f86d6bcb42f27ecad55bb12c404f1c65de9c6fe4c4d25120080bd6 diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg new file mode 100644 index 000000000000..d4c521347140 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.9" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py39" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.9/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.9/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.9/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.9/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.9/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.9/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file From 9e4eda5bc243a0d4c271bb3aca8237694e699f7b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 23 Jun 2021 20:24:49 +0000 Subject: [PATCH 499/855] feat: add always_use_jwt_access (#334) ... chore: update gapic-generator-ruby to the latest commit chore: release gapic-generator-typescript 1.5.0 Committer: @miraleung PiperOrigin-RevId: 380641501 Source-Link: https://github.com/googleapis/googleapis/commit/076f7e9f0b258bdb54338895d7251b202e8f0de3 Source-Link: https://github.com/googleapis/googleapis-gen/commit/27e4c88b4048e5f56508d4e1aa417d60a3380892 --- packages/google-cloud-logging/.coveragerc | 1 - .../config_service_v2/transports/base.py | 40 +++---- .../config_service_v2/transports/grpc.py | 7 +- .../transports/grpc_asyncio.py | 7 +- .../logging_service_v2/transports/base.py | 40 +++---- .../logging_service_v2/transports/grpc.py | 7 +- .../transports/grpc_asyncio.py | 7 +- .../metrics_service_v2/transports/base.py | 40 +++---- .../metrics_service_v2/transports/grpc.py | 7 +- .../transports/grpc_asyncio.py | 7 +- packages/google-cloud-logging/setup.py | 2 +- .../testing/constraints-3.6.txt | 2 +- .../logging_v2/test_config_service_v2.py | 109 +++-------------- .../logging_v2/test_logging_service_v2.py | 110 +++--------------- .../logging_v2/test_metrics_service_v2.py | 110 +++--------------- 15 files changed, 113 insertions(+), 383 deletions(-) diff --git a/packages/google-cloud-logging/.coveragerc b/packages/google-cloud-logging/.coveragerc index cfcd5ac606f1..b38d22e21fd1 100644 --- a/packages/google-cloud-logging/.coveragerc +++ b/packages/google-cloud-logging/.coveragerc @@ -2,7 +2,6 @@ branch = True [report] -fail_under = 100 show_missing = True omit = google/cloud/logging/__init__.py diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index d52c97635c4c..41a63f75da6a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -24,6 +24,7 @@ from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging_config from google.protobuf import empty_pb2 # type: ignore @@ -44,8 +45,6 @@ except pkg_resources.DistributionNotFound: # pragma: NO COVER _GOOGLE_AUTH_VERSION = None -_API_CORE_VERSION = google.api_core.__version__ - class ConfigServiceV2Transport(abc.ABC): """Abstract transport class for ConfigServiceV2.""" @@ -68,6 +67,7 @@ def __init__( scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. @@ -91,6 +91,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -119,13 +121,20 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials - # TODO(busunkim): These two class methods are in the base transport + # TODO(busunkim): This method is in the base transport # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-api-core - # and google-auth are increased. + # should be deleted once the minimum required versions of google-auth is increased. # TODO: Remove this function once google-auth >= 1.25.0 is required @classmethod @@ -146,27 +155,6 @@ def _get_scopes_kwargs( return scopes_kwargs - # TODO: Remove this function once google-api-core >= 1.26.0 is required - @classmethod - def _get_self_signed_jwt_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Union[Optional[Sequence[str]], str]]: - """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" - - self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} - - if _API_CORE_VERSION and ( - packaging.version.parse(_API_CORE_VERSION) - >= packaging.version.parse("1.26.0") - ): - self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES - self_signed_jwt_kwargs["scopes"] = scopes - self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST - else: - self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES - - return self_signed_jwt_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 327cc79c3e77..70018fa3af12 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -150,6 +150,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: @@ -205,14 +206,14 @@ def create_channel( and ``credentials_file`` are passed. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 3d7d271bf469..3274060938ec 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -79,14 +79,14 @@ def create_channel( aio.Channel: A gRPC AsyncIO channel object. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -196,6 +196,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index fdcbead00c60..de6e25ccb018 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -24,6 +24,7 @@ from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging from google.protobuf import empty_pb2 # type: ignore @@ -44,8 +45,6 @@ except pkg_resources.DistributionNotFound: # pragma: NO COVER _GOOGLE_AUTH_VERSION = None -_API_CORE_VERSION = google.api_core.__version__ - class LoggingServiceV2Transport(abc.ABC): """Abstract transport class for LoggingServiceV2.""" @@ -69,6 +68,7 @@ def __init__( scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. @@ -92,6 +92,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -120,13 +122,20 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials - # TODO(busunkim): These two class methods are in the base transport + # TODO(busunkim): This method is in the base transport # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-api-core - # and google-auth are increased. + # should be deleted once the minimum required versions of google-auth is increased. # TODO: Remove this function once google-auth >= 1.25.0 is required @classmethod @@ -147,27 +156,6 @@ def _get_scopes_kwargs( return scopes_kwargs - # TODO: Remove this function once google-api-core >= 1.26.0 is required - @classmethod - def _get_self_signed_jwt_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Union[Optional[Sequence[str]], str]]: - """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" - - self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} - - if _API_CORE_VERSION and ( - packaging.version.parse(_API_CORE_VERSION) - >= packaging.version.parse("1.26.0") - ): - self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES - self_signed_jwt_kwargs["scopes"] = scopes - self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST - else: - self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES - - return self_signed_jwt_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 5e5c1ad0c9c1..a79cf885d29e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -150,6 +150,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: @@ -205,14 +206,14 @@ def create_channel( and ``credentials_file`` are passed. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 1f33ad78a14f..e5ffe115527b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -79,14 +79,14 @@ def create_channel( aio.Channel: A gRPC AsyncIO channel object. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -196,6 +196,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 814f62590b2a..1cd695d32e37 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -24,6 +24,7 @@ from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging_metrics from google.protobuf import empty_pb2 # type: ignore @@ -44,8 +45,6 @@ except pkg_resources.DistributionNotFound: # pragma: NO COVER _GOOGLE_AUTH_VERSION = None -_API_CORE_VERSION = google.api_core.__version__ - class MetricsServiceV2Transport(abc.ABC): """Abstract transport class for MetricsServiceV2.""" @@ -69,6 +68,7 @@ def __init__( scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. @@ -92,6 +92,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -120,13 +122,20 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials - # TODO(busunkim): These two class methods are in the base transport + # TODO(busunkim): This method is in the base transport # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-api-core - # and google-auth are increased. + # should be deleted once the minimum required versions of google-auth is increased. # TODO: Remove this function once google-auth >= 1.25.0 is required @classmethod @@ -147,27 +156,6 @@ def _get_scopes_kwargs( return scopes_kwargs - # TODO: Remove this function once google-api-core >= 1.26.0 is required - @classmethod - def _get_self_signed_jwt_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Union[Optional[Sequence[str]], str]]: - """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" - - self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} - - if _API_CORE_VERSION and ( - packaging.version.parse(_API_CORE_VERSION) - >= packaging.version.parse("1.26.0") - ): - self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES - self_signed_jwt_kwargs["scopes"] = scopes - self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST - else: - self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES - - return self_signed_jwt_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 1c9b3dde9924..85fa5bfce89b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -150,6 +150,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: @@ -205,14 +206,14 @@ def create_channel( and ``credentials_file`` are passed. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 62a0bf0f855a..46000ae7742c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -79,14 +79,14 @@ def create_channel( aio.Channel: A gRPC AsyncIO channel object. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -196,6 +196,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 1cdf5e4e2431..ff2e3b04f8b4 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", + "google-api-core[grpc] >= 1.26.0, <2.0.0dev", "google-cloud-appengine-logging >= 0.1.0, < 1.0.0dev", "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", "google-cloud-core >= 1.4.1, < 2.0dev", diff --git a/packages/google-cloud-logging/testing/constraints-3.6.txt b/packages/google-cloud-logging/testing/constraints-3.6.txt index 61bbd6ec66fc..7e089b8b45a4 100644 --- a/packages/google-cloud-logging/testing/constraints-3.6.txt +++ b/packages/google-cloud-logging/testing/constraints-3.6.txt @@ -5,7 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.22.2 +google-api-core==1.26.0 google-cloud-core==1.4.1 proto-plus==1.11.0 packaging==14.3 diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 8be1ee06fa71..1633a2da34cc 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -37,9 +37,6 @@ from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.services.config_service_v2 import transports -from google.cloud.logging_v2.services.config_service_v2.transports.base import ( - _API_CORE_VERSION, -) from google.cloud.logging_v2.services.config_service_v2.transports.base import ( _GOOGLE_AUTH_VERSION, ) @@ -50,8 +47,9 @@ import google.auth -# TODO(busunkim): Once google-api-core >= 1.26.0 is required: -# - Delete all the api-core and auth "less than" test cases +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases # - Delete these pytest markers (Make the "greater than or equal to" tests the default). requires_google_auth_lt_1_25_0 = pytest.mark.skipif( packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), @@ -62,16 +60,6 @@ reason="This test requires google-auth >= 1.25.0", ) -requires_api_core_lt_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), - reason="This test requires google-api-core < 1.26.0", -) - -requires_api_core_gte_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), - reason="This test requires google-api-core >= 1.26.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -134,6 +122,18 @@ def test_config_service_v2_client_from_service_account_info(client_class): assert client.transport._host == "logging.googleapis.com:443" +@pytest.mark.parametrize( + "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] +) +def test_config_service_v2_client_service_account_always_use_jwt(client_class): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + @pytest.mark.parametrize( "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] ) @@ -5570,7 +5570,6 @@ def test_config_service_v2_transport_auth_adc_old_google_auth(transport_class): (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async), ], ) -@requires_api_core_gte_1_26_0 def test_config_service_v2_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -5604,84 +5603,6 @@ def test_config_service_v2_transport_create_channel(transport_class, grpc_helper ) -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ConfigServiceV2GrpcTransport, grpc_helpers), - (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_config_service_v2_transport_create_channel_old_api_core( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus") - - create_channel.assert_called_with( - "logging.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - ), - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ConfigServiceV2GrpcTransport, grpc_helpers), - (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_config_service_v2_transport_create_channel_user_scopes( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "logging.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=["1", "2"], - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "transport_class", [ diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 5de01cf2163c..006e638ce2c1 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -38,9 +38,6 @@ from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.services.logging_service_v2 import transports -from google.cloud.logging_v2.services.logging_service_v2.transports.base import ( - _API_CORE_VERSION, -) from google.cloud.logging_v2.services.logging_service_v2.transports.base import ( _GOOGLE_AUTH_VERSION, ) @@ -56,8 +53,9 @@ import google.auth -# TODO(busunkim): Once google-api-core >= 1.26.0 is required: -# - Delete all the api-core and auth "less than" test cases +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases # - Delete these pytest markers (Make the "greater than or equal to" tests the default). requires_google_auth_lt_1_25_0 = pytest.mark.skipif( packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), @@ -68,16 +66,6 @@ reason="This test requires google-auth >= 1.25.0", ) -requires_api_core_lt_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), - reason="This test requires google-api-core < 1.26.0", -) - -requires_api_core_gte_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), - reason="This test requires google-api-core >= 1.26.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -141,6 +129,18 @@ def test_logging_service_v2_client_from_service_account_info(client_class): assert client.transport._host == "logging.googleapis.com:443" +@pytest.mark.parametrize( + "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] +) +def test_logging_service_v2_client_service_account_always_use_jwt(client_class): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + @pytest.mark.parametrize( "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] ) @@ -2108,7 +2108,6 @@ def test_logging_service_v2_transport_auth_adc_old_google_auth(transport_class): (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async), ], ) -@requires_api_core_gte_1_26_0 def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -2143,85 +2142,6 @@ def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpe ) -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.LoggingServiceV2GrpcTransport, grpc_helpers), - (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_logging_service_v2_transport_create_channel_old_api_core( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus") - - create_channel.assert_called_with( - "logging.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ), - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.LoggingServiceV2GrpcTransport, grpc_helpers), - (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_logging_service_v2_transport_create_channel_user_scopes( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "logging.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=["1", "2"], - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "transport_class", [ diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index a8a420a28064..53649345c73a 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -41,9 +41,6 @@ from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2Client from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.services.metrics_service_v2 import transports -from google.cloud.logging_v2.services.metrics_service_v2.transports.base import ( - _API_CORE_VERSION, -) from google.cloud.logging_v2.services.metrics_service_v2.transports.base import ( _GOOGLE_AUTH_VERSION, ) @@ -54,8 +51,9 @@ import google.auth -# TODO(busunkim): Once google-api-core >= 1.26.0 is required: -# - Delete all the api-core and auth "less than" test cases +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases # - Delete these pytest markers (Make the "greater than or equal to" tests the default). requires_google_auth_lt_1_25_0 = pytest.mark.skipif( packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), @@ -66,16 +64,6 @@ reason="This test requires google-auth >= 1.25.0", ) -requires_api_core_lt_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), - reason="This test requires google-api-core < 1.26.0", -) - -requires_api_core_gte_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), - reason="This test requires google-api-core >= 1.26.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -139,6 +127,18 @@ def test_metrics_service_v2_client_from_service_account_info(client_class): assert client.transport._host == "logging.googleapis.com:443" +@pytest.mark.parametrize( + "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] +) +def test_metrics_service_v2_client_service_account_always_use_jwt(client_class): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + @pytest.mark.parametrize( "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] ) @@ -2031,7 +2031,6 @@ def test_metrics_service_v2_transport_auth_adc_old_google_auth(transport_class): (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async), ], ) -@requires_api_core_gte_1_26_0 def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -2066,85 +2065,6 @@ def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpe ) -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.MetricsServiceV2GrpcTransport, grpc_helpers), - (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_metrics_service_v2_transport_create_channel_old_api_core( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus") - - create_channel.assert_called_with( - "logging.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ), - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.MetricsServiceV2GrpcTransport, grpc_helpers), - (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_metrics_service_v2_transport_create_channel_user_scopes( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "logging.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=["1", "2"], - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "transport_class", [ From 7216ebc11e503166a5b6ab4363a0fac6afe5467c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 26 Jun 2021 11:40:09 +0000 Subject: [PATCH 500/855] chore(python): simplify nox steps in CONTRIBUTING.rst (#340) Source-Link: https://github.com/googleapis/synthtool/commit/26558bae8976a985d73c2d98c31d8612273f907d Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:99d90d097e4a4710cc8658ee0b5b963f4426d0e424819787c3ac1405c9a26719 --- .../google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/CONTRIBUTING.rst | 14 ++++++-------- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 0954585f2833..e2b39f946040 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:df50e8d462f86d6bcb42f27ecad55bb12c404f1c65de9c6fe4c4d25120080bd6 + digest: sha256:99d90d097e4a4710cc8658ee0b5b963f4426d0e424819787c3ac1405c9a26719 diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index 45a53f09878d..ced2c87c6dac 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -68,14 +68,12 @@ Using ``nox`` We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: + $ nox -s unit - $ nox -s unit-3.8 - $ ... +- To run a single unit test:: -- Args to pytest can be passed through the nox command separated by a `--`. For - example, to run a single test:: + $ nox -s unit-3.9 -- -k - $ nox -s unit-3.8 -- -k .. note:: @@ -142,7 +140,7 @@ Running System Tests - To run system tests, you can execute:: # Run all system tests - $ nox -s system-3.8 + $ nox -s system # Run a single system test $ nox -s system-3.8 -- -k @@ -215,8 +213,8 @@ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-logging/blob/master/noxfile.py -We also explicitly decided to support Python 3 beginning with version -3.6. Reasons for this include: +We also explicitly decided to support Python 3 beginning with version 3.6. +Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ From 9e430d406b90080ec7169b879a1b4abad57ef663 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 26 Jun 2021 13:42:10 +0200 Subject: [PATCH 501/855] chore(deps): update dependency google-cloud-storage to v1.39.0 (#335) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-storage](https://togithub.com/googleapis/python-storage) | `==1.38.0` -> `==1.39.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.39.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.39.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.39.0/compatibility-slim/1.38.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.39.0/confidence-slim/1.38.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-storage ### [`v1.39.0`](https://togithub.com/googleapis/python-storage/blob/master/CHANGELOG.md#​1390-httpswwwgithubcomgoogleapispython-storagecomparev1380v1390-2021-06-21) [Compare Source](https://togithub.com/googleapis/python-storage/compare/v1.38.0...v1.39.0) ##### Features - media operation retries can be configured using the same interface as with non-media operation ([#​447](https://www.github.com/googleapis/python-storage/issues/447)) ([0dbbb8a](https://www.github.com/googleapis/python-storage/commit/0dbbb8ac17a4b632707485ee6c7cc15e4670efaa)) ##### Bug Fixes - add ConnectionError to default retry ([#​445](https://www.github.com/googleapis/python-storage/issues/445)) ([8344253](https://www.github.com/googleapis/python-storage/commit/8344253a1969b9d04b81f87a6d7bddd3ddb55006)) - apply idempotency policies for ACLs ([#​458](https://www.github.com/googleapis/python-storage/issues/458)) ([2232f38](https://www.github.com/googleapis/python-storage/commit/2232f38933dbdfeb4f6585291794d332771ffdf2)) - replace python lifecycle action parsing ValueError with warning ([#​437](https://www.github.com/googleapis/python-storage/issues/437)) ([2532d50](https://www.github.com/googleapis/python-storage/commit/2532d506b44fc1ef0fa0a996822d29e7459c465a)) - revise blob.compose query parameters `if_generation_match` ([#​454](https://www.github.com/googleapis/python-storage/issues/454)) ([70d19e7](https://www.github.com/googleapis/python-storage/commit/70d19e72831dee112bb07f38b50beef4890c1155)) ##### Documentation - streamline 'timeout' / 'retry' docs in docstrings ([#​461](https://www.github.com/googleapis/python-storage/issues/461)) ([78b2eba](https://www.github.com/googleapis/python-storage/commit/78b2eba81003b437cd24f2b8d269ea2455682507)) - streamline docstrings for conditional parmas ([#​464](https://www.github.com/googleapis/python-storage/issues/464)) ([6999370](https://www.github.com/googleapis/python-storage/commit/69993702390322df07cc2e818003186a47524c2b))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-logging). --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 7feb0d2cc313..d29af8c30574 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.5.0 google-cloud-bigquery==2.20.0 -google-cloud-storage==1.38.0 +google-cloud-storage==1.39.0 google-cloud-pubsub==2.5.0 From 1090f70c8de59cfe3cd05de20735d7481374baa2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 2 Jul 2021 07:51:13 -0400 Subject: [PATCH 502/855] fix: disable always_use_jwt_access (#342) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.50.3 Committer: @busunkim96 PiperOrigin-RevId: 382142900 Source-Link: https://github.com/googleapis/googleapis/commit/513440fda515f3c799c22a30e3906dcda325004e Source-Link: https://github.com/googleapis/googleapis-gen/commit/7b1e2c31233f79a704ec21ca410bf661d6bc68d0 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/master/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../config_service_v2/transports/base.py | 2 +- .../config_service_v2/transports/grpc.py | 5 ++- .../transports/grpc_asyncio.py | 5 ++- .../logging_service_v2/transports/base.py | 2 +- .../logging_service_v2/transports/grpc.py | 5 ++- .../transports/grpc_asyncio.py | 5 ++- .../metrics_service_v2/transports/base.py | 2 +- .../metrics_service_v2/transports/grpc.py | 5 ++- .../transports/grpc_asyncio.py | 5 ++- .../logging_v2/test_config_service_v2.py | 41 +++++++++-------- .../logging_v2/test_logging_service_v2.py | 44 +++++++++---------- .../logging_v2/test_metrics_service_v2.py | 44 +++++++++---------- 12 files changed, 93 insertions(+), 72 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 41a63f75da6a..e191687e9458 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -102,7 +102,7 @@ def __init__( scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 70018fa3af12..ccd766fa79a3 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -58,6 +58,7 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -98,6 +99,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -150,7 +153,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 3274060938ec..3b8139e1e050 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -104,6 +104,7 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -145,6 +146,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -196,7 +199,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index de6e25ccb018..fb5e3c8b00f1 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -103,7 +103,7 @@ def __init__( scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index a79cf885d29e..fddf039883f7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -58,6 +58,7 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -98,6 +99,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -150,7 +153,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index e5ffe115527b..3e158cd6f54a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -104,6 +104,7 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -145,6 +146,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -196,7 +199,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 1cd695d32e37..f62eb00461bb 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -103,7 +103,7 @@ def __init__( scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 85fa5bfce89b..6a7a2c6a61a7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -58,6 +58,7 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -98,6 +99,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -150,7 +153,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 46000ae7742c..d2d9b6188061 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -104,6 +104,7 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -145,6 +146,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -196,7 +199,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 1633a2da34cc..0691b2699b3d 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -131,7 +131,25 @@ def test_config_service_v2_client_service_account_always_use_jwt(client_class): ) as use_jwt: creds = service_account.Credentials(None, None, None) client = client_class(credentials=creds) - use_jwt.assert_called_with(True) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ConfigServiceV2GrpcTransport, "grpc"), + (transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_config_service_v2_client_service_account_always_use_jwt_true( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) @pytest.mark.parametrize( @@ -5625,12 +5643,7 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls(transport_ "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - ), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -5739,12 +5752,7 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source( "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -5791,12 +5799,7 @@ def test_config_service_v2_transport_channel_mtls_with_adc(transport_class): "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 006e638ce2c1..a373edc3b451 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -138,7 +138,25 @@ def test_logging_service_v2_client_service_account_always_use_jwt(client_class): ) as use_jwt: creds = service_account.Credentials(None, None, None) client = client_class(credentials=creds) - use_jwt.assert_called_with(True) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.LoggingServiceV2GrpcTransport, "grpc"), + (transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_logging_service_v2_client_service_account_always_use_jwt_true( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) @pytest.mark.parametrize( @@ -2164,13 +2182,7 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls(transport "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -2279,13 +2291,7 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -2332,13 +2338,7 @@ def test_logging_service_v2_transport_channel_mtls_with_adc(transport_class): "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 53649345c73a..ff91b20281dc 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -136,7 +136,25 @@ def test_metrics_service_v2_client_service_account_always_use_jwt(client_class): ) as use_jwt: creds = service_account.Credentials(None, None, None) client = client_class(credentials=creds) - use_jwt.assert_called_with(True) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MetricsServiceV2GrpcTransport, "grpc"), + (transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_metrics_service_v2_client_service_account_always_use_jwt_true( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) @pytest.mark.parametrize( @@ -2087,13 +2105,7 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls(transport "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -2202,13 +2214,7 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -2255,13 +2261,7 @@ def test_metrics_service_v2_transport_channel_mtls_with_adc(transport_class): "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ From 036d6f23a3233132f66a3b81330b85da6dc185f3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 14 Jul 2021 15:26:36 +0000 Subject: [PATCH 503/855] build(python): exit with success status if no samples found (#352) Source-Link: https://github.com/googleapis/synthtool/commit/53ea3896a52f87c758e79b5a19fa338c83925a98 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:e1793a23ae0ee9aafb2e3a53b564a351f74790dbe3c2d75f8fc3b8c43e5c036c --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/.kokoro/test-samples-impl.sh | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index e2b39f946040..a5d3697f2167 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:99d90d097e4a4710cc8658ee0b5b963f4426d0e424819787c3ac1405c9a26719 + digest: sha256:e1793a23ae0ee9aafb2e3a53b564a351f74790dbe3c2d75f8fc3b8c43e5c036c diff --git a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh index cf5de74c17a5..311a8d54b9f1 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh @@ -20,9 +20,9 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" +# Exit early if samples don't exist +if ! find samples -name 'requirements.txt' | grep -q .; then + echo "No tests run. './samples/**/requirements.txt' not found" exit 0 fi From 56c499d0db803c1a59810139148b168fc5eeb4c5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 14 Jul 2021 17:58:38 +0000 Subject: [PATCH 504/855] build(python): remove python 3.7 from kokoro Dockerfile (#353) Source-Link: https://github.com/googleapis/synthtool/commit/e44dc0c742b1230887a73552357e0c18dcc30b92 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:5ff7446edeaede81c3ed58b23a4e76a5403fba1350ce28478045657303b6479d --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/docker/docs/Dockerfile | 35 +-------------- .../.kokoro/docker/docs/fetch_gpg_keys.sh | 45 ------------------- 3 files changed, 3 insertions(+), 79 deletions(-) delete mode 100755 packages/google-cloud-logging/.kokoro/docker/docs/fetch_gpg_keys.sh diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index a5d3697f2167..cb06536dab0b 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:e1793a23ae0ee9aafb2e3a53b564a351f74790dbe3c2d75f8fc3b8c43e5c036c + digest: sha256:5ff7446edeaede81c3ed58b23a4e76a5403fba1350ce28478045657303b6479d diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile index 412b0b56a921..4e1b1fb8b5a5 100644 --- a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile @@ -40,6 +40,7 @@ RUN apt-get update \ libssl-dev \ libsqlite3-dev \ portaudio19-dev \ + python3-distutils \ redis-server \ software-properties-common \ ssh \ @@ -59,40 +60,8 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb - -COPY fetch_gpg_keys.sh /tmp -# Install the desired versions of Python. -RUN set -ex \ - && export GNUPGHOME="$(mktemp -d)" \ - && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ - && /tmp/fetch_gpg_keys.sh \ - && for PYTHON_VERSION in 3.7.8 3.8.5; do \ - wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ - && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ - && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ - && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ - && mkdir -p /usr/src/python-${PYTHON_VERSION} \ - && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ - && rm python-${PYTHON_VERSION}.tar.xz \ - && cd /usr/src/python-${PYTHON_VERSION} \ - && ./configure \ - --enable-shared \ - # This works only on Python 2.7 and throws a warning on every other - # version, but seems otherwise harmless. - --enable-unicode=ucs4 \ - --with-system-ffi \ - --without-ensurepip \ - && make -j$(nproc) \ - && make install \ - && ldconfig \ - ; done \ - && rm -rf "${GNUPGHOME}" \ - && rm -rf /usr/src/python* \ - && rm -rf ~/.cache/ - RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.7 /tmp/get-pip.py \ && python3.8 /tmp/get-pip.py \ && rm /tmp/get-pip.py -CMD ["python3.7"] +CMD ["python3.8"] diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/fetch_gpg_keys.sh b/packages/google-cloud-logging/.kokoro/docker/docs/fetch_gpg_keys.sh deleted file mode 100755 index d653dd868e4b..000000000000 --- a/packages/google-cloud-logging/.kokoro/docker/docs/fetch_gpg_keys.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# A script to fetch gpg keys with retry. -# Avoid jinja parsing the file. -# - -function retry { - if [[ "${#}" -le 1 ]]; then - echo "Usage: ${0} retry_count commands.." - exit 1 - fi - local retries=${1} - local command="${@:2}" - until [[ "${retries}" -le 0 ]]; do - $command && return 0 - if [[ $? -ne 0 ]]; then - echo "command failed, retrying" - ((retries--)) - fi - done - return 1 -} - -# 3.6.9, 3.7.5 (Ned Deily) -retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ - 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D - -# 3.8.0 (Łukasz Langa) -retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ - E3FF2839C048B25C084DEBE9B26995E310250568 - -# From 932c3deedd999008da4f4c5bb4270af528e205e2 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 16 Jul 2021 10:39:41 -0400 Subject: [PATCH 505/855] chore: pin 'google-{api,cloud}-core' to allow 2.x versions (#354) --- packages/google-cloud-logging/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index ff2e3b04f8b4..5667a4d13b59 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -29,10 +29,10 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.26.0, <2.0.0dev", + "google-api-core[grpc] >= 1.26.0, <3.0.0dev", "google-cloud-appengine-logging >= 0.1.0, < 1.0.0dev", "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", - "google-cloud-core >= 1.4.1, < 2.0dev", + "google-cloud-core >= 1.4.1, < 3.0dev", "proto-plus >= 1.11.0", "packaging >= 14.3", ] From cf311199c3f6cf8443739f94f4519eb3d7932b3c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 19 Jul 2021 12:50:22 +0200 Subject: [PATCH 506/855] chore(deps): update dependency google-cloud-bigquery to v2.21.0 (#349) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-bigquery](https://togithub.com/googleapis/python-bigquery) | `==2.20.0` -> `==2.21.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.21.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.21.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.21.0/compatibility-slim/2.20.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.21.0/confidence-slim/2.20.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-bigquery ### [`v2.21.0`](https://togithub.com/googleapis/python-bigquery/blob/master/CHANGELOG.md#​2210-httpswwwgithubcomgoogleapispython-bigquerycomparev2200v2210-2021-07-12) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.20.0...v2.21.0) ##### Features - Add max_results parameter to some of the `QueryJob` methods. ([#​698](https://www.github.com/googleapis/python-bigquery/issues/698)) ([2a9618f](https://www.github.com/googleapis/python-bigquery/commit/2a9618f4daaa4a014161e1a2f7376844eec9e8da)) - Add support for decimal target types. ([#​735](https://www.github.com/googleapis/python-bigquery/issues/735)) ([7d2d3e9](https://www.github.com/googleapis/python-bigquery/commit/7d2d3e906a9eb161911a198fb925ad79de5df934)) - Add support for table snapshots. ([#​740](https://www.github.com/googleapis/python-bigquery/issues/740)) ([ba86b2a](https://www.github.com/googleapis/python-bigquery/commit/ba86b2a6300ae5a9f3c803beeb42bda4c522e34c)) - Enable unsetting policy tags on schema fields. ([#​703](https://www.github.com/googleapis/python-bigquery/issues/703)) ([18bb443](https://www.github.com/googleapis/python-bigquery/commit/18bb443c7acd0a75dcb57d9aebe38b2d734ff8c7)) - Make it easier to disable best-effort deduplication with streaming inserts. ([#​734](https://www.github.com/googleapis/python-bigquery/issues/734)) ([1246da8](https://www.github.com/googleapis/python-bigquery/commit/1246da86b78b03ca1aa2c45ec71649e294cfb2f1)) - Support passing struct data to the DB API. ([#​718](https://www.github.com/googleapis/python-bigquery/issues/718)) ([38b3ef9](https://www.github.com/googleapis/python-bigquery/commit/38b3ef96c3dedc139b84f0ff06885141ae7ce78c)) ##### Bug Fixes - Inserting non-finite floats with `insert_rows()`. ([#​728](https://www.github.com/googleapis/python-bigquery/issues/728)) ([d047419](https://www.github.com/googleapis/python-bigquery/commit/d047419879e807e123296da2eee89a5253050166)) - Use `pandas` function to check for `NaN`. ([#​750](https://www.github.com/googleapis/python-bigquery/issues/750)) ([67bc5fb](https://www.github.com/googleapis/python-bigquery/commit/67bc5fbd306be7cdffd216f3791d4024acfa95b3)) ##### Documentation - Add docs for all enums in module. ([#​745](https://www.github.com/googleapis/python-bigquery/issues/745)) ([145944f](https://www.github.com/googleapis/python-bigquery/commit/145944f24fedc4d739687399a8309f9d51d43dfd)) - Omit mention of Python 2.7 in `CONTRIBUTING.rst`. ([#​706](https://www.github.com/googleapis/python-bigquery/issues/706)) ([27d6839](https://www.github.com/googleapis/python-bigquery/commit/27d6839ee8a40909e4199cfa0da8b6b64705b2e9))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-logging). --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index d29af8c30574..3407e0ce026a 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.5.0 -google-cloud-bigquery==2.20.0 +google-cloud-bigquery==2.21.0 google-cloud-storage==1.39.0 google-cloud-pubsub==2.5.0 From 6fdf232d8307b65146045c3c5ffa259638f88154 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 20 Jul 2021 04:36:26 +0200 Subject: [PATCH 507/855] chore(deps): update dependency google-cloud-bigquery to v2.22.0 (#355) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 3407e0ce026a..65d5f74a7ca1 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.5.0 -google-cloud-bigquery==2.21.0 +google-cloud-bigquery==2.22.0 google-cloud-storage==1.39.0 google-cloud-pubsub==2.5.0 From 5b9d6f047f461a1edce5927eafc6f7e029ccc492 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 20 Jul 2021 03:46:33 -0600 Subject: [PATCH 508/855] fix(deps): pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions (#356) Expand pins on library dependencies in preparation for these dependencies taking a new major version. See https://github.com/googleapis/google-cloud-python/issues/10566. --- packages/google-cloud-logging/setup.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 5667a4d13b59..5c8698299738 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -29,10 +29,16 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ + # NOTE: Maintainers, please do not require google-api-core>=2.x.x + # Until this issue is closed + # https://github.com/googleapis/google-cloud-python/issues/10566 "google-api-core[grpc] >= 1.26.0, <3.0.0dev", "google-cloud-appengine-logging >= 0.1.0, < 1.0.0dev", "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", - "google-cloud-core >= 1.4.1, < 3.0dev", + # NOTE: Maintainers, please do not require google-api-core>=2.x.x + # Until this issue is closed + # https://github.com/googleapis/google-cloud-python/issues/10566 + "google-cloud-core >= 1.4.1, <3.0.0dev", "proto-plus >= 1.11.0", "packaging >= 14.3", ] From ab6b4b1ac9337ab52076d2eaed49bcc94791878d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 22 Jul 2021 13:50:27 +0000 Subject: [PATCH 509/855] feat: add Samples section to CONTRIBUTING.rst (#357) Source-Link: https://github.com/googleapis/synthtool/commit/52e4e46eff2a0b70e3ff5506a02929d089d077d4 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:6186535cbdbf6b9fe61f00294929221d060634dae4a0795c1cefdbc995b2d605 --- .../.github/.OwlBot.lock.yaml | 2 +- .../google-cloud-logging/CONTRIBUTING.rst | 24 +++++++++++++++++++ .../samples/snippets/noxfile.py | 5 ++-- 3 files changed, 28 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index cb06536dab0b..d57f74204625 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:5ff7446edeaede81c3ed58b23a4e76a5403fba1350ce28478045657303b6479d + digest: sha256:6186535cbdbf6b9fe61f00294929221d060634dae4a0795c1cefdbc995b2d605 diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index ced2c87c6dac..f5505c222c02 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -177,6 +177,30 @@ Build the docs via: $ nox -s docs +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + ******************************************** Note About ``README`` as it pertains to PyPI ******************************************** diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 5ff9e1db5808..6a8ccdae22c9 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -28,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -159,7 +160,7 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: - session.install("black") + session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) From d402bf806413a2e5d0fa6a9714cf4581865bddef Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 23 Jul 2021 15:33:25 +0000 Subject: [PATCH 510/855] chore: fix kokoro config for samples (#359) Source-Link: https://github.com/googleapis/synthtool/commit/dd05f9d12f134871c9e45282349c9856fbebecdd Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:aea14a583128771ae8aefa364e1652f3c56070168ef31beb203534222d842b8b --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/samples/python3.6/periodic-head.cfg | 2 +- .../.kokoro/samples/python3.7/periodic-head.cfg | 2 +- .../.kokoro/samples/python3.8/periodic-head.cfg | 2 +- .../.kokoro/samples/python3.9/periodic-head.cfg | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index d57f74204625..9ee60f7e4850 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:6186535cbdbf6b9fe61f00294929221d060634dae4a0795c1cefdbc995b2d605 + digest: sha256:aea14a583128771ae8aefa364e1652f3c56070168ef31beb203534222d842b8b diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg index f9cfcd33e058..7e2973e3b659 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-logging/.kokoro/test-samples-against-head.sh" } diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg index f9cfcd33e058..7e2973e3b659 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-logging/.kokoro/test-samples-against-head.sh" } diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg index f9cfcd33e058..7e2973e3b659 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-logging/.kokoro/test-samples-against-head.sh" } diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic-head.cfg index f9cfcd33e058..7e2973e3b659 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic-head.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-logging/.kokoro/test-samples-against-head.sh" } From 8198793cea96c47e67f08483ae055cb8ccbeb89c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 24 Jul 2021 10:16:22 +0000 Subject: [PATCH 511/855] fix: enable self signed jwt for grpc (#360) PiperOrigin-RevId: 386504689 Source-Link: https://github.com/googleapis/googleapis/commit/762094a99ac6e03a17516b13dfbef37927267a70 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6bfc480e1a161d5de121c2bcc3745885d33b265a --- .../services/config_service_v2/client.py | 4 +++ .../services/logging_service_v2/client.py | 4 +++ .../services/metrics_service_v2/client.py | 4 +++ .../logging_v2/test_config_service_v2.py | 31 +++++++++++-------- .../logging_v2/test_logging_service_v2.py | 31 +++++++++++-------- .../logging_v2/test_metrics_service_v2.py | 31 +++++++++++-------- 6 files changed, 66 insertions(+), 39 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index d2b32322777e..2ef4f49b6f7e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -397,6 +397,10 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), ) def list_buckets( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index dd94b6721520..c5dba2d04471 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -351,6 +351,10 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), ) def delete_log( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 6dcbcdfb3bac..9749b5eff239 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -344,6 +344,10 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), ) def list_log_metrics( diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 0691b2699b3d..12a5cf896031 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -122,18 +122,6 @@ def test_config_service_v2_client_from_service_account_info(client_class): assert client.transport._host == "logging.googleapis.com:443" -@pytest.mark.parametrize( - "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] -) -def test_config_service_v2_client_service_account_always_use_jwt(client_class): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize( "transport_class,transport_name", [ @@ -141,7 +129,7 @@ def test_config_service_v2_client_service_account_always_use_jwt(client_class): (transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), ], ) -def test_config_service_v2_client_service_account_always_use_jwt_true( +def test_config_service_v2_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -151,6 +139,13 @@ def test_config_service_v2_client_service_account_always_use_jwt_true( transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize( "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] @@ -231,6 +226,7 @@ def test_config_service_v2_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -247,6 +243,7 @@ def test_config_service_v2_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -263,6 +260,7 @@ def test_config_service_v2_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -291,6 +289,7 @@ def test_config_service_v2_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -367,6 +366,7 @@ def test_config_service_v2_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -400,6 +400,7 @@ def test_config_service_v2_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -421,6 +422,7 @@ def test_config_service_v2_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -451,6 +453,7 @@ def test_config_service_v2_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -481,6 +484,7 @@ def test_config_service_v2_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -500,6 +504,7 @@ def test_config_service_v2_client_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index a373edc3b451..1bf1ac26fea1 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -129,18 +129,6 @@ def test_logging_service_v2_client_from_service_account_info(client_class): assert client.transport._host == "logging.googleapis.com:443" -@pytest.mark.parametrize( - "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] -) -def test_logging_service_v2_client_service_account_always_use_jwt(client_class): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize( "transport_class,transport_name", [ @@ -148,7 +136,7 @@ def test_logging_service_v2_client_service_account_always_use_jwt(client_class): (transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), ], ) -def test_logging_service_v2_client_service_account_always_use_jwt_true( +def test_logging_service_v2_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -158,6 +146,13 @@ def test_logging_service_v2_client_service_account_always_use_jwt_true( transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize( "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] @@ -238,6 +233,7 @@ def test_logging_service_v2_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -254,6 +250,7 @@ def test_logging_service_v2_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -270,6 +267,7 @@ def test_logging_service_v2_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -298,6 +296,7 @@ def test_logging_service_v2_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -374,6 +373,7 @@ def test_logging_service_v2_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -407,6 +407,7 @@ def test_logging_service_v2_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -428,6 +429,7 @@ def test_logging_service_v2_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -458,6 +460,7 @@ def test_logging_service_v2_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -488,6 +491,7 @@ def test_logging_service_v2_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -507,6 +511,7 @@ def test_logging_service_v2_client_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index ff91b20281dc..7455f075e277 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -127,18 +127,6 @@ def test_metrics_service_v2_client_from_service_account_info(client_class): assert client.transport._host == "logging.googleapis.com:443" -@pytest.mark.parametrize( - "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] -) -def test_metrics_service_v2_client_service_account_always_use_jwt(client_class): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize( "transport_class,transport_name", [ @@ -146,7 +134,7 @@ def test_metrics_service_v2_client_service_account_always_use_jwt(client_class): (transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), ], ) -def test_metrics_service_v2_client_service_account_always_use_jwt_true( +def test_metrics_service_v2_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -156,6 +144,13 @@ def test_metrics_service_v2_client_service_account_always_use_jwt_true( transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize( "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] @@ -236,6 +231,7 @@ def test_metrics_service_v2_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -252,6 +248,7 @@ def test_metrics_service_v2_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -268,6 +265,7 @@ def test_metrics_service_v2_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -296,6 +294,7 @@ def test_metrics_service_v2_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -372,6 +371,7 @@ def test_metrics_service_v2_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -405,6 +405,7 @@ def test_metrics_service_v2_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -426,6 +427,7 @@ def test_metrics_service_v2_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -456,6 +458,7 @@ def test_metrics_service_v2_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -486,6 +489,7 @@ def test_metrics_service_v2_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -505,6 +509,7 @@ def test_metrics_service_v2_client_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) From 49fa62deab18b15ba7ae82e73c81ae2ec6c7e180 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 26 Jul 2021 14:06:00 -0700 Subject: [PATCH 512/855] chore: use conventional commits for rennovate bot (#350) --- packages/google-cloud-logging/owlbot.py | 11 +++++++++++ packages/google-cloud-logging/renovate.json | 3 ++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index f012b1191d46..b7cbcae4fb70 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -82,6 +82,17 @@ 'exclude =\n # Exclude environment test code.\n tests/environment/**\n' ) +# use conventional commits for renovate bot +s.replace( + "renovate.json", + """} +}""", + """}, + "semanticCommits": "enabled" +}""" +) + + # -------------------------------------------------------------------------- # Samples templates # -------------------------------------------------------------------------- diff --git a/packages/google-cloud-logging/renovate.json b/packages/google-cloud-logging/renovate.json index c04895563e69..9d2f2512aa5b 100644 --- a/packages/google-cloud-logging/renovate.json +++ b/packages/google-cloud-logging/renovate.json @@ -5,5 +5,6 @@ "ignorePaths": [".pre-commit-config.yaml"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] - } + }, + "semanticCommits": "enabled" } From 5d30275c5eaf7afb7694d3cda04c126b4ea0fde8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 26 Jul 2021 23:07:25 +0200 Subject: [PATCH 513/855] chore(deps): update dependency google-cloud-pubsub to v2.6.1 (#332) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 65d5f74a7ca1..268b82d895e5 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.5.0 google-cloud-bigquery==2.22.0 google-cloud-storage==1.39.0 -google-cloud-pubsub==2.5.0 +google-cloud-pubsub==2.6.1 From a5e09acafcb5379c2ebe360ac58dfd3fc030bc1a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 26 Jul 2021 23:09:04 +0200 Subject: [PATCH 514/855] chore(deps): update dependency google-cloud-bigquery to v2.22.1 (#358) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 268b82d895e5..fe19996d20fc 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.5.0 -google-cloud-bigquery==2.22.0 +google-cloud-bigquery==2.22.1 google-cloud-storage==1.39.0 google-cloud-pubsub==2.6.1 From 12a6d6b310dac371135817c09fc8938c61dd62bf Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 27 Jul 2021 00:14:02 +0200 Subject: [PATCH 515/855] chore(deps): update dependency backoff to v1.11.1 (#348) --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 766a8035d690..0db5cc446cf1 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ -backoff==1.10.0 +backoff==1.11.1 pytest==6.2.4 From 54aba8f6446982a8f9067af528cf9fb100890cce Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 27 Jul 2021 15:42:17 +0200 Subject: [PATCH 516/855] chore(deps): update dependency google-cloud-storage to v1.41.1 (#344) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index fe19996d20fc..ea74f1c4d7f2 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.5.0 google-cloud-bigquery==2.22.1 -google-cloud-storage==1.39.0 +google-cloud-storage==1.41.1 google-cloud-pubsub==2.6.1 From 0145068b83464fc8bbeb9c9aeb36d73f54bd456a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 28 Jul 2021 19:51:42 +0200 Subject: [PATCH 517/855] chore(deps): update dependency google-cloud-pubsub to v2.7.0 (#364) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index ea74f1c4d7f2..9ba16a0c3c61 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.5.0 google-cloud-bigquery==2.22.1 google-cloud-storage==1.41.1 -google-cloud-pubsub==2.6.1 +google-cloud-pubsub==2.7.0 From 4c116b448d8717111270e3125a33f081eacec682 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 28 Jul 2021 19:51:59 +0200 Subject: [PATCH 518/855] chore(deps): update dependency google-cloud-bigquery to v2.23.1 (#362) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 9ba16a0c3c61..1f7eb19dda73 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.5.0 -google-cloud-bigquery==2.22.1 +google-cloud-bigquery==2.23.1 google-cloud-storage==1.41.1 google-cloud-pubsub==2.7.0 From 3a21ae14bc2f0562780091f975a331fe5790483b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 28 Jul 2021 19:58:53 +0000 Subject: [PATCH 519/855] chore: release 2.6.0 (#337) :robot: I have created a release \*beep\* \*boop\* --- ## [2.6.0](https://www.github.com/googleapis/python-logging/compare/v2.5.0...v2.6.0) (2021-07-28) ### Features * add always_use_jwt_access ([#334](https://www.github.com/googleapis/python-logging/issues/334)) ([ae67d10](https://www.github.com/googleapis/python-logging/commit/ae67d10a661a3561b366bb05f5cf6d34520164b4)) ### Bug Fixes * **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#356](https://www.github.com/googleapis/python-logging/issues/356)) ([a970dd2](https://www.github.com/googleapis/python-logging/commit/a970dd293d4fddc983946cb1c362f487a82d9609)) * disable always_use_jwt_access ([#342](https://www.github.com/googleapis/python-logging/issues/342)) ([a95e401](https://www.github.com/googleapis/python-logging/commit/a95e40188c9483310fb1dce9242c7c66721a6b7f)) * enable self signed jwt for grpc ([#360](https://www.github.com/googleapis/python-logging/issues/360)) ([707fad1](https://www.github.com/googleapis/python-logging/commit/707fad1a714d951727336b03f4444f53199737e3)) ### Documentation * omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-logging/issues/1127)) ([#327](https://www.github.com/googleapis/python-logging/issues/327)) ([faa6fb9](https://www.github.com/googleapis/python-logging/commit/faa6fb9a86c82b99b529e979160bfdd7a505793b)), closes [#1126](https://www.github.com/googleapis/python-logging/issues/1126) * add Samples section to CONTRIBUTING.rst ([#357](https://www.github.com/googleapis/python-logging/issues/357)) ([8771716](https://www.github.com/googleapis/python-logging/commit/8771716cae7cf67d710b3741b8b718fc4a8aa2b6)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/google-cloud-logging/CHANGELOG.md | 21 +++++++++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 3dd7ab48b8bc..450c1486ce5b 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,27 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [2.6.0](https://www.github.com/googleapis/python-logging/compare/v2.5.0...v2.6.0) (2021-07-28) + + +### Features + +* add always_use_jwt_access ([#334](https://www.github.com/googleapis/python-logging/issues/334)) ([ae67d10](https://www.github.com/googleapis/python-logging/commit/ae67d10a661a3561b366bb05f5cf6d34520164b4)) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#356](https://www.github.com/googleapis/python-logging/issues/356)) ([a970dd2](https://www.github.com/googleapis/python-logging/commit/a970dd293d4fddc983946cb1c362f487a82d9609)) +* disable always_use_jwt_access ([#342](https://www.github.com/googleapis/python-logging/issues/342)) ([a95e401](https://www.github.com/googleapis/python-logging/commit/a95e40188c9483310fb1dce9242c7c66721a6b7f)) +* enable self signed jwt for grpc ([#360](https://www.github.com/googleapis/python-logging/issues/360)) ([707fad1](https://www.github.com/googleapis/python-logging/commit/707fad1a714d951727336b03f4444f53199737e3)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-logging/issues/1127)) ([#327](https://www.github.com/googleapis/python-logging/issues/327)) ([faa6fb9](https://www.github.com/googleapis/python-logging/commit/faa6fb9a86c82b99b529e979160bfdd7a505793b)), closes [#1126](https://www.github.com/googleapis/python-logging/issues/1126) +* add Samples section to CONTRIBUTING.rst ([#357](https://www.github.com/googleapis/python-logging/issues/357)) ([8771716](https://www.github.com/googleapis/python-logging/commit/8771716cae7cf67d710b3741b8b718fc4a8aa2b6)) + + ## [2.5.0](https://www.github.com/googleapis/python-logging/compare/v2.4.0...v2.5.0) (2021-06-10) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 5c8698299738..6a9ed8a59d67 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.5.0" +version = "2.6.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 61a9632268345bf12ec7e16eae0869b18de698d1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 29 Jul 2021 13:03:19 +0200 Subject: [PATCH 520/855] chore(deps): update dependency google-cloud-logging to v2.6.0 (#365) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 1f7eb19dda73..69f81191f332 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==2.5.0 +google-cloud-logging==2.6.0 google-cloud-bigquery==2.23.1 google-cloud-storage==1.41.1 google-cloud-pubsub==2.7.0 From 21eeb37d82fbb0278983b9dec7d95fe79069a584 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 29 Jul 2021 18:13:52 +0200 Subject: [PATCH 521/855] chore(deps): update dependency google-cloud-bigquery to v2.23.2 (#366) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 69f81191f332..a43730bea9f2 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.6.0 -google-cloud-bigquery==2.23.1 +google-cloud-bigquery==2.23.2 google-cloud-storage==1.41.1 google-cloud-pubsub==2.7.0 From f50a9532f423c89e79378e34d9e046bda5ea04f2 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 2 Aug 2021 19:20:31 -0600 Subject: [PATCH 522/855] chore: require CODEOWNER review and up to date branches (#367) These two lines bring the rules on this repo in line with the defaults: https://github.com/googleapis/repo-automation-bots/blob/63c858e539e1f4d9bb8ea66e12f9c0a0de5fef55/packages/sync-repo-settings/src/required-checks.json#L40-L50 --- packages/google-cloud-logging/.github/sync-repo-settings.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/google-cloud-logging/.github/sync-repo-settings.yaml b/packages/google-cloud-logging/.github/sync-repo-settings.yaml index af59935321a9..0ddb512dbab7 100644 --- a/packages/google-cloud-logging/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-logging/.github/sync-repo-settings.yaml @@ -4,6 +4,8 @@ branchProtectionRules: # Identifies the protection rule pattern. Name of the branch to be protected. # Defaults to `master` - pattern: master + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: true requiredStatusCheckContexts: - 'Kokoro' - 'cla/google' From 7cfcafbab30cfb05c46260faec582d322a70d33f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 6 Aug 2021 17:03:20 -0700 Subject: [PATCH 523/855] chore(tests): updated env tests commit (#369) * updated env tests commit * updated commit --- .../tests/environment/.github/CODEOWNERS | 7 + .../tests/environment/.gitignore | 4 +- .../tests/environment/CODE_OF_CONDUCT.md | 122 ++----- .../tests/environment/README.md | 4 +- .../environment/deployable/java/.dockerignore | 1 + .../environment/deployable/java/.gitignore | 28 ++ .../deployable/java/.mvn/jvm.config | 1 + .../.mvn/wrapper/MavenWrapperDownloader.java | 117 +++++++ .../java/.mvn/wrapper/maven-wrapper.jar | Bin 0 -> 50710 bytes .../.mvn/wrapper/maven-wrapper.properties | 2 + .../environment/deployable/java/Dockerfile | 47 +++ .../tests/environment/deployable/java/mvnw | 310 ++++++++++++++++++ .../environment/deployable/java/mvnw.cmd | 182 ++++++++++ .../tests/environment/deployable/java/pom.xml | 119 +++++++ .../deployable/DeployableApplication.java | 173 ++++++++++ .../java/envtest/deployable/Snippets.java | 48 +++ .../web/DeployableHttpController.java | 61 ++++ .../src/main/resources/application.properties | 1 + .../environment/deployable/nodejs/.gitignore | 1 + .../environment/deployable/nodejs/Dockerfile | 2 +- .../environment/deployable/nodejs/app.js | 4 +- .../environment/deployable/nodejs/tests.js | 58 +++- .../environment/deployable/python/.gitignore | 1 + .../environment/deployable/python/Dockerfile | 2 +- .../envctl/env_scripts/go/kubernetes.sh | 3 +- .../envctl/env_scripts/java/kubernetes.sh | 118 +++++++ .../envctl/env_scripts/java/local.sh | 58 ++++ .../envctl/env_scripts/nodejs/kubernetes.sh | 3 +- .../envctl/env_scripts/python/kubernetes.sh | 3 +- .../tests/environment/envctl/envctl | 18 +- .../tests/environment/noxfile.py | 2 +- .../tests/environment/renovate.json | 6 + .../tests/environment/tests/common/common.py | 16 + .../tests/environment/tests/common/stdout.py | 67 ++++ .../tests/environment/tests/java/__init__.py | 13 + .../environment/tests/java/test_kubernetes.py | 29 ++ .../tests/nodejs/test_appengine_standard.py | 32 +- .../environment/tests/nodejs/test_cloudrun.py | 31 +- .../tests/nodejs/test_functions.py | 34 +- .../tests/nodejs/test_kubernetes.py | 31 +- tests/environment | 2 +- 41 files changed, 1654 insertions(+), 107 deletions(-) create mode 100644 packages/google-cloud-logging/tests/environment/.github/CODEOWNERS create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/.dockerignore create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/.gitignore create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/.mvn/jvm.config create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/.mvn/wrapper/MavenWrapperDownloader.java create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/.mvn/wrapper/maven-wrapper.jar create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/.mvn/wrapper/maven-wrapper.properties create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile create mode 100755 packages/google-cloud-logging/tests/environment/deployable/java/mvnw create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/mvnw.cmd create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/pom.xml create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/DeployableApplication.java create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/Snippets.java create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/web/DeployableHttpController.java create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/src/main/resources/application.properties create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/kubernetes.sh create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/local.sh create mode 100644 packages/google-cloud-logging/tests/environment/tests/common/stdout.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/java/__init__.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/java/test_kubernetes.py diff --git a/packages/google-cloud-logging/tests/environment/.github/CODEOWNERS b/packages/google-cloud-logging/tests/environment/.github/CODEOWNERS new file mode 100644 index 000000000000..f48477fbf40e --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/.github/CODEOWNERS @@ -0,0 +1,7 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. +# +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax + +* @googleapis/api-logging diff --git a/packages/google-cloud-logging/tests/environment/.gitignore b/packages/google-cloud-logging/tests/environment/.gitignore index 60a42c15cd62..407f5b71e3fe 100644 --- a/packages/google-cloud-logging/tests/environment/.gitignore +++ b/packages/google-cloud-logging/tests/environment/.gitignore @@ -1,3 +1,5 @@ +.idea + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] @@ -141,4 +143,4 @@ cython_debug/ deployable/go/google-cloud-go deployable/go/logging deployable/go/lib.tar -deployable/go/vendor \ No newline at end of file +deployable/go/vendor diff --git a/packages/google-cloud-logging/tests/environment/CODE_OF_CONDUCT.md b/packages/google-cloud-logging/tests/environment/CODE_OF_CONDUCT.md index dc079b4d66eb..46b2a08ea6d1 100644 --- a/packages/google-cloud-logging/tests/environment/CODE_OF_CONDUCT.md +++ b/packages/google-cloud-logging/tests/environment/CODE_OF_CONDUCT.md @@ -1,93 +1,43 @@ -# Code of Conduct +# Contributor Code of Conduct -## Our Pledge +As contributors and maintainers of this project, +and in the interest of fostering an open and welcoming community, +we pledge to respect all people who contribute through reporting issues, +posting feature requests, updating documentation, +submitting pull requests or patches, and other activities. -In the interest of fostering an open and welcoming environment, we as -contributors and maintainers pledge to making participation in our project and -our community a harassment-free experience for everyone, regardless of age, body -size, disability, ethnicity, gender identity and expression, level of -experience, education, socio-economic status, nationality, personal appearance, -race, religion, or sexual identity and orientation. - -## Our Standards - -Examples of behavior that contributes to creating a positive environment -include: - -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members +We are committed to making participation in this project +a harassment-free experience for everyone, +regardless of level of experience, gender, gender identity and expression, +sexual orientation, disability, personal appearance, +body size, race, ethnicity, age, religion, or nationality. Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery and unwelcome sexual attention or - advances -* Trolling, insulting/derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or electronic - address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Our Responsibilities - -Project maintainers are responsible for clarifying the standards of acceptable -behavior and are expected to take appropriate and fair corrective action in -response to any instances of unacceptable behavior. +* The use of sexualized language or imagery +* Personal attacks +* Trolling or insulting/derogatory comments +* Public or private harassment +* Publishing other's private information, +such as physical or electronic +addresses, without explicit permission +* Other unethical or unprofessional conduct. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are -not aligned to this Code of Conduct, or to ban temporarily or permanently any -contributor for other behaviors that they deem inappropriate, threatening, -offensive, or harmful. - -## Scope - -This Code of Conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. Examples of -representing a project or community include using an official project e-mail -address, posting via an official social media account, or acting as an appointed -representative at an online or offline event. Representation of a project may be -further defined and clarified by project maintainers. - -This Code of Conduct also applies outside the project spaces when the Project -Steward has a reasonable belief that an individual's behavior may have a -negative impact on the project or its community. - -## Conflict Resolution - -We do not believe that all conflict is bad; healthy debate and disagreement -often yield positive results. However, it is never okay to be disrespectful or -to engage in behavior that violates the project’s code of conduct. - -If you see someone violating the code of conduct, you are encouraged to address -the behavior directly with those involved. Many issues can be resolved quickly -and easily, and this gives people more control over the outcome of their -dispute. If you are unable to resolve the matter for any reason, or if the -behavior is threatening or harassing, report it. We are dedicated to providing -an environment where participants feel welcome and safe. - -Reports should be directed to *[PROJECT STEWARD NAME(s) AND EMAIL(s)]*, the -Project Steward(s) for *[PROJECT NAME]*. It is the Project Steward’s duty to -receive and address reported violations of the code of conduct. They will then -work with a committee consisting of representatives from the Open Source -Programs Office and the Google Open Source Strategy team. If for any reason you -are uncomfortable reaching out to the Project Steward, please email -opensource@google.com. - -We will investigate every complaint, but you may not receive a direct response. -We will use our discretion in determining when and how to follow up on reported -incidents, which may range from not taking action to permanent expulsion from -the project and project-sponsored spaces. We will notify the accused of the -report and provide them an opportunity to discuss it before any action is taken. -The identity of the reporter will be omitted from the details of the report -supplied to the accused. In potentially harmful situations, such as ongoing -harassment or threats to anyone's safety, we may take action without notice. - -## Attribution - -This Code of Conduct is adapted from the Contributor Covenant, version 1.4, -available at -https://www.contributor-covenant.org/version/1/4/code-of-conduct.html +comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct. +By adopting this Code of Conduct, +project maintainers commit themselves to fairly and consistently +applying these principles to every aspect of managing this project. +Project maintainers who do not follow or enforce the Code of Conduct +may be permanently removed from the project team. + +This code of conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. + +Instances of abusive, harassing, or otherwise unacceptable behavior +may be reported by opening an issue +or contacting one or more of the project maintainers. + +This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, +available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) diff --git a/packages/google-cloud-logging/tests/environment/README.md b/packages/google-cloud-logging/tests/environment/README.md index 6d97b83c6303..41600ff2f668 100644 --- a/packages/google-cloud-logging/tests/environment/README.md +++ b/packages/google-cloud-logging/tests/environment/README.md @@ -70,4 +70,6 @@ Test files in `tests/` can inherit from any file in `tests/common` log | Test Name | Optional Input | Description | | -------------- | ---------------- | -------------------------------- | -| `simplelog` | `log_name`, `log_text` | Logs a simple text payload | +| `simplelog` | `log_name`, `log_text` | Logs a simple text payload | +| `requestlog` | `log_name`, `log_text` | Logs an http request object | +| `stdoutlog` | `log_name`, `log_text` | Logs to standard out | diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/.dockerignore b/packages/google-cloud-logging/tests/environment/deployable/java/.dockerignore new file mode 100644 index 000000000000..1ee84da9887f --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/.dockerignore @@ -0,0 +1 @@ +*.sw* diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/.gitignore b/packages/google-cloud-logging/tests/environment/deployable/java/.gitignore new file mode 100644 index 000000000000..976ef6f11484 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/.gitignore @@ -0,0 +1,28 @@ +java-logging +_library +lib.tar +*.sw* + +# Compiled class file +*.class + +# Log file +*.log + +# BlueJ files +*.ctxt + +# Mobile Tools for Java (J2ME) +.mtj.tmp/ + +# Package Files # +*.jar +*.war +*.nar +*.ear +*.zip +*.tar.gz +*.rar + +# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml +hs_err_pid* diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/.mvn/jvm.config b/packages/google-cloud-logging/tests/environment/deployable/java/.mvn/jvm.config new file mode 100644 index 000000000000..094e02c902c7 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/.mvn/jvm.config @@ -0,0 +1 @@ +-Dmaven.wagon.httpconnectionManager.ttlSeconds=120 -Dmaven.wagon.http.retryHandler.requestSentEnabled=true diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/.mvn/wrapper/MavenWrapperDownloader.java b/packages/google-cloud-logging/tests/environment/deployable/java/.mvn/wrapper/MavenWrapperDownloader.java new file mode 100644 index 000000000000..c32394f140a7 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/.mvn/wrapper/MavenWrapperDownloader.java @@ -0,0 +1,117 @@ +/* + * Copyright 2007-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import java.net.*; +import java.io.*; +import java.nio.channels.*; +import java.util.Properties; + +public class MavenWrapperDownloader { + + private static final String WRAPPER_VERSION = "0.5.5"; + /** + * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. + */ + private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/" + + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar"; + + /** + * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to + * use instead of the default one. + */ + private static final String MAVEN_WRAPPER_PROPERTIES_PATH = + ".mvn/wrapper/maven-wrapper.properties"; + + /** + * Path where the maven-wrapper.jar will be saved to. + */ + private static final String MAVEN_WRAPPER_JAR_PATH = + ".mvn/wrapper/maven-wrapper.jar"; + + /** + * Name of the property which should be used to override the default download url for the wrapper. + */ + private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; + + public static void main(String args[]) { + System.out.println("- Downloader started"); + File baseDirectory = new File(args[0]); + System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath()); + + // If the maven-wrapper.properties exists, read it and check if it contains a custom + // wrapperUrl parameter. + File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); + String url = DEFAULT_DOWNLOAD_URL; + if(mavenWrapperPropertyFile.exists()) { + FileInputStream mavenWrapperPropertyFileInputStream = null; + try { + mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); + Properties mavenWrapperProperties = new Properties(); + mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); + url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); + } catch (IOException e) { + System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); + } finally { + try { + if(mavenWrapperPropertyFileInputStream != null) { + mavenWrapperPropertyFileInputStream.close(); + } + } catch (IOException e) { + // Ignore ... + } + } + } + System.out.println("- Downloading from: " + url); + + File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); + if(!outputFile.getParentFile().exists()) { + if(!outputFile.getParentFile().mkdirs()) { + System.out.println( + "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'"); + } + } + System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); + try { + downloadFileFromURL(url, outputFile); + System.out.println("Done"); + System.exit(0); + } catch (Throwable e) { + System.out.println("- Error downloading"); + e.printStackTrace(); + System.exit(1); + } + } + + private static void downloadFileFromURL(String urlString, File destination) throws Exception { + if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) { + String username = System.getenv("MVNW_USERNAME"); + char[] password = System.getenv("MVNW_PASSWORD").toCharArray(); + Authenticator.setDefault(new Authenticator() { + @Override + protected PasswordAuthentication getPasswordAuthentication() { + return new PasswordAuthentication(username, password); + } + }); + } + URL website = new URL(urlString); + ReadableByteChannel rbc; + rbc = Channels.newChannel(website.openStream()); + FileOutputStream fos = new FileOutputStream(destination); + fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); + fos.close(); + rbc.close(); + } + +} diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/.mvn/wrapper/maven-wrapper.jar b/packages/google-cloud-logging/tests/environment/deployable/java/.mvn/wrapper/maven-wrapper.jar new file mode 100644 index 0000000000000000000000000000000000000000..0d5e649888a4843c1520054d9672f80c62ebbb48 GIT binary patch literal 50710 zcmbTd1F&Yzk}llaw%yydZQHhOtG8|2wr$%sdfWEC{mnUpfBrjP%(-twMXZRmGOM!c zd9yOJo|2OU0!ID;4i5g~#}E8J?LU7Ie;%cUmH4T}WkhI!e#l9J{q@Zcz<+)r_dg0E z|5rh2ei?BQVMQexX_2HDe#ihic;RQiO?))5*`S|S7OJR$0!15$@o}&gh{KEX8>-aS zebwz)UwGRGE9?4DhKZ)R2wjvy<%rYe_z!fyA~>e=tmvNPLiuHP53`)W`FLgV1o9b@ z?3)Q4hagTgvBzZDa`v_DRkmwm>bk&&5@m;ZKwovq%oDWOE5u zleR0Z)LP%g z*ydlFD2)HVxVbHjlfI?CgZaOti1hCi{oA;xT^;o8?2H}$CAG}|d$o49)--kwwtsqX zGBi1>nE^FB$)DBl&kl0=BkJj!u8pT3X-SM$t*%!O7Tx#?VUN(J@J7 z%mqmlxhp6bH9rj)^iYq`pf?`O*$x~aBDK%&CjpjW0Dmepb(vLDTzk@0d>tccth>%{ zqcr7aeZu!Zr23hdL)!RGizX}aWJj6ClX4Gb=bet4tBUy?-|r{nUh$7yJ*eiA?Z;B2`eF1LaPBSu_fx@B5isJF5&|yU7hLsa5}05d3gQRmO4{!66oMh zigvqS{W+|Y0wOi($g$qiEf^jL)}>W~AR*|m?Ia0Mm&;BjorRn-!}CxKVO!7^_eSU; za}~KI`cHaF*!+>B5a-KI>36u#or|tTiuzm;hLCR>bMq9@2Z1fr4d$A`%|rCLKl^5z z`Z~yYPy)~i?x3_LE7|;0GLF#mVOpQ8X>1gNNLX!4rWD(!q!EVsGZPum^~IQ?OAy9U z#lqI;WcC{U(KHra8q6HKa`%NZ^;gqs))9Mb3hgxa%QY1dO_YQok3%a5hFXmwyQwt5 zokv+V7DJgXNlo1Jv9u21JB$WF~oaC)aF8zY-VK6{ynvH6F zk|{{&#%crN>5Vm&6byp)q(XYXIF)9Q`;lMGWJIP3e)3zmi0gVmI|;n*$`v-Jtj5!h>;@Y&fY9%VqR zdvyz`W~hk%)WdNHVGkD6tdf`iv8B&HpjCgRcx=@$^CrBuzraY$k`dZ&LmR8t+(FSQ zL7=y~l+GL+%Xzvj66Xb`Ey}35$xDv5O2@5ywUr2_>Jz*srt`dPuFp2>5mTdt>H7NR zvg!zAScv9uGBZa^gCeh77YJ4_0xc@0!jSG}P@Pn!)t0|+UFI7!?W90^55Ha1de+3Y zNz}7<*xPlOFN5;J!=rS=Zwb(PT)j`|B_(F8EmsvkQZ1wGuG&Xu)OZmTR0Y99D$5#tf%OElqb{J^!W*E8vy2$QkhN-E(3>~vNdny^ z&_#^RRL>0Mog`;hZ~2=uUwy|8W@gdO$pq$;8M?Z?{ z(!g)#LR-;l-oCvHxx--!6D~z2_%z~DPIcWwnzgGa&;ouDP~Bx#u>)3HUKjSUTv2kS z*jfLRyc-Yu(ClrUvuAvfnmu_BkvFbTk8>#tYv@*?nq_h~A!A!yM;do9 zC^E#;pW}3;$ApFCRQo(dyU5c>3TcRmq%|Z|8p^lxDmk7JN6llr_&U?Rg|@NljYOR2 zb=vg=oS1GN>(^NCAaiE9rbhk__1Nwu!OuPddM7KQJj)Bezh85DvUl}a?!*ZJEMKfp zbU*8SY`{iQ=%fl0#Af$k6~2*0v^?llf1Emdn5Q5YG+%7`*5uyO_^txn^`x2l^J_As2-4_Tm|5b}0q$5okF$ zHaO03%@~_Z=jpV!WTbL$}e;NgXz=Uw!ogI}+S@aBP**2Wo^yN#ZG z4G$m^yaM9g?M5E1ft8jOLuzc3Psca*;7`;gnI0YzS0%f4{|VGEzKceaptfluwyY#7 z^=q#@gi@?cOm99Qz!EylA4G~7kbF7hlRIzcrb~{_2(x@@z`7d96Bi_**(vyr_~9Of z!n>Gqk|ZWyu!xhi9f53&PM3`3tNF}pHaq}(;KEn#pmm6DZBu8*{kyrTxk<;mx~(;; z1NMrp@Zd0ZqI!oTJo3b|HROE}UNcQash!p5eLjTcz)>kP=Bp@z)5rLGnaF5{~@z;MFCP9s_dDdADddy z{|Zd9ou-;laEHid_b7A^ zBw1J-^uo$K|@udwk;w* za_|mNqh!k}0fkzR#`|v?iVB@HJt^?0Fo^YGim=lqWD&K7$=J2L(HMp@*5YwV1U)1Aj@><#btD=m0Ga1X))fcKJ=s(v}E7fc1fa_$nGP%d9Opjh3) zRid3zuc5^mNmnnsg4G>m;Sfh@hH$ZT$p%QswzSRa2bh;(7lOaWT>Jv@Ki>_Ep?jx7 z&hwEG^YF=vEgvUwjT_VgWlSZeS{CTjedc)A>N0*uAU(9G@5|><%)^NxRcyx@4!m3s z%1?oiq^@>V!+tKZka-ax2e-`Deeb9_AaTF~z;arjq>Im$ zMc`JAOruhFrFTj6I-Al5$^z4tyu_l2Qk04>>;9#)B#fF})h0_OHP)%xv~m#T+6VG< zP6O@;?5g^t6wm{HX+54ZPoe%(;HU^*OPSEojLYRFRE~=mPXE!0pb|Zs=psR=-v`L# zB2`|mvJBoNTvW`LJ}a;cHP~jC@klxY0|ec3Y!w-`mQ6>CzF}GQCHmrB>k3`fk=3Ck z+WwgG3U_aN&(|RY$ss6CYZ(%4!~tuVWSHu?q=6{-Izay&o_Mvxm=!*?C-NQZFC8=n{?qfRf$3o_VSHs%zfSMdMQ5_f3xt6~+{RX=$H8at z9Si~lTmp}|lmm;++^zA%Iv+XJAHcTf1_jRxfEgz$XozU8$D?08YntWwMY-9iyk@u#wR?JxR2bky5j9 z3Sl-dQQU?#rO0xa)Sp<|MJnx@%w#GcXXM7*Vs=VPdSFt5$aJux89D%D?lA0_j&L42 zcyGz!opsIob%M&~(~&UkX0ndOq^MqjxXw8MIN}U@vAKq_fp@*Vp$uVFiNfahq2MzA zU`4uR8m$S~m+h{-pKVzp%Gs(Wz+%>h;R9Sg-MrB38r?e_Tx6PD%>)bi(#$!a@*_#j zCKr_wm;wtEtOCDwzW25?t{~PANe*e(EXogwcq&Ysl-nT2MBB3E96NP8`Ej_iQFT@X zG22M5ibzYHNJ~tR(et8lDFp|we$&U1tZ33H-o#?o$(o&(>aCNWlMw#Y{b}!fw$6_p z{k}778KP{PZ`c87HBXWDJK)sKXU5xF2))N*t_1C^~Q5(q1W#@r0y#QUke zY9@kew61E>;G2Ds$-gvm=pMuXW~T4Tv@ZhzZkH)DZ_mlk!&rL#E+5JaIx|cf&@b{g ziV)ouh%FU9i6D+C!e&>1x91bwV26SChDV1};|%rXHfqfEpP9?svl6*wM_)kY1DlTX zVN?D2ru8SysDeW~0<@G�zysyX$qy=e$fT3I);zi(d{LG!_|v^=p4+LvsaO4ZCN~ zB-KmIW}S_KN_ATX;5;x^db&s|}S8E#kzLatD!GN+|kuC<-^@23Y! z*;N4OIffqekU*ZaeTLtsHRzwQKbwq>RI6t0q&$~4;x_R!j1^WDlIWM;4owb|LaUU;gB#MA@JqI#y;!{{X|Dopjjm?}-C%NvfAIc8KU4twNO{gMnKTHPgD_kgT>dPikq_{#R~- z5_LG$FSLUqOdW;v1Sld5H;iO?Kt~1>?KtDuV~QlMHwU1aUdmH2gDOt#2doNPh*b#| zj*nPhH-OXD^b|$QA2mZwnAQ5#*o;#inRD_HLwn9_qvcj5qS$^Yzr%^V?>svB2OgQa zwb)=f5m@1E6{{~15H$w6r>|_>&!pWVf>~#bcLb7PI#F2VX+|c^cxRYg&Rf-g+-+8Y z+9b3@@uoR2Bq#b(GR}?7e?R`l7gp&^LqAg<39sS{n)*aB#u2+xXKf+_@NCse$b#x> z|D853NTEM!txFmuZ8~B&9*E?|7&T6{ePv{9!U&CK=H^@W*dbvN(+dW(86zl_2SRqP zVz1T$USo{^tp6su9fqL}hRYP2kXl7zv=9Bn*2NMrfQhT&#$P@F8ojHpeo#G{UN)Iu zdyFTF6Xog5MPav;ZC%%W)qUR&gnUzG9AFiT?H=GzZZ6FKLWIy$S~hi#wUT9KwV+!!3ux(uIY&xNOy#_ zb@YdgY}y@5sivI8BEhQ<)Xve#*}|P)>n+>UHSP72oB%los3Hnc@M*l^04)-w?h#El zLnO=xj4vs{#Y3SZyJTN7gLy-Z6bZHV{H-j>HQ)Dia)VL&*G8}J&5qXvX9;%%O%?6& zymuDI1Z2O%G2gl0tF2evSCQCMwY8zQjaDzY-8}2#$9nyGauUh5mPja>5XSRj}YzFxKs12=Ie0gr;4-rl7ES2utCIaTjqFNg{V`5}Rdt~xE^I;Bwp4)|cs8=f)1YwHz zp?r7}s2~qsDV+gL1e}}NpUE#`^Aq8l%yL9DyQeXSADg5*qMprGAELiHg0Q39`O+i1 z!J@iV!`Y~C$wJ!5?|2X&h?5r(@)tBG$JL=!*uk=2k;T<@{|s1xYL079FvK(6NMedO zP8^EEZnp`(hVMZ;sTk(k5YXnG-b6v;nlw+^* zEwj5-yyMEI3=z&TduBb3HLKz9{|qCfLrTof>=V;1r2y;LT3N)to9fNmN^_w;gpvtr z#4Z->#;&${rrl6`uidUzwT0ab5cAd(eq1^_;`7#H*J0NAJlc@Q>a;+uk$1Fo%q1>V ztuCG3YmenEJhn45P;?%`k@Y>ot+ZzKw9qU`LM| z5^tVL}`9?D;Hzd>_%ptW6 z#N#GToeLGh=K(xh3^-Wj zJpQ)7Zzj6MZdx3^Jn@dh#&_`!w5*<+z^_z~Zc1EyN73#a8yMu*us=j$zX|$sa7Qja zJqh|s-0NjR=L@{4^RexB5aiQJk-m~K^0-AnoCz)nOyncC9+EzeaOQ;W`3Fy|tX21Z zYS`m6!*in{AkaUR|EZKLvNDL+D#(Pz#TTPwImog9dM47L2Ha*RhaXuWuVNEk zv^yjmQQilZpE!xi)2UL9FThU@%XPr@><}RDNOnAZVo7F@UzrdfIeQ}ztxG;_5D8{x zpghA^U4P0{+lr65_?%+D?R-Z|%F4h9&{UhTF&^rKK@f1|DYh1V+z?V5Y7DoHO;E04 zspYSv9AuJII$U~Vbe9+yNypV&&?1%5*S@Sm!g@KaK*D-8e_jd`d3{_7GkL8lN20!~ zSPC<%ss zq}c{_ZD89J{JbXK-yZNh=_2;Spj0~&Rmdy@G~6|)6IWLW0jN_~ZwBq!r;7F}yhPMw zyGvM6nVXhJVb3P#P^wo6Z79Mus9+P-E zn<4+(Z00{oIR8jvgroal`}p94zw;8~W8Hp$q0z8RcM-&i5e2?mkT#ZWnJAyHVRQWo zLDUQsCt>vcvL*RGaPI(0&ArSQKsR%QXGrRc8xlXN6w)_JuSZbSE)|-Hje-i9jWVVY zCRpOHe4+=#$V2c!5b$mFdJku;)298132#glg?KN(>C4atl4%gDXow)md;WfQq-vT& zL$Y%hKKUSwlx&yzsU(lOCd9m0fz9X#b2@`^U(GKka``>d5|X z8pLfJo%F4&{{5gKOU+#m`?vEqw|S9z)o@CrRm1=l=xeOA9+pvT)Ga=S5RtlC^5D82 z<8t)jPzUD(Zn9DJFKa~bJ#g{9U^~uf0N{n%dIUWUKy$@)rc>c{CTsKbZR)P;)*e<* zGu3#c0Xz+F#+~==PoHb=`>mX=FVtTs4wHOgdT~g27WD?py|^9Z2A2&5(gXICs0|0w zmvch%kRg|?05N(`)XO{-CG42L%3p)78)BYwkMaX%@s{urW?yoQC%DBEl!tb z+qIV({K_N1-m(n1;jmQ*ldFehGiLQOkR?{M6fYE{)aVjKNPxDp7}3Evlw_rsYy}oo z>I9tCT81hPGr>ar(HF(_{zaxdE81dX1-~r?=j0r+a^H`!Dd1h2GgBTRxH2+xF9pfV zr6vcp_)q7Jy;0zmGH&t|RPUuzQ}I)m5W?5B%SLTDyQc_%oO2lUg5E3L#Bv&FxyQKi z+fU*dE#u%YtnXn4ttri0=4<>be51WT)4n68^vuXmTH^6Z+fCF-eDF)m9m%XHJDTGF zIEy_YfPDHk!(NVDJJpEjIN#gfT&=Cox92;W20|ojSNW{vzaAn<;#~#@5vh#9gD(nk zwn)`Foh-(wGTz2RI2N(gbSCGv80UV8_#sF%3LA{cuN-W^Xh~#g&6j3boo%h#=n-r4 zzTONgkxjx=zE4PLMVm0JmzcL3+r`_YJ>=-LptK4UcoP?JWwCqf%qGnj2CAm1g;bpW zc=Snp-L_MK9X)Fsj)3uZR`gGIHyh=uw6L<#l7A@g^IoduM7G|<3opaWkZR123QBQe z00cg!%35wF(b@x%^mL~rWQlDI`05vX#~75`3=_F9oA05`X!XIX77X!|g`nXw{BmX! z6m;1XDruiW3Ww$3vFdvSZ9h$jNopc#&JX!Lm^j}U6XH_xz^q7YD$fFP(xubauVuWz z<6GkJyg;wwwaAO^O5pP-(*t@MEMCWM2zY2v@Mg*Wfeu@(C>6lg2d_U zXkydADuMO6yx@Eu(!0C8t@4I)Kim_!gvMDPqnrH|Q0~ zM1vX0ItXknO){#fNgWNwScueS#7wP-InL$k5%`gmg2$Q*%%nHTm8!0ibosAkct7cz zUtu!`{C5zJG1se79|^BUxb762i~QxxNp5PlPY5KIx6w9S7W)w|h#0}~EQ%BQ&si;v zvBI8D+-qFH1E9DiHj1v&*nLQqpQYUKnb5pz2KW0D7wlDM?#|A1$j6!?Mde@a>w}D# zX4D@r9Y`{4NsY{4OGn32Ts7Slqe4+C6%?Y$S@x^2$%U7xXyIx_fkbJjdmDr zG3TY$_(^f=PBth@PU$(P>s!2$RLv%3)7@|mtg4-wo7s7oU+B4BNs3}s989xGNB*`oRQ~ocNDijOq26fjIl>+`e#NPDIsyiIXm) zO6rQjqHyQsl_p6IiTj+=@|BQ}zDkR^rcmMq&oQ33;P>sMy?7ccB1k+i zzGvMKP%A`m~)r;gNhP zBG|G-*d?Gi=i|R|0=eVu^)%Ie#t7U-pL(u|zVIUP4w%;;dE;Lt+v}s4I;$NZ#VH87 zNoFz{FCfRDmeE@U#b;!-s*Yo9;c||hjW4zHvdCZf5XeRBz|$^`yL%W~*v&?7^i?%K z2?~03DjYqn7t|@mQ*5XZHB_~y7Ei{eO{!~X^Yxl{>v@o^<^rHFWNgQ>Kitlni=V*J z8&xA_4J@Yp91m4yN^uuvZ(19gFDzGzqNrJLaXH%8Dl7#rdER!XgTXFZgt!JY4@OiE}3b32Pzbj)nI7kKeR7Br|x zFR(8p8qdMMMM8=K+g?R_3k5jVrgJ83ZYTPrPbmW`?T@mhzag=Dq36?8PJvqDhJ*7M z0{U4XGtN6%(UWf%&O~EnuHG79nFT(v<+PHK2@Y4^C{=zs*iZ~EVbHOrTvBXqb4KD- z&pMMu663ByI}OEAJj3+~A1el$m5AEkh>#bjKl}^vf=j&adgZY0GLlE$6Bc?oqF_v18Ix%3(Zw?{!V=p{lIxU6SIk<4$I{0U}@ znuoM`TGm!vNuyX}Ok@KCxC{MNwpj+F1w`;;HRctuLQtmg;0uBl2u`*zW@F6+S(osl zTvrKIpkiQV8PFO)4gh%NaFh9FGYSLK43{Ek@zGdr;Y=uSsWxHK1&J)Fjs9jG8yJXV zx=Ohi7D%i|h>hT{lPMvC;>|N1bOO&N-EtcUVLFeZGCG1F>}4r9qu`q}hp)qjt$2we zacGRO$2cn_%FV~IS~VW=F>6StmI}!`2guXSr=Jcb~qj;b#nxT)|t4%GlNo} zo-yQLi!cprmaZK3oadq|cp*}4sy$IjFo8HziwdsYPr%mFS+Azxn1UU=tO=7jXCoKb zip6_)Q>vdzvhRoZ?t`%*?gyzdo{HT+W8$amGE=a^wb~60Jv&??XvYkLKNRqRMWJB1 zX+q3@<+IG(P1d_`+lvL^C}4-90*LuRnRiC;-4{O-FPODpxiGBN#SQ9H2+B;JqhDnfLY&c`Hbsh*Nbd_6nZ zl9=4Ovg803&N()m4bzp_yjrrARDUr~a$e!;?Bd?vw8ZsDm-ZHMwfhtN@I6AG9&-QH zp+LW1tt1Dra(n>zr90}1%cETiD2XOVUyjdP+I|8|b7kQMcaAl$<^rr5T|iD3jp7%K zq{bY)q)csIS*0Z=qmr2^5Lb=N47!L*t@wXzq;4}I>+)>*)t}$y!`^)Wbs92AHPo@ zdua*H4TdfzFK?I&g5+RhbwlA4(mh_lf?~mq!q!Gx`Zs#^rRq2uu&9jhOc7_XlSpv& zndOJPFccid+ddXM_uV{N{~Jh&K@0jn#U;~#GqEHPLjA!642j_ zfmuhn!AA{O@pb#89k4lnb8lW8od-;6nP}7Kwt2wq=&Mxsa(!U>WVx^N15Z?r|MniI zEn#jJy1{bGdF@aQzRA!^!Y5|kYq{aR+M)4&vG&Tr@J@Ny1>1a7_?Eoo^it)I`UdSe zujc6wdEwSLC^&+;1@lr3gDVXbe@*MctM`z2$bj|zo~`QQb(pwUu5OH7i8&DUqyK14 zF!!3!uRQGGg=kFdS<+HjzhDo(w-~SBrtDBd_w_+fdW0dpT|j)mdk||XX}?%o;4RAu zof1gVjZI&#T;yLg0DoK!m}u1rsXedYXgOLrw)E_>1k>a`D0NA^S)|f<_P(23i(7lg zf0lS~zhD zINR|YzR{)5#+1eU-cV3cOg5=L0GxVkQ%ElBEP?#FTWn7cc%XnFH$G0E#!RA2{rf-x z2R-4HdYE2m1>Mn@pTyp>liQrVC8voT4OpXdhy7DAIr^m|T0fgoo@T$Ep+T$iEs0zOXJ0fTVEpTA8jJ#DNdUtDDZWpgKH$btBLEEiU}KG?R? z4H{)_NnT}8qb=N2*IxC!m11tft~qS;L(sc}q?7ma& zZND)34!)yzz{@9ao%c+Gk#>O4ateAf-r9zca_-tkU3@Xn1E?aUqinmCi@GbT=sa3q zKPyB15v|h50)Z%l8}i1uh!&SB3F>UeI*IDe zp_`qKh7)LFd?kcTS|Vb>7g`miC!nC_+=A))I>^T#K>3UD)(1MlPR`J92n`_y98@Ux5!dAKe4XCRi{*wZl3|cn#H~> zln&utaatEGJ*&(vZl)7X1C61?Ha*xOW3{2vqdM!e31Q#sClAMPhq#`Ka@v1>cAR~DMS4iLzdBb4eS(%%!+{Y`g?TvfF(P`@$UlOa`mDQD=5akH5k zDiHth|Hhyk62Bh@VZQ0U8Rxd-g>eu#3hx8p zi|oL$BN#2DPTbRW#xZ;0KC`*U=lca>7a`k>jE;%$RNbq03rPR*RW5Kj?l8bFHW|k~ zI~G#{nlZ#{wCYz#cGCtYvQ2+3yQZzqg-Z+iDo;T79;nX==?r>!Rr7${dgL|~PC}!k zkwgbMsN=@knrF&0M(QvM3?tfLN6x;`gY+WZgxr%5K|lV0#RQM2cp;w0`KA3RAI=KX zq_)ze1xdAGw%slLZ~l*QC_-`;cPjL=6!UAT8fi#RkF@ zFxZst_L;sr5tbf50#s=#KGg)g7y5zt&z#Veu(J@neBV}k3go5ounsf%c6o`t6;USM zdL1NE{Ni12$lQQ;%q#jy9R-%#ACwQa4Vm_K%6hV6qt&1bJzFGHsYns96?D zu6bH|YY>l#n2}{~YPIh#5Yz?`l~yo#&^V_jcvsLcfgQmy4?&(GaL%s5Ae}hwXFL;; zXNK><%cyZM&kruofu8Rn!5agDfDxL|+~#HN%(=q~=~%daMa?>XN(ziX2O?SpqXxKp z)d23BQA0#Ic_H)cv&?K<@K@GXS5O^wfeIHm;`1nHhs*V4RoQa7J9@6R6o}Y_tSafq`yu?q+R3QVihW#6!;r0i*8g@y}^BuXI4( zYjeJup^poCg`0?-DuDya_3$Y|Yobf5os0HIm>YDtaTkcDqe3yU-Xw%oT8t74?KK>lC8lZvtn88Us;`n_Fi|I2tT|jV7h`d#n z^_Pq;imf6s`vT@tn`ISTC{Oy70Vf&~)vbh>&wT7Jo!$^f-jN?B4rmtWDwj*ipFxqK zC7x-<>ak}hi5?vS!gRK3bYx>*tv0;X54>@)2byTK2y1;*Y@N{!4b#hZIl@x!N_i~A zYIzm?!Ve}7xGJreRHfI_>+|dMz9Om~LIGg{&)NemNSH~v?})&p32_-lMvWZD=#XzN zm5_|sqLFBX!txXVQM6*v=hDU0^U!rWn}mI9%=?0u z0ZZDa#qHZVM;C^8Xe_EI9xPrVPq*4>}!b>O2eNTFpD@8%>`D`P1u(pN08RgFL|RY%Vx zvpY-hUiMA3Dw`ZRf;1S z#Cu`s5D}AdwIa~Q+0r&?vvpvwe?CviFiE#pT}-G!niAWZc#u%j80DQdC@sWu?D&~L z#Hv!bq3BEzEnobi>z`8?&CyQN`gN2`UgW2}Fs{tGRxTlC1d|rcWJ46*+e*bwsI8JH z%H*wnbPeCo&lr~wku@g7uIC7?72@jG zH^*vFO#Lgh6e}yPi4VKC8_y+I>L6i#q_>pb!UZdTb)?4)gx7eGtU{4GGez?~ymG|Y z#+N*o2=uK(jyriZ?N%1D)?~sWtc>Jcb zeT!t&0+8lyrT@3y;q(TVQo9IQ@}g#hz0XR*6S85oIz)(==#=`RJGEOBfWd zi7hK@k$=v$9Rx#y=!WeNMFq@mMM7LRzsrdY|2?W z%HgE2NY4PC*2^a{cEda5S12$2EA@ex?M9@bHSkRih{`eda>jg>nHHs4B<*euVyo=< zS8ea}=RvXk`l)*8a?b%d+84dHONPI%OkPpUP15KKYfZI0mbA}@C<45{+?-7DqFTLK zd|JAHbh|JHX*jC#3d{s+KE3QBe%A zQOXRbgI1;D;E(~gAT4JjS9JKQy%`GDq0&Vp&)tJc%c_(jIYGzi!ln6qij-O0iJ21C zt+4ZsJ$vz+6m`BZ5^7GgFhI;Ig@v}k#^NBWb|%5u;b0pbB4d2Irk&Kzra|GTDaT~- zucRc|44P1pqk!FytDFu!6ccd9nasV@vv`}-H%gg5ELCA#Ev zpYVkWMW#%inszrWSTUZ}-r){tK4Oc*-02p~))ykW*Y4hJU8P!;Rvm>}o$<$d|3`=F zE|7DIYFY|4RmZM;y{`E4bpJ;Sx0hzr^HxWC*Xr6Ppk*n8&sbMM&{e3vhspxId#ymu8XF#OJh0P)zHxw)GbS$>5$8boRB7VOaXgcP?o4~jG=|} z%c=aGdp?6K-(hT@89XL!+gIQI;vcK&!yH#0_v2omRtSg3r z>&&!(96I2Q+)df;nk6^J`+=Vbll1z|knbhXI>R|0Iu4PS*%sx(b(KA@iK2T+DL z!;6nOt%!%m%xkt1jrw*5zr%T1Vi*UEP1g@STbmlHGn9F=2i#0&ikU_(9jd4s&`9dO zy?Y8=(JQ_`K$JohV6~R~ZZ1izAuMOr@;OVEo=We}WibfqVGTfz@}?Jp)3o6z&sduG z;E>P~&s??jO@_<~IRB|bOy~mJgl03A@^0UTgDnL$uKu$3#-LhWb`Q z=6~+5nHxAencMy|kdIQ(mPL|>=Wd|xkW*D_egxv>2RBD^`aMNPj}IRuUOLxJyd3m zz&rirB*|SxZz_W_e?&k$luAU2N0AAqavrW$l8ysI02=+GGKE)rE-T4Tus7WT4R`dO++T@(&Sk+;BM^7Q5=b) zq2_D@d1+HRn%NqmJ|p~21^NrH#+oV)_d)9eMxNe*W!Y7zym4muj{kxQw(X2~$Dahx z>2DJ}s{b`i{*m2fsl56kJtKHqN+wgG0z#&)>rqUP$5RK9Gy(&K(bg(VxOn^7W7Q|4 zy7O-Q-;zw>7T8&nC!&pzOW1lvLzF3c_ol@a1wFvz6IM`qWA1< zEiQS)%$S0m(Nk@z1!8^Lot8IOv5+8$q#80ZFQ`gdLZVQBh7u@xHk?pxo!X`Y!U;yT zV9&geHFqb>9jXEXXKkOWxAHQ$swfDgsI1Cg3JJJm>a^#V>Eh(MsY~Ff|!X(;Zg8TwnS&1vah^ul7@4~nns()56G~~XOJ)fG+*TkUVBhmoVR>Skq z1{GZJlcS#72i;B9i7~M{O@-`4t`4aKou#BBAXt#(D56?F4brAF;94??^0eLLFua+B z)1#v~?00I)%&=Y;KDGeSFIUPF_uNzp*j+j(yvy=KlQSC!4+3Fd$mnvm-~&h(B}S~J zLR``O4C;=nB|j^lm~gUov4|>K4av7zYE@R8m}I0mPuI;6aV=q1kI>#`DuG%`@M0`B zH@)KPTX;SNzxKM`{!?+3>!AWj+--#|pDFzKuDSOgyhZ!oZax0+En(z!D`}RoFYSeZ zZd!d`RVtstggHyreG3))R)k#nG4Rs|V?VN27e`RwDBfmgXf)%Su{)ZJz>{=rwE`E= z6T1yIt}KClNx-K8iOGY>QDpaktmN=FCl$gs%AJ@wX;n0aN(<4Ps>Uba5z*0p;1%Mw zJm?a#_0JWCliL#<>e55@_i$y)+nWy<>Qntv2Pyg9DTdl(I0D`XLDt%Q!ZuG7^v<{Y zGG?Jr=D!0dlD<1ivoBKiU(?tDH99?=)r|9luNMQ$t(oXvpUc;UG~sVoZIv*Ug|VC# zfL}p*iQybOhz6&wF+d1hahR${WA-7#wUxVQvkr?44R`5AJW!8*eAq36$3_Oq-2lpN zD=-aj-lHL1Xg@Gxe^Qij)k2YMRZo*8zivp-ry;$jZ6DV0AkH#I!Rr$hPi4BOuehJs zjc}QIgo=$Rdtu}0Q;G+ z8f@Gg1tgC|H_1B@!JZK$2u!&(hImH-sS`15_%gESYql9LsZ&*W#}t+N)TSorQ{|d) z^&kv`Jd$)T=AOv6n*OLwtbG2U01!uoF6xQjWuDeQa40 z_ZWlsiCo@XQ}zP%CFcKN8lkbh2I!>ysp{_*KtXxumN1H`B!S@zspot@s^g;NEkBeo z??-TDzhRKkF~I;07T^}aZ&aEU25g^#iZBp{JcU*4ypZSthq&1J><%fdAV0^&cx0qR!i8l<~S2Mpf3|(f=ik)2g|GBhPJDX2$RnSS%`DSPwsCzH)mu!HA2v+xkWme<4 z_M4wmgmz>u94Wh`Iox?Ep%OUx7u&A@<(zL~J3ntuRNB0TNWxP!R}4}SL+)D!15+G0ynmrkBY0e;$&v6?5L*q z4bAb^dIianfZARpSxOHvK7R-z`d^}U5h3p4)~$f;$?Mi$=(3DODqJBIn;V1Ll5W8j zCK{;^ivkv)vv5(!FQ=xYM{S6b*%jqRTE|#;H6aENfw)&o1~mbd;Js_Ozs`b>syNb zj+Smd%c4{{6bDaNVh}mn;x&7}*KW|%3TU?;x$uguy4%B=biQ(mAZO&=k6)i4u!jrqd&&Y( zB>lWCqTs4jIoK%Uknd?S`yS}+{iP#*dsmWIwUJp+cX2Sbo{Eds2 z*V9FF*R#0==ork%|FWB%{=2*vbmjQ*1dsI0Duq>Ann0}R^Vnpes%yqFIUE|1Uz zY`$br1QQXQFV_LRmkLe7cwj^@J9SlYscieuKXJ#^mEQ$k#3kEx9b@sHO%w}k(9*_c zI^B|W?b-AD<7=d*2Y@Z=n#l@@&A211b`Slw5V|DleI9bABltj!6IWkZ)UPc0k_{6EC}Q&X(FNjY!45E84Z3x z$I4*Et{$T!Msz7k6-{{&GnX*MFHQM=?9{jqLLj?3T-oavFPE0qX+_21ypuc zpuLXc;XW5*lc|D`iC}j13$o#NC6=l4{Vukj;*vffTCUA3k7K2wbtx^B!JdEQ?gXv$ z@d79z*VRfn&k7!RJTC&Mj}kUXo;1FiyM{7dXL%pgMarar-uBVy9)$C~HINFEwgxy! zww4OXfq=`#E!&9(hfZINFJj%COcycF0$(U64@aKDM}34D8Y#2G0YJ*F3~>laER1HOMb>l>=k9d&Sh^WJ`-97;M-oc?Dc9$tPoAVUX zP92Y_zn=|OLWq}%!=YuDzEsNyN~=`&Kv$(JsxsmY`ZJk{p~ zD4SZU2q!5(D7TKhP7G}+cAHD{U1pVhOLdrbsy?)wp@QB91PFySQI_yKKU{i&G8c)g zBcyYWex8Kn4dH;a(Zc-i#k&U3EQ|JYXW^4op(Kl;c{x92F5`&l7sutto@}^&)P@Ed zEmS_<`$)1H(Xu`A6U@byC|@tjHVdwxHmIwnK9t4JMAO%{<-@Qlvx9OpkXGB{t)Do* z#LKkZS2xE)-2`m7XLxJ!%q>7Y3;M9r@d}zP-C=%+vvJi2FH>yIvaI2Z?>-^k`{4P? zfO*L-H3tq9Sc1z`<$0EunSz#-Zf6WU&q5N)W`OzjMHFnZYiSQr0lha#wj!5m53zlE z=l!G$8N;^uvjTeN;P#HN2JB4SwOIq&h;5RS+eVe^OjX7XS>0dWCtWnP$n)V?Wtj%R z-tUE-fBiOHfOi)tPCy@KQZ0(H0vPtpjB8fhBbLq53h;t&w+pwVd%OcD@W+*@TSy(o z*dTh~&KxT7a>Cui?k*XGE2LADAn?c_N2Hw(MJb$lvCIbeJ9fA$DP^$M#=jj4%Xr~38&Wt$N4Y~}rm_K#TV z38Y7J^7UQp%9m@>zn4+}t#!+P46p=kZA{EfogMW5ZvmW?xUGn#j6BkVCV)5}6bMot z+B9#mIv7kN(5Mj(BTi{8h$s#`enO9?Hn3cqvAWr-^htu}Br+Tg_YVA4fIYLh$ydL@ zbx+{wlk>XjIeoPK`QZ+w2Rem5jQ%@$bJ;BgFY9EDf_Fjsa^q;T+Q!nen_B&7Mx?{k zaiw+=oe;WA^)1p8$ELaIWtZxG)Hszw2~ML)r0#w%S7F^)Ott2B`d3+VDGIH) zIBnl{di7gIHpVbsU%#VOvkd3r5*aIMe7aALELch}<=nH$qDu|6YhMoCMttJM92)XE z^KM0EqR{m<$nTO->b1Jw*~W$1M~ZzUSkNeh`_=~eF-&@MNrQ7Hl!Y06`yd+Efw|SQ zAO3aexzN5FpW~%%R4cA12(M}^zml0Hq>1+>6sTjU zLPNR!S<}{Oo=wj|2#z*&g!3S0#|BFv4ja)`*e<=FE$XbUx!nEtRWeI`!5MfidAlqmysJN-CXU#*!Nekce6V#ZVa(@aoPENcLt=k^0zIth+X+ zHyG3{y;~s3w)?2=?5QH&4nCfgW!l=k(~4}Jrv=Mb67Fkw{F7X8{o-1_?F;MQGy+4~ z)C;U%_ah`R?M^zw$sh6aW5b+J7h6VHtC4&&-fw>ccx(6RK#Co9@N--xP;G18A1fwa$ zCee>3BNtNsP=^RmDl_o}5hMM!n(SX0%#W!Mn~rV74E;OaLW79U1UR-Gxey-gSqE}H zHUPOFpI2c@mWb~NDE7KDJ?pRWb^CW-{nW3{2KnCtpZ4!a)PDe9*v;6``TsaCB&kAp zBCVis13M5$=p(V{B`fJe)OVH^5*wFnePbO~p*A!CFETW@f{SB5GYbSXimw$~$0uKD z&XZc3X|%62>dm!6Xp3iDdHPECWIvh^M-6`4y?Zp@@^oBroawrITmIDX1nzZtV+|FC zG$>|HoBgffAt5VeX?m|^Fg*X;eNzJ4G27ep!D)`A3LgkkC3AV&EUYp)Lkc=7XL+I7 zKY8n8an#QDaW3v7uTN1l2I;8qGyP zGo@NCL*yrqPBSc%tI{Op+Uj8oSJmgXtUqrZNj5&)JWtex)zo&5TqOI6$(*mbi?*09jV8NM^q=~7HK@8ND z&vN68l_s#o2c$x~ep-k$I0#vnnjJ^D3?&XWL=24?H`-IU$*xUGqbEQj0=t%*#w1c} zq>DwBSCC3Y=!Y5n!9?|ywp8I~P{E4m*^t?n6snQ6QfCGs-q9HnfA8PO^ z1N!Pkvx4>;bv8178CXOHk6I??d^wa28AiXj>7vvG!{8bhvbpt!N^QcS^%sfd34w#J z*ic7ZLfg6N*o=SVlN)@8_=yGlz)+^O)Va6mf``r`TVNODns&wnQW-YQ_fHUHD%|>*U9631xSLio4|(~i#Hz%72ThiniprGkUijgXBk+{Q1)`uY zv1p^bdn7jaxL0Z z{Zc(2iyibQk>6wJ+Qf^JTKDc}40|_}DoYT4wsP&(MCPK^^zyU{F$hk!>McayQc-fX zG4T^=PrJTWZ%M$Dk~?3=3ndRxtTk~x1sDen+1#;`7p`tDC_i~Uw<%{%E#%k)4N;_z z_)tnv*im?xl8!7El1O@aGyS7~IGQjYOtW}QCLL&lSy4sKpv6Svo^jt{&0WSWE7RNQ zXMJeCYGrrXo^syCBq=k^Yp6WATl?5g=}O)aItJ~NH7E3x z8}7cCYt@eC%a`o?bs;BZps4ykulwV3IE$5mXI>v5XxJ=Cr04q{V(Qe{ zvb9mW^n%H~#z!b=Jc&9vtzLVyF4!#;XvUS5&QQ&bWwTg%>MsXMDmM6z2`*d02isc{ zcvhQ7c_z|UNda0@4gf#m`nu@Xjy=ZvXlLnN=IM{Hemi4 zp{UGjCfaRf4)yUwY}n~u^YVeeZ$iW^ zBJBJYg- ze9E0S`OXy%=;XkHZlWzF?aR*tR<0h(-U%rV_r3s)Y;FWZE`|BfwE^`>^vEF^)O z$G?O`1dT)^Tnoa2I-bgJ-QcXMkFgPchk`ET?Hzp^jQrhRy+6_m*ouH-1_r)fwmS?} zJb?;5bHvpBxA43%u5OxTg$k_z4Sy9Fbev6$9+E=#nYBHUCBA%jc+K1j;cZ>d*kh^| zaK@=6K4SWaBx|k1cQmm%If!lY-6Zz5b~mXq*LU*GXu#0OFH^E2%O${JJ8Z;xZIj6Q^6sgRB=E;`=6Nfv51nLu&4KRfVORYFQ+Dy#DzxBi+9`b~5tqoFmrpcOKzZf)MeQGfnzqaf*ZD!X0Mn))xrX z9{!URDm3nK7?i`DeP=jaS#d^nFq%?ibJsmLL)YAbDiZpbZLMm{d38dM=-A9hczOi_ zJrLVnxOrU=-@zPW2*M}E4}nd3q$etV1g8C>F=;)xZSXR^PHBCtrIMS#5b3_~4Ezt$ zZ79KZOS523`S}NbLE>}C036oYS-{Hl_MbMkAJaqSx6VpGrkLk<6q<(|_UgiotcD%u z^)~>@_N`ma;Pv9otwheygmDX zbNRlWqBq|UxPMeRPa_5FabGU5)JXqY<@{&kSe(BjJBC(&Z*BUY?Sy#$t3Ts6_=n%6 zp_8Dkwe?r`Ny^;D_^X6+`7$E?-wM+#<#QQKespf4h!cq}6a?$@B2~4%C5?5;#l>Ig zsdAQt1gAZ)=g2F)0?ESXlK1Ktcv5SHaI+y6FH^L_i8T4VF0|WTj?>T6&;!@JyguL6 zhDE@=p)FB5O7AFHVS{vzM*8Pvt#qm&HCZK!yVXnCSy(fxB-$pc0xHeJs=}SAtwetj zkV6-UzNMa%*q}Vb1QF@85!^FUyMjId8=lOhCZAf-gY1QI1=K6E!&3sGLlOmk4@OAq z(WFBQ%-Ro%*F&FCfz}y!Tu;0+k+X-L!W882Ja3$0G*R@nAs7Fq&Osn7(TIF~Go^q8Za8|$-Iy+a4Qn#}FVY!-Vc z_#iS^*LjbyR1reR#=gN9W1xB#ZSA{A|Dr6WFZAE#NB=U_@+kj|P;FBc# zjcCUc8R9kwUpY=b@W(gv0`iIww^6>ZXp&4na-U+L!?Mu%>JK+t(7JGYGy<=;)3Nru z({qZ=8SrMdj%>94!%@?$xg;yKPQ{Vk1bzpReU66li=+7#q~OPJV3u3A zi_X3x8SOy(_2x-ZjcLjly*Xx9nV={w_A}S>H?WONy^RUwM=Ixa`1N8h&7+Pk+z7;o zT}RTEEr^aejI(DRZTFl+caGt2-uy2y;0m%|!m$9R^}_72QWw|cDjHw#(6e0Mqr?g`$scr<)u=4{sv>;udHUn4Yq>Sz zUX`r*E%BFnf3GI}F42a;ZC{(uMSOwM=%E*|W;9p|xh|S`j8Z{9Gn6KBX-Z@wB#9E! zF?h^O&7(9G@5`(Zxck$rG?*?kI!Dz>n*3dXm>Z&Xoa@+tM%F-Dw)2hoo+8`}gnZ9j ztAy?{nqg`*#ybi*|L3_%s$N#t@PTo6fESL+fz2r;k2Mbf*D4e@;z(1A2tH z8zB6Q3iznqQ`558k0)QV*-fY4ZdYn*zG;ob5U!z{KvU(!ORKLcCobX+;)MrlW1}> zSrH=e8c|$;!6B&1l)RbjdZ5I=d{<^XGJnq%_QylWR9SQx@(fH+H-TBRuCaV5*We^W zquU6z;NCX>Nqxp;?>wejhO_ zUOtEm&3n&T;9_x>N=7V%KJ-yoiw8I}yf}~w-5|Ev$a8HxCA|Dy zCs>h!Y?ezghb$^;EwMq|q^By0S8#|DwUhIVdFL$JN{jN4_>Y@VzfG7tD0T>{Cw~F; z1=hu`A?e^NldDOPo7C?(Y6Gf--9~JxuJef9!-|x)CSlE;I1g7RS>`|y`|2sVKg%U% zX>U11G92lQ7^KG$(Y6ov++o|(KpqoF^|59`@wGjnswGRok$8swF9?_FnvD1VAbiVwwF0*+<5h=aKy zSnVTXx|3r2nH@&!17KmD2VS<#ya zy^Bgq=tFov5dCz`W`p6IF0YK>f_U+jK}valfCKsZw|cj(x&F>JB6O>;SR^*@UR?_O zbakqF*)zVUu7Oe3qKyc=TxJ4(2BZ;Ct_pQ}ayU;MLANSg--jGj+8jR37wsSMv* zKpgz+8R~L10&WiVCRf^XwT9^|A2}aN1oswPx0KR)>j>OIHS!CzycvVnWbKkA3iPF2 zu_@Js=HrwDR!!1Q#8@gB;Qdn;oiq?F^$Z1;e&z;K8)^Vy@A+BUx8;+)e{6U3?0fc8 z?Qfv2F@4>Z9%%R0bviB@!76IIFWcsv51*t1a&Ox4i9pCu#8>ntdxK1TD{-k=voI4} zB*SUFOgV(&bk}7$zB%J2FdVQvJbZDa?buE7cj{k-yNj)kWr%D23xnPvg)yy;)AsXw zTW~{2V=HP@hAne3lfrXgfu^U(xGIKvrKoDg7oQc7@4m;)+p0M41HAv>HWtVDBGq3V z-03e*kbfT}|4TaZFCmfN!PMFM%TQC;&CuBH|8{e;V)5)f1g?~Ba<3oxdMs0vZ zMu-Lw0ECbdh63QPjF}2d&Xa9`dy>fz;e5XFCf4DAL?OccneBdjxxRka-R9NV{-(7z zD-^v$nV2n2bS9IEGfRQ=M{1tjVBW>s=CL0?*Wkjg&!#X1Op3T=hBg8b7ZS?S`?;`tlS(@ zA_OF@wBb-?^%A1mJAD#u$G%7Our4Yc(>EA+;T5V9!Uu5+R^?@7cbP1a3ht33Nf+C) z&GB+k3H6cYa0@7u@Lyx(U@r0s&{LFj>W}3CSNhFs$Bq~8fjAYSWEdAt1e$%5BvPWU zY@^gF4J%Eu|2V)`YnDW%FP)L;SEl>-2gv$gWx0Pj!2iS}lfHClUkBHf)eF*d!}$UH zCpQTm$vAK@my}eJ$?ryI*g4s1Q(^eN<#`A0MifI5AXYe67gF41`k3jses}x)2lksY zTXP?wT#PZFdjFegA;N^*EZSH+2+4z>45vLZ0C3;hD?`nYNFjj*2~tj!48UYSm<{Oz ze^2~*IrD)pSK-ck(`BI_0Ixmry19>7y3zfTTF8ZJh&2vU{d=t~xsO;NZu%7>v4abq zI!lb$&Z2%+qtsb(On9eRyJSU?CtYM>B05Si^B7f8gRv_k{qeXkMk?CAmA*#(*}xf- zW?Q$7?pRr?T8gVDzJ7cL3GV)m`6Evqe>QU7`Grzy(~Z!(b3ZSi4Pg9eWuXq*xMWG& zVM~`H0RmpxcTZKmh?WO}`s++d?!mdVGz%09bCn5S6LXaXpA)kTGgdq3qOW@k@8sbI zi~Z%FI~KUvauTJ!4y@yEg<(wpjRTYYSC}blsv@Z(f54)V1&a47wW(F82?-JocBt@G zw1}WK+>LTXnX(8vwSeUw{3i%HX6-pvQS-~ zOmm#x+WyDG{=9#!>kDiLwrysHfZmiP)jx_=CY?5l5mS`pwuk=Q>4aETnU>n<$UY!J zCM`LAti908)Cl2ZixCqgv|P&&_8di%<^amHzD^77MAEgHZ)t)AHIIXIqDIe{yo-uM zL9f=qnO(_8(;97VJX}35$eJkyAfs`;RnL}rt*9hz5Xs|90DiFC2OO@ZB?l!MdW?Y! zVeW$Z2knWJ4@RJxr@0!9%l(-MHk=DYEl#4ev6Ge_Ebr~MUtrj*0P32f95h$u7#2~9 zhM|KP%(!GKDydv2y=;WeN9p1qJV7#xf~7NO6RJ*n*61NJ)-33TQ{}I zRJO7(=F0iqd5tRKCuN=Y>ce7iLGXL*r#jK1o=E#$hpC0Hw5mjjMX8T9T&|4Dal3CO z$n^Yq*7KP%JSfbV_NjYZf{9-%L2-wibG3!?PDz21yQnBSK{$cw0aS!b(~MH%+@Y^g zMbh^HDT{IkJhPp#^C~#|0yC3^d5Arm)5NNiSpq25j%UngFeBVnu~h> zF6a63K7QC#d~?Uq-H#2|W|=~t7C;0wMBTC6W6CFDxKLt2tEh74!D7i0?eogkWEP2>jmm?Q?6ZS)p&ZkxzP?QLz9V1yTAnzUG107^d4Edc`eU(7{J!5-g|<@s1*(lgQ*l63GoeHDU})F-AHL zvTY+9qB`=3Fo!*RAf{x*KSAfbPOq3%0h!l5u^eIT#VnZj2b@r(B}rE6_bCSU8n7qu zdec9Hxl#li5;L|xqIzgWajIz_wSJ(^J;CDo#OQT;>isx9bR#bKlQ`G@hyd_j7v0XU z*FuwLt6w(Lu!EGE2Wj%0P4wtqSqlayo+lvv zvIwLW5a2I5Wvx@<3FE9`l67?{Pqta37`H_2r~Rh`mvn?bJK@;O)^qixzSP z^P7CNTSUwq9Gw)M4gTZjzl6F|Dw_XLZ+{fiP*YDRx4HEw)6&%LXori@JXVM&1&$2V zCl9%_tkT{{zQOSrdbD;S|Z<8bkmY!{JPNXC^QcUh(0cJobNZ#riP{Tx=a`7jDT(xzwJmnVm}Q6nGa zT%9oRYxj^klt5N6rBVfWzD|HYra%E#V{M!|U{lqAWU5u;2wSi)CD3xrI}RgWkKKi* zt118z~o_nKw#_j#v?MmwVR4Y4%(_3PW5iE|2cLH5fIE*5dkli zhMU*G#1uhwUc7sWMQKdYx(}>KKo5C^Na{U&-}Juh(tJ@rJN|MpKkE-g*?$uEfI)Df zEKxb*aGUWk@AbOG4U4la2-@}0F=Hic3Hbt1$B5!c5KQ?(k1sgs-0D%@;n-Z!;Cq{_ zBxJAabMsyPcV@;G1Rigb1OIssZO!;$tnF|9-D0Ch+6n9!tdd`(8ByDFFBrN*Pw-ox zcV*7Bjv^{JEh7HuPApmjnY9PxmQ)K@DFj4j3(eN;VU44QQrXUERI5f0;}m-Qhavv{ zAo};V$FL>UK(bU-j-UyFc?~OsvWG++(fb-0aA?&mKI!s`30^Wcl%YSpWaxX6T@^c1 z9B2^VL6{LQH~s$jJ$`4p@eN3n2U2DV=D-vsx?58lKAsCS!SC4v^m0uDX+)@O*S*6p zxE&BJ&X}FQ`&WGT8o3PW#xq+Lc4Hrpp9a6o_4GuWGj_K@^PZT~F*)^q?e|>&QQasO zz!YVY&QCQ(D0S!VN*Dx((~2}A$YsEKa0aLWn#Aix;u5Zffc7dqF+dYcNSDBMynuIX zQZkv0a*uw4IsVMi4?Km>!1qz*GL=a@C11c_a3lYTCN&~ZuiavZO-Y(66Lb)0HNv#0 z`wt#_)H7j8^F@hB{uZPB{|#F7uNeJ{B02tr&7!1#Zk!nTbfl@$f&xVW!9zeWr@{_> z5%40FkfMzLCVdd4zSfl4>^b%D?OmojR)}P75Uw|bVR|d8=oe5MQ_9BG^z@sHiHpnQ z&dkjAw<9|`h=AIiRusuaVRK0h<~pLJrt@$Q?RJ$i3(W|bDpI93J*qasul!Ax-St@b zT70z{Z9$Ac#uW+8Hp8cW+BEZCFHLQE003gFJgjd6bC(a>_%r4gt1PIKDxdlOmG5bxg!q%}OBBmE^em zMD$CGBvlqmJ64Hwq#{I&4eLk+K>MijQH1o}Sp;1j}*B%iMG#<^c!LVvstF3s)e4ogyjcWT?4>;2{JEMM^F`i ztl&9)S?Kp*~8M)+^p!-&4ec07Sw$10W>b#&6n%ipaV=_5%8df_LS_JKqMhAo?C zqfLGE@2z6ldhp zB1D>7Em+1(_>RhmZGt+*m*>vO9G<q3-DZfdDKlO|pcqDz5KKociyxl*E4@0RqM*whqSsCQV%`BALQ}T07Xe zv6IXT6bWO|KoSQMh10z?M!+PW0uSf#1-I1kgk z$8cTzXe9WR9(n1HVJyrm=o%KA*Hs*XgBr zE~W$D{Akz4%O;jWEpVS~xHMj`dsp{o#$0+@dXX+_VySrh1<6m*YPkmw4uPY6vJ5|> zk3;DJ-lbq(C$EXJh2z*X?*4$HJyBVmnoTqFT`_J95tUE`O9u=LU;nba8?|q`5IjUX zI{BaGy-liq*$IgD_s6J_j=g@C%d8izHOUrg{RJtXW*OPMx*~M{ZIa|kJrE^ zZ(;A+Tvr91Ir=~(%4j6geD?WU0);@_g?gbbo=l=iVVjjY6%Lr~YRs0YC@-KA`pP|` z>K$Ca=mj>xP}M+LwguRU`7>bsXU^y~bxIMUgGB*h|G4G2z9$<4Q;6eyG8fq)kX@0% zwGHQP*A3~Cf|`RB_Ob%FYqQb4%8MAsKvVs9gj>z9HSWtP+@(LptM+K+Y_h3aH9hP# z^Q90YIiG!q(x%+4Vr&>svY;)Z&Ew@1EoHHo?Amx~asX+u?q3v`zgzS7e&fnR$>20R zrP3L77h8PI5}d&I9(6aP{E~wyCdb;fiS9$(;^4JnczkSvfXefJf35vR||0K|IC(?ottwQUIsMi9qL-Ki1PC5|H3*{%XN(vI#!0?7F?op25ln65L)@Tz?(<+kxO<@M9G=^I#=9#3WgVT| zbl4nf1a+Z@&odHk*mqzIJ=?%Y1ViaVpn3@R6~TLbG?~$hX}&VYvoWg7VH@-iPK$D+ zp=cy^wSS3hojkEf*hOx2F4Om(YXd10{e&yT!%sCcf=xKZtyz{x)}4C6it(*XMQ>&R z4Z2SnR+GnjToyoV2iGEZuo%;D!GfAc+?So=e;}fkPp_O|MsuCNM6*e+(Ip-I=Dqy( ziA_?>c;WB1-#U;9w9p~7FQuA@-mRyha=^kiNVj5_bGj0q`62iOw)W2<$OZDt_U2bw z{RZ=QK}G4mA5;YO9gV*%aE)yo&7I6$j1|AWUbHd&qQG|gUmDK;vq(qriv{x|f0(p5 z6$f zH|!s{Xq#l;{(2gCeZ1en^x!yQse=Rf;JA5?0vLCro|MS13y${dX197%bU4wYS~*T7 zNMPGwgSIU0JW2NftQ-3$QXmuq?@1Y^@`;R^fPG&PD=ww}!g($Q^w@U%jh~>J&{$ zIT8p4^dD`WnJ_Z>t>mLFB_6}o5mz%Gl{ncGYtQr!*NEda(Jb9YovwZL-9Tsg=!3Nl&5$2Pez6&4IAf6x^6Qf=1#(zvhhNAUu7#{N>lx@!d z+2KhRXK3(adQQw|B#w9(1`V(JO-7w)D&ou3Aw-!D{s&7PYIJVqQo|)uLy|#Jserq0 zp;ZCFc%J&KZ-~*Vm$tJYJ;QtohtMEla^-AW-eR_`_ipuJ`1HUK?hs)m#r%vaUS-_* z+@<QOd6bSo61=b|nA%cU98n%d+|}3iuZ( z{8|y|Wc(Kyyi_}NMOH@r>?#ywo&q)`n)@kP_C0=jJ~z~WUJzu^3|ueO$e+=ys6z^p zQ`uVC8K^aSoto0do?vf!^n}e&Pbvi6emgpQ{|E0Y-qTPIUsp?cdxMi>EfTK>n^V_= z>-GEQVOL6xug5j;H_O{Le+Iv*Z3DA0iX zHb3Sb%u&(Yt_VcM08@~gL9&uQc)pu7mkm)2gtU2&;d73)p35qTW<8pc`u|WSj&}5nCmZjz<;EMxr zl^p?8=QuuhYi%?t`?^5`>fPlcL=?5&sw70n{tXS9I(P(|C2?whWVVPPS0gYFXU~@9 zjC{H9W=#m1rJ_}^$ACWgAJM(d3YQc*^yKM;$*UHR#$ZkhD8JM-(W{;BZY2Y$wW#bd zXwlT>OFC98rxTg-En@tsKv>>1AlkY#AIY3%lIg3FTe;NcQu9g5b*&bcsIrzU=I3#i z8nu>|Y*v(~l$yTfiuZwyA5s{)-d`;s9gLc273l3pQsn#yLw)m$zh;@hofUhA5iV_S z^Jc-XQ>~@+cQ!jTYg5rv2lRKSMbRK?+T%b-otosVU)L?64nHW3X-F&MiFN$=y<94o zUQldpIV*N1p2VbtRH9#Kj$p&r;g2e(ZcVm;a+wq#hlUi+fEkQ4c>2B}!hY0BP&*#e%)U|_eQgXde%vfhiAhy&HT&-bI#pprT2RHl-n9Or9kKY@ z*y6h^2Ln;NAa*rkeMxTgnOJI23y^g-A!~?`3V~4otb&p;eW9M5-lobP=P*BL2RaxZ3%Wziqya7JN{_s8TzoHXh3ST@OSRX1e6 z>$kR7wI$QYF$t&v}!NXCxg*MV=COu(&$S|cT(SuBvRZ&%%PHyp%;O;VXhH_;x z2HE2!upKD-`%LYo4-j(^+!AN!uZa;`%`G%%&#FDxOtExn{+1$mp2Zq&fXt@IQ+Vd5 zxy8=T8HbuT)*Nf;;=>yVza}=`u*qPzR-qSAEnH34$p9#bZ^G__*EM(OsuHn9s(iSs z@1b-`{6L6cDAQp=<-~@Rg8P;+;HJIPnVAD4Dh;+F&&1@R@G%6ml^W!^W;MP0d)imB zbBq?EBbgVY&-X?b)b_aAoKZUE36E1#{7!D%s3ckf+ca?KU~yW?7Cs%}4bKpA3#HZL zY9w6<)gF>&;-Yp^>p9k(4$X1%!Lb75zWg?uNWkgi10?l4%`F`Zu-y%^bv*Eb-G1bx zfx(%lYkITUQU0wktRS*;%_P0Oi@k^)R&}m?Z&ryTJbM7h6wNb0mMpv9Y>ilHz81R| zNa)#|zlxlfx|5EZ>g%QadIiiL)E8+5jg3iqB0IB;t?;L)3$_{phsj~;UI0o%gKX0g z(gwmaY_#YBn3m`RBz41p#ldnxLp79&YIMO%dpLkd4_drcD1y-7of@f5?&C7T7bg!* z+9O$vNRgMdT#m~Ql>Nl~UZcEw+Do(CxnWs%MNl)erW)%a9eV7n)cJr@N4*@WH$=Sr zAhZ%9vs<41`&UP6;T>@`?np7*dBd--?u-hXv~`mYkhSp%X)aEIJ5@3x@SZdI9=Z7^ zm`a$T8G>!TbmyVE+@a)*=B%I01?eWpM`#8RPKUTB|8^2_5otvAK&gp4QmeXLlLl8< z7q`?^RRNV0Zx>wC?=eUpiywAApVgW1 z26PBx#Gj)=xWi}Wm@kzi;q}eouVi_z3bwY7Et>>Nthd&%~TRU2RklNMo zjR1tO$Zmf2ikfZdY{w4qmcEwuj?VBt(Z~4uu{D*;?462ZUxjtkN26g-Mx^A|7~3vj$%%WKOuq#P1%TfMi%b5 z3A+m!PpQ1fx`!Y4u-@>yAKa9?1&rN1_!|NmOYN}D@6ev!<-68YDd`CqblRnk9+=E&zlax$$Z zEo3QqIOH#=`aS0F!U%onRIz#%d+Uu-ZTV~+KOW5lgf3#92 zs=j>nz*M{C5^SxuTa3NC5PoHADLhR5{6QFiJm3{lXa=#5F|Pw|uTB(`gmtPyy?-|e- zo!SpO%F=zX?002uubhHWls4g@ z$#c|C53m9UmMZnqljx2rvZ|CtTMy21QWa}%;DQqL1`b>3BPxm@4VTtyDBge$=!Puw zyd&F+VEvOtPlX2!>NBKqg7?CC`V+rmZA=K7Y?*qaE@CQvOWin}e)41=!WLN*AmICp zmApxQI7fZ@Fn$iKs11M+Um$0c@jZLYE;LiUT>Q z;mj4M9@HGF55B8!suGMpT5sP$Z0H81g`%akXopX=;Vuyya|V^5eGs80E$GcNc_7{w z^8xFDCK;Ge+b0TnY01uz&_%fk-3~ zvi@tUr$)PwWk9(8y{S8#NB)r=Z&8RFES$pdKZz}*U-@kS(R3c6ORIFKDCtI3bCeVK5Ouo`CNgYaXVC;;%_1`Y%C zS$Gkx5qw1G7=P5+GQv2jWqBM^c;nED(khcK>H|id>bS}R(2;{C#FXUv_o-0C=w18S z!7fg}MXAN-iF$lV4>ADs{#}r_Pj3`vONGc>LbCQ$kqa~BpZsXaR3r4-jfEZh6lG;g zH2?O&x)$tLCc6%_^X-$8UCQbq`iWZf3k_#t`>d-3RZ1*6t})5ZW#k?<7x4jX1;FIv z#JqAvG!v>ArA>Oj^}~zAj*s-^uw4QHo?OwxadvD*vQw8q!$k+PkzQ$ck-*m5V;_V^ zO&2BUt>Gxc!AIbE;ki~+_O#~NVhaYQx6FHt%&w_T7mmi9xrCyXhJ_PZ`?rYlZS;Gx zW*VdJVQtk}tC$DGfP9YCu&PI)g+*tzI1J1+`ggxT`r>R1{5ZK7^vgg50`)~XxH#op zaFi4=I&6N~23d3&(`fqN-9g-AD4TjsqHwXNH!B-hK#bOSvK=vpVyEh|pjvqg?2bX_Aq~vcQBK+U4{r-Z;e{M_^DgE#9TxFsI4gL-&iiIYv zc6g{nT!eB$I+&D&*!`uP%y|6Qh;DOl`zGXO4+>ozdgcSKpd0AWrFrJpE8_Np(d2u{OsCVzDh!qE*XZ~Qkk-UV;Za2i^fWH z4GBwmrBGEgJC z2615hax*kh=rlN!7SVm_!m?!&jd>4(rm^_RjHa;s7IJgmpKidx6*{aw&1Vjb5xBy0^j5%jkNfAs?F~Z@CFq3O^wFH- z#IYRF>aR{2o|F+6=`?(!PHgaN-~%e>IHc&2lxTYNE~aNaMm0JjWHoW#EQ1yr@uOXY zKBd2o6w+Rpm!V{ui6q0wL35|47?O$R;hFf&*I;d1L?g;zf#AW{5r+BsgjI9#8$50~ z&kOiWjaUVk9(WcPI%tIn+M%Q%H=Lk!9ECDuUV&bs)b8?PYtO4@A55o)1xlN-2uVDn zw7Ka-zkOkWep`@x4Vn~s$4_Lb3lX-~ySpE74Ur15s#rZA1R#rs6CJQyr_^D_>jwn= zcz|gF9BRbkd}iENr&_k%#j~p{}>)f0wtqOec{LNZ}B7YKgG}glU<4wq-_`Y;Jx=- z#m|G8r1QKMaQP%WN{5nEP~iRe!q+7D+3nU_iCn2Xt*cmrczfZ_Ai{uof8r?v&P6Cg zbtF{QyzfLBY+bXDRt{rwzUdfr1pT~euQjifNXm4`tZ-zxMXMN(x6U-;z(sYho*Way z;!$Zfczr8%YNuBT7-k=DyG^RowGu^y(QO&%=nRCdBrv~E$7_y&?K!6DP-#b?a_ojj86^W z&>qkL(X+DkI^|n^^#TTQ88cjqV^Ut;YOxE@e{|8suiT~=n*p!+*rx42!=v6v4#vEx z2yh*NAiv>w>={9^8@c$;SO)UNrtQ@wk3hM8=^JP-igxR51Qx_72dHv$GqPmq4 z(E|^Cw3ope@#CReHwW%Uu9gg87a=azdA81=6> z`d6FxKgOtve;L#%YBX0`mVrV(g+b2KHd6WQh%WsAkdlHhrDA&huJ59dZ2q#D_y4jm zhw@4ilE@F^?d>rVI<`>-2@eYn*~;?#ilJ$33$~s)JwT~~(t_b~cLBvDYyCPYDw0;> zGagu>E}CG;mmJIf+ZGTtbti7W+rR}dq-a}+Mjlo2dvDV*=L6q@e<3DQbrv^uHWOTi z&XW0)=G8upEJW2Hyu7E*3-&)Eg!Y*Cm!1c;5PiYrE7+NQX?p&Bh50|`)Bk3cp(Opqr_p^(+Kr9X$+rnLX&MeW5Zt-D}b4V$BS=UJD|xt*F3*Vo6OHIj>hb z@3>|ruWGipeZHv;v_nka%)?nkn}u6wbHLaWC*1+yr;4F7%a1vPd*_LPp&Yfy2+EO zBsv&8pr30tVSW-^u;e(0PH!WZzc2s2DJfy8-d^JeU)MhCJxZZUez zJF5P5ln|;{3z;aB3sH*>7p)^yOi7c|Ia7nlM^IU^Mp>LO^y*1%al!pk5cX9Z`8J95 zt_qXct{-X)mk2s#Gps{N;>a;1F&d-Y$lfj0GWlL<)IUaumu}UVA8U?U7{6J!0CCqq z9vN&-9eW=a+N5h!PU$TmkrW#ce&^X%RoZ+F~T?ID_qB<7o;6)tE?w27|Os*&^xT@2LZzS)!=F9Rs>0^B|0u-B}( zNl0w@E%`{tV4q4{t{__9SVnWcNEc?!;cl=6y&*Vw9Pc07N2Ov@%v%!fnZhC)wX%C0%n=#QHv5J7TY8!vhxp{?=|zv7 zAEG-l>AX-1l3ws!-vLVLAv(vo8p4K)$v6X%<}{pS8vKc{%CQF|KZfD;Bq>oi=_`D21zg3JX3?P=l`+lVmBQ!pkr~VHokJ zkUjk=g6YEs30vQeuhMQF-A(SCx$7>Tpm87k%W?nw-!JliUfyGe0OQZm{Xfdg^EfER zKtCPu%<_~V)vqMSAQB}a7PZV%Qm;tm%IS*dkLUrQ>~{qqzMyjkBY?B%eG35?O&kW}0mXETeorvq1l6J1rIfv^TUGSBgSo70>;HXQrLxnw#l zzSR3fe*g)pStm&xV^_TOqpW~Evs)ooSiO^JRga^PsCScYkR|wtxxRc;A!_Y3S%%h> ziF!I)cB4pSS!2O`D93)MG6F7UigV8r6_L!_C@>`!<>O2(x?eG zS(xrKNzk#e2;SgykHF$k)tvEi)JQXqe+75%;zGtiDSmBypv(DEa%x+{Q1W0jS2^Ar z;YD~xkS_*DhM;Kax5gw4>v^vR`?{Bsf<_TIx!qdaz5peT)}_<+*GaY^MaJYf6k3+c z1VP?sheS}%x=20boUc{2NQYcrsn+u6g|QgUn7Xr=&95h=PS2`a&?ZI{Y+fTY;n6nF zc7mHHa6>*W)Exe8+i+#C=(_{jHdOrb>P_a~k1S=t>t9^Hbu0hz8K$a+N%ewu2@#`4 z3l9D>qu&b{8dyP8AW{qdY;4u+9>*O0!Pf1eASy#J(s!`$;MxT4huv5=k9xT05S8Fk zLV}SNK%VL!I9b1Z;9j^mJjM62nGYrvabBqxRa6r3P){+cB(b!c#E1{EA9C+!DM+(b zpZ4b-On~nwlXTihz8P~=*`>q)xkz4q&ZgwU5%)XD6s@2@2N4Y=qS?{wvuDmz`uS^; z9S^@prtP4EZ8BwWEjPltC?sv&m%_e!gGX31f*cO6kCtHR66>eBX?(4+7@=rPAs!^n z3spoM2EfOEfowchCdA?3?LF7Nvl)~lWA=t;HjA1*k2C~3OY`F6rva(4H#7;73O2hd zqSTbHq{@7Ug6b@kVXMpX?I+@xue3xr`7tM{>(pqa=9X0oSUxpQ3=hShumN9(NinFl$s?Q8J<@-6+ChwFU0UJCfs*;U-p3wK6*i}AC@um4L8yQV z-FS*mbw#A8CzujxFrLzM{h8e1v(#{DS$0d2g-2;uz>SIdW_QyfZfW-Ru;LWh%Th}z zr$(}3W%cmo*^E9w2k|l95$0#I`71Zc^YBZfNl&GI>=mER>y*IJl0EX*@3)38W31=~ zv4ujAYPVOElT}d?Bz$W}jS#G|d;0)Oe#}+DD?EgL)-kQr(2sUWB=@sMAKQnG#|7u(x2 z)M#MD`z668XwdFC)-^2vv=+pR_5hP*Z|e7EC;e|Sc%8KSi4e}OlI`}nzg)S0xpiNE zVnyI~LF5%`_%47>P?Tvx-pn4iEX~*`v9cdQ3Gf7GVZpetYI47%6yDJR$Gg_3#jBwM z#(yXZI*`c9x3a(R7}q;uV3i*C!&H#2MFsB?Jah-VTPg{$PNpyGAYE~K&_|saU3*pd zd6||7FO*H#WS{(r$rK~lXnF9-LD|WQ)r7UJiwUOTgDc-uTzAb6wHp>{L?uwmWf$8J zxR2V0yw4>)QfKg4G!ai4eRxQXU%W)F>B1@n=BxO-zs=t`91mx@sZ+zc=nxD2Vu4m~ zZYte|mCV@3kldi~wGh5GnIKHuJD?iJ&rj3A18zh<$PUuq(s&w+WzO7yB$XsgY8tg_ z7SUU^7u#70c~jRwPBjz<SJi3`odU zmq#fdmS}~iWq-w}7N=m$Vb9@WrM~ z{%r%(NO6`w6&H^H&up8LT@eHaiJ*{+-ay2}+_%Yw4KF!i6KTnT;t0g)7h!NonrhEY zddbMJq5{g5z-p={e2D-PBlLv>BXb*>vS63U5Q^0A1~)93xzR#IkZ6T$C7xny>tYbOh!m+CjB#s@$O&J}%2rvMwpjU51_{tnM&kfLv(F%N80N!> zVP}2xs$MuVKJlG8r`0aq>WLQ5o(l1JV;GE4z~nqX&tCVN9nKDZdc7uGYO10PZXO@= z@s{l6l6nxcb6Q7mkW+rJbB}ntX<+tJ?CD!Ei(XkoUP#rqMRfQ&oxVQIwY1^V`ssu| z7vwl|$rf4gI_t2;;%~G?i{Oqp?fHDP5SkfBi~;JOhg0-|wkH)bLT(9^Jx?}$Tks<{ z&nXBBMs$fB+hA342M<}RuV5j3j5x|17a5iIO4U_cYO|F(onU5Q9S&tJY^cx;0}m{f zsJ`xhI^R3X~j1MPVe+zPYsVBQw6SU!W%4f%#@2 zkG6br=Z)@*rW@lfC0>^oy(Q-;h{vhk5ibfRGp0(0H+y+(7v)#Kq2a$PN&A2Z{nXdd zstoxQ5nnuxrEDCggii_RS+x8vO5D8~*u?>;Ji6YorzD76-iwB@9qVDXJTnTej1hWi zM?u|WwAx&4>jD)h`g$}llxvrCMD&a4<4}eZkC8e2 zCepXI)#OPr^e9_{ zYd4Scc9b?M0?Jz1lkfc3fi&-&*qbxPfLgdLG8~pq1<>iZ$_`4dIZL(Me31@#^Hxb6 zwURj`a&pz#Z#Az4VXv19WtoC$un3pY5O3qhtj8$vZ^Lipbw{UEw$D5T8T(nke`NNn zn!9cjtETsmx>VAe>n)DGY(?0+mG@-BThH473ZckUtQ-)a>9LVXS)Z5%IOR&y_GN?$ zC*s+#d=a9DxHiygz;9mL?ZK+bl;j-y`Oc0 zvPu_k+{!kKw)47^1rj0BX z@zvAzPeR^{BqoO}bT5e8rSTAOBOYQ6SGveRQqE0;Be%zu+vW}!wJ z*GFPOUqaXO4arQg?Zj?+4mo#CMpbAcBXxP$07>Q1O-$9^sPFY=Hcsx4O9L+TIU^raS#^ovwxDwoPDB(vMdHzNV1yxNs zwT0D=68C7?L}bU3t+3}r*wjmhis;f+eVL-()6%cwdi3dMrKhrSR#{CK*G(gwBI9;h zG&F~-op}z=mcpJr8hVw6+$Ia;umjKWAPEXiO>=HmvtHelBsjtNGLF6jTazN?UQEh> z*R7gWALMr8?S)e%Fikr#R7s;9dj;uG@a;msE07M;{L+m7!r-wt`>qL-3;{Bmv8h-Z z3di;%JyzsXQTNmj(OPJVS7hiZJ0F^NHB-)O$Twv>>kD*7Rlh=h!!orwe{1@drC;^GUBR&u5qtIFNF(8ji_75OmnK6P4q3 zCE^BD<~IPPp(|@`rjVx;HDp_xw}x( z7%FkWhm!4e4Ly@*8KNAoqs#wBuR-ouM?bY~-Lna&)8@xdMRcOAurIjB)H1~Hc7&|{ zLTOd$yK9>8IRNwWWuYOrWq5+ac^-X}WHl9g>e1Sf9^d5K+hZb+OsWjRHYxLYmDQt0 zXzNU*3vJa8sYR0QV5w?%=4E zN?&Rbk>-u)qG>uT{m_YTr|yV=n3{U^sbx&F-m)DRK&u$S%~kGs zTH$)RCwi%PJvT>B2%>VFUw-ZsJ|ea|LgORx>|rQDNS8OG&*&cTl2ctYk-maGV)*{l zv$HFM!fJ8-T=Vi3`PG5bIn*FYm%^pn>|U;%;sMe*Mh1b&P%(G7$L8r)fpf;^8wlA; z^wp7#QQ~XTb+$`;U-tFv8o<>ie(Er}K*HC#xSjk+#e*l@eCGw&vucjttCh=deLQPM zjh~b$LzTz#oGyRL3vP^rn93<#=#2rB3Voyka776e4|et;InBp7#BIjKh~^I^pbFw* z2|GjYx#4AAtm_IvN>N|Dx3(JCw>HiThEc&YhW4{z ziN+s?4tWAr_*UPsyxi_>7*LygZXy^_JmmX$#U0h0GR3ANlci70c?Bb3>R1#>iIjAq(S{mMok@b!UR&rJGT z!}ajGkq%L`+k4r*bERW&J_(H=9F%URu;XHA+qUJexjGD(_b0VQ`W%rci!{rgl7!dY974z_%*3gps|ODyecqNgmTxu+K3iNgXAJxf6EE zIW@ei=IR5ddbn$YESSluDwtBfC-&&;5;-({8s{PC)!25X1pthkSe5eF)heGVWp!<# z2Klm2UBH3FLiXYk>hf)k1jo2(6Fir&U&s6}RggF7(@MR+Q=+b8>R6eY~V* zqnNH5BR*k_bSTAWAi=xC^Y%_gpqJ86!QAc^~^Z4Ps*iwxC7UZKqX z`NDU`=UMisO?a@SRa~6b&9RGLuti~UhoXYCr=nE0Zay5PY zBs60NHz?mxeH?s~AnqWm>bl@D8LG}_K7E(hwbBgMJN)05m;|g;WJWTNIpWm4vdn`Q zzKUQbYI%f9>bN9pRX^c1Z>0vsv9THMkMAH^69^b`dGwZVke zXqVcM50=?#K24Y*ZED#fOPCus=jKxw^dU>&T^VMhON^LMz}+vbR(rp-zfcu#0ArAg zPP;--pt@l}T8paV*uQ;B1SW6$n*6grN zT_-8%{EPgSIU>?VpzkpCt>@ciw1ey4{GQmSudb_*!N7o2zq+US+cS~h4nhq72(P|l zy8Hc1q)f%^jw{&X9p+%4Z+iqY6|9(UTU8W&ZImux1p>99F*pUs~&uk(wa z>12FgwE}zcH4+69@{*o6aVpf+c=QG1=AanyO$!OVgB88LW*fy4t+d?JP~E z-H@H(fW+K#3ZzigYJ37sxsNa%*63-SbOyw<%rQjAb1G6oGMchB9n)%EvU_i9_{!1Z zP1kUI;zmRS$0xj0HmR}kJ$9+>dh@3&@cFEC73}f`OpDmH9s*Vfr^B$)=er1RI1oJ` zU+82p)4mo#5eW>CnI=J&J{}gWP|mc(*n@o!e6g3aA<_#CGhad+mJhRMRY4*uKfkWA zJ5m8Y3gZYjUv18=KX(}t_AI3Sb)BYfKsfz$s0buK#BO-I*@mb>=1iPjZxs{|+Ix0) zS?6tE`WIQxd|E;h8?_M4c1-%9jHNPjma@dseNphP`SLiKaN6~}JDo^7sGekz4#2s+ z>=fprK_0>>(YGjpmmjEv@{P$M_6~QzMM3y9nL=BD>5h?u5;mdE8veBBfC){DF4jK~ zHJpsC{G5qAnc&j_j4X@@=E)e4Bz}vVb})!oHZgG+_Y@~tz}R4HVB>;&fn#-E6M;LF zVtL*(5b6U-uo^}T&vl5O^2$^9@^3v=$Riado%qDxk0R@g-0xV;LoCrR;U0_@J@C z>uGtz(a|tb@8>iOlvwP1!F)DSweafR0)+G7bdp3}O1UJCqPDt*NI)cByZP2$V>UNM|uud8-v z-64JmvjGO)LY#6_cfodFPZrAh3%xuD_Jl$+F9Q_;Io?g>l+%m-3#qRb@E%0G>!GEO zS`}F?6WL$&z@@5w9*}uDDAqC?#CszTL)OX#ITQ9}_?mRhCm#DTY)s9PDE0(W$SC(`6j zZ-co==Vd&6!B9M`$+dn}z+<(_kW@5;*F%8Kc z_rTY}>*1bvz+bomfD)PNYATayfBuov(FS3z3->J`KSGJHhQQW zm+?%nE*$Dl@ld%WwmS`dP`x*fDSIp8&ocBIZ#tZTx*=nh>$wpgSxI2uXFYwsj!|Fiuivcw=)!HRLSB{Gx-<@~n!QqZ z#bNhJEVwX-OYn5C*?`inLYhIC{gvcZ0eYf^8$lu(AI8@@`i6bz^z=j#mZ^1!dKGfU zVuXm;7#paZasHS7qdg+&@_^P*tYRe(xdu=F9OTyb_Lpz+hRZM<2vQ|uViE@X z)XMpMDn@W9HkHfr-Kx)+ZsOY0W200)HB38EAwE9JR)x*<)g@1QE;C`f&khyo>7YG9 z?xRGIdkMRH0tSwsB6)*02Uy{Sg#dnHP8!Ler-$cGa9u){}=A&D)}f6^Xnu1jgvk5Ou%ju$#HX z@C<&+l_|L#J)ng`K4cA<0L+$vr+(kSlOC2C#8cvHfqsXT(&D!R52(@44LTKIW9 z&s?K0TJx}M$37;8NcA?;UF(MM?t&qRc>Vb{G#HpGXhHqoP7gePcSZN7#q@W_p5K?$ zv^$rcJD=eM0JW4igmOzRjF2XfHsmA+L$u2;7bQ03sWa}ZM3Z5YWvwRqZLmP<`I0XM zjUejD453kTbraA(087Wwac|yjuK`3{d2zK&>4i~Bd%#>eRTk2N+pL745l#rB=w^8+ zCak8>KT?A=Zys_a_FiS#nEPF-ev{s|gQB39o^uAF_0U&i(YeoaSmde1&TZidreo@# zxh-ZIvsO>?(~LG4H!x!7=%twG-trEw@~T12jSWdUhD-WzFHG#RLwk~_8^Tyj43Z!` zgH}E!E!7Ru13m%*)URJ=`=hk$KEuwYxkNU^j`@&LXYSVF+JA;Xf;{v|YM#ngD$$J* zyP|~0=Htq(IBGU-F-#K`lrFXunVUEqTAl=kVp9G*jg@Ny+kCkXEy$NWguW9Q1AuM; z2p!@iUj)Js%Sr&6oEsQYY^njhC0$IzL!I?GZ+OCRUd3O2U=5>ml^_d!R3AVN6^amD zU6)DXP1Zj$@ud-1E2L(ebi{+Y>|ACv?b?Y9s5aKnUw9cEAO^+OvePih-?$xC>J!fz zVACH(ElWFliv?cC4|P}X4An~j;&!Z@?eP?NuYi%L+i!l3o&Ofr|; z)tY=*7~}O(2m1R4_1DvZ2#Z4RjpDmlwOoxaA$W7ivDY?wZjPs6w0NRb{2c}SOnY+! zH+i2&Q^s|h;>+R-%A^rh+4(J6VP7m6MvieVeGMb^!VWOS&q>>w8ev#FuJ;=x(C+LU z%xy7P;)j-FszyuW@0fo#p&Eu~;0?I&#ga`6xaqCm>$IA`p5J>)n%)LkncfAHZ{z8cLT!f? z7+w>pxMXWfwbk?`EL5zwbQ#dMU5E#fpO}luPRNyVUBvgWT(01H-PDQ8{2Hh<9!T zUsa*7eD#3U^poU!)1b#rv13vnn4Vy!(Gj7gkQmPDiz-t#Ts9VgQ!$R)pSdp$ThJrZ zy2-|~NOqVO5L*c&_R0!%K#P5h;5Mco3E$)OxiJgL6WufKl@&|lGhKtx&#y`h9S#p* z^Tbo>GA#^<=>hsPJp&WE4&>dcl^njftX!&Eo=L(^Etw5+z!Y!5aL!foh9mT)0ReyC zbJ(V$*ZcT)y}vJH85jieZ(#qWTcr5k_5Q=eZ}+}Q9#O7&!@Zy06ttL}UY%QEH3Stw> zQf&xDZC_&;N!AS@bzD#%c<|vW943zxN5W2sY6AC-P-R)bD^YMMS~Zd2ij*zJ-bJqy zIcAuom)kUQkZ-b#Qa*-=vc?3zS3GMq;Uz1*y0+clRJO}lM6Z@_a)Oi8bfrV=dI zG~}ijJz9lVr=Z~rH8cl8*y%Kzj_4}BD+YM>Y#{)KzY1CIe#C1$fu?WHuE9GVY z(oY&lK|24V!BWrB2=FKP`-O3SDy;wK!e&+s_Ij`NY|VbDhVmyhCBIVhTb<~gZ1t?I zjcosuw=WZKvX9)J6ltO^o`=DX}t=rE^t*tB>tZl78`t8k(?0#iCkjK(J$pArE z*_!;RQg{FI!`dK*se3a1M+rS^Jp)stUlv5UR}2j731~FkLH$wi-*%MTUlsq!rjLFf zrFXdj#-^`(gg`5oE*u!xT{^WN0tCOy!t|$F{7@rgWo3VtC%{@p&kO(xm;7&bfZr^7 z4}g6~I2#pYiB*s~mLJ+dParri=&ksl03t@ldJY!$A|QSR3oAWC5G5Y-?>otd`Ui1! z;9x=etwG(T_>=xJPF{-;WryUFd3L|}JA^slXOKb5+`Ps+tX^UVKL{!-80RM5`O$Wk9< z2{LIb13e27Gtk>$rtk1yTIz=lxt|>tWQ_j^5FEhwPqF^G758%`-es5lAwclQBEQi5 zaJ>JNYxZI7@26$^d74lJv0MI6Oa0LUpe@Y99E=YE?x#Yz%kK6=fZ);~=g_|c_&L|x zZ@T}-N_>}0<-fwM@(bN}sZ}0U^M2}wJMQuy0t65EJ5_(5SmhzueF}AumH#6^@B{U~ zsrL`CfATr;5cWRt_s?y_(D@tKd)wCk!Pfo|>^^Dr9hdkI0fJBI{&TPgd*p{8_i0-1 zE(LxF5Ij)-pM%^#&v=M%pJejquDUe&=Lo+$X8wZw^&#wiWK JS$+5G{{hr`vzY(@ literal 0 HcmV?d00001 diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/.mvn/wrapper/maven-wrapper.properties b/packages/google-cloud-logging/tests/environment/deployable/java/.mvn/wrapper/maven-wrapper.properties new file mode 100644 index 000000000000..15fbe3d284ea --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,2 @@ +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip +wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.5/maven-wrapper-0.5.5.jar diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile b/packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile new file mode 100644 index 000000000000..2a0b9c2340be --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile @@ -0,0 +1,47 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# compile local java-logging library +FROM docker.io/maven AS lib-env +WORKDIR /app +COPY _library ./java-logging +RUN mvn verify --fail-never +RUN mvn -Dmaven.test.skip=true package + +# Compile the deployable code. +FROM docker.io/maven AS build-env +WORKDIR /app +COPY pom.xml /app/pom.xml +# copy over compiled library +COPY --from=lib-env /app/target/*.jar /app/java-logging.jar +# install java-logging +RUN mvn install:install-file \ + -Dfile=/app/java-logging.jar \ + -DgroupId=com.google.cloud.local \ + -DartifactId=google-cloud-logging \ + -Dversion=0.0.1 \ + -Dpackaging=jar \ + -DgeneratePom=true +# download dependencies as specified in pom.xml +RUN mvn verify --fail-never +COPY src /app/src +RUN mvn -Dmaven.test.skip=true package + +# Build runtime image. +FROM openjdk:8-jre-slim +# Copy the compiled files over. +COPY --from=build-env /app/target/ /app/ +ENV PORT=8080 +# Starts libapp with debugging server at port 5005. +CMD ["java", "-jar", "/app/deployable-1.0.0.jar"] diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/mvnw b/packages/google-cloud-logging/tests/environment/deployable/java/mvnw new file mode 100755 index 000000000000..d2f0ea38081d --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/mvnw @@ -0,0 +1,310 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Maven2 Start Up Batch script +# +# Required ENV vars: +# ------------------ +# JAVA_HOME - location of a JDK home dir +# +# Optional ENV vars +# ----------------- +# M2_HOME - location of maven2's installed home dir +# MAVEN_OPTS - parameters passed to the Java VM when running Maven +# e.g. to debug Maven itself, use +# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +# MAVEN_SKIP_RC - flag to disable loading of mavenrc files +# ---------------------------------------------------------------------------- + +if [ -z "$MAVEN_SKIP_RC" ] ; then + + if [ -f /etc/mavenrc ] ; then + . /etc/mavenrc + fi + + if [ -f "$HOME/.mavenrc" ] ; then + . "$HOME/.mavenrc" + fi + +fi + +# OS specific support. $var _must_ be set to either true or false. +cygwin=false; +darwin=false; +mingw=false +case "`uname`" in + CYGWIN*) cygwin=true ;; + MINGW*) mingw=true;; + Darwin*) darwin=true + # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home + # See https://developer.apple.com/library/mac/qa/qa1170/_index.html + if [ -z "$JAVA_HOME" ]; then + if [ -x "/usr/libexec/java_home" ]; then + export JAVA_HOME="`/usr/libexec/java_home`" + else + export JAVA_HOME="/Library/Java/Home" + fi + fi + ;; +esac + +if [ -z "$JAVA_HOME" ] ; then + if [ -r /etc/gentoo-release ] ; then + JAVA_HOME=`java-config --jre-home` + fi +fi + +if [ -z "$M2_HOME" ] ; then + ## resolve links - $0 may be a link to maven's home + PRG="$0" + + # need this for relative symlinks + while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG="`dirname "$PRG"`/$link" + fi + done + + saveddir=`pwd` + + M2_HOME=`dirname "$PRG"`/.. + + # make it fully qualified + M2_HOME=`cd "$M2_HOME" && pwd` + + cd "$saveddir" + # echo Using m2 at $M2_HOME +fi + +# For Cygwin, ensure paths are in UNIX format before anything is touched +if $cygwin ; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --unix "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --unix "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --unix "$CLASSPATH"` +fi + +# For Mingw, ensure paths are in UNIX format before anything is touched +if $mingw ; then + [ -n "$M2_HOME" ] && + M2_HOME="`(cd "$M2_HOME"; pwd)`" + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" +fi + +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + if $darwin ; then + javaHome="`dirname \"$javaExecutable\"`" + javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" + else + javaExecutable="`readlink -f \"$javaExecutable\"`" + fi + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "$JAVACMD" ] ; then + if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." >&2 + echo " We cannot execute $JAVACMD" >&2 + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher + +# traverses directory structure from process work directory to filesystem root +# first directory with .mvn subdirectory is considered project base directory +find_maven_basedir() { + + if [ -z "$1" ] + then + echo "Path not specified to find_maven_basedir" + return 1 + fi + + basedir="$1" + wdir="$1" + while [ "$wdir" != '/' ] ; do + if [ -d "$wdir"/.mvn ] ; then + basedir=$wdir + break + fi + # workaround for JBEAP-8937 (on Solaris 10/Sparc) + if [ -d "${wdir}" ]; then + wdir=`cd "$wdir/.."; pwd` + fi + # end of workaround + done + echo "${basedir}" +} + +# concatenates all lines of a file +concat_lines() { + if [ -f "$1" ]; then + echo "$(tr -s '\n' ' ' < "$1")" + fi +} + +BASE_DIR=`find_maven_basedir "$(pwd)"` +if [ -z "$BASE_DIR" ]; then + exit 1; +fi + +########################################################################################## +# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +# This allows using the maven wrapper in projects that prohibit checking in binary data. +########################################################################################## +if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found .mvn/wrapper/maven-wrapper.jar" + fi +else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." + fi + if [ -n "$MVNW_REPOURL" ]; then + jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.5/maven-wrapper-0.5.5.jar" + else + jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.5/maven-wrapper-0.5.5.jar" + fi + while IFS="=" read key value; do + case "$key" in (wrapperUrl) jarUrl="$value"; break ;; + esac + done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" + if [ "$MVNW_VERBOSE" = true ]; then + echo "Downloading from: $jarUrl" + fi + wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" + if $cygwin; then + wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"` + fi + + if command -v wget > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found wget ... using wget" + fi + if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then + wget "$jarUrl" -O "$wrapperJarPath" + else + wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath" + fi + elif command -v curl > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found curl ... using curl" + fi + if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then + curl -o "$wrapperJarPath" "$jarUrl" -f + else + curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f + fi + + else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Falling back to using Java to download" + fi + javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" + # For Cygwin, switch paths to Windows format before running javac + if $cygwin; then + javaClass=`cygpath --path --windows "$javaClass"` + fi + if [ -e "$javaClass" ]; then + if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Compiling MavenWrapperDownloader.java ..." + fi + # Compiling the Java class + ("$JAVA_HOME/bin/javac" "$javaClass") + fi + if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + # Running the downloader + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Running MavenWrapperDownloader.java ..." + fi + ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") + fi + fi + fi +fi +########################################################################################## +# End of extension +########################################################################################## + +export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} +if [ "$MVNW_VERBOSE" = true ]; then + echo $MAVEN_PROJECTBASEDIR +fi +MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" + +# For Cygwin, switch paths to Windows format before running java +if $cygwin; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --path --windows "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --windows "$CLASSPATH"` + [ -n "$MAVEN_PROJECTBASEDIR" ] && + MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` +fi + +# Provide a "standardized" way to retrieve the CLI args that will +# work with both Windows and non-Windows executions. +MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" +export MAVEN_CMD_LINE_ARGS + +WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +exec "$JAVACMD" \ + $MAVEN_OPTS \ + -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ + "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ + ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/mvnw.cmd b/packages/google-cloud-logging/tests/environment/deployable/java/mvnw.cmd new file mode 100644 index 000000000000..b26ab24f039e --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/mvnw.cmd @@ -0,0 +1,182 @@ +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM http://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Maven2 Start Up Batch script +@REM +@REM Required ENV vars: +@REM JAVA_HOME - location of a JDK home dir +@REM +@REM Optional ENV vars +@REM M2_HOME - location of maven2's installed home dir +@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands +@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending +@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven +@REM e.g. to debug Maven itself, use +@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files +@REM ---------------------------------------------------------------------------- + +@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' +@echo off +@REM set title of command window +title %0 +@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' +@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% + +@REM set %HOME% to equivalent of $HOME +if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") + +@REM Execute a user defined script before this one +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre +@REM check for pre script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" +if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" +:skipRcPre + +@setlocal + +set ERROR_CODE=0 + +@REM To isolate internal variables from possible post scripts, we use another setlocal +@setlocal + +@REM ==== START VALIDATION ==== +if not "%JAVA_HOME%" == "" goto OkJHome + +echo. +echo Error: JAVA_HOME not found in your environment. >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +:OkJHome +if exist "%JAVA_HOME%\bin\java.exe" goto init + +echo. +echo Error: JAVA_HOME is set to an invalid directory. >&2 +echo JAVA_HOME = "%JAVA_HOME%" >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +@REM ==== END VALIDATION ==== + +:init + +@REM Find the project base dir, i.e. the directory that contains the folder ".mvn". +@REM Fallback to current working directory if not found. + +set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% +IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir + +set EXEC_DIR=%CD% +set WDIR=%EXEC_DIR% +:findBaseDir +IF EXIST "%WDIR%"\.mvn goto baseDirFound +cd .. +IF "%WDIR%"=="%CD%" goto baseDirNotFound +set WDIR=%CD% +goto findBaseDir + +:baseDirFound +set MAVEN_PROJECTBASEDIR=%WDIR% +cd "%EXEC_DIR%" +goto endDetectBaseDir + +:baseDirNotFound +set MAVEN_PROJECTBASEDIR=%EXEC_DIR% +cd "%EXEC_DIR%" + +:endDetectBaseDir + +IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig + +@setlocal EnableExtensions EnableDelayedExpansion +for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a +@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% + +:endReadAdditionalConfig + +SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" +set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" +set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.5/maven-wrapper-0.5.5.jar" + +FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( + IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B +) + +@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +@REM This allows using the maven wrapper in projects that prohibit checking in binary data. +if exist %WRAPPER_JAR% ( + if "%MVNW_VERBOSE%" == "true" ( + echo Found %WRAPPER_JAR% + ) +) else ( + if not "%MVNW_REPOURL%" == "" ( + SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.5/maven-wrapper-0.5.5.jar" + ) + if "%MVNW_VERBOSE%" == "true" ( + echo Couldn't find %WRAPPER_JAR%, downloading it ... + echo Downloading from: %DOWNLOAD_URL% + ) + + powershell -Command "&{"^ + "$webclient = new-object System.Net.WebClient;"^ + "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ + "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ + "}"^ + "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^ + "}" + if "%MVNW_VERBOSE%" == "true" ( + echo Finished downloading %WRAPPER_JAR% + ) +) +@REM End of extension + +@REM Provide a "standardized" way to retrieve the CLI args that will +@REM work with both Windows and non-Windows executions. +set MAVEN_CMD_LINE_ARGS=%* + +%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* +if ERRORLEVEL 1 goto error +goto end + +:error +set ERROR_CODE=1 + +:end +@endlocal & set ERROR_CODE=%ERROR_CODE% + +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost +@REM check for post script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" +if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" +:skipRcPost + +@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' +if "%MAVEN_BATCH_PAUSE%" == "on" pause + +if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% + +exit /B %ERROR_CODE% diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/pom.xml b/packages/google-cloud-logging/tests/environment/deployable/java/pom.xml new file mode 100644 index 000000000000..e4c74e012dd0 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/pom.xml @@ -0,0 +1,119 @@ + + + + + 4.0.0 + + deployable + jar + Deployable + Java deployable for environment tests + 1.0.0 + + + org.springframework.boot + spring-boot-starter-parent + 2.1.4.RELEASE + + + + 1.8 + ./checkstyle.xml + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + org.apache.maven.plugins + maven-checkstyle-plugin + 3.1.2 + + + + + + + + + org.springframework.cloud + spring-cloud-gcp-dependencies + 1.2.7.RELEASE + pom + import + + + + + + + + org.springframework.boot + spring-boot-starter + + + + com.google.cloud.local + google-cloud-logging + 0.0.1 + + + + org.springframework.boot + spring-boot-starter-jetty + + + + org.springframework + spring-webmvc + + + + org.springframework.boot + spring-boot-starter-thymeleaf + + + + org.springframework.boot + spring-boot-starter-test + test + + + + org.springframework.cloud + spring-cloud-gcp-starter-logging + + + + io.netty + netty-tcnative-boringssl-static + 2.0.34.Final + + + + com.google.cloud + google-cloud-pubsub + 1.111.4 + + + + + diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/DeployableApplication.java b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/DeployableApplication.java new file mode 100644 index 000000000000..dd591225e235 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/DeployableApplication.java @@ -0,0 +1,173 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package envtest.deployable; + +import org.slf4j.LoggerFactory; +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.Logger; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; + +import com.google.cloud.pubsub.v1.AckReplyConsumer; +import com.google.cloud.pubsub.v1.MessageReceiver; +import com.google.cloud.pubsub.v1.Subscriber; +import com.google.cloud.pubsub.v1.SubscriptionAdminClient; +import com.google.pubsub.v1.PushConfig; +import com.google.pubsub.v1.ProjectTopicName; +import com.google.pubsub.v1.ProjectSubscriptionName; +import com.google.common.util.concurrent.MoreExecutors; +import com.google.pubsub.v1.PubsubMessage; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.io.IOException; +import java.lang.Thread; +import java.lang.InterruptedException; +import java.lang.NoSuchMethodException; +import java.lang.IllegalAccessException; +import java.lang.reflect.InvocationTargetException; + +import com.google.auth.oauth2.GoogleCredentials; +import com.google.auth.oauth2.ServiceAccountCredentials; +import com.google.cloud.logging.Severity; +import com.google.cloud.pubsub.v1.AckReplyConsumer; +import com.google.cloud.pubsub.v1.MessageReceiver; +import com.google.cloud.pubsub.v1.Subscriber; +import com.google.cloud.pubsub.v1.SubscriptionAdminClient; +import com.google.pubsub.v1.ProjectSubscriptionName; +import com.google.pubsub.v1.PubsubMessage; +import com.google.pubsub.v1.PushConfig; +import com.google.pubsub.v1.TopicName; +import com.sun.net.httpserver.HttpServer; +import com.sun.net.httpserver.HttpHandler; +import com.sun.net.httpserver.HttpExchange; + +import java.io.OutputStream; +import java.net.InetSocketAddress; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.Map; +import java.lang.reflect.Method; + +import java.io.BufferedReader; +import java.net.URL; +import java.io.InputStreamReader; +import java.net.MalformedURLException; +import java.net.HttpURLConnection; + +/** + * This class serves as an entry point for the Spring Boot app + * Here, we check to ensure all required environment variables are set + */ +@SpringBootApplication +public class DeployableApplication { + + private static final org.slf4j.Logger logger = LoggerFactory.getLogger(DeployableApplication.class); + + private static String getProjectId() throws RuntimeException { + try { + // try reading from service account + GoogleCredentials credentials = GoogleCredentials.getApplicationDefault(); + if (credentials instanceof ServiceAccountCredentials) { + String id = ((ServiceAccountCredentials) credentials).getProjectId(); + if (id != null){ + return id; + } + } + // try grabbing from metadata server + URL url = new URL("http://metadata.google.internal/computeMetadata/v1/project/project-id"); + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn.setRequestProperty("Metadata-Flavor", "Google"); + BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream(), "UTF-8")); + String line = reader.readLine(); + return line; + } catch (IOException e){ + System.out.println(e); + } + throw new RuntimeException("could not find project ID"); + } + + private static void startPubsubSubscription() throws IOException, RuntimeException { + // create variables + String projectId = getProjectId(); + String topicId = System.getenv().getOrDefault("PUBSUB_TOPIC", "logging-test"); + String subscriptionId = topicId + "-subscriber"; + TopicName topicName = TopicName.of(projectId, topicId); + ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(projectId, subscriptionId); + // create subscription + SubscriptionAdminClient subscriptionClient = SubscriptionAdminClient.create(); + subscriptionClient.createSubscription( + subscriptionName, + topicName, + PushConfig.newBuilder().build(), + 20); + // define callback + MessageReceiver receiver = (PubsubMessage message, AckReplyConsumer consumer) -> { + consumer.ack(); + String fnName = message.getData().toStringUtf8(); + Map args = message.getAttributes(); + triggerSnippet(fnName, args); + }; + // start subscriber + Subscriber subscriber = null; + try { + subscriber = Subscriber.newBuilder(subscriptionName, receiver).build(); + subscriber.startAsync().awaitRunning(); + System.out.printf("Listening for messages on %s:\n", subscriptionName.toString()); + subscriber.awaitTerminated(); + } finally { + subscriber.stopAsync().awaitTerminated(); + } + } + + public static void triggerSnippet(String fnName, Map args) { + try { + Snippets obj = new Snippets(); + Class c = obj.getClass(); + Method found = c.getDeclaredMethod(fnName, new Class[] {Map.class}); + found.invoke(obj, args); + } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { + System.out.println(e.toString()); + } + } + + public static void main(String[] args) throws IOException, RuntimeException { + String projectId = ""; + String topicId; + String subscriptionId; + + + Logger root = (Logger)LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME); + root.setLevel(Level.INFO); + + // ****************** GAE, GKE, GCE ****************** + // Enable app subscriber for all environments except GCR + Boolean enableSubscriber = Boolean.parseBoolean(System.getenv().getOrDefault("ENABLE_SUBSCRIBER", "false")); + System.out.format("ENV: ENABLE_SUBSCRIBER=true\n"); + if (enableSubscriber) { + // start a pub/sub server and listen for messages + startPubsubSubscription(); + return; + } + + // GCR, GAE Standard + Boolean runServer = Boolean.parseBoolean(System.getenv().getOrDefault("RUNSERVER", "0")); + System.out.format("ENV: RUNSERVER=%b\n", runServer); + if (runServer) { + // hand off execution to DeployableHttpController + SpringApplication.run(DeployableApplication.class, args); + return; + } + } +} diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/Snippets.java b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/Snippets.java new file mode 100644 index 000000000000..bd6c48a8e7b2 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/Snippets.java @@ -0,0 +1,48 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package envtest.deployable; +import java.util.Map; +import java.util.Collections; + +import com.google.cloud.MonitoredResource; +import com.google.cloud.logging.LogEntry; +import com.google.cloud.logging.Logging; +import com.google.cloud.logging.LoggingOptions; +import com.google.cloud.logging.Payload.StringPayload; +import com.google.cloud.logging.Severity; +import com.google.logging.type.LogSeverity; + +public class Snippets { + + public void simplelog(Map args){ + System.out.println("Called Simplelog!"); + // pull out arguments + String logText = args.getOrDefault("log_text", "simplelog"); + String logName = args.getOrDefault("log_name", "test"); + String severityString = args.getOrDefault("severity", "DEFAULT"); + + // Instantiates a client + Logging logging = LoggingOptions.getDefaultInstance().getService(); + LogEntry entry = + LogEntry.newBuilder(StringPayload.of(logText)) + .setSeverity(Severity.ERROR) + .setLogName(logName) + .setResource(MonitoredResource.newBuilder("global").build()) + .build(); + + //Writes the log entry asynchronously + logging.write(Collections.singleton(entry)); + } +} diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/web/DeployableHttpController.java b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/web/DeployableHttpController.java new file mode 100644 index 000000000000..bf317584f56d --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/web/DeployableHttpController.java @@ -0,0 +1,61 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + + +package envtest.deployable.web; + +import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.bind.annotation.GetMapping; + +import com.google.cloud.MonitoredResource; +import com.google.cloud.logging.LogEntry; +import com.google.cloud.logging.Logging; +import com.google.cloud.logging.LoggingOptions; +import com.google.cloud.logging.Payload.StringPayload; +import com.google.cloud.logging.Severity; +import java.util.Collections; + +import org.springframework.beans.factory.annotation.Autowired; + +import io.grpc.StatusRuntimeException; + +import com.google.cloud.pubsub.v1.AckReplyConsumer; +import com.google.cloud.pubsub.v1.MessageReceiver; +import com.google.cloud.pubsub.v1.Subscriber; +import com.google.cloud.pubsub.v1.SubscriptionAdminClient; +import com.google.pubsub.v1.PushConfig; +import com.google.pubsub.v1.ProjectTopicName; +import com.google.pubsub.v1.ProjectSubscriptionName; +import com.google.common.util.concurrent.MoreExecutors; +import com.google.pubsub.v1.PubsubMessage; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.io.IOException; +import java.lang.Thread; +import java.lang.InterruptedException; + +/** + * Defines a controller to handle HTTP requests. + */ +@RestController +public final class DeployableHttpController { + + + @GetMapping("/") + public String helloWorld() { + String message = "It's running!"; + + return message; + } +} diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/src/main/resources/application.properties b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/resources/application.properties new file mode 100644 index 000000000000..e00492cfd83e --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/resources/application.properties @@ -0,0 +1 @@ +server.port = ${PORT} diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/.gitignore b/packages/google-cloud-logging/tests/environment/deployable/nodejs/.gitignore index 2d56765aa1d6..0ffb52ecff51 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/nodejs/.gitignore +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/.gitignore @@ -1,4 +1,5 @@ node_modules nodejs-logging +_library *.tar package-lock.json diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile b/packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile index f9fcc01026df..ecafc659f343 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile @@ -24,7 +24,7 @@ COPY package*.json ./ COPY app.js ./ COPY tests.js ./ # Assumption: local file is already built -COPY nodejs-logging ./nodejs-logging +COPY _library ./nodejs-logging # Install test app's dependencies. RUN npm install --production diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js b/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js index 459607cd4dac..7ba97ba79dc4 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js @@ -108,12 +108,12 @@ async function listenForMessages(pubSubClient, subscriptionName) { message.ack(); }; - // Listen for new messages until timeout is hit or test is done. + // Listen for new messages until timeout is hit or test destroy the resource. subscription.on('message', messageHandler); setTimeout(() => { subscription.removeListener('message', messageHandler); - }, 600000); // max 10 minutes timeout + }, 3600000); // max 1 hour timeout } function triggerTest(message) { diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/tests.js b/packages/google-cloud-logging/tests/environment/deployable/nodejs/tests.js index 71bdb17877d9..41731416ccb2 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/nodejs/tests.js +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/tests.js @@ -14,6 +14,14 @@ const {Logging} = require('@google-cloud/logging'); const logging = new Logging(); +const defaultRequest = { + method: 'POST', + httpVersion: 'HTTP/1.1', + url: 'https://google.com', + headers: {'x-cloud-trace-context': '1/1;o=1'}, + rawHeaders: ['X-Cloud-Trace-Context'], + statusCode: 200, +} /** * The following are test functions that can be triggered in each service. @@ -27,5 +35,53 @@ var simplelog = function(logname = "my-log", logtext = "hello world" ) { log.write(text_entry).then(r => console.log(r)); } -module.exports={ 'simplelog': simplelog } +/** + * envctl nodejs trigger requestlog log_name=foo,log_text=bar + */ +var requestlog = function(logname = 'my-log', logtext = 'hello world', request) { + if (!request) request = defaultRequest; + const log = logging.log(logname); + const entry = log.entry({httpRequest: request}, logtext); + log.write(entry).then(r => console.log(r)); +} + +/** + * envctl nodejs trigger stdoutlog log_name=foo,log_text=bar + */ +var stdoutlog = function(logname = 'my-log', logtext = 'hello world', request) { + if (!request) request = defaultRequest; + logging.setProjectId().then( res => { + logging.setDetectedResource().then( res => { + const log = logging.logSync(logname); + const meta = { + // Fields all agents lift: + severity: 'WARNING', + httpRequest: request, + labels: {foo: 'bar'}, + // Fields not lifted by all agents, e.g. GCF: + insertId: '42', + timestamp: new Date(2021,1,1,1,1,1,1), + resource: { + type: 'global', + labels: { + region: 'my-backyard', + zone: 'twilight', + } + }, + // Note: explicit trace declarations override httpRequest header context + trace: 'projects/my-projectid/traces/0679686673a', + spanId: '000000000000004a', + traceSampled: false, + } + const entry = log.entry(meta, logtext); + log.write(entry); + }); + }); +} + +module.exports={ + 'simplelog': simplelog, + 'stdoutlog': stdoutlog, + 'requestlog': requestlog, +} diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/.gitignore b/packages/google-cloud-logging/tests/environment/deployable/python/.gitignore index e0333f99bd3a..fd785511cedb 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/.gitignore +++ b/packages/google-cloud-logging/tests/environment/deployable/python/.gitignore @@ -1,2 +1,3 @@ python-logging +_library *.tar diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/Dockerfile b/packages/google-cloud-logging/tests/environment/deployable/python/Dockerfile index 621f30743dd8..cecb080369a6 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/Dockerfile +++ b/packages/google-cloud-logging/tests/environment/deployable/python/Dockerfile @@ -30,7 +30,7 @@ COPY snippets.py /app/ # install logging from local directory. WORKDIR /app ENV PATH="/app:${PATH}" -COPY python-logging /app/python-logging +COPY _library /app/python-logging RUN pip install -e /app/python-logging # Start script diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/kubernetes.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/kubernetes.sh index eb1b8b8701b6..801ee3481b50 100644 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/kubernetes.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/kubernetes.sh @@ -59,7 +59,8 @@ attach_or_create_gke_cluster(){ echo "cluster not found. creating..." gcloud container clusters create $SERVICE_NAME \ --zone $ZONE \ - --scopes=gke-default,pubsub + --scopes=gke-default,pubsub \ + --no-enable-ip-alias fi set -e } diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/kubernetes.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/kubernetes.sh new file mode 100755 index 000000000000..a5a012bc5cd5 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/kubernetes.sh @@ -0,0 +1,118 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +SERVICE_NAME="log-java-gke-$(echo $ENVCTL_ID | head -c 8)" +ZONE=us-central1-a + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete container images + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null + # delete cluster + gcloud container clusters delete --zone $ZONE $SERVICE_NAME -q + set -e +} + +verify() { + set +e + gcloud pubsub subscriptions describe $SERVICE_NAME-subscriber 2> /dev/null + if [[ $? != 0 ]]; then + echo "FALSE" + exit 1 + fi + gcloud container clusters describe --zone $ZONE $SERVICE_NAME > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +attach_or_create_gke_cluster(){ + set +e + gcloud container clusters get-credentials $SERVICE_NAME + if [[ $? -ne 0 ]]; then + echo "cluster not found. creating..." + gcloud container clusters create $SERVICE_NAME \ + --zone $ZONE \ + --scopes=gke-default,pubsub \ + --no-enable-ip-alias + fi + set -e +} + +deploy() { + attach_or_create_gke_cluster + build_container + cat < $TMP_DIR/gke.yaml + apiVersion: apps/v1 + kind: Deployment + metadata: + name: $SERVICE_NAME + spec: + selector: + matchLabels: + app: $SERVICE_NAME + template: + metadata: + labels: + app: $SERVICE_NAME + spec: + containers: + - name: $SERVICE_NAME + image: $GCR_PATH + imagePullPolicy: Always + env: + - name: PUBSUB_TOPIC + value: $SERVICE_NAME + - name: ENABLE_SUBSCRIBER + value: "true" + - name: RUNSERVER + value: "false" +EOF + # clean cluster + set +e + kubectl delete deployments --all 2>/dev/null + kubectl delete -f $TMP_DIR 2>/dev/null + set -e + # deploy test container + kubectl apply -f $TMP_DIR + sleep 60 + # wait for pod to spin up + kubectl wait --for=condition=ready pod -l app=$SERVICE_NAME + # wait for the pub/sub subscriber to start + NUM_SUBSCRIBERS=0 + TRIES=0 + while [[ "${NUM_SUBSCRIBERS}" -lt 1 && "${TRIES}" -lt 10 ]]; do + sleep 30 + NUM_SUBSCRIBERS=$(gcloud pubsub topics list-subscriptions $SERVICE_NAME 2> /dev/null | wc -l) + TRIES=$((TRIES + 1)) + done +} + +filter-string() { + echo "resource.type=\"k8s_container\" AND resource.labels.cluster_name=\"$SERVICE_NAME\"" +} diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/local.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/local.sh new file mode 100755 index 000000000000..412998cf2bf4 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/local.sh @@ -0,0 +1,58 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +SERVICE_NAME="log-java-local-$(echo $ENVCTL_ID | head -c 8)" +SA_NAME=$SERVICE_NAME-invoker + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # stop container + docker stop $SERVICE_NAME 2> /dev/null + set -e +} + +verify() { + set +e + if [ "$( docker container inspect -f '{{.State.Status}}' $SERVICE_NAME )" == "running" ]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +deploy() { + build_container nopush + # for interactive mode, run `envctl java local deploy -it` + FLAGS=${@:-"-d"} + docker run --rm --name $SERVICE_NAME -e RUNSERVER=false -e ENABLE_SUBSCRIBER=true -e PUBSUB_TOPIC=$SERVICE_NAME \ + -v ~/service-account.json:/service-account.json -e GOOGLE_APPLICATION_CREDENTIALS=/service-account.json \ + $FLAGS $GCR_PATH + # for authentication, link in local service account + #-v ~/service-account.json:/service-account.json -e GOOGLE_APPLICATION_CREDENTIALS=/service-account.json \ +} + +filter-string() { + echo "resource.type=\"global\"" +} diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/kubernetes.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/kubernetes.sh index 079b9162f5e4..72f9130134df 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/kubernetes.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/kubernetes.sh @@ -59,7 +59,8 @@ attach_or_create_gke_cluster(){ echo "cluster not found. creating..." gcloud container clusters create $SERVICE_NAME \ --zone $ZONE \ - --scopes=gke-default,pubsub + --scopes=gke-default,pubsub \ + --no-enable-ip-alias fi set -e } diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh index 7c85eb197596..48a2619dc90b 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/kubernetes.sh @@ -58,7 +58,8 @@ attach_or_create_gke_cluster(){ echo "cluster not found. creating..." gcloud container clusters create $SERVICE_NAME \ --zone $ZONE \ - --scopes=gke-default,pubsub + --scopes=gke-default,pubsub \ + --no-enable-ip-alias fi set -e } diff --git a/packages/google-cloud-logging/tests/environment/envctl/envctl b/packages/google-cloud-logging/tests/environment/envctl/envctl index 961fa9ecdb65..1e0e2cdd3846 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/envctl +++ b/packages/google-cloud-logging/tests/environment/envctl/envctl @@ -4,7 +4,9 @@ set -o pipefail # any step in pipe caused failure set -u # undefined variables cause exit # set an ID to use for future runs -ENVCTL_ID="${ENVCTL_ID:-$(hostname)}" +# remove dots from the hostname if present +HOST=$(echo $(hostname) | sed 's/[^a-zA-Z0-9]//g') +ENVCTL_ID="${ENVCTL_ID:-$HOST}" # find the associated project PROJECT_ID="${PROJECT_ID:-$(gcloud config get-value project)}" @@ -34,19 +36,25 @@ trap finish EXIT # shared logic build_container() { + ARG=${1:-none} export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME # copy super-repo into deployable dir _env_tests_relative_path=${REPO_ROOT#"$SUPERREPO_ROOT/"} _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE # copy over local copy of library pushd $SUPERREPO_ROOT - tar -cvf $_deployable_dir/lib.tar --exclude tests --exclude .nox --exclude samples --exclude docs --exclude __pycache__ . + tar -cvf $_deployable_dir/lib.tar \ + --exclude tests --exclude .nox --exclude samples --exclude docs --exclude __pycache__ \ + --exclude target --exclude env-tests-logging --exclude test --exclude .git --exclude .github \ + --exclude system-test --exclude environment-tests --exclude .kokoro . popd - mkdir -p $_deployable_dir/python-logging - tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/python-logging + mkdir -p $_deployable_dir/_library + tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/_library # build container docker build -t $GCR_PATH $_deployable_dir - docker push $GCR_PATH + if [[ "$ARG" != "nopush" ]]; then + docker push $GCR_PATH + fi } logs() { diff --git a/packages/google-cloud-logging/tests/environment/noxfile.py b/packages/google-cloud-logging/tests/environment/noxfile.py index 1dc3db103408..f47421828e5f 100644 --- a/packages/google-cloud-logging/tests/environment/noxfile.py +++ b/packages/google-cloud-logging/tests/environment/noxfile.py @@ -129,7 +129,7 @@ def blacken(session: nox.sessions.Session) -> None: "functions", ], ) -@nox.parametrize("language", ["python", "go", "nodejs"]) +@nox.parametrize("language", ["python", "go", "nodejs", "java"]) def tests(session, language, platform): """Run the e2e environment test suite.""" if os.environ.get("RUN_ENV_TESTS", "true") == "false": diff --git a/packages/google-cloud-logging/tests/environment/renovate.json b/packages/google-cloud-logging/tests/environment/renovate.json index f45d8f110c30..fd3f0830485e 100644 --- a/packages/google-cloud-logging/tests/environment/renovate.json +++ b/packages/google-cloud-logging/tests/environment/renovate.json @@ -1,4 +1,10 @@ { + "prConcurrentLimit": 2, + "timezone": "America/Los_Angeles", + "schedule": [ + "after 9am and before 3pm every tuesday" + ], + "semanticCommits" : "enabled", "extends": [ "config:base" ] diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index 31a157d99766..4d5c79bcccd6 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -79,6 +79,7 @@ def trigger_and_retrieve( self._trigger(snippet, log_text=log_text, **kwargs) sleep(2) filter_str = self._add_time_condition_to_filter(log_text) + print(filter_str) # give the command time to be received tries = 0 while tries < max_tries: @@ -172,6 +173,21 @@ def test_monitored_resource(self): self.assertTrue(found_resource.labels[label], f'resource.labels[{label}] is not set') + def test_request_log(self): + if self.language not in ["nodejs"]: + return True + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text, "requestlog") + # Note: 2 logs are spawned, only one containing http_request prop. + log_entry = log_list[-1] + if log_entry.http_request is None: + log_entry = log_list[-2] + found_request = log_entry.http_request + if hasattr(self, 'request_props'): + for prop in self.request_props: + self.assertTrue(found_request[prop], + f'{prop} is not set') + def test_severity(self): if self.language != "python": # to do: enable test for other languages diff --git a/packages/google-cloud-logging/tests/environment/tests/common/stdout.py b/packages/google-cloud-logging/tests/environment/tests/common/stdout.py new file mode 100644 index 000000000000..51679d30b9b3 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/common/stdout.py @@ -0,0 +1,67 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect +import re + +import google.cloud.logging + +from ..common.common import Common + +class CommonStdout: + def test_stdout_log(self): + if self.language not in ["nodejs"]: + # TODO: other languages to also support this test + return True + if self.environment in ["compute"]: + # No logging agent support in GCE + return True + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text, "stdoutlog") + # Note: 2 logs are spawned, use the one containing http_request prop. + found = log_list[-1] + if found.http_request is None: + found = log_list[-2] + # Agents lift fields inconsistently among envs, so check if is expected. + if hasattr(self, 'stdout_log_name'): + self.assertTrue(self.stdout_log_name in found.log_name) + if hasattr(self, 'stdout_severity'): + self.assertEqual(found.severity, self.stdout_severity) + if hasattr(self, 'stdout_insert_id'): + self.assertEqual(found.insert_id, self.stdout_insert_id) + if hasattr(self, 'stdout_timestamp'): + self.assertEqual(found.timestamp, self.stdout_timestamp) + if hasattr(self, 'stdout_trace'): + self.assertTrue(self.stdout_trace in found.trace) + if hasattr(self, 'stdout_span_id'): + self.assertEqual(found.span_id, self.stdout_span_id) + # TODO: uncomment this again once python-logging accepts trace_samples + # if hasattr(self, 'stdout_trace_sampled'): + # self.assertEqual(found.trace_sampled, self.stdout_trace_sampled) + if hasattr(self, 'stdout_labels'): + for prop in self.stdout_labels: + self.assertTrue(found.labels[prop], + f'{prop} is not set') + if hasattr(self, 'stdout_resource_type'): + self.assertEqual(found.resource.type, self.stdout_resource_type) + if hasattr(self, 'stdout_resource_labels'): + for prop in self.stdout_resource_labels: + self.assertTrue(found.resource.labels[prop], + f'{prop} is not set') + if hasattr(self, 'stdout_payload_props'): + for prop in self.stdout_payload_props: + self.assertTrue(found.payload[prop], + f'{prop} is not set') diff --git a/packages/google-cloud-logging/tests/environment/tests/java/__init__.py b/packages/google-cloud-logging/tests/environment/tests/java/__init__.py new file mode 100644 index 000000000000..d46dbae5ebd0 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/java/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-logging/tests/environment/tests/java/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/java/test_kubernetes.py new file mode 100644 index 000000000000..b3968fa84607 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/java/test_kubernetes.py @@ -0,0 +1,29 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect + +import google.cloud.logging + +from ..common.common import Common + + +class TestKubernetesEngine(Common, unittest.TestCase): + environment = "kubernetes" + language = "java" + + monitored_resource_name = "k8s_container" + monitored_resource_labels = ["project_id", "location", "cluster_name", "pod_name", "namespace_name"] diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py index 8e24fe96a57d..b010b514f8c7 100644 --- a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py @@ -18,12 +18,42 @@ import google.cloud.logging from ..common.common import Common +from ..common.stdout import CommonStdout -class TestAppEngineStandard(Common, unittest.TestCase): +class TestAppEngineStandard(Common, CommonStdout, unittest.TestCase): environment = "appengine_standard" language = "nodejs" monitored_resource_name = "gae_app" monitored_resource_labels = ["project_id", "module_id", "version_id", "zone"] + + request_props = [ + "requestMethod", + "requestUrl", + "protocol", + ] + + stdout_payload_props = [ + "message", + "resource", + "timestamp", + "logName", + ] + stdout_severity = "WARNING" + stdout_request_props = request_props + stdout_labels = [ + "foo", + ] + # substring to test for + stdout_trace = "/traces/0679686673a" + stdout_span_id = "000000000000004a" + + # Not lifted properly + # stdout_trace_sampled = "true" + # stdout_insert_id + # stdout_resource_type + # stdout_timestamp + # stdout_log_name: its /logs/stdout + diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py index 98f2ce816820..21f8fcd2dadb 100644 --- a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py @@ -21,9 +21,10 @@ from google.cloud.logging_v2.resource import Resource from ..common.common import Common +from ..common.stdout import CommonStdout -class TestCloudRun(Common, unittest.TestCase): +class TestCloudRun(Common, CommonStdout, unittest.TestCase): environment = "cloudrun" language = "nodejs" @@ -37,3 +38,31 @@ class TestCloudRun(Common, unittest.TestCase): "location", "configuration_name", ] + + request_props = [ + "requestMethod", + "requestUrl", + "protocol", + ] + + stdout_payload_props = [ + "message", + "resource", + "timestamp", + "logName", + ] + stdout_severity = "WARNING" + stdout_request_props = request_props + stdout_labels = [ + "foo", + ] + # substring to test for + stdout_trace = "/traces/0679686673a" + stdout_span_id = "000000000000004a" + + # Not lifted properly + # stdout_trace_sampled = "true" + # stdout_insert_id + # stdout_resource_type + # stdout_timestamp + # stdout_log_name: its /logs/stdout diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py index 46f7382e585f..a55672471116 100644 --- a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py @@ -19,9 +19,10 @@ import google.cloud.logging from ..common.common import Common +from ..common.stdout import CommonStdout -class TestCloudFunctions(Common, unittest.TestCase): +class TestCloudFunctions(Common, CommonStdout, unittest.TestCase): environment = "functions" language = "nodejs" @@ -32,3 +33,34 @@ class TestCloudFunctions(Common, unittest.TestCase): "function_name", "project_id", ] + + request_props = [ + "requestMethod", + "requestUrl", + "protocol", + ] + + stdout_payload_props = [ + "message", + "resource", + "timestamp", + "logName", + ] + stdout_severity = "WARNING" + stdout_request_props = request_props + stdout_labels = [ + "foo", + # Nicely inserted by the agent + "execution_id", + ] + # Randomly dropped by Functions agent: + # stdout_insert_id = '42' + # stdout_trace = /traces/0679686673a' + # stdout_span_id = '000000000000004a' + # stdout_trace_sampled = 'true' + # ============================= + # Not lifted and just left in JSONPayload: + # stdout_resource_type + # stdout_resource_labels + # stdout_log_name + # stdout_timestamp diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py index ced5d748e5f8..2cb0d3129ab4 100644 --- a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py @@ -19,12 +19,41 @@ import google.cloud.logging from ..common.common import Common +from ..common.stdout import CommonStdout -class TestKubernetesEngine(Common, unittest.TestCase): +class TestKubernetesEngine(Common, CommonStdout, unittest.TestCase): environment = "kubernetes" language = "nodejs" monitored_resource_name = "k8s_container" monitored_resource_labels = ["project_id", "location", "cluster_name", "pod_name", "namespace_name"] + + request_props = [ + "requestMethod", + "requestUrl", + "protocol", + ] + + stdout_payload_props = [ + "message", + "resource", + "timestamp", + "logName", + ] + stdout_severity = "WARNING" + stdout_request_props = request_props + stdout_labels = [ + "foo", + ] + stdout_insert_id = "42" + # substring to test for + stdout_trace = "/traces/0679686673a" + stdout_span_id = "000000000000004a" + stdout_trace_sampled = "true" + + # Not lifted and just left in JSONPayload: + # stdout_resource_type + # stdout_timestamp + # stdout_log_name: in GKE it looks like /logs/stdout. weird diff --git a/tests/environment b/tests/environment index a0af8d102a3c..3b5b391d6afc 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit a0af8d102a3c711cdff0dd12e01c8bfd357b7a83 +Subproject commit 3b5b391d6afc746c59cdd3100ccc5e8d60793489 From e105105c14561128de3d8711bffaf56de9a510e4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 10 Aug 2021 19:40:03 +0200 Subject: [PATCH 524/855] chore(deps): update dependency google-cloud-bigquery to v2.23.3 (#370) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index a43730bea9f2..c507944bf78f 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.6.0 -google-cloud-bigquery==2.23.2 +google-cloud-bigquery==2.23.3 google-cloud-storage==1.41.1 google-cloud-pubsub==2.7.0 From ec0dcb4cb4bf60eb78bbd899010fba6a5730f0a6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 11 Aug 2021 16:32:20 +0000 Subject: [PATCH 525/855] chore: fix INSTALL_LIBRARY_FROM_SOURCE in noxfile.py (#371) Source-Link: https://github.com/googleapis/synthtool/commit/6252f2cd074c38f37b44abe5e96d128733eb1b61 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:50e35228649c47b6ca82aa0be3ff9eb2afce51c82b66c4a03fe4afeb5ff6c0fc --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/samples/snippets/noxfile.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 9ee60f7e4850..649877dc494c 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:aea14a583128771ae8aefa364e1652f3c56070168ef31beb203534222d842b8b + digest: sha256:50e35228649c47b6ca82aa0be3ff9eb2afce51c82b66c4a03fe4afeb5ff6c0fc diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 6a8ccdae22c9..125bb619cc49 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -96,7 +96,7 @@ def get_pytest_env_vars() -> Dict[str, str]: TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ("True", "true") # # Style Checks # From f9cb35fc0cad218a0afdd133b1a1d1316c4f8786 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 12 Aug 2021 16:42:11 +0000 Subject: [PATCH 526/855] chore(python): avoid `.nox` directories when building docs (#372) Source-Link: https://github.com/googleapis/synthtool/commit/7e1f6da50524b5d98eb67adbf6dd0805df54233d Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:a1a891041baa4ffbe1a809ac1b8b9b4a71887293c9101c88e8e255943c5aec2d --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/docs/conf.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 649877dc494c..b771c37caef8 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:50e35228649c47b6ca82aa0be3ff9eb2afce51c82b66c4a03fe4afeb5ff6c0fc + digest: sha256:a1a891041baa4ffbe1a809ac1b8b9b4a71887293c9101c88e8e255943c5aec2d diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index 6e52e94f3a58..d6e9cca6e6b4 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -110,6 +110,7 @@ # directories to ignore when looking for source files. exclude_patterns = [ "_build", + "**/.nox/**/*", "samples/AUTHORING_GUIDE.md", "samples/CONTRIBUTING.md", "samples/snippets/README.rst", From 2f57cf5b2604320caf91fd60c4aa9f264a2c90a9 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 13 Aug 2021 20:34:50 +0200 Subject: [PATCH 527/855] chore(deps): update dependency google-cloud-bigquery to v2.24.0 (#374) Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index c507944bf78f..4f8fc9e2edf4 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.6.0 -google-cloud-bigquery==2.23.3 +google-cloud-bigquery==2.24.0 google-cloud-storage==1.41.1 google-cloud-pubsub==2.7.0 From f7ea64da09a263d89b0fb2d724f7ff8f8a33fd2c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 14 Aug 2021 01:50:26 +0200 Subject: [PATCH 528/855] chore(deps): update dependency google-cloud-storage to v1.42.0 (#373) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 4f8fc9e2edf4..b16b37da73e5 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.6.0 google-cloud-bigquery==2.24.0 -google-cloud-storage==1.41.1 +google-cloud-storage==1.42.0 google-cloud-pubsub==2.7.0 From 43fc9e9b0549933ddd63d649b4f5b7718ed2f08c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 18 Aug 2021 11:48:10 -0400 Subject: [PATCH 529/855] chore: remove redundant code in owlbot.py (#376) --- packages/google-cloud-logging/owlbot.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index b7cbcae4fb70..4dff2bfb4712 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -92,13 +92,6 @@ }""" ) - -# -------------------------------------------------------------------------- -# Samples templates -# -------------------------------------------------------------------------- - -python.py_samples() - # -------------------------------------------------------------------------- # Samples templates # -------------------------------------------------------------------------- From 59bffcee6ac2354db74b9b2e9ad37e05973b12af Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 20 Aug 2021 18:42:23 +0200 Subject: [PATCH 530/855] chore(deps): update dependency google-cloud-bigquery to v2.24.1 (#377) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-bigquery](https://togithub.com/googleapis/python-bigquery) | `==2.24.0` -> `==2.24.1` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.24.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.24.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.24.1/compatibility-slim/2.24.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.24.1/confidence-slim/2.24.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-bigquery ### [`v2.24.1`](https://togithub.com/googleapis/python-bigquery/blob/master/CHANGELOG.md#​2241-httpswwwgithubcomgoogleapispython-bigquerycomparev2240v2241-2021-08-13) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.24.0...v2.24.1)
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-logging). --- .../google-cloud-logging/samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index b16b37da73e5..114bb6fe6e14 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.6.0 -google-cloud-bigquery==2.24.0 +google-cloud-bigquery==2.24.1 google-cloud-storage==1.42.0 -google-cloud-pubsub==2.7.0 +google-cloud-pubsub==2.7.1 From d54057a41023bc9871f2ff55439aecb6cc9d423e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 25 Aug 2021 15:24:46 +0200 Subject: [PATCH 531/855] chore(deps): update dependency google-cloud-bigquery to v2.25.0 (#379) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 114bb6fe6e14..0735df35f3ba 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.6.0 -google-cloud-bigquery==2.24.1 +google-cloud-bigquery==2.25.0 google-cloud-storage==1.42.0 google-cloud-pubsub==2.7.1 From c6c62fd202e74250d72379a1e0c7b1a9f5985d2f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 27 Aug 2021 22:40:01 +0200 Subject: [PATCH 532/855] chore(deps): update dependency google-cloud-bigquery to v2.25.1 (#381) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 0735df35f3ba..46db124bd99f 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.6.0 -google-cloud-bigquery==2.25.0 +google-cloud-bigquery==2.25.1 google-cloud-storage==1.42.0 google-cloud-pubsub==2.7.1 From 7bb1585a88a5015c14db2f63646e0e40b302dc01 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Mon, 30 Aug 2021 16:37:20 -0400 Subject: [PATCH 533/855] chore: migrate to main branch (#385) --- .../.github/sync-repo-settings.yaml | 8 +++---- .../google-cloud-logging/.kokoro/build.sh | 2 +- .../.kokoro/test-samples-impl.sh | 2 +- packages/google-cloud-logging/CHANGELOG.md | 2 +- .../google-cloud-logging/CONTRIBUTING.rst | 12 +++++----- packages/google-cloud-logging/owlbot.py | 23 +++++++++++++++++++ 6 files changed, 36 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-logging/.github/sync-repo-settings.yaml b/packages/google-cloud-logging/.github/sync-repo-settings.yaml index 0ddb512dbab7..3e98ae70f964 100644 --- a/packages/google-cloud-logging/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-logging/.github/sync-repo-settings.yaml @@ -1,9 +1,9 @@ -# https://github.com/googleapis/repo-automation-bots/tree/master/packages/sync-repo-settings -# Rules for master branch protection +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings +# Rules for main branch protection branchProtectionRules: # Identifies the protection rule pattern. Name of the branch to be protected. -# Defaults to `master` -- pattern: master +# Defaults to `main` +- pattern: main requiresCodeOwnerReviews: true requiresStrictStatusChecks: true requiredStatusCheckContexts: diff --git a/packages/google-cloud-logging/.kokoro/build.sh b/packages/google-cloud-logging/.kokoro/build.sh index cb17176f08ef..4d739a338901 100755 --- a/packages/google-cloud-logging/.kokoro/build.sh +++ b/packages/google-cloud-logging/.kokoro/build.sh @@ -41,7 +41,7 @@ python3 -m pip install --upgrade --quiet nox python3 -m nox --version # If this is a continuous build, send the test log to the FlakyBot. -# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then cleanup() { chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot diff --git a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh index 311a8d54b9f1..8a324c9c7bc6 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh @@ -80,7 +80,7 @@ for file in samples/**/requirements.txt; do EXIT=$? # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot $KOKORO_GFILE_DIR/linux_amd64/flakybot diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 450c1486ce5b..4586148505f3 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -150,7 +150,7 @@ ### ⚠ BREAKING CHANGES -* Use microgenerator for GAPIC layer. See [UPGRADING.md](https://github.com/googleapis/python-logging/blob/master/UPGRADING.md) for details. (#94) +* Use microgenerator for GAPIC layer. See [UPGRADING.md](https://github.com/googleapis/python-logging/blob/main/UPGRADING.md) for details. (#94) * removes support for webapp2 and other Python2 specific code ### Features diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index f5505c222c02..2a720ed444e9 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -50,9 +50,9 @@ You'll have to create a development environment using a Git checkout: # Configure remotes such that you can pull changes from the googleapis/python-logging # repository into your local repository. $ git remote add upstream git@github.com:googleapis/python-logging.git - # fetch and merge changes from upstream into master + # fetch and merge changes from upstream into main $ git fetch upstream - $ git merge upstream/master + $ git merge upstream/main Now your local repo is set up such that you will push changes to your GitHub repo, from which you can submit a pull request. @@ -110,12 +110,12 @@ Coding Style variables:: export GOOGLE_CLOUD_TESTING_REMOTE="upstream" - export GOOGLE_CLOUD_TESTING_BRANCH="master" + export GOOGLE_CLOUD_TESTING_BRANCH="main" By doing this, you are specifying the location of the most up-to-date version of ``python-logging``. The the suggested remote name ``upstream`` should point to the official ``googleapis`` checkout and the - the branch should be the main branch on that remote (``master``). + the branch should be the main branch on that remote (``main``). - This repository contains configuration for the `pre-commit `__ tool, which automates checking @@ -209,7 +209,7 @@ The `description on PyPI`_ for the project comes directly from the ``README``. Due to the reStructuredText (``rst``) parser used by PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` instead of -``https://github.com/googleapis/python-logging/blob/master/CONTRIBUTING.rst``) +``https://github.com/googleapis/python-logging/blob/main/CONTRIBUTING.rst``) may cause problems creating links or rendering the description. .. _description on PyPI: https://pypi.org/project/google-cloud-logging @@ -234,7 +234,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/python-logging/blob/master/noxfile.py +.. _config: https://github.com/googleapis/python-logging/blob/main/noxfile.py We also explicitly decided to support Python 3 beginning with version 3.6. diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index 4dff2bfb4712..3bffd7c99dd8 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -99,3 +99,26 @@ python.py_samples() s.shell.run(["nox", "-s", "blacken"], hide_output=False) + +# ---------------------------------------------------------------------------- +# Main Branch migration +# ---------------------------------------------------------------------------- + +s.replace( + "*.rst", + "master", + "main" +) + +s.replace( + "CONTRIBUTING.rst", + "kubernetes/community/blob/main", + "kubernetes/community/blob/master" +) + +s.replace( + ".kokoro/*", + "master", + "main" +) + From 681c4de3d8501b71a924c26e6a27ba0f21bdda7a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 31 Aug 2021 17:40:02 +0200 Subject: [PATCH 534/855] chore(deps): update dependency pytest to v6.2.5 (#383) Co-authored-by: Anthonios Partheniou --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 0db5cc446cf1..fbe6c1c5cfc8 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==1.11.1 -pytest==6.2.4 +pytest==6.2.5 From f1d99ccc57782c1f93be9708e7c5750ec21df765 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 1 Sep 2021 16:52:42 +0200 Subject: [PATCH 535/855] chore(deps): update dependency google-cloud-bigquery to v2.25.2 (#386) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 46db124bd99f..eb9a1eaba899 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.6.0 -google-cloud-bigquery==2.25.1 +google-cloud-bigquery==2.25.2 google-cloud-storage==1.42.0 google-cloud-pubsub==2.7.1 From d6303ea5273bd937fd507a56b217130d0352c12b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 1 Sep 2021 14:26:06 -0700 Subject: [PATCH 536/855] chore(tests): added missing test dependency (#387) --- .../.kokoro/environment_tests.sh | 1 + .../environment/deployable/nodejs/Dockerfile | 9 +++++++-- .../deployable/python/requirements.txt | 1 + .../envctl/env_scripts/nodejs/cloudrun.sh | 20 +------------------ .../envctl/env_scripts/nodejs/compute.sh | 20 +------------------ .../envctl/env_scripts/nodejs/kubernetes.sh | 19 +----------------- .../tests/environment/envctl/envctl | 1 + tests/environment | 2 +- 8 files changed, 14 insertions(+), 59 deletions(-) diff --git a/packages/google-cloud-logging/.kokoro/environment_tests.sh b/packages/google-cloud-logging/.kokoro/environment_tests.sh index f8f138ea173e..2b6fa5177e69 100755 --- a/packages/google-cloud-logging/.kokoro/environment_tests.sh +++ b/packages/google-cloud-logging/.kokoro/environment_tests.sh @@ -72,6 +72,7 @@ echo $ENVCTL_ID # Run the specified environment test set +e + python3.6 -m nox --session "tests(language='python', platform='$ENVIRONMENT')" TEST_STATUS_CODE=$? diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile b/packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile index ecafc659f343..4262efa9d636 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/Dockerfile @@ -23,10 +23,15 @@ WORKDIR /usr/src/app COPY package*.json ./ COPY app.js ./ COPY tests.js ./ -# Assumption: local file is already built + +# Compile library COPY _library ./nodejs-logging +WORKDIR nodejs-logging +RUN npm install +RUN npm run compile -# Install test app's dependencies. +# Install other dependencies +WORKDIR /usr/src/app RUN npm install --production # Environment variable denoting whether to run an app server diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt index 37d80d55bba1..26a183914fed 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt +++ b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt @@ -1,3 +1,4 @@ flask==1.1.2 google-cloud-pubsub==2.3.0 click==7.1.2 +pytz==2021.1 diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/cloudrun.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/cloudrun.sh index c9be55c09158..50e5aa3f7a3a 100644 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/cloudrun.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/cloudrun.sh @@ -68,26 +68,8 @@ verify() { set -e } -build_node_container() { - export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME - # copy super-repo into deployable dir - _env_tests_relative_path=${REPO_ROOT#"$SUPERREPO_ROOT/"} - _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE - - # copy over local copy of library - pushd $SUPERREPO_ROOT - tar -cvf $_deployable_dir/lib.tar --exclude node_modules --exclude env-tests-logging --exclude test --exclude system-test --exclude .nox --exclude samples --exclude docs . - popd - mkdir -p $_deployable_dir/$LIBRARY_NAME - tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/$LIBRARY_NAME - - # build container - docker build -t $GCR_PATH $_deployable_dir - docker push $GCR_PATH -} - deploy() { - build_node_container + build_container gcloud config set run/platform managed gcloud config set run/region us-west1 gcloud run deploy \ diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/compute.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/compute.sh index 90a49bde506f..0cb0a425c98b 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/compute.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/compute.sh @@ -47,26 +47,8 @@ verify() { set -e } - -build_node_container() { - export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME - # copy super-repo into deployable dir - _env_tests_relative_path=${REPO_ROOT#"$SUPERREPO_ROOT/"} - _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE - - # copy over local copy of library - pushd $SUPERREPO_ROOT - tar -cvf $_deployable_dir/lib.tar --exclude node_modules --exclude env-tests-logging --exclude test --exclude system-test --exclude .nox --exclude samples --exclude docs . - popd - mkdir -p $_deployable_dir/$LIBRARY_NAME - tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/$LIBRARY_NAME - # build container - docker build -t $GCR_PATH $_deployable_dir - docker push $GCR_PATH -} - deploy() { - build_node_container + build_container gcloud config set compute/zone $ZONE gcloud compute instances create-with-container \ $SERVICE_NAME \ diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/kubernetes.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/kubernetes.sh index 72f9130134df..3baa0948943a 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/kubernetes.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/kubernetes.sh @@ -65,26 +65,9 @@ attach_or_create_gke_cluster(){ set -e } -build_node_container(){ - export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME - # copy super-repo into deployable dir - _env_tests_relative_path=${REPO_ROOT#"$SUPERREPO_ROOT/"} - _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE - - # copy over local copy of library - pushd $SUPERREPO_ROOT - tar -cvf $_deployable_dir/lib.tar --exclude node_modules --exclude env-tests-logging --exclude test --exclude system-test --exclude .nox --exclude samples --exclude docs . - popd - mkdir -p $_deployable_dir/$LIBRARY_NAME - tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/$LIBRARY_NAME - # build container - docker build -t $GCR_PATH $_deployable_dir - docker push $GCR_PATH -} - deploy() { attach_or_create_gke_cluster - build_node_container + build_container cat < $TMP_DIR/gke.yaml apiVersion: apps/v1 kind: Deployment diff --git a/packages/google-cloud-logging/tests/environment/envctl/envctl b/packages/google-cloud-logging/tests/environment/envctl/envctl index 1e0e2cdd3846..745c1c06ebf8 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/envctl +++ b/packages/google-cloud-logging/tests/environment/envctl/envctl @@ -46,6 +46,7 @@ build_container() { tar -cvf $_deployable_dir/lib.tar \ --exclude tests --exclude .nox --exclude samples --exclude docs --exclude __pycache__ \ --exclude target --exclude env-tests-logging --exclude test --exclude .git --exclude .github \ + --exclude node_modules --exclude system-test \ --exclude system-test --exclude environment-tests --exclude .kokoro . popd mkdir -p $_deployable_dir/_library diff --git a/tests/environment b/tests/environment index 3b5b391d6afc..f30dafa8b62b 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 3b5b391d6afc746c59cdd3100ccc5e8d60793489 +Subproject commit f30dafa8b62bf197e39edd5aca57ce1cc62e9540 From 0ef5daecebf87654232695bf4c533265ade7e5ef Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 2 Sep 2021 02:29:45 +0200 Subject: [PATCH 537/855] chore(deps): update dependency google-cloud-bigquery to v2.26.0 (#388) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index eb9a1eaba899..aeb2ca5e6582 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.6.0 -google-cloud-bigquery==2.25.2 +google-cloud-bigquery==2.26.0 google-cloud-storage==1.42.0 google-cloud-pubsub==2.7.1 From f16444c459b2dca66baa254f6f525012fa1ad026 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 8 Sep 2021 16:55:05 +0200 Subject: [PATCH 538/855] chore(deps): update dependency google-cloud-pubsub to v2.8.0 (#391) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index aeb2ca5e6582..fae75498f014 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.6.0 google-cloud-bigquery==2.26.0 google-cloud-storage==1.42.0 -google-cloud-pubsub==2.7.1 +google-cloud-pubsub==2.8.0 From 3305948305a38450385ce565d414aed8a025a217 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 9 Sep 2021 16:43:27 +0200 Subject: [PATCH 539/855] chore(deps): update dependency google-cloud-storage to v1.42.1 (#393) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index fae75498f014..13294d95adf2 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.6.0 google-cloud-bigquery==2.26.0 -google-cloud-storage==1.42.0 +google-cloud-storage==1.42.1 google-cloud-pubsub==2.8.0 From 3f7a71b0cc8dd51aa69197a86a79be8b2479bfe4 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 14 Sep 2021 14:30:01 -0700 Subject: [PATCH 540/855] fix: exception log message format (#394) --- .../logging_v2/handlers/structured_log.py | 5 +++++ .../environment/deployable/python/snippets.py | 5 +++++ .../tests/environment/tests/common/python.py | 15 +++++++++++++++ .../tests/unit/handlers/test_structured_log.py | 18 ++++++++++++++++++ tests/environment | 2 +- 5 files changed, 44 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py index f0b4c69ecd04..43e1250a3c2b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py @@ -62,9 +62,14 @@ def format(self, record): # let other formatters alter the message super_payload = None if record.msg: + # format the message using default handler behaviors super_payload = super(StructuredLogHandler, self).format(record) # properly break any formatting in string to make it json safe record._formatted_msg = json.dumps(super_payload or "") + # remove exception info to avoid duplicating it + # https://github.com/googleapis/python-logging/issues/382 + record.exc_info = None + record.exc_text = None # convert to GCP structred logging format gcp_payload = self._gcp_formatter.format(record) return gcp_payload diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py index 2be50bea17b9..d1e2f758c5b5 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -132,6 +132,11 @@ def pylogging_flask( ): logging.info(log_text) +def pylogging_exception(log_text="pylogging_exception", exception_text="Test", **kwargs): + try: + raise Exception(exception_text) + except Exception: + logging.exception(log_text) def print_handlers(**kwargs): root_logger = logging.getLogger() diff --git a/packages/google-cloud-logging/tests/environment/tests/common/python.py b/packages/google-cloud-logging/tests/environment/tests/common/python.py index 1dcbb9a0f7ca..2ac037298a3f 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/python.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/python.py @@ -262,3 +262,18 @@ def test_pylogging_extras_sparse(self): found_log.source_location.get(field, None), f"source_location[{field}] is unexpectedly not None", ) + + def test_pylogging_exception(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + exception_text = "test_exception" + log_list = self.trigger_and_retrieve(log_text, "pylogging_exception", + exception_text=exception_text) + found_log = log_list[-1] + + message = (found_log.payload.get("message", None) + if isinstance(found_log.payload, dict) + else str(found_log.payload)) + + self.assertIn(log_text, message) + self.assertIn(f"Exception: {exception_text}", message) + self.assertIn("Traceback (most recent call last):", message) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 3d1c11ab0782..271a68189090 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -119,6 +119,24 @@ def test_format_with_quotes(self): result = handler.format(record) self.assertIn(expected_result, result) + def test_format_with_exception(self): + """ + When logging a message with an exception, the stack trace should not be appended + """ + import logging + import json + + handler = self._make_one() + exception_tuple = (Exception, Exception(), None) + message = "test" + record = logging.LogRecord( + None, logging.INFO, None, None, message, None, exception_tuple + ) + record.created = None + handler.filter(record) + result = json.loads(handler.format(record)) + self.assertEqual(result["message"], f"{message}\nException") + def test_format_with_line_break(self): """ When logging a message containing \n, it should be properly escaped diff --git a/tests/environment b/tests/environment index f30dafa8b62b..17b7a4690832 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit f30dafa8b62bf197e39edd5aca57ce1cc62e9540 +Subproject commit 17b7a46908320891605908d5baa5f32eb255380e From b34f28bd64c8a38fddcf54306723d0ad91f96a84 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 17 Sep 2021 19:43:39 +0200 Subject: [PATCH 541/855] chore(deps): update dependency google-cloud-storage to v1.42.2 (#398) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 13294d95adf2..dbc2f3fd6bcd 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.6.0 google-cloud-bigquery==2.26.0 -google-cloud-storage==1.42.1 +google-cloud-storage==1.42.2 google-cloud-pubsub==2.8.0 From 35ee75ac7b8f7b68ee7c4673b35a7d79cf80d669 Mon Sep 17 00:00:00 2001 From: Jeffrey Rennie Date: Tue, 21 Sep 2021 12:36:22 -0700 Subject: [PATCH 542/855] chore: relocate owl bot post processor (#401) chore: relocate owl bot post processor --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-logging/.github/.OwlBot.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index b771c37caef8..2567653c000d 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: - image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:a1a891041baa4ffbe1a809ac1b8b9b4a71887293c9101c88e8e255943c5aec2d + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + digest: sha256:87eee22d276554e4e52863ec9b1cb6a7245815dfae20439712bf644348215a5a diff --git a/packages/google-cloud-logging/.github/.OwlBot.yaml b/packages/google-cloud-logging/.github/.OwlBot.yaml index 63a2aab5460e..58377caf628c 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.yaml @@ -13,7 +13,7 @@ # limitations under the License. docker: - image: gcr.io/repo-automation-bots/owlbot-python:latest + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest deep-remove-regex: - /owl-bot-staging From 0d03c61cb42f7e7417d79b527cf1ce8e89754d9e Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 21 Sep 2021 17:41:34 -0400 Subject: [PATCH 543/855] chore: drop six (#403) --- .../cloud/logging_v2/handlers/transports/background_thread.py | 3 +-- .../tests/unit/handlers/transports/test_background_thread.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py index 3d654dbd8e75..60828a117804 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py @@ -22,12 +22,11 @@ import atexit import datetime import logging +import queue import sys import threading import time -from six.moves import queue - from google.cloud.logging_v2 import _helpers from google.cloud.logging_v2.handlers.transports.base import Transport diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 5410c5f10547..1666cd74b4b0 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -14,10 +14,10 @@ import time import logging +import queue import unittest import mock -from six.moves import queue class TestBackgroundThreadHandler(unittest.TestCase): @@ -379,7 +379,6 @@ def test__thread_main_max_latency(self, time): # _get_many invokes queue.get() followed by queue._get(). It fails # the "change detector" test in that way. However, this is still a # useful test to verify the queue timeout is appropriately calculated. - from six.moves import queue from google.cloud.logging_v2.handlers.transports import background_thread # Use monotonically increasing time. From 94b171f38a139a43d991f73d6076946352847299 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 24 Sep 2021 15:08:17 +0000 Subject: [PATCH 544/855] chore: use gapic-generator-python 0.51.2 (#404) - [ ] Regenerate this pull request now. fix: add 'dict' annotation type to 'request' Committer: @busunkim96 PiperOrigin-RevId: 398509016 Source-Link: https://github.com/googleapis/googleapis/commit/b224dfa52642a733ea64849d4e06d15c274bc08f Source-Link: https://github.com/googleapis/googleapis-gen/commit/63a1db7a38d74b9639592f521ed1daaf7299ad9a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjNhMWRiN2EzOGQ3NGI5NjM5NTkyZjUyMWVkMWRhYWY3Mjk5YWQ5YSJ9 --- .../owl-bot-staging/v2/.coveragerc | 17 + .../owl-bot-staging/v2/MANIFEST.in | 2 + .../owl-bot-staging/v2/README.rst | 49 + .../owl-bot-staging/v2/docs/conf.py | 376 + .../owl-bot-staging/v2/docs/index.rst | 7 + .../v2/docs/logging_v2/config_service_v2.rst | 10 + .../v2/docs/logging_v2/logging_service_v2.rst | 10 + .../v2/docs/logging_v2/metrics_service_v2.rst | 10 + .../v2/docs/logging_v2/services.rst | 8 + .../v2/docs/logging_v2/types.rst | 7 + .../v2/google/cloud/logging/__init__.py | 143 + .../v2/google/cloud/logging/py.typed | 2 + .../v2/google/cloud/logging_v2/__init__.py | 144 + .../cloud/logging_v2/gapic_metadata.json | 391 + .../v2/google/cloud/logging_v2/py.typed | 2 + .../cloud/logging_v2/services/__init__.py | 15 + .../services/config_service_v2/__init__.py | 22 + .../config_service_v2/async_client.py | 2016 ++++++ .../services/config_service_v2/client.py | 2198 ++++++ .../services/config_service_v2/pagers.py | 506 ++ .../config_service_v2/transports/__init__.py | 33 + .../config_service_v2/transports/base.py | 536 ++ .../config_service_v2/transports/grpc.py | 878 +++ .../transports/grpc_asyncio.py | 882 +++ .../services/logging_service_v2/__init__.py | 22 + .../logging_service_v2/async_client.py | 781 ++ .../services/logging_service_v2/client.py | 920 +++ .../services/logging_service_v2/pagers.py | 386 + .../logging_service_v2/transports/__init__.py | 33 + .../logging_service_v2/transports/base.py | 291 + .../logging_service_v2/transports/grpc.py | 402 + .../transports/grpc_asyncio.py | 406 ++ .../services/metrics_service_v2/__init__.py | 22 + .../metrics_service_v2/async_client.py | 640 ++ .../services/metrics_service_v2/client.py | 799 ++ .../services/metrics_service_v2/pagers.py | 140 + .../metrics_service_v2/transports/__init__.py | 33 + .../metrics_service_v2/transports/base.py | 261 + .../metrics_service_v2/transports/grpc.py | 357 + .../transports/grpc_asyncio.py | 361 + .../google/cloud/logging_v2/types/__init__.py | 138 + .../cloud/logging_v2/types/log_entry.py | 321 + .../google/cloud/logging_v2/types/logging.py | 573 ++ .../cloud/logging_v2/types/logging_config.py | 1457 ++++ .../cloud/logging_v2/types/logging_metrics.py | 371 + .../owl-bot-staging/v2/mypy.ini | 3 + .../owl-bot-staging/v2/noxfile.py | 132 + .../v2/scripts/fixup_logging_v2_keywords.py | 209 + .../owl-bot-staging/v2/setup.py | 54 + .../owl-bot-staging/v2/tests/__init__.py | 16 + .../owl-bot-staging/v2/tests/unit/__init__.py | 16 + .../v2/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/logging_v2/__init__.py | 16 + .../logging_v2/test_config_service_v2.py | 6447 +++++++++++++++++ .../logging_v2/test_logging_service_v2.py | 2494 +++++++ .../logging_v2/test_metrics_service_v2.py | 2359 ++++++ 56 files changed, 28740 insertions(+) create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/.coveragerc create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/MANIFEST.in create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/README.rst create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/docs/conf.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/docs/index.rst create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/config_service_v2.rst create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/logging_service_v2.rst create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/metrics_service_v2.rst create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/services.rst create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/types.rst create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/__init__.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/py.typed create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/__init__.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/gapic_metadata.json create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/py.typed create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/__init__.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/__init__.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/async_client.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/client.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/pagers.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/base.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/__init__.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/async_client.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/client.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/pagers.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/base.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/__init__.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/async_client.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/client.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/pagers.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/__init__.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/log_entry.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_config.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_metrics.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/mypy.ini create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/noxfile.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/scripts/fixup_logging_v2_keywords.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/setup.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/tests/__init__.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/__init__.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/__init__.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/__init__.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_config_service_v2.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_logging_service_v2.py create mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_metrics_service_v2.py diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/.coveragerc b/packages/google-cloud-logging/owl-bot-staging/v2/.coveragerc new file mode 100644 index 000000000000..b38d22e21fd1 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/logging/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/MANIFEST.in b/packages/google-cloud-logging/owl-bot-staging/v2/MANIFEST.in new file mode 100644 index 000000000000..f8c276f2cce8 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/logging *.py +recursive-include google/cloud/logging_v2 *.py diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/README.rst b/packages/google-cloud-logging/owl-bot-staging/v2/README.rst new file mode 100644 index 000000000000..56aa7d0a8ad9 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Logging API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Logging API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/docs/conf.py b/packages/google-cloud-logging/owl-bot-staging/v2/docs/conf.py new file mode 100644 index 000000000000..eb6783779012 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-logging documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-logging" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-logging-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-logging.tex", + u"google-cloud-logging Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-logging", + u"Google Cloud Logging Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-logging", + u"google-cloud-logging Documentation", + author, + "google-cloud-logging", + "GAPIC library for Google Cloud Logging API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/docs/index.rst b/packages/google-cloud-logging/owl-bot-staging/v2/docs/index.rst new file mode 100644 index 000000000000..6a4859643f45 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + logging_v2/services + logging_v2/types diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/config_service_v2.rst b/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/config_service_v2.rst new file mode 100644 index 000000000000..f7c0a7701de1 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/config_service_v2.rst @@ -0,0 +1,10 @@ +ConfigServiceV2 +--------------------------------- + +.. automodule:: google.cloud.logging_v2.services.config_service_v2 + :members: + :inherited-members: + +.. automodule:: google.cloud.logging_v2.services.config_service_v2.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/logging_service_v2.rst b/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/logging_service_v2.rst new file mode 100644 index 000000000000..f41c0c89b78c --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/logging_service_v2.rst @@ -0,0 +1,10 @@ +LoggingServiceV2 +---------------------------------- + +.. automodule:: google.cloud.logging_v2.services.logging_service_v2 + :members: + :inherited-members: + +.. automodule:: google.cloud.logging_v2.services.logging_service_v2.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/metrics_service_v2.rst b/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/metrics_service_v2.rst new file mode 100644 index 000000000000..fd4d9bc7d9ba --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/metrics_service_v2.rst @@ -0,0 +1,10 @@ +MetricsServiceV2 +---------------------------------- + +.. automodule:: google.cloud.logging_v2.services.metrics_service_v2 + :members: + :inherited-members: + +.. automodule:: google.cloud.logging_v2.services.metrics_service_v2.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/services.rst b/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/services.rst new file mode 100644 index 000000000000..d7a0471b13c3 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/services.rst @@ -0,0 +1,8 @@ +Services for Google Cloud Logging v2 API +======================================== +.. toctree:: + :maxdepth: 2 + + config_service_v2 + logging_service_v2 + metrics_service_v2 diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/types.rst b/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/types.rst new file mode 100644 index 000000000000..843c0dc370d4 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Logging v2 API +===================================== + +.. automodule:: google.cloud.logging_v2.types + :members: + :undoc-members: + :show-inheritance: diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/__init__.py new file mode 100644 index 000000000000..16e3d0cc06cf --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/__init__.py @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.logging_v2.services.config_service_v2.client import ConfigServiceV2Client +from google.cloud.logging_v2.services.config_service_v2.async_client import ConfigServiceV2AsyncClient +from google.cloud.logging_v2.services.logging_service_v2.client import LoggingServiceV2Client +from google.cloud.logging_v2.services.logging_service_v2.async_client import LoggingServiceV2AsyncClient +from google.cloud.logging_v2.services.metrics_service_v2.client import MetricsServiceV2Client +from google.cloud.logging_v2.services.metrics_service_v2.async_client import MetricsServiceV2AsyncClient + +from google.cloud.logging_v2.types.log_entry import LogEntry +from google.cloud.logging_v2.types.log_entry import LogEntryOperation +from google.cloud.logging_v2.types.log_entry import LogEntrySourceLocation +from google.cloud.logging_v2.types.logging import DeleteLogRequest +from google.cloud.logging_v2.types.logging import ListLogEntriesRequest +from google.cloud.logging_v2.types.logging import ListLogEntriesResponse +from google.cloud.logging_v2.types.logging import ListLogsRequest +from google.cloud.logging_v2.types.logging import ListLogsResponse +from google.cloud.logging_v2.types.logging import ListMonitoredResourceDescriptorsRequest +from google.cloud.logging_v2.types.logging import ListMonitoredResourceDescriptorsResponse +from google.cloud.logging_v2.types.logging import TailLogEntriesRequest +from google.cloud.logging_v2.types.logging import TailLogEntriesResponse +from google.cloud.logging_v2.types.logging import WriteLogEntriesPartialErrors +from google.cloud.logging_v2.types.logging import WriteLogEntriesRequest +from google.cloud.logging_v2.types.logging import WriteLogEntriesResponse +from google.cloud.logging_v2.types.logging_config import BigQueryOptions +from google.cloud.logging_v2.types.logging_config import CmekSettings +from google.cloud.logging_v2.types.logging_config import CreateBucketRequest +from google.cloud.logging_v2.types.logging_config import CreateExclusionRequest +from google.cloud.logging_v2.types.logging_config import CreateSinkRequest +from google.cloud.logging_v2.types.logging_config import CreateViewRequest +from google.cloud.logging_v2.types.logging_config import DeleteBucketRequest +from google.cloud.logging_v2.types.logging_config import DeleteExclusionRequest +from google.cloud.logging_v2.types.logging_config import DeleteSinkRequest +from google.cloud.logging_v2.types.logging_config import DeleteViewRequest +from google.cloud.logging_v2.types.logging_config import GetBucketRequest +from google.cloud.logging_v2.types.logging_config import GetCmekSettingsRequest +from google.cloud.logging_v2.types.logging_config import GetExclusionRequest +from google.cloud.logging_v2.types.logging_config import GetSinkRequest +from google.cloud.logging_v2.types.logging_config import GetViewRequest +from google.cloud.logging_v2.types.logging_config import ListBucketsRequest +from google.cloud.logging_v2.types.logging_config import ListBucketsResponse +from google.cloud.logging_v2.types.logging_config import ListExclusionsRequest +from google.cloud.logging_v2.types.logging_config import ListExclusionsResponse +from google.cloud.logging_v2.types.logging_config import ListSinksRequest +from google.cloud.logging_v2.types.logging_config import ListSinksResponse +from google.cloud.logging_v2.types.logging_config import ListViewsRequest +from google.cloud.logging_v2.types.logging_config import ListViewsResponse +from google.cloud.logging_v2.types.logging_config import LogBucket +from google.cloud.logging_v2.types.logging_config import LogExclusion +from google.cloud.logging_v2.types.logging_config import LogSink +from google.cloud.logging_v2.types.logging_config import LogView +from google.cloud.logging_v2.types.logging_config import UndeleteBucketRequest +from google.cloud.logging_v2.types.logging_config import UpdateBucketRequest +from google.cloud.logging_v2.types.logging_config import UpdateCmekSettingsRequest +from google.cloud.logging_v2.types.logging_config import UpdateExclusionRequest +from google.cloud.logging_v2.types.logging_config import UpdateSinkRequest +from google.cloud.logging_v2.types.logging_config import UpdateViewRequest +from google.cloud.logging_v2.types.logging_config import LifecycleState +from google.cloud.logging_v2.types.logging_metrics import CreateLogMetricRequest +from google.cloud.logging_v2.types.logging_metrics import DeleteLogMetricRequest +from google.cloud.logging_v2.types.logging_metrics import GetLogMetricRequest +from google.cloud.logging_v2.types.logging_metrics import ListLogMetricsRequest +from google.cloud.logging_v2.types.logging_metrics import ListLogMetricsResponse +from google.cloud.logging_v2.types.logging_metrics import LogMetric +from google.cloud.logging_v2.types.logging_metrics import UpdateLogMetricRequest + +__all__ = ('ConfigServiceV2Client', + 'ConfigServiceV2AsyncClient', + 'LoggingServiceV2Client', + 'LoggingServiceV2AsyncClient', + 'MetricsServiceV2Client', + 'MetricsServiceV2AsyncClient', + 'LogEntry', + 'LogEntryOperation', + 'LogEntrySourceLocation', + 'DeleteLogRequest', + 'ListLogEntriesRequest', + 'ListLogEntriesResponse', + 'ListLogsRequest', + 'ListLogsResponse', + 'ListMonitoredResourceDescriptorsRequest', + 'ListMonitoredResourceDescriptorsResponse', + 'TailLogEntriesRequest', + 'TailLogEntriesResponse', + 'WriteLogEntriesPartialErrors', + 'WriteLogEntriesRequest', + 'WriteLogEntriesResponse', + 'BigQueryOptions', + 'CmekSettings', + 'CreateBucketRequest', + 'CreateExclusionRequest', + 'CreateSinkRequest', + 'CreateViewRequest', + 'DeleteBucketRequest', + 'DeleteExclusionRequest', + 'DeleteSinkRequest', + 'DeleteViewRequest', + 'GetBucketRequest', + 'GetCmekSettingsRequest', + 'GetExclusionRequest', + 'GetSinkRequest', + 'GetViewRequest', + 'ListBucketsRequest', + 'ListBucketsResponse', + 'ListExclusionsRequest', + 'ListExclusionsResponse', + 'ListSinksRequest', + 'ListSinksResponse', + 'ListViewsRequest', + 'ListViewsResponse', + 'LogBucket', + 'LogExclusion', + 'LogSink', + 'LogView', + 'UndeleteBucketRequest', + 'UpdateBucketRequest', + 'UpdateCmekSettingsRequest', + 'UpdateExclusionRequest', + 'UpdateSinkRequest', + 'UpdateViewRequest', + 'LifecycleState', + 'CreateLogMetricRequest', + 'DeleteLogMetricRequest', + 'GetLogMetricRequest', + 'ListLogMetricsRequest', + 'ListLogMetricsResponse', + 'LogMetric', + 'UpdateLogMetricRequest', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/py.typed b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/py.typed new file mode 100644 index 000000000000..6c7420d0d9cb --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-logging package uses inline types. diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/__init__.py new file mode 100644 index 000000000000..1dc1e1eac254 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/__init__.py @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.config_service_v2 import ConfigServiceV2Client +from .services.config_service_v2 import ConfigServiceV2AsyncClient +from .services.logging_service_v2 import LoggingServiceV2Client +from .services.logging_service_v2 import LoggingServiceV2AsyncClient +from .services.metrics_service_v2 import MetricsServiceV2Client +from .services.metrics_service_v2 import MetricsServiceV2AsyncClient + +from .types.log_entry import LogEntry +from .types.log_entry import LogEntryOperation +from .types.log_entry import LogEntrySourceLocation +from .types.logging import DeleteLogRequest +from .types.logging import ListLogEntriesRequest +from .types.logging import ListLogEntriesResponse +from .types.logging import ListLogsRequest +from .types.logging import ListLogsResponse +from .types.logging import ListMonitoredResourceDescriptorsRequest +from .types.logging import ListMonitoredResourceDescriptorsResponse +from .types.logging import TailLogEntriesRequest +from .types.logging import TailLogEntriesResponse +from .types.logging import WriteLogEntriesPartialErrors +from .types.logging import WriteLogEntriesRequest +from .types.logging import WriteLogEntriesResponse +from .types.logging_config import BigQueryOptions +from .types.logging_config import CmekSettings +from .types.logging_config import CreateBucketRequest +from .types.logging_config import CreateExclusionRequest +from .types.logging_config import CreateSinkRequest +from .types.logging_config import CreateViewRequest +from .types.logging_config import DeleteBucketRequest +from .types.logging_config import DeleteExclusionRequest +from .types.logging_config import DeleteSinkRequest +from .types.logging_config import DeleteViewRequest +from .types.logging_config import GetBucketRequest +from .types.logging_config import GetCmekSettingsRequest +from .types.logging_config import GetExclusionRequest +from .types.logging_config import GetSinkRequest +from .types.logging_config import GetViewRequest +from .types.logging_config import ListBucketsRequest +from .types.logging_config import ListBucketsResponse +from .types.logging_config import ListExclusionsRequest +from .types.logging_config import ListExclusionsResponse +from .types.logging_config import ListSinksRequest +from .types.logging_config import ListSinksResponse +from .types.logging_config import ListViewsRequest +from .types.logging_config import ListViewsResponse +from .types.logging_config import LogBucket +from .types.logging_config import LogExclusion +from .types.logging_config import LogSink +from .types.logging_config import LogView +from .types.logging_config import UndeleteBucketRequest +from .types.logging_config import UpdateBucketRequest +from .types.logging_config import UpdateCmekSettingsRequest +from .types.logging_config import UpdateExclusionRequest +from .types.logging_config import UpdateSinkRequest +from .types.logging_config import UpdateViewRequest +from .types.logging_config import LifecycleState +from .types.logging_metrics import CreateLogMetricRequest +from .types.logging_metrics import DeleteLogMetricRequest +from .types.logging_metrics import GetLogMetricRequest +from .types.logging_metrics import ListLogMetricsRequest +from .types.logging_metrics import ListLogMetricsResponse +from .types.logging_metrics import LogMetric +from .types.logging_metrics import UpdateLogMetricRequest + +__all__ = ( + 'ConfigServiceV2AsyncClient', + 'LoggingServiceV2AsyncClient', + 'MetricsServiceV2AsyncClient', +'BigQueryOptions', +'CmekSettings', +'ConfigServiceV2Client', +'CreateBucketRequest', +'CreateExclusionRequest', +'CreateLogMetricRequest', +'CreateSinkRequest', +'CreateViewRequest', +'DeleteBucketRequest', +'DeleteExclusionRequest', +'DeleteLogMetricRequest', +'DeleteLogRequest', +'DeleteSinkRequest', +'DeleteViewRequest', +'GetBucketRequest', +'GetCmekSettingsRequest', +'GetExclusionRequest', +'GetLogMetricRequest', +'GetSinkRequest', +'GetViewRequest', +'LifecycleState', +'ListBucketsRequest', +'ListBucketsResponse', +'ListExclusionsRequest', +'ListExclusionsResponse', +'ListLogEntriesRequest', +'ListLogEntriesResponse', +'ListLogMetricsRequest', +'ListLogMetricsResponse', +'ListLogsRequest', +'ListLogsResponse', +'ListMonitoredResourceDescriptorsRequest', +'ListMonitoredResourceDescriptorsResponse', +'ListSinksRequest', +'ListSinksResponse', +'ListViewsRequest', +'ListViewsResponse', +'LogBucket', +'LogEntry', +'LogEntryOperation', +'LogEntrySourceLocation', +'LogExclusion', +'LogMetric', +'LogSink', +'LogView', +'LoggingServiceV2Client', +'MetricsServiceV2Client', +'TailLogEntriesRequest', +'TailLogEntriesResponse', +'UndeleteBucketRequest', +'UpdateBucketRequest', +'UpdateCmekSettingsRequest', +'UpdateExclusionRequest', +'UpdateLogMetricRequest', +'UpdateSinkRequest', +'UpdateViewRequest', +'WriteLogEntriesPartialErrors', +'WriteLogEntriesRequest', +'WriteLogEntriesResponse', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/gapic_metadata.json b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/gapic_metadata.json new file mode 100644 index 000000000000..da4eefd477fc --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/gapic_metadata.json @@ -0,0 +1,391 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.logging_v2", + "protoPackage": "google.logging.v2", + "schema": "1.0", + "services": { + "ConfigServiceV2": { + "clients": { + "grpc": { + "libraryClient": "ConfigServiceV2Client", + "rpcs": { + "CreateBucket": { + "methods": [ + "create_bucket" + ] + }, + "CreateExclusion": { + "methods": [ + "create_exclusion" + ] + }, + "CreateSink": { + "methods": [ + "create_sink" + ] + }, + "CreateView": { + "methods": [ + "create_view" + ] + }, + "DeleteBucket": { + "methods": [ + "delete_bucket" + ] + }, + "DeleteExclusion": { + "methods": [ + "delete_exclusion" + ] + }, + "DeleteSink": { + "methods": [ + "delete_sink" + ] + }, + "DeleteView": { + "methods": [ + "delete_view" + ] + }, + "GetBucket": { + "methods": [ + "get_bucket" + ] + }, + "GetCmekSettings": { + "methods": [ + "get_cmek_settings" + ] + }, + "GetExclusion": { + "methods": [ + "get_exclusion" + ] + }, + "GetSink": { + "methods": [ + "get_sink" + ] + }, + "GetView": { + "methods": [ + "get_view" + ] + }, + "ListBuckets": { + "methods": [ + "list_buckets" + ] + }, + "ListExclusions": { + "methods": [ + "list_exclusions" + ] + }, + "ListSinks": { + "methods": [ + "list_sinks" + ] + }, + "ListViews": { + "methods": [ + "list_views" + ] + }, + "UndeleteBucket": { + "methods": [ + "undelete_bucket" + ] + }, + "UpdateBucket": { + "methods": [ + "update_bucket" + ] + }, + "UpdateCmekSettings": { + "methods": [ + "update_cmek_settings" + ] + }, + "UpdateExclusion": { + "methods": [ + "update_exclusion" + ] + }, + "UpdateSink": { + "methods": [ + "update_sink" + ] + }, + "UpdateView": { + "methods": [ + "update_view" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ConfigServiceV2AsyncClient", + "rpcs": { + "CreateBucket": { + "methods": [ + "create_bucket" + ] + }, + "CreateExclusion": { + "methods": [ + "create_exclusion" + ] + }, + "CreateSink": { + "methods": [ + "create_sink" + ] + }, + "CreateView": { + "methods": [ + "create_view" + ] + }, + "DeleteBucket": { + "methods": [ + "delete_bucket" + ] + }, + "DeleteExclusion": { + "methods": [ + "delete_exclusion" + ] + }, + "DeleteSink": { + "methods": [ + "delete_sink" + ] + }, + "DeleteView": { + "methods": [ + "delete_view" + ] + }, + "GetBucket": { + "methods": [ + "get_bucket" + ] + }, + "GetCmekSettings": { + "methods": [ + "get_cmek_settings" + ] + }, + "GetExclusion": { + "methods": [ + "get_exclusion" + ] + }, + "GetSink": { + "methods": [ + "get_sink" + ] + }, + "GetView": { + "methods": [ + "get_view" + ] + }, + "ListBuckets": { + "methods": [ + "list_buckets" + ] + }, + "ListExclusions": { + "methods": [ + "list_exclusions" + ] + }, + "ListSinks": { + "methods": [ + "list_sinks" + ] + }, + "ListViews": { + "methods": [ + "list_views" + ] + }, + "UndeleteBucket": { + "methods": [ + "undelete_bucket" + ] + }, + "UpdateBucket": { + "methods": [ + "update_bucket" + ] + }, + "UpdateCmekSettings": { + "methods": [ + "update_cmek_settings" + ] + }, + "UpdateExclusion": { + "methods": [ + "update_exclusion" + ] + }, + "UpdateSink": { + "methods": [ + "update_sink" + ] + }, + "UpdateView": { + "methods": [ + "update_view" + ] + } + } + } + } + }, + "LoggingServiceV2": { + "clients": { + "grpc": { + "libraryClient": "LoggingServiceV2Client", + "rpcs": { + "DeleteLog": { + "methods": [ + "delete_log" + ] + }, + "ListLogEntries": { + "methods": [ + "list_log_entries" + ] + }, + "ListLogs": { + "methods": [ + "list_logs" + ] + }, + "ListMonitoredResourceDescriptors": { + "methods": [ + "list_monitored_resource_descriptors" + ] + }, + "TailLogEntries": { + "methods": [ + "tail_log_entries" + ] + }, + "WriteLogEntries": { + "methods": [ + "write_log_entries" + ] + } + } + }, + "grpc-async": { + "libraryClient": "LoggingServiceV2AsyncClient", + "rpcs": { + "DeleteLog": { + "methods": [ + "delete_log" + ] + }, + "ListLogEntries": { + "methods": [ + "list_log_entries" + ] + }, + "ListLogs": { + "methods": [ + "list_logs" + ] + }, + "ListMonitoredResourceDescriptors": { + "methods": [ + "list_monitored_resource_descriptors" + ] + }, + "TailLogEntries": { + "methods": [ + "tail_log_entries" + ] + }, + "WriteLogEntries": { + "methods": [ + "write_log_entries" + ] + } + } + } + } + }, + "MetricsServiceV2": { + "clients": { + "grpc": { + "libraryClient": "MetricsServiceV2Client", + "rpcs": { + "CreateLogMetric": { + "methods": [ + "create_log_metric" + ] + }, + "DeleteLogMetric": { + "methods": [ + "delete_log_metric" + ] + }, + "GetLogMetric": { + "methods": [ + "get_log_metric" + ] + }, + "ListLogMetrics": { + "methods": [ + "list_log_metrics" + ] + }, + "UpdateLogMetric": { + "methods": [ + "update_log_metric" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MetricsServiceV2AsyncClient", + "rpcs": { + "CreateLogMetric": { + "methods": [ + "create_log_metric" + ] + }, + "DeleteLogMetric": { + "methods": [ + "delete_log_metric" + ] + }, + "GetLogMetric": { + "methods": [ + "get_log_metric" + ] + }, + "ListLogMetrics": { + "methods": [ + "list_log_metrics" + ] + }, + "UpdateLogMetric": { + "methods": [ + "update_log_metric" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/py.typed b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/py.typed new file mode 100644 index 000000000000..6c7420d0d9cb --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-logging package uses inline types. diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/__init__.py new file mode 100644 index 000000000000..4de65971c238 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/__init__.py new file mode 100644 index 000000000000..2b27a12e93f8 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ConfigServiceV2Client +from .async_client import ConfigServiceV2AsyncClient + +__all__ = ( + 'ConfigServiceV2Client', + 'ConfigServiceV2AsyncClient', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/async_client.py new file mode 100644 index 000000000000..82e84aab817c --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -0,0 +1,2016 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.types import logging_config +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport +from .client import ConfigServiceV2Client + + +class ConfigServiceV2AsyncClient: + """Service for configuring sinks used to route log entries.""" + + _client: ConfigServiceV2Client + + DEFAULT_ENDPOINT = ConfigServiceV2Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + + cmek_settings_path = staticmethod(ConfigServiceV2Client.cmek_settings_path) + parse_cmek_settings_path = staticmethod(ConfigServiceV2Client.parse_cmek_settings_path) + log_bucket_path = staticmethod(ConfigServiceV2Client.log_bucket_path) + parse_log_bucket_path = staticmethod(ConfigServiceV2Client.parse_log_bucket_path) + log_exclusion_path = staticmethod(ConfigServiceV2Client.log_exclusion_path) + parse_log_exclusion_path = staticmethod(ConfigServiceV2Client.parse_log_exclusion_path) + log_sink_path = staticmethod(ConfigServiceV2Client.log_sink_path) + parse_log_sink_path = staticmethod(ConfigServiceV2Client.parse_log_sink_path) + log_view_path = staticmethod(ConfigServiceV2Client.log_view_path) + parse_log_view_path = staticmethod(ConfigServiceV2Client.parse_log_view_path) + common_billing_account_path = staticmethod(ConfigServiceV2Client.common_billing_account_path) + parse_common_billing_account_path = staticmethod(ConfigServiceV2Client.parse_common_billing_account_path) + common_folder_path = staticmethod(ConfigServiceV2Client.common_folder_path) + parse_common_folder_path = staticmethod(ConfigServiceV2Client.parse_common_folder_path) + common_organization_path = staticmethod(ConfigServiceV2Client.common_organization_path) + parse_common_organization_path = staticmethod(ConfigServiceV2Client.parse_common_organization_path) + common_project_path = staticmethod(ConfigServiceV2Client.common_project_path) + parse_common_project_path = staticmethod(ConfigServiceV2Client.parse_common_project_path) + common_location_path = staticmethod(ConfigServiceV2Client.common_location_path) + parse_common_location_path = staticmethod(ConfigServiceV2Client.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigServiceV2AsyncClient: The constructed client. + """ + return ConfigServiceV2Client.from_service_account_info.__func__(ConfigServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigServiceV2AsyncClient: The constructed client. + """ + return ConfigServiceV2Client.from_service_account_file.__func__(ConfigServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ConfigServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + ConfigServiceV2Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(ConfigServiceV2Client).get_transport_class, type(ConfigServiceV2Client)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, ConfigServiceV2Transport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the config service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ConfigServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ConfigServiceV2Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_buckets(self, + request: logging_config.ListBucketsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBucketsAsyncPager: + r"""Lists buckets. + + Args: + request (:class:`google.cloud.logging_v2.types.ListBucketsRequest`): + The request object. The parameters to `ListBuckets`. + parent (:class:`str`): + Required. The parent resource whose buckets are to be + listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]" + + Note: The locations portion of the resource must be + specified, but supplying the character ``-`` in place of + [LOCATION_ID] will return all buckets. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager: + The response from ListBuckets. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.ListBucketsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_buckets, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBucketsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_bucket(self, + request: logging_config.GetBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Gets a bucket. + + Args: + request (:class:`google.cloud.logging_v2.types.GetBucketRequest`): + The request object. The parameters to `GetBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + request = logging_config.GetBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_bucket(self, + request: logging_config.CreateBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Args: + request (:class:`google.cloud.logging_v2.types.CreateBucketRequest`): + The request object. The parameters to `CreateBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_bucket(self, + request: logging_config.UpdateBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Updates a bucket. This method replaces the following fields in + the existing bucket with values from the new bucket: + ``retention_period`` + + If the retention period is decreased and the bucket is locked, + FAILED_PRECONDITION will be returned. + + If the bucket has a LifecycleState of DELETE_REQUESTED, + FAILED_PRECONDITION will be returned. + + A buckets region may not be modified after it is created. + + Args: + request (:class:`google.cloud.logging_v2.types.UpdateBucketRequest`): + The request object. The parameters to `UpdateBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_bucket(self, + request: logging_config.DeleteBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Args: + request (:class:`google.cloud.logging_v2.types.DeleteBucketRequest`): + The request object. The parameters to `DeleteBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = logging_config.DeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def undelete_bucket(self, + request: logging_config.UndeleteBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Args: + request (:class:`google.cloud.logging_v2.types.UndeleteBucketRequest`): + The request object. The parameters to `UndeleteBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = logging_config.UndeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.undelete_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_views(self, + request: logging_config.ListViewsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListViewsAsyncPager: + r"""Lists views on a bucket. + + Args: + request (:class:`google.cloud.logging_v2.types.ListViewsRequest`): + The request object. The parameters to `ListViews`. + parent (:class:`str`): + Required. The bucket whose views are to be listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager: + The response from ListViews. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.ListViewsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_views, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListViewsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_view(self, + request: logging_config.GetViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Gets a view. + + Args: + request (:class:`google.cloud.logging_v2.types.GetViewRequest`): + The request object. The parameters to `GetView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + request = logging_config.GetViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_view(self, + request: logging_config.CreateViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Args: + request (:class:`google.cloud.logging_v2.types.CreateViewRequest`): + The request object. The parameters to `CreateView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + request = logging_config.CreateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_view(self, + request: logging_config.UpdateViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Args: + request (:class:`google.cloud.logging_v2.types.UpdateViewRequest`): + The request object. The parameters to `UpdateView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + request = logging_config.UpdateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_view(self, + request: logging_config.DeleteViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a view from a bucket. + + Args: + request (:class:`google.cloud.logging_v2.types.DeleteViewRequest`): + The request object. The parameters to `DeleteView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = logging_config.DeleteViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_sinks(self, + request: logging_config.ListSinksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSinksAsyncPager: + r"""Lists sinks. + + Args: + request (:class:`google.cloud.logging_v2.types.ListSinksRequest`): + The request object. The parameters to `ListSinks`. + parent (:class:`str`): + Required. The parent resource whose sinks are to be + listed: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager: + Result returned from ListSinks. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.ListSinksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_sinks, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSinksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_sink(self, + request: logging_config.GetSinkRequest = None, + *, + sink_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Gets a sink. + + Args: + request (:class:`google.cloud.logging_v2.types.GetSinkRequest`): + The request object. The parameters to `GetSink`. + sink_name (:class:`str`): + Required. The resource name of the sink: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.GetSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_sink, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_sink(self, + request: logging_config.CreateSinkRequest = None, + *, + parent: str = None, + sink: logging_config.LogSink = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + Args: + request (:class:`google.cloud.logging_v2.types.CreateSinkRequest`): + The request object. The parameters to `CreateSink`. + parent (:class:`str`): + Required. The resource in which to create the sink: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (:class:`google.cloud.logging_v2.types.LogSink`): + Required. The new sink, whose ``name`` parameter is a + sink identifier that is not already in use. + + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, sink]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.CreateSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if sink is not None: + request.sink = sink + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_sink, + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_sink(self, + request: logging_config.UpdateSinkRequest = None, + *, + sink_name: str = None, + sink: logging_config.LogSink = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + Args: + request (:class:`google.cloud.logging_v2.types.UpdateSinkRequest`): + The request object. The parameters to `UpdateSink`. + sink_name (:class:`str`): + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (:class:`google.cloud.logging_v2.types.LogSink`): + Required. The updated sink, whose name is the same + identifier that appears as part of ``sink_name``. + + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask that specifies the fields in + ``sink`` that need an update. A sink field will be + overwritten if, and only if, it is in the update mask. + ``name`` and output only fields cannot be updated. + + An empty updateMask is temporarily treated as using the + following mask for backwards compatibility purposes: + destination,filter,includeChildren At some point in the + future, behavior will be removed and specifying an empty + updateMask will be an error. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=filter``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name, sink, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.UpdateSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + if sink is not None: + request.sink = sink + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_sink, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_sink(self, + request: logging_config.DeleteSinkRequest = None, + *, + sink_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + Args: + request (:class:`google.cloud.logging_v2.types.DeleteSinkRequest`): + The request object. The parameters to `DeleteSink`. + sink_name (:class:`str`): + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.DeleteSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_sink, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_exclusions(self, + request: logging_config.ListExclusionsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExclusionsAsyncPager: + r"""Lists all the exclusions in a parent resource. + + Args: + request (:class:`google.cloud.logging_v2.types.ListExclusionsRequest`): + The request object. The parameters to `ListExclusions`. + parent (:class:`str`): + Required. The parent resource whose exclusions are to be + listed. + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager: + Result returned from ListExclusions. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.ListExclusionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_exclusions, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListExclusionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_exclusion(self, + request: logging_config.GetExclusionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Gets the description of an exclusion. + + Args: + request (:class:`google.cloud.logging_v2.types.GetExclusionRequest`): + The request object. The parameters to `GetExclusion`. + name (:class:`str`): + Required. The resource name of an existing exclusion: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.GetExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_exclusion, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_exclusion(self, + request: logging_config.CreateExclusionRequest = None, + *, + parent: str = None, + exclusion: logging_config.LogExclusion = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Creates a new exclusion in a specified parent + resource. Only log entries belonging to that resource + can be excluded. You can have up to 10 exclusions in a + resource. + + Args: + request (:class:`google.cloud.logging_v2.types.CreateExclusionRequest`): + The request object. The parameters to `CreateExclusion`. + parent (:class:`str`): + Required. The parent resource in which to create the + exclusion: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): + Required. The new exclusion, whose ``name`` parameter is + an exclusion name that is not already used in the parent + resource. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, exclusion]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.CreateExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if exclusion is not None: + request.exclusion = exclusion + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_exclusion, + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_exclusion(self, + request: logging_config.UpdateExclusionRequest = None, + *, + name: str = None, + exclusion: logging_config.LogExclusion = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Changes one or more properties of an existing + exclusion. + + Args: + request (:class:`google.cloud.logging_v2.types.UpdateExclusionRequest`): + The request object. The parameters to `UpdateExclusion`. + name (:class:`str`): + Required. The resource name of the exclusion to update: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): + Required. New values for the existing exclusion. Only + the fields specified in ``update_mask`` are relevant. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A non-empty list of fields to change in the + existing exclusion. New values for the fields are taken + from the corresponding fields in the + [LogExclusion][google.logging.v2.LogExclusion] included + in this request. Fields not mentioned in ``update_mask`` + are not changed and are ignored in the request. + + For example, to change the filter and description of an + exclusion, specify an ``update_mask`` of + ``"filter,description"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, exclusion, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.UpdateExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if exclusion is not None: + request.exclusion = exclusion + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_exclusion, + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_exclusion(self, + request: logging_config.DeleteExclusionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an exclusion. + + Args: + request (:class:`google.cloud.logging_v2.types.DeleteExclusionRequest`): + The request object. The parameters to `DeleteExclusion`. + name (:class:`str`): + Required. The resource name of an existing exclusion to + delete: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.DeleteExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_exclusion, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_cmek_settings(self, + request: logging_config.GetCmekSettingsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Gets the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Args: + request (:class:`google.cloud.logging_v2.types.GetCmekSettingsRequest`): + The request object. The parameters to + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + See [Enabling CMEK for Logs + Router](https://cloud.google.com/logging/docs/routing/managed- + encryption) for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See [Enabling CMEK for Logs + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + request = logging_config.GetCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_cmek_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_cmek_settings(self, + request: logging_config.UpdateCmekSettingsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Updates the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Args: + request (:class:`google.cloud.logging_v2.types.UpdateCmekSettingsRequest`): + The request object. The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + See [Enabling CMEK for Logs + Router](https://cloud.google.com/logging/docs/routing/managed- + encryption) for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See [Enabling CMEK for Logs + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + request = logging_config.UpdateCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_cmek_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "ConfigServiceV2AsyncClient", +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/client.py new file mode 100644 index 000000000000..acf10f2292b8 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/client.py @@ -0,0 +1,2198 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.types import logging_config +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import ConfigServiceV2GrpcTransport +from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport + + +class ConfigServiceV2ClientMeta(type): + """Metaclass for the ConfigServiceV2 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] + _transport_registry["grpc"] = ConfigServiceV2GrpcTransport + _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[ConfigServiceV2Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ConfigServiceV2Client(metaclass=ConfigServiceV2ClientMeta): + """Service for configuring sinks used to route log entries.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "logging.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ConfigServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + ConfigServiceV2Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def cmek_settings_path(project: str,) -> str: + """Returns a fully-qualified cmek_settings string.""" + return "projects/{project}/cmekSettings".format(project=project, ) + + @staticmethod + def parse_cmek_settings_path(path: str) -> Dict[str,str]: + """Parses a cmek_settings path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) + return m.groupdict() if m else {} + + @staticmethod + def log_bucket_path(project: str,location: str,bucket: str,) -> str: + """Returns a fully-qualified log_bucket string.""" + return "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) + + @staticmethod + def parse_log_bucket_path(path: str) -> Dict[str,str]: + """Parses a log_bucket path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def log_exclusion_path(project: str,exclusion: str,) -> str: + """Returns a fully-qualified log_exclusion string.""" + return "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) + + @staticmethod + def parse_log_exclusion_path(path: str) -> Dict[str,str]: + """Parses a log_exclusion path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/exclusions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def log_sink_path(project: str,sink: str,) -> str: + """Returns a fully-qualified log_sink string.""" + return "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) + + @staticmethod + def parse_log_sink_path(path: str) -> Dict[str,str]: + """Parses a log_sink path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/sinks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def log_view_path(project: str,location: str,bucket: str,view: str,) -> str: + """Returns a fully-qualified log_view string.""" + return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) + + @staticmethod + def parse_log_view_path(path: str) -> Dict[str,str]: + """Parses a log_view path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ConfigServiceV2Transport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the config service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ConfigServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ConfigServiceV2Transport): + # transport is a ConfigServiceV2Transport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + ) + + def list_buckets(self, + request: Union[logging_config.ListBucketsRequest, dict] = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBucketsPager: + r"""Lists buckets. + + Args: + request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): + The request object. The parameters to `ListBuckets`. + parent (str): + Required. The parent resource whose buckets are to be + listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]" + + Note: The locations portion of the resource must be + specified, but supplying the character ``-`` in place of + [LOCATION_ID] will return all buckets. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager: + The response from ListBuckets. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListBucketsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListBucketsRequest): + request = logging_config.ListBucketsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_buckets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBucketsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_bucket(self, + request: Union[logging_config.GetBucketRequest, dict] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Gets a bucket. + + Args: + request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): + The request object. The parameters to `GetBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetBucketRequest): + request = logging_config.GetBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_bucket(self, + request: Union[logging_config.CreateBucketRequest, dict] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Args: + request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): + The request object. The parameters to `CreateBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_bucket(self, + request: Union[logging_config.UpdateBucketRequest, dict] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Updates a bucket. This method replaces the following fields in + the existing bucket with values from the new bucket: + ``retention_period`` + + If the retention period is decreased and the bucket is locked, + FAILED_PRECONDITION will be returned. + + If the bucket has a LifecycleState of DELETE_REQUESTED, + FAILED_PRECONDITION will be returned. + + A buckets region may not be modified after it is created. + + Args: + request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): + The request object. The parameters to `UpdateBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_bucket(self, + request: Union[logging_config.DeleteBucketRequest, dict] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Args: + request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): + The request object. The parameters to `DeleteBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteBucketRequest): + request = logging_config.DeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def undelete_bucket(self, + request: Union[logging_config.UndeleteBucketRequest, dict] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Args: + request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): + The request object. The parameters to `UndeleteBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UndeleteBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UndeleteBucketRequest): + request = logging_config.UndeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.undelete_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_views(self, + request: Union[logging_config.ListViewsRequest, dict] = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListViewsPager: + r"""Lists views on a bucket. + + Args: + request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): + The request object. The parameters to `ListViews`. + parent (str): + Required. The bucket whose views are to be listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager: + The response from ListViews. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListViewsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListViewsRequest): + request = logging_config.ListViewsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_views] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListViewsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_view(self, + request: Union[logging_config.GetViewRequest, dict] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Gets a view. + + Args: + request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): + The request object. The parameters to `GetView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetViewRequest): + request = logging_config.GetViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_view(self, + request: Union[logging_config.CreateViewRequest, dict] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Args: + request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): + The request object. The parameters to `CreateView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateViewRequest): + request = logging_config.CreateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_view(self, + request: Union[logging_config.UpdateViewRequest, dict] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Args: + request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): + The request object. The parameters to `UpdateView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateViewRequest): + request = logging_config.UpdateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_view(self, + request: Union[logging_config.DeleteViewRequest, dict] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a view from a bucket. + + Args: + request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): + The request object. The parameters to `DeleteView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteViewRequest): + request = logging_config.DeleteViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_sinks(self, + request: Union[logging_config.ListSinksRequest, dict] = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSinksPager: + r"""Lists sinks. + + Args: + request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): + The request object. The parameters to `ListSinks`. + parent (str): + Required. The parent resource whose sinks are to be + listed: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager: + Result returned from ListSinks. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListSinksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListSinksRequest): + request = logging_config.ListSinksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_sinks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSinksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_sink(self, + request: Union[logging_config.GetSinkRequest, dict] = None, + *, + sink_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Gets a sink. + + Args: + request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): + The request object. The parameters to `GetSink`. + sink_name (str): + Required. The resource name of the sink: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetSinkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetSinkRequest): + request = logging_config.GetSinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_sink(self, + request: Union[logging_config.CreateSinkRequest, dict] = None, + *, + parent: str = None, + sink: logging_config.LogSink = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + Args: + request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): + The request object. The parameters to `CreateSink`. + parent (str): + Required. The resource in which to create the sink: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (google.cloud.logging_v2.types.LogSink): + Required. The new sink, whose ``name`` parameter is a + sink identifier that is not already in use. + + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, sink]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateSinkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateSinkRequest): + request = logging_config.CreateSinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if sink is not None: + request.sink = sink + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_sink(self, + request: Union[logging_config.UpdateSinkRequest, dict] = None, + *, + sink_name: str = None, + sink: logging_config.LogSink = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + Args: + request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): + The request object. The parameters to `UpdateSink`. + sink_name (str): + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (google.cloud.logging_v2.types.LogSink): + Required. The updated sink, whose name is the same + identifier that appears as part of ``sink_name``. + + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that specifies the fields in + ``sink`` that need an update. A sink field will be + overwritten if, and only if, it is in the update mask. + ``name`` and output only fields cannot be updated. + + An empty updateMask is temporarily treated as using the + following mask for backwards compatibility purposes: + destination,filter,includeChildren At some point in the + future, behavior will be removed and specifying an empty + updateMask will be an error. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=filter``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name, sink, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateSinkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateSinkRequest): + request = logging_config.UpdateSinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + if sink is not None: + request.sink = sink + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_sink(self, + request: Union[logging_config.DeleteSinkRequest, dict] = None, + *, + sink_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + Args: + request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): + The request object. The parameters to `DeleteSink`. + sink_name (str): + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteSinkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteSinkRequest): + request = logging_config.DeleteSinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_exclusions(self, + request: Union[logging_config.ListExclusionsRequest, dict] = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExclusionsPager: + r"""Lists all the exclusions in a parent resource. + + Args: + request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): + The request object. The parameters to `ListExclusions`. + parent (str): + Required. The parent resource whose exclusions are to be + listed. + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager: + Result returned from ListExclusions. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListExclusionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListExclusionsRequest): + request = logging_config.ListExclusionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_exclusions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListExclusionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_exclusion(self, + request: Union[logging_config.GetExclusionRequest, dict] = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Gets the description of an exclusion. + + Args: + request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): + The request object. The parameters to `GetExclusion`. + name (str): + Required. The resource name of an existing exclusion: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetExclusionRequest): + request = logging_config.GetExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_exclusion(self, + request: Union[logging_config.CreateExclusionRequest, dict] = None, + *, + parent: str = None, + exclusion: logging_config.LogExclusion = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Creates a new exclusion in a specified parent + resource. Only log entries belonging to that resource + can be excluded. You can have up to 10 exclusions in a + resource. + + Args: + request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): + The request object. The parameters to `CreateExclusion`. + parent (str): + Required. The parent resource in which to create the + exclusion: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (google.cloud.logging_v2.types.LogExclusion): + Required. The new exclusion, whose ``name`` parameter is + an exclusion name that is not already used in the parent + resource. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, exclusion]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateExclusionRequest): + request = logging_config.CreateExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if exclusion is not None: + request.exclusion = exclusion + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_exclusion(self, + request: Union[logging_config.UpdateExclusionRequest, dict] = None, + *, + name: str = None, + exclusion: logging_config.LogExclusion = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Changes one or more properties of an existing + exclusion. + + Args: + request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): + The request object. The parameters to `UpdateExclusion`. + name (str): + Required. The resource name of the exclusion to update: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (google.cloud.logging_v2.types.LogExclusion): + Required. New values for the existing exclusion. Only + the fields specified in ``update_mask`` are relevant. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A non-empty list of fields to change in the + existing exclusion. New values for the fields are taken + from the corresponding fields in the + [LogExclusion][google.logging.v2.LogExclusion] included + in this request. Fields not mentioned in ``update_mask`` + are not changed and are ignored in the request. + + For example, to change the filter and description of an + exclusion, specify an ``update_mask`` of + ``"filter,description"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, exclusion, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateExclusionRequest): + request = logging_config.UpdateExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if exclusion is not None: + request.exclusion = exclusion + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_exclusion(self, + request: Union[logging_config.DeleteExclusionRequest, dict] = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an exclusion. + + Args: + request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): + The request object. The parameters to `DeleteExclusion`. + name (str): + Required. The resource name of an existing exclusion to + delete: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteExclusionRequest): + request = logging_config.DeleteExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_cmek_settings(self, + request: Union[logging_config.GetCmekSettingsRequest, dict] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Gets the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Args: + request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): + The request object. The parameters to + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + See [Enabling CMEK for Logs + Router](https://cloud.google.com/logging/docs/routing/managed- + encryption) for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See [Enabling CMEK for Logs + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetCmekSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetCmekSettingsRequest): + request = logging_config.GetCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cmek_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_cmek_settings(self, + request: Union[logging_config.UpdateCmekSettingsRequest, dict] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Updates the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Args: + request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): + The request object. The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + See [Enabling CMEK for Logs + Router](https://cloud.google.com/logging/docs/routing/managed- + encryption) for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See [Enabling CMEK for Logs + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateCmekSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateCmekSettingsRequest): + request = logging_config.UpdateCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_cmek_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "ConfigServiceV2Client", +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/pagers.py new file mode 100644 index 000000000000..11dce2ab7d58 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -0,0 +1,506 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.logging_v2.types import logging_config + + +class ListBucketsPager: + """A pager for iterating through ``list_buckets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListBucketsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``buckets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBuckets`` requests and continue to iterate + through the ``buckets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListBucketsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_config.ListBucketsResponse], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListBucketsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListBucketsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListBucketsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_config.ListBucketsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_config.LogBucket]: + for page in self.pages: + yield from page.buckets + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBucketsAsyncPager: + """A pager for iterating through ``list_buckets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListBucketsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``buckets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBuckets`` requests and continue to iterate + through the ``buckets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListBucketsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_config.ListBucketsResponse]], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListBucketsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListBucketsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListBucketsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_config.ListBucketsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_config.LogBucket]: + async def async_generator(): + async for page in self.pages: + for response in page.buckets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListViewsPager: + """A pager for iterating through ``list_views`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListViewsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``views`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListViews`` requests and continue to iterate + through the ``views`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListViewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_config.ListViewsResponse], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListViewsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListViewsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListViewsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_config.ListViewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_config.LogView]: + for page in self.pages: + yield from page.views + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListViewsAsyncPager: + """A pager for iterating through ``list_views`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListViewsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``views`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListViews`` requests and continue to iterate + through the ``views`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListViewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_config.ListViewsResponse]], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListViewsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListViewsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListViewsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_config.ListViewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_config.LogView]: + async def async_generator(): + async for page in self.pages: + for response in page.views: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSinksPager: + """A pager for iterating through ``list_sinks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListSinksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sinks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSinks`` requests and continue to iterate + through the ``sinks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListSinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_config.ListSinksResponse], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListSinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListSinksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListSinksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_config.ListSinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_config.LogSink]: + for page in self.pages: + yield from page.sinks + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSinksAsyncPager: + """A pager for iterating through ``list_sinks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListSinksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sinks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSinks`` requests and continue to iterate + through the ``sinks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListSinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_config.ListSinksResponse]], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListSinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListSinksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListSinksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_config.ListSinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_config.LogSink]: + async def async_generator(): + async for page in self.pages: + for response in page.sinks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListExclusionsPager: + """A pager for iterating through ``list_exclusions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListExclusionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``exclusions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListExclusions`` requests and continue to iterate + through the ``exclusions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListExclusionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_config.ListExclusionsResponse], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListExclusionsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListExclusionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListExclusionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_config.ListExclusionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_config.LogExclusion]: + for page in self.pages: + yield from page.exclusions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListExclusionsAsyncPager: + """A pager for iterating through ``list_exclusions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListExclusionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``exclusions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListExclusions`` requests and continue to iterate + through the ``exclusions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListExclusionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_config.ListExclusionsResponse]], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListExclusionsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListExclusionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListExclusionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_config.ListExclusionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_config.LogExclusion]: + async def async_generator(): + async for page in self.pages: + for response in page.exclusions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py new file mode 100644 index 000000000000..6e18c331ff70 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ConfigServiceV2Transport +from .grpc import ConfigServiceV2GrpcTransport +from .grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] +_transport_registry['grpc'] = ConfigServiceV2GrpcTransport +_transport_registry['grpc_asyncio'] = ConfigServiceV2GrpcAsyncIOTransport + +__all__ = ( + 'ConfigServiceV2Transport', + 'ConfigServiceV2GrpcTransport', + 'ConfigServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/base.py new file mode 100644 index 000000000000..e7f0db9d401c --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -0,0 +1,536 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.logging_v2.types import logging_config +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-logging', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class ConfigServiceV2Transport(abc.ABC): + """Abstract transport class for ConfigServiceV2.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + ) + + DEFAULT_HOST: str = 'logging.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_buckets: gapic_v1.method.wrap_method( + self.list_buckets, + default_timeout=None, + client_info=client_info, + ), + self.get_bucket: gapic_v1.method.wrap_method( + self.get_bucket, + default_timeout=None, + client_info=client_info, + ), + self.create_bucket: gapic_v1.method.wrap_method( + self.create_bucket, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket: gapic_v1.method.wrap_method( + self.update_bucket, + default_timeout=None, + client_info=client_info, + ), + self.delete_bucket: gapic_v1.method.wrap_method( + self.delete_bucket, + default_timeout=None, + client_info=client_info, + ), + self.undelete_bucket: gapic_v1.method.wrap_method( + self.undelete_bucket, + default_timeout=None, + client_info=client_info, + ), + self.list_views: gapic_v1.method.wrap_method( + self.list_views, + default_timeout=None, + client_info=client_info, + ), + self.get_view: gapic_v1.method.wrap_method( + self.get_view, + default_timeout=None, + client_info=client_info, + ), + self.create_view: gapic_v1.method.wrap_method( + self.create_view, + default_timeout=None, + client_info=client_info, + ), + self.update_view: gapic_v1.method.wrap_method( + self.update_view, + default_timeout=None, + client_info=client_info, + ), + self.delete_view: gapic_v1.method.wrap_method( + self.delete_view, + default_timeout=None, + client_info=client_info, + ), + self.list_sinks: gapic_v1.method.wrap_method( + self.list_sinks, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_sink: gapic_v1.method.wrap_method( + self.get_sink, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_sink: gapic_v1.method.wrap_method( + self.create_sink, + default_timeout=120.0, + client_info=client_info, + ), + self.update_sink: gapic_v1.method.wrap_method( + self.update_sink, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_sink: gapic_v1.method.wrap_method( + self.delete_sink, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_exclusions: gapic_v1.method.wrap_method( + self.list_exclusions, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_exclusion: gapic_v1.method.wrap_method( + self.get_exclusion, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_exclusion: gapic_v1.method.wrap_method( + self.create_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self.update_exclusion: gapic_v1.method.wrap_method( + self.update_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self.delete_exclusion: gapic_v1.method.wrap_method( + self.delete_exclusion, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cmek_settings: gapic_v1.method.wrap_method( + self.get_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_cmek_settings: gapic_v1.method.wrap_method( + self.update_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + } + + @property + def list_buckets(self) -> Callable[ + [logging_config.ListBucketsRequest], + Union[ + logging_config.ListBucketsResponse, + Awaitable[logging_config.ListBucketsResponse] + ]]: + raise NotImplementedError() + + @property + def get_bucket(self) -> Callable[ + [logging_config.GetBucketRequest], + Union[ + logging_config.LogBucket, + Awaitable[logging_config.LogBucket] + ]]: + raise NotImplementedError() + + @property + def create_bucket(self) -> Callable[ + [logging_config.CreateBucketRequest], + Union[ + logging_config.LogBucket, + Awaitable[logging_config.LogBucket] + ]]: + raise NotImplementedError() + + @property + def update_bucket(self) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[ + logging_config.LogBucket, + Awaitable[logging_config.LogBucket] + ]]: + raise NotImplementedError() + + @property + def delete_bucket(self) -> Callable[ + [logging_config.DeleteBucketRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def undelete_bucket(self) -> Callable[ + [logging_config.UndeleteBucketRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_views(self) -> Callable[ + [logging_config.ListViewsRequest], + Union[ + logging_config.ListViewsResponse, + Awaitable[logging_config.ListViewsResponse] + ]]: + raise NotImplementedError() + + @property + def get_view(self) -> Callable[ + [logging_config.GetViewRequest], + Union[ + logging_config.LogView, + Awaitable[logging_config.LogView] + ]]: + raise NotImplementedError() + + @property + def create_view(self) -> Callable[ + [logging_config.CreateViewRequest], + Union[ + logging_config.LogView, + Awaitable[logging_config.LogView] + ]]: + raise NotImplementedError() + + @property + def update_view(self) -> Callable[ + [logging_config.UpdateViewRequest], + Union[ + logging_config.LogView, + Awaitable[logging_config.LogView] + ]]: + raise NotImplementedError() + + @property + def delete_view(self) -> Callable[ + [logging_config.DeleteViewRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_sinks(self) -> Callable[ + [logging_config.ListSinksRequest], + Union[ + logging_config.ListSinksResponse, + Awaitable[logging_config.ListSinksResponse] + ]]: + raise NotImplementedError() + + @property + def get_sink(self) -> Callable[ + [logging_config.GetSinkRequest], + Union[ + logging_config.LogSink, + Awaitable[logging_config.LogSink] + ]]: + raise NotImplementedError() + + @property + def create_sink(self) -> Callable[ + [logging_config.CreateSinkRequest], + Union[ + logging_config.LogSink, + Awaitable[logging_config.LogSink] + ]]: + raise NotImplementedError() + + @property + def update_sink(self) -> Callable[ + [logging_config.UpdateSinkRequest], + Union[ + logging_config.LogSink, + Awaitable[logging_config.LogSink] + ]]: + raise NotImplementedError() + + @property + def delete_sink(self) -> Callable[ + [logging_config.DeleteSinkRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_exclusions(self) -> Callable[ + [logging_config.ListExclusionsRequest], + Union[ + logging_config.ListExclusionsResponse, + Awaitable[logging_config.ListExclusionsResponse] + ]]: + raise NotImplementedError() + + @property + def get_exclusion(self) -> Callable[ + [logging_config.GetExclusionRequest], + Union[ + logging_config.LogExclusion, + Awaitable[logging_config.LogExclusion] + ]]: + raise NotImplementedError() + + @property + def create_exclusion(self) -> Callable[ + [logging_config.CreateExclusionRequest], + Union[ + logging_config.LogExclusion, + Awaitable[logging_config.LogExclusion] + ]]: + raise NotImplementedError() + + @property + def update_exclusion(self) -> Callable[ + [logging_config.UpdateExclusionRequest], + Union[ + logging_config.LogExclusion, + Awaitable[logging_config.LogExclusion] + ]]: + raise NotImplementedError() + + @property + def delete_exclusion(self) -> Callable[ + [logging_config.DeleteExclusionRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_cmek_settings(self) -> Callable[ + [logging_config.GetCmekSettingsRequest], + Union[ + logging_config.CmekSettings, + Awaitable[logging_config.CmekSettings] + ]]: + raise NotImplementedError() + + @property + def update_cmek_settings(self) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + Union[ + logging_config.CmekSettings, + Awaitable[logging_config.CmekSettings] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'ConfigServiceV2Transport', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py new file mode 100644 index 000000000000..94e4af68334b --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -0,0 +1,878 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.logging_v2.types import logging_config +from google.protobuf import empty_pb2 # type: ignore +from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO + + +class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport): + """gRPC backend transport for ConfigServiceV2. + + Service for configuring sinks used to route log entries. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_buckets(self) -> Callable[ + [logging_config.ListBucketsRequest], + logging_config.ListBucketsResponse]: + r"""Return a callable for the list buckets method over gRPC. + + Lists buckets. + + Returns: + Callable[[~.ListBucketsRequest], + ~.ListBucketsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_buckets' not in self._stubs: + self._stubs['list_buckets'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListBuckets', + request_serializer=logging_config.ListBucketsRequest.serialize, + response_deserializer=logging_config.ListBucketsResponse.deserialize, + ) + return self._stubs['list_buckets'] + + @property + def get_bucket(self) -> Callable[ + [logging_config.GetBucketRequest], + logging_config.LogBucket]: + r"""Return a callable for the get bucket method over gRPC. + + Gets a bucket. + + Returns: + Callable[[~.GetBucketRequest], + ~.LogBucket]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_bucket' not in self._stubs: + self._stubs['get_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetBucket', + request_serializer=logging_config.GetBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['get_bucket'] + + @property + def create_bucket(self) -> Callable[ + [logging_config.CreateBucketRequest], + logging_config.LogBucket]: + r"""Return a callable for the create bucket method over gRPC. + + Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + ~.LogBucket]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_bucket' not in self._stubs: + self._stubs['create_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateBucket', + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['create_bucket'] + + @property + def update_bucket(self) -> Callable[ + [logging_config.UpdateBucketRequest], + logging_config.LogBucket]: + r"""Return a callable for the update bucket method over gRPC. + + Updates a bucket. This method replaces the following fields in + the existing bucket with values from the new bucket: + ``retention_period`` + + If the retention period is decreased and the bucket is locked, + FAILED_PRECONDITION will be returned. + + If the bucket has a LifecycleState of DELETE_REQUESTED, + FAILED_PRECONDITION will be returned. + + A buckets region may not be modified after it is created. + + Returns: + Callable[[~.UpdateBucketRequest], + ~.LogBucket]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_bucket' not in self._stubs: + self._stubs['update_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateBucket', + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['update_bucket'] + + @property + def delete_bucket(self) -> Callable[ + [logging_config.DeleteBucketRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete bucket method over gRPC. + + Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Returns: + Callable[[~.DeleteBucketRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_bucket' not in self._stubs: + self._stubs['delete_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteBucket', + request_serializer=logging_config.DeleteBucketRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_bucket'] + + @property + def undelete_bucket(self) -> Callable[ + [logging_config.UndeleteBucketRequest], + empty_pb2.Empty]: + r"""Return a callable for the undelete bucket method over gRPC. + + Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Returns: + Callable[[~.UndeleteBucketRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'undelete_bucket' not in self._stubs: + self._stubs['undelete_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UndeleteBucket', + request_serializer=logging_config.UndeleteBucketRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['undelete_bucket'] + + @property + def list_views(self) -> Callable[ + [logging_config.ListViewsRequest], + logging_config.ListViewsResponse]: + r"""Return a callable for the list views method over gRPC. + + Lists views on a bucket. + + Returns: + Callable[[~.ListViewsRequest], + ~.ListViewsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_views' not in self._stubs: + self._stubs['list_views'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListViews', + request_serializer=logging_config.ListViewsRequest.serialize, + response_deserializer=logging_config.ListViewsResponse.deserialize, + ) + return self._stubs['list_views'] + + @property + def get_view(self) -> Callable[ + [logging_config.GetViewRequest], + logging_config.LogView]: + r"""Return a callable for the get view method over gRPC. + + Gets a view. + + Returns: + Callable[[~.GetViewRequest], + ~.LogView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_view' not in self._stubs: + self._stubs['get_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetView', + request_serializer=logging_config.GetViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['get_view'] + + @property + def create_view(self) -> Callable[ + [logging_config.CreateViewRequest], + logging_config.LogView]: + r"""Return a callable for the create view method over gRPC. + + Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Returns: + Callable[[~.CreateViewRequest], + ~.LogView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_view' not in self._stubs: + self._stubs['create_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateView', + request_serializer=logging_config.CreateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['create_view'] + + @property + def update_view(self) -> Callable[ + [logging_config.UpdateViewRequest], + logging_config.LogView]: + r"""Return a callable for the update view method over gRPC. + + Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Returns: + Callable[[~.UpdateViewRequest], + ~.LogView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_view' not in self._stubs: + self._stubs['update_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateView', + request_serializer=logging_config.UpdateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['update_view'] + + @property + def delete_view(self) -> Callable[ + [logging_config.DeleteViewRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete view method over gRPC. + + Deletes a view from a bucket. + + Returns: + Callable[[~.DeleteViewRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_view' not in self._stubs: + self._stubs['delete_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteView', + request_serializer=logging_config.DeleteViewRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_view'] + + @property + def list_sinks(self) -> Callable[ + [logging_config.ListSinksRequest], + logging_config.ListSinksResponse]: + r"""Return a callable for the list sinks method over gRPC. + + Lists sinks. + + Returns: + Callable[[~.ListSinksRequest], + ~.ListSinksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_sinks' not in self._stubs: + self._stubs['list_sinks'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListSinks', + request_serializer=logging_config.ListSinksRequest.serialize, + response_deserializer=logging_config.ListSinksResponse.deserialize, + ) + return self._stubs['list_sinks'] + + @property + def get_sink(self) -> Callable[ + [logging_config.GetSinkRequest], + logging_config.LogSink]: + r"""Return a callable for the get sink method over gRPC. + + Gets a sink. + + Returns: + Callable[[~.GetSinkRequest], + ~.LogSink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_sink' not in self._stubs: + self._stubs['get_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSink', + request_serializer=logging_config.GetSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['get_sink'] + + @property + def create_sink(self) -> Callable[ + [logging_config.CreateSinkRequest], + logging_config.LogSink]: + r"""Return a callable for the create sink method over gRPC. + + Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + Returns: + Callable[[~.CreateSinkRequest], + ~.LogSink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_sink' not in self._stubs: + self._stubs['create_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateSink', + request_serializer=logging_config.CreateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['create_sink'] + + @property + def update_sink(self) -> Callable[ + [logging_config.UpdateSinkRequest], + logging_config.LogSink]: + r"""Return a callable for the update sink method over gRPC. + + Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + Returns: + Callable[[~.UpdateSinkRequest], + ~.LogSink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_sink' not in self._stubs: + self._stubs['update_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSink', + request_serializer=logging_config.UpdateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['update_sink'] + + @property + def delete_sink(self) -> Callable[ + [logging_config.DeleteSinkRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete sink method over gRPC. + + Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + Returns: + Callable[[~.DeleteSinkRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_sink' not in self._stubs: + self._stubs['delete_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteSink', + request_serializer=logging_config.DeleteSinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_sink'] + + @property + def list_exclusions(self) -> Callable[ + [logging_config.ListExclusionsRequest], + logging_config.ListExclusionsResponse]: + r"""Return a callable for the list exclusions method over gRPC. + + Lists all the exclusions in a parent resource. + + Returns: + Callable[[~.ListExclusionsRequest], + ~.ListExclusionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_exclusions' not in self._stubs: + self._stubs['list_exclusions'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListExclusions', + request_serializer=logging_config.ListExclusionsRequest.serialize, + response_deserializer=logging_config.ListExclusionsResponse.deserialize, + ) + return self._stubs['list_exclusions'] + + @property + def get_exclusion(self) -> Callable[ + [logging_config.GetExclusionRequest], + logging_config.LogExclusion]: + r"""Return a callable for the get exclusion method over gRPC. + + Gets the description of an exclusion. + + Returns: + Callable[[~.GetExclusionRequest], + ~.LogExclusion]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_exclusion' not in self._stubs: + self._stubs['get_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetExclusion', + request_serializer=logging_config.GetExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['get_exclusion'] + + @property + def create_exclusion(self) -> Callable[ + [logging_config.CreateExclusionRequest], + logging_config.LogExclusion]: + r"""Return a callable for the create exclusion method over gRPC. + + Creates a new exclusion in a specified parent + resource. Only log entries belonging to that resource + can be excluded. You can have up to 10 exclusions in a + resource. + + Returns: + Callable[[~.CreateExclusionRequest], + ~.LogExclusion]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_exclusion' not in self._stubs: + self._stubs['create_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateExclusion', + request_serializer=logging_config.CreateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['create_exclusion'] + + @property + def update_exclusion(self) -> Callable[ + [logging_config.UpdateExclusionRequest], + logging_config.LogExclusion]: + r"""Return a callable for the update exclusion method over gRPC. + + Changes one or more properties of an existing + exclusion. + + Returns: + Callable[[~.UpdateExclusionRequest], + ~.LogExclusion]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_exclusion' not in self._stubs: + self._stubs['update_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + request_serializer=logging_config.UpdateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['update_exclusion'] + + @property + def delete_exclusion(self) -> Callable[ + [logging_config.DeleteExclusionRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete exclusion method over gRPC. + + Deletes an exclusion. + + Returns: + Callable[[~.DeleteExclusionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_exclusion' not in self._stubs: + self._stubs['delete_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + request_serializer=logging_config.DeleteExclusionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_exclusion'] + + @property + def get_cmek_settings(self) -> Callable[ + [logging_config.GetCmekSettingsRequest], + logging_config.CmekSettings]: + r"""Return a callable for the get cmek settings method over gRPC. + + Gets the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Returns: + Callable[[~.GetCmekSettingsRequest], + ~.CmekSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_cmek_settings' not in self._stubs: + self._stubs['get_cmek_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetCmekSettings', + request_serializer=logging_config.GetCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs['get_cmek_settings'] + + @property + def update_cmek_settings(self) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + logging_config.CmekSettings]: + r"""Return a callable for the update cmek settings method over gRPC. + + Updates the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Returns: + Callable[[~.UpdateCmekSettingsRequest], + ~.CmekSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_cmek_settings' not in self._stubs: + self._stubs['update_cmek_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', + request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs['update_cmek_settings'] + + +__all__ = ( + 'ConfigServiceV2GrpcTransport', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py new file mode 100644 index 000000000000..78e442d8a4e3 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -0,0 +1,882 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.logging_v2.types import logging_config +from google.protobuf import empty_pb2 # type: ignore +from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from .grpc import ConfigServiceV2GrpcTransport + + +class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): + """gRPC AsyncIO backend transport for ConfigServiceV2. + + Service for configuring sinks used to route log entries. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_buckets(self) -> Callable[ + [logging_config.ListBucketsRequest], + Awaitable[logging_config.ListBucketsResponse]]: + r"""Return a callable for the list buckets method over gRPC. + + Lists buckets. + + Returns: + Callable[[~.ListBucketsRequest], + Awaitable[~.ListBucketsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_buckets' not in self._stubs: + self._stubs['list_buckets'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListBuckets', + request_serializer=logging_config.ListBucketsRequest.serialize, + response_deserializer=logging_config.ListBucketsResponse.deserialize, + ) + return self._stubs['list_buckets'] + + @property + def get_bucket(self) -> Callable[ + [logging_config.GetBucketRequest], + Awaitable[logging_config.LogBucket]]: + r"""Return a callable for the get bucket method over gRPC. + + Gets a bucket. + + Returns: + Callable[[~.GetBucketRequest], + Awaitable[~.LogBucket]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_bucket' not in self._stubs: + self._stubs['get_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetBucket', + request_serializer=logging_config.GetBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['get_bucket'] + + @property + def create_bucket(self) -> Callable[ + [logging_config.CreateBucketRequest], + Awaitable[logging_config.LogBucket]]: + r"""Return a callable for the create bucket method over gRPC. + + Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + Awaitable[~.LogBucket]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_bucket' not in self._stubs: + self._stubs['create_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateBucket', + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['create_bucket'] + + @property + def update_bucket(self) -> Callable[ + [logging_config.UpdateBucketRequest], + Awaitable[logging_config.LogBucket]]: + r"""Return a callable for the update bucket method over gRPC. + + Updates a bucket. This method replaces the following fields in + the existing bucket with values from the new bucket: + ``retention_period`` + + If the retention period is decreased and the bucket is locked, + FAILED_PRECONDITION will be returned. + + If the bucket has a LifecycleState of DELETE_REQUESTED, + FAILED_PRECONDITION will be returned. + + A buckets region may not be modified after it is created. + + Returns: + Callable[[~.UpdateBucketRequest], + Awaitable[~.LogBucket]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_bucket' not in self._stubs: + self._stubs['update_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateBucket', + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['update_bucket'] + + @property + def delete_bucket(self) -> Callable[ + [logging_config.DeleteBucketRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete bucket method over gRPC. + + Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Returns: + Callable[[~.DeleteBucketRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_bucket' not in self._stubs: + self._stubs['delete_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteBucket', + request_serializer=logging_config.DeleteBucketRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_bucket'] + + @property + def undelete_bucket(self) -> Callable[ + [logging_config.UndeleteBucketRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the undelete bucket method over gRPC. + + Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Returns: + Callable[[~.UndeleteBucketRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'undelete_bucket' not in self._stubs: + self._stubs['undelete_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UndeleteBucket', + request_serializer=logging_config.UndeleteBucketRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['undelete_bucket'] + + @property + def list_views(self) -> Callable[ + [logging_config.ListViewsRequest], + Awaitable[logging_config.ListViewsResponse]]: + r"""Return a callable for the list views method over gRPC. + + Lists views on a bucket. + + Returns: + Callable[[~.ListViewsRequest], + Awaitable[~.ListViewsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_views' not in self._stubs: + self._stubs['list_views'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListViews', + request_serializer=logging_config.ListViewsRequest.serialize, + response_deserializer=logging_config.ListViewsResponse.deserialize, + ) + return self._stubs['list_views'] + + @property + def get_view(self) -> Callable[ + [logging_config.GetViewRequest], + Awaitable[logging_config.LogView]]: + r"""Return a callable for the get view method over gRPC. + + Gets a view. + + Returns: + Callable[[~.GetViewRequest], + Awaitable[~.LogView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_view' not in self._stubs: + self._stubs['get_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetView', + request_serializer=logging_config.GetViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['get_view'] + + @property + def create_view(self) -> Callable[ + [logging_config.CreateViewRequest], + Awaitable[logging_config.LogView]]: + r"""Return a callable for the create view method over gRPC. + + Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Returns: + Callable[[~.CreateViewRequest], + Awaitable[~.LogView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_view' not in self._stubs: + self._stubs['create_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateView', + request_serializer=logging_config.CreateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['create_view'] + + @property + def update_view(self) -> Callable[ + [logging_config.UpdateViewRequest], + Awaitable[logging_config.LogView]]: + r"""Return a callable for the update view method over gRPC. + + Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Returns: + Callable[[~.UpdateViewRequest], + Awaitable[~.LogView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_view' not in self._stubs: + self._stubs['update_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateView', + request_serializer=logging_config.UpdateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['update_view'] + + @property + def delete_view(self) -> Callable[ + [logging_config.DeleteViewRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete view method over gRPC. + + Deletes a view from a bucket. + + Returns: + Callable[[~.DeleteViewRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_view' not in self._stubs: + self._stubs['delete_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteView', + request_serializer=logging_config.DeleteViewRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_view'] + + @property + def list_sinks(self) -> Callable[ + [logging_config.ListSinksRequest], + Awaitable[logging_config.ListSinksResponse]]: + r"""Return a callable for the list sinks method over gRPC. + + Lists sinks. + + Returns: + Callable[[~.ListSinksRequest], + Awaitable[~.ListSinksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_sinks' not in self._stubs: + self._stubs['list_sinks'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListSinks', + request_serializer=logging_config.ListSinksRequest.serialize, + response_deserializer=logging_config.ListSinksResponse.deserialize, + ) + return self._stubs['list_sinks'] + + @property + def get_sink(self) -> Callable[ + [logging_config.GetSinkRequest], + Awaitable[logging_config.LogSink]]: + r"""Return a callable for the get sink method over gRPC. + + Gets a sink. + + Returns: + Callable[[~.GetSinkRequest], + Awaitable[~.LogSink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_sink' not in self._stubs: + self._stubs['get_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSink', + request_serializer=logging_config.GetSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['get_sink'] + + @property + def create_sink(self) -> Callable[ + [logging_config.CreateSinkRequest], + Awaitable[logging_config.LogSink]]: + r"""Return a callable for the create sink method over gRPC. + + Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + Returns: + Callable[[~.CreateSinkRequest], + Awaitable[~.LogSink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_sink' not in self._stubs: + self._stubs['create_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateSink', + request_serializer=logging_config.CreateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['create_sink'] + + @property + def update_sink(self) -> Callable[ + [logging_config.UpdateSinkRequest], + Awaitable[logging_config.LogSink]]: + r"""Return a callable for the update sink method over gRPC. + + Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + Returns: + Callable[[~.UpdateSinkRequest], + Awaitable[~.LogSink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_sink' not in self._stubs: + self._stubs['update_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSink', + request_serializer=logging_config.UpdateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['update_sink'] + + @property + def delete_sink(self) -> Callable[ + [logging_config.DeleteSinkRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete sink method over gRPC. + + Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + Returns: + Callable[[~.DeleteSinkRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_sink' not in self._stubs: + self._stubs['delete_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteSink', + request_serializer=logging_config.DeleteSinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_sink'] + + @property + def list_exclusions(self) -> Callable[ + [logging_config.ListExclusionsRequest], + Awaitable[logging_config.ListExclusionsResponse]]: + r"""Return a callable for the list exclusions method over gRPC. + + Lists all the exclusions in a parent resource. + + Returns: + Callable[[~.ListExclusionsRequest], + Awaitable[~.ListExclusionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_exclusions' not in self._stubs: + self._stubs['list_exclusions'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListExclusions', + request_serializer=logging_config.ListExclusionsRequest.serialize, + response_deserializer=logging_config.ListExclusionsResponse.deserialize, + ) + return self._stubs['list_exclusions'] + + @property + def get_exclusion(self) -> Callable[ + [logging_config.GetExclusionRequest], + Awaitable[logging_config.LogExclusion]]: + r"""Return a callable for the get exclusion method over gRPC. + + Gets the description of an exclusion. + + Returns: + Callable[[~.GetExclusionRequest], + Awaitable[~.LogExclusion]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_exclusion' not in self._stubs: + self._stubs['get_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetExclusion', + request_serializer=logging_config.GetExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['get_exclusion'] + + @property + def create_exclusion(self) -> Callable[ + [logging_config.CreateExclusionRequest], + Awaitable[logging_config.LogExclusion]]: + r"""Return a callable for the create exclusion method over gRPC. + + Creates a new exclusion in a specified parent + resource. Only log entries belonging to that resource + can be excluded. You can have up to 10 exclusions in a + resource. + + Returns: + Callable[[~.CreateExclusionRequest], + Awaitable[~.LogExclusion]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_exclusion' not in self._stubs: + self._stubs['create_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateExclusion', + request_serializer=logging_config.CreateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['create_exclusion'] + + @property + def update_exclusion(self) -> Callable[ + [logging_config.UpdateExclusionRequest], + Awaitable[logging_config.LogExclusion]]: + r"""Return a callable for the update exclusion method over gRPC. + + Changes one or more properties of an existing + exclusion. + + Returns: + Callable[[~.UpdateExclusionRequest], + Awaitable[~.LogExclusion]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_exclusion' not in self._stubs: + self._stubs['update_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + request_serializer=logging_config.UpdateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['update_exclusion'] + + @property + def delete_exclusion(self) -> Callable[ + [logging_config.DeleteExclusionRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete exclusion method over gRPC. + + Deletes an exclusion. + + Returns: + Callable[[~.DeleteExclusionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_exclusion' not in self._stubs: + self._stubs['delete_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + request_serializer=logging_config.DeleteExclusionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_exclusion'] + + @property + def get_cmek_settings(self) -> Callable[ + [logging_config.GetCmekSettingsRequest], + Awaitable[logging_config.CmekSettings]]: + r"""Return a callable for the get cmek settings method over gRPC. + + Gets the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Returns: + Callable[[~.GetCmekSettingsRequest], + Awaitable[~.CmekSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_cmek_settings' not in self._stubs: + self._stubs['get_cmek_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetCmekSettings', + request_serializer=logging_config.GetCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs['get_cmek_settings'] + + @property + def update_cmek_settings(self) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + Awaitable[logging_config.CmekSettings]]: + r"""Return a callable for the update cmek settings method over gRPC. + + Updates the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Returns: + Callable[[~.UpdateCmekSettingsRequest], + Awaitable[~.CmekSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_cmek_settings' not in self._stubs: + self._stubs['update_cmek_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', + request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs['update_cmek_settings'] + + +__all__ = ( + 'ConfigServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/__init__.py new file mode 100644 index 000000000000..ed08d1888503 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import LoggingServiceV2Client +from .async_client import LoggingServiceV2AsyncClient + +__all__ = ( + 'LoggingServiceV2Client', + 'LoggingServiceV2AsyncClient', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/async_client.py new file mode 100644 index 000000000000..dd9cbb78dd9a --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -0,0 +1,781 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging +from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport +from .client import LoggingServiceV2Client + + +class LoggingServiceV2AsyncClient: + """Service for ingesting and querying logs.""" + + _client: LoggingServiceV2Client + + DEFAULT_ENDPOINT = LoggingServiceV2Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + + log_path = staticmethod(LoggingServiceV2Client.log_path) + parse_log_path = staticmethod(LoggingServiceV2Client.parse_log_path) + common_billing_account_path = staticmethod(LoggingServiceV2Client.common_billing_account_path) + parse_common_billing_account_path = staticmethod(LoggingServiceV2Client.parse_common_billing_account_path) + common_folder_path = staticmethod(LoggingServiceV2Client.common_folder_path) + parse_common_folder_path = staticmethod(LoggingServiceV2Client.parse_common_folder_path) + common_organization_path = staticmethod(LoggingServiceV2Client.common_organization_path) + parse_common_organization_path = staticmethod(LoggingServiceV2Client.parse_common_organization_path) + common_project_path = staticmethod(LoggingServiceV2Client.common_project_path) + parse_common_project_path = staticmethod(LoggingServiceV2Client.parse_common_project_path) + common_location_path = staticmethod(LoggingServiceV2Client.common_location_path) + parse_common_location_path = staticmethod(LoggingServiceV2Client.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2AsyncClient: The constructed client. + """ + return LoggingServiceV2Client.from_service_account_info.__func__(LoggingServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2AsyncClient: The constructed client. + """ + return LoggingServiceV2Client.from_service_account_file.__func__(LoggingServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LoggingServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + LoggingServiceV2Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(LoggingServiceV2Client).get_transport_class, type(LoggingServiceV2Client)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, LoggingServiceV2Transport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the logging service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.LoggingServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = LoggingServiceV2Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def delete_log(self, + request: logging.DeleteLogRequest = None, + *, + log_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes all the log entries in a log. The log + reappears if it receives new entries. Log entries + written shortly before the delete operation might not be + deleted. Entries received after the delete operation + with a timestamp before the operation will be deleted. + + Args: + request (:class:`google.cloud.logging_v2.types.DeleteLogRequest`): + The request object. The parameters to DeleteLog. + log_name (:class:`str`): + Required. The resource name of the log to delete: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + For more information about log names, see + [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging.DeleteLogRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if log_name is not None: + request.log_name = log_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_log, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("log_name", request.log_name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def write_log_entries(self, + request: logging.WriteLogEntriesRequest = None, + *, + log_name: str = None, + resource: monitored_resource_pb2.MonitoredResource = None, + labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, + entries: Sequence[log_entry.LogEntry] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging.WriteLogEntriesResponse: + r"""Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + Args: + request (:class:`google.cloud.logging_v2.types.WriteLogEntriesRequest`): + The request object. The parameters to WriteLogEntries. + log_name (:class:`str`): + Optional. A default log resource name that is assigned + to all log entries in ``entries`` that do not specify a + value for ``log_name``: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example: + + :: + + "projects/my-project-id/logs/syslog" + "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + + The permission ``logging.logEntries.create`` is needed + on each project, organization, billing account, or + folder that is receiving new log entries, whether the + resource is specified in ``logName`` or in an individual + log entry. + + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (:class:`google.api.monitored_resource_pb2.MonitoredResource`): + Optional. A default monitored resource object that is + assigned to all log entries in ``entries`` that do not + specify a value for ``resource``. Example: + + :: + + { "type": "gce_instance", + "labels": { + "zone": "us-central1-a", "instance_id": "00000000000000000000" }} + + See [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + labels (:class:`Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]`): + Optional. Default labels that are added to the + ``labels`` field of all log entries in ``entries``. If a + log entry already has a label with the same key as a + label in this parameter, then the log entry's label is + not changed. See [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``labels`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entries (:class:`Sequence[google.cloud.logging_v2.types.LogEntry]`): + Required. The log entries to send to Logging. The order + of log entries in this list does not matter. Values + supplied in this method's ``log_name``, ``resource``, + and ``labels`` fields are copied into those log entries + in this list that do not include values for their + corresponding fields. For more information, see the + [LogEntry][google.logging.v2.LogEntry] type. + + If the ``timestamp`` or ``insert_id`` fields are missing + in log entries, then this method supplies the current + time or a unique identifier, respectively. The supplied + values are chosen so that, among the log entries that + did not supply their own values, the entries earlier in + the list will sort before the entries later in the list. + See the ``entries.list`` method. + + Log entries with timestamps that are more than the `logs + retention + period `__ + in the past or more than 24 hours in the future will not + be available when calling ``entries.list``. However, + those log entries can still be `exported with + LogSinks `__. + + To improve throughput and to avoid exceeding the `quota + limit `__ + for calls to ``entries.write``, you should try to + include several log entries in this list, rather than + calling this method for each individual log entry. + + This corresponds to the ``entries`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.WriteLogEntriesResponse: + Result returned from WriteLogEntries. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name, resource, labels, entries]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging.WriteLogEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if log_name is not None: + request.log_name = log_name + if resource is not None: + request.resource = resource + + if labels: + request.labels.update(labels) + if entries: + request.entries.extend(entries) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.write_log_entries, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_log_entries(self, + request: logging.ListLogEntriesRequest = None, + *, + resource_names: Sequence[str] = None, + filter: str = None, + order_by: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogEntriesAsyncPager: + r"""Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + Args: + request (:class:`google.cloud.logging_v2.types.ListLogEntriesRequest`): + The request object. The parameters to `ListLogEntries`. + resource_names (:class:`Sequence[str]`): + Required. Names of one or more parent resources from + which to retrieve log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + May alternatively be one or more views + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + + Projects listed in the ``project_ids`` field are added + to this list. + + This corresponds to the ``resource_names`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. A filter that chooses which log entries to + return. See `Advanced Logs + Queries `__. + Only log entries that match the filter are returned. An + empty filter matches all log entries in the resources + listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will + cause the filter to return no results. The maximum + length of the filter is 20000 characters. + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + order_by (:class:`str`): + Optional. How the results should be sorted. Presently, + the only permitted values are ``"timestamp asc"`` + (default) and ``"timestamp desc"``. The first option + returns entries in order of increasing values of + ``LogEntry.timestamp`` (oldest first), and the second + option returns entries in order of decreasing timestamps + (newest first). Entries with equal timestamps are + returned in order of their ``insert_id`` values. + + This corresponds to the ``order_by`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager: + Result returned from ListLogEntries. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource_names, filter, order_by]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging.ListLogEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if filter is not None: + request.filter = filter + if order_by is not None: + request.order_by = order_by + if resource_names: + request.resource_names.extend(resource_names) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_log_entries, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLogEntriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_monitored_resource_descriptors(self, + request: logging.ListMonitoredResourceDescriptorsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: + r"""Lists the descriptors for monitored resource types + used by Logging. + + Args: + request (:class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest`): + The request object. The parameters to + ListMonitoredResourceDescriptors + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager: + Result returned from + ListMonitoredResourceDescriptors. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = logging.ListMonitoredResourceDescriptorsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_monitored_resource_descriptors, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMonitoredResourceDescriptorsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_logs(self, + request: logging.ListLogsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogsAsyncPager: + r"""Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + Args: + request (:class:`google.cloud.logging_v2.types.ListLogsRequest`): + The request object. The parameters to ListLogs. + parent (:class:`str`): + Required. The resource name that owns the logs: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager: + Result returned from ListLogs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging.ListLogsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_logs, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLogsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def tail_log_entries(self, + requests: AsyncIterator[logging.TailLogEntriesRequest] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: + r"""Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Args: + requests (AsyncIterator[`google.cloud.logging_v2.types.TailLogEntriesRequest`]): + The request object AsyncIterator. The parameters to `TailLogEntries`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: + Result returned from TailLogEntries. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.tail_log_entries, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "LoggingServiceV2AsyncClient", +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/client.py new file mode 100644 index 000000000000..354945976630 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -0,0 +1,920 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Dict, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging +from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import LoggingServiceV2GrpcTransport +from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport + + +class LoggingServiceV2ClientMeta(type): + """Metaclass for the LoggingServiceV2 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] + _transport_registry["grpc"] = LoggingServiceV2GrpcTransport + _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[LoggingServiceV2Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LoggingServiceV2Client(metaclass=LoggingServiceV2ClientMeta): + """Service for ingesting and querying logs.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "logging.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LoggingServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + LoggingServiceV2Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def log_path(project: str,log: str,) -> str: + """Returns a fully-qualified log string.""" + return "projects/{project}/logs/{log}".format(project=project, log=log, ) + + @staticmethod + def parse_log_path(path: str) -> Dict[str,str]: + """Parses a log path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/logs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, LoggingServiceV2Transport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the logging service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, LoggingServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, LoggingServiceV2Transport): + # transport is a LoggingServiceV2Transport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + ) + + def delete_log(self, + request: Union[logging.DeleteLogRequest, dict] = None, + *, + log_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes all the log entries in a log. The log + reappears if it receives new entries. Log entries + written shortly before the delete operation might not be + deleted. Entries received after the delete operation + with a timestamp before the operation will be deleted. + + Args: + request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): + The request object. The parameters to DeleteLog. + log_name (str): + Required. The resource name of the log to delete: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + For more information about log names, see + [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging.DeleteLogRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging.DeleteLogRequest): + request = logging.DeleteLogRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if log_name is not None: + request.log_name = log_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_log] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("log_name", request.log_name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def write_log_entries(self, + request: Union[logging.WriteLogEntriesRequest, dict] = None, + *, + log_name: str = None, + resource: monitored_resource_pb2.MonitoredResource = None, + labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, + entries: Sequence[log_entry.LogEntry] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging.WriteLogEntriesResponse: + r"""Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + Args: + request (Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]): + The request object. The parameters to WriteLogEntries. + log_name (str): + Optional. A default log resource name that is assigned + to all log entries in ``entries`` that do not specify a + value for ``log_name``: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example: + + :: + + "projects/my-project-id/logs/syslog" + "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + + The permission ``logging.logEntries.create`` is needed + on each project, organization, billing account, or + folder that is receiving new log entries, whether the + resource is specified in ``logName`` or in an individual + log entry. + + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (google.api.monitored_resource_pb2.MonitoredResource): + Optional. A default monitored resource object that is + assigned to all log entries in ``entries`` that do not + specify a value for ``resource``. Example: + + :: + + { "type": "gce_instance", + "labels": { + "zone": "us-central1-a", "instance_id": "00000000000000000000" }} + + See [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + labels (Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]): + Optional. Default labels that are added to the + ``labels`` field of all log entries in ``entries``. If a + log entry already has a label with the same key as a + label in this parameter, then the log entry's label is + not changed. See [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``labels`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + Required. The log entries to send to Logging. The order + of log entries in this list does not matter. Values + supplied in this method's ``log_name``, ``resource``, + and ``labels`` fields are copied into those log entries + in this list that do not include values for their + corresponding fields. For more information, see the + [LogEntry][google.logging.v2.LogEntry] type. + + If the ``timestamp`` or ``insert_id`` fields are missing + in log entries, then this method supplies the current + time or a unique identifier, respectively. The supplied + values are chosen so that, among the log entries that + did not supply their own values, the entries earlier in + the list will sort before the entries later in the list. + See the ``entries.list`` method. + + Log entries with timestamps that are more than the `logs + retention + period `__ + in the past or more than 24 hours in the future will not + be available when calling ``entries.list``. However, + those log entries can still be `exported with + LogSinks `__. + + To improve throughput and to avoid exceeding the `quota + limit `__ + for calls to ``entries.write``, you should try to + include several log entries in this list, rather than + calling this method for each individual log entry. + + This corresponds to the ``entries`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.WriteLogEntriesResponse: + Result returned from WriteLogEntries. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name, resource, labels, entries]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging.WriteLogEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging.WriteLogEntriesRequest): + request = logging.WriteLogEntriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if log_name is not None: + request.log_name = log_name + if resource is not None: + request.resource = resource + if labels is not None: + request.labels = labels + if entries is not None: + request.entries = entries + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.write_log_entries] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_log_entries(self, + request: Union[logging.ListLogEntriesRequest, dict] = None, + *, + resource_names: Sequence[str] = None, + filter: str = None, + order_by: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogEntriesPager: + r"""Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + Args: + request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): + The request object. The parameters to `ListLogEntries`. + resource_names (Sequence[str]): + Required. Names of one or more parent resources from + which to retrieve log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + May alternatively be one or more views + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + + Projects listed in the ``project_ids`` field are added + to this list. + + This corresponds to the ``resource_names`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (str): + Optional. A filter that chooses which log entries to + return. See `Advanced Logs + Queries `__. + Only log entries that match the filter are returned. An + empty filter matches all log entries in the resources + listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will + cause the filter to return no results. The maximum + length of the filter is 20000 characters. + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + order_by (str): + Optional. How the results should be sorted. Presently, + the only permitted values are ``"timestamp asc"`` + (default) and ``"timestamp desc"``. The first option + returns entries in order of increasing values of + ``LogEntry.timestamp`` (oldest first), and the second + option returns entries in order of decreasing timestamps + (newest first). Entries with equal timestamps are + returned in order of their ``insert_id`` values. + + This corresponds to the ``order_by`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager: + Result returned from ListLogEntries. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource_names, filter, order_by]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging.ListLogEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging.ListLogEntriesRequest): + request = logging.ListLogEntriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if resource_names is not None: + request.resource_names = resource_names + if filter is not None: + request.filter = filter + if order_by is not None: + request.order_by = order_by + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_log_entries] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLogEntriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_monitored_resource_descriptors(self, + request: Union[logging.ListMonitoredResourceDescriptorsRequest, dict] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsPager: + r"""Lists the descriptors for monitored resource types + used by Logging. + + Args: + request (Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]): + The request object. The parameters to + ListMonitoredResourceDescriptors + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager: + Result returned from + ListMonitoredResourceDescriptors. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging.ListMonitoredResourceDescriptorsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest): + request = logging.ListMonitoredResourceDescriptorsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_monitored_resource_descriptors] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMonitoredResourceDescriptorsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_logs(self, + request: Union[logging.ListLogsRequest, dict] = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogsPager: + r"""Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + Args: + request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): + The request object. The parameters to ListLogs. + parent (str): + Required. The resource name that owns the logs: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager: + Result returned from ListLogs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging.ListLogsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging.ListLogsRequest): + request = logging.ListLogsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_logs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLogsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def tail_log_entries(self, + requests: Iterator[logging.TailLogEntriesRequest] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[logging.TailLogEntriesResponse]: + r"""Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Args: + requests (Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]): + The request object iterator. The parameters to `TailLogEntries`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: + Result returned from TailLogEntries. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.tail_log_entries] + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "LoggingServiceV2Client", +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/pagers.py new file mode 100644 index 000000000000..9b94311d2e33 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -0,0 +1,386 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging + + +class ListLogEntriesPager: + """A pager for iterating through ``list_log_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLogEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging.ListLogEntriesResponse], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogEntriesRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogEntriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListLogEntriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging.ListLogEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[log_entry.LogEntry]: + for page in self.pages: + yield from page.entries + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLogEntriesAsyncPager: + """A pager for iterating through ``list_log_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLogEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging.ListLogEntriesResponse]], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogEntriesRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogEntriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListLogEntriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging.ListLogEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[log_entry.LogEntry]: + async def async_generator(): + async for page in self.pages: + for response in page.entries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMonitoredResourceDescriptorsPager: + """A pager for iterating through ``list_monitored_resource_descriptors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``resource_descriptors`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMonitoredResourceDescriptors`` requests and continue to iterate + through the ``resource_descriptors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging.ListMonitoredResourceDescriptorsResponse], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListMonitoredResourceDescriptorsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging.ListMonitoredResourceDescriptorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[monitored_resource_pb2.MonitoredResourceDescriptor]: + for page in self.pages: + yield from page.resource_descriptors + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMonitoredResourceDescriptorsAsyncPager: + """A pager for iterating through ``list_monitored_resource_descriptors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``resource_descriptors`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMonitoredResourceDescriptors`` requests and continue to iterate + through the ``resource_descriptors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging.ListMonitoredResourceDescriptorsResponse]], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListMonitoredResourceDescriptorsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging.ListMonitoredResourceDescriptorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[monitored_resource_pb2.MonitoredResourceDescriptor]: + async def async_generator(): + async for page in self.pages: + for response in page.resource_descriptors: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLogsPager: + """A pager for iterating through ``list_logs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``log_names`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLogs`` requests and continue to iterate + through the ``log_names`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging.ListLogsResponse], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListLogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging.ListLogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[str]: + for page in self.pages: + yield from page.log_names + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLogsAsyncPager: + """A pager for iterating through ``list_logs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``log_names`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLogs`` requests and continue to iterate + through the ``log_names`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging.ListLogsResponse]], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListLogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging.ListLogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[str]: + async def async_generator(): + async for page in self.pages: + for response in page.log_names: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py new file mode 100644 index 000000000000..46e9a1fcbf4c --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import LoggingServiceV2Transport +from .grpc import LoggingServiceV2GrpcTransport +from .grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] +_transport_registry['grpc'] = LoggingServiceV2GrpcTransport +_transport_registry['grpc_asyncio'] = LoggingServiceV2GrpcAsyncIOTransport + +__all__ = ( + 'LoggingServiceV2Transport', + 'LoggingServiceV2GrpcTransport', + 'LoggingServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/base.py new file mode 100644 index 000000000000..222ed3c1f99c --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -0,0 +1,291 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.logging_v2.types import logging +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-logging', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class LoggingServiceV2Transport(abc.ABC): + """Abstract transport class for LoggingServiceV2.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ) + + DEFAULT_HOST: str = 'logging.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete_log: gapic_v1.method.wrap_method( + self.delete_log, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.write_log_entries: gapic_v1.method.wrap_method( + self.write_log_entries, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_log_entries: gapic_v1.method.wrap_method( + self.list_log_entries, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_monitored_resource_descriptors: gapic_v1.method.wrap_method( + self.list_monitored_resource_descriptors, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_logs: gapic_v1.method.wrap_method( + self.list_logs, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.tail_log_entries: gapic_v1.method.wrap_method( + self.tail_log_entries, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + } + + @property + def delete_log(self) -> Callable[ + [logging.DeleteLogRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def write_log_entries(self) -> Callable[ + [logging.WriteLogEntriesRequest], + Union[ + logging.WriteLogEntriesResponse, + Awaitable[logging.WriteLogEntriesResponse] + ]]: + raise NotImplementedError() + + @property + def list_log_entries(self) -> Callable[ + [logging.ListLogEntriesRequest], + Union[ + logging.ListLogEntriesResponse, + Awaitable[logging.ListLogEntriesResponse] + ]]: + raise NotImplementedError() + + @property + def list_monitored_resource_descriptors(self) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + Union[ + logging.ListMonitoredResourceDescriptorsResponse, + Awaitable[logging.ListMonitoredResourceDescriptorsResponse] + ]]: + raise NotImplementedError() + + @property + def list_logs(self) -> Callable[ + [logging.ListLogsRequest], + Union[ + logging.ListLogsResponse, + Awaitable[logging.ListLogsResponse] + ]]: + raise NotImplementedError() + + @property + def tail_log_entries(self) -> Callable[ + [logging.TailLogEntriesRequest], + Union[ + logging.TailLogEntriesResponse, + Awaitable[logging.TailLogEntriesResponse] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'LoggingServiceV2Transport', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py new file mode 100644 index 000000000000..f66cb54a21aa --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -0,0 +1,402 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.logging_v2.types import logging +from google.protobuf import empty_pb2 # type: ignore +from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO + + +class LoggingServiceV2GrpcTransport(LoggingServiceV2Transport): + """gRPC backend transport for LoggingServiceV2. + + Service for ingesting and querying logs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def delete_log(self) -> Callable[ + [logging.DeleteLogRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete log method over gRPC. + + Deletes all the log entries in a log. The log + reappears if it receives new entries. Log entries + written shortly before the delete operation might not be + deleted. Entries received after the delete operation + with a timestamp before the operation will be deleted. + + Returns: + Callable[[~.DeleteLogRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_log' not in self._stubs: + self._stubs['delete_log'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/DeleteLog', + request_serializer=logging.DeleteLogRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_log'] + + @property + def write_log_entries(self) -> Callable[ + [logging.WriteLogEntriesRequest], + logging.WriteLogEntriesResponse]: + r"""Return a callable for the write log entries method over gRPC. + + Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + Returns: + Callable[[~.WriteLogEntriesRequest], + ~.WriteLogEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'write_log_entries' not in self._stubs: + self._stubs['write_log_entries'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + request_serializer=logging.WriteLogEntriesRequest.serialize, + response_deserializer=logging.WriteLogEntriesResponse.deserialize, + ) + return self._stubs['write_log_entries'] + + @property + def list_log_entries(self) -> Callable[ + [logging.ListLogEntriesRequest], + logging.ListLogEntriesResponse]: + r"""Return a callable for the list log entries method over gRPC. + + Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + Returns: + Callable[[~.ListLogEntriesRequest], + ~.ListLogEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_log_entries' not in self._stubs: + self._stubs['list_log_entries'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogEntries', + request_serializer=logging.ListLogEntriesRequest.serialize, + response_deserializer=logging.ListLogEntriesResponse.deserialize, + ) + return self._stubs['list_log_entries'] + + @property + def list_monitored_resource_descriptors(self) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + logging.ListMonitoredResourceDescriptorsResponse]: + r"""Return a callable for the list monitored resource + descriptors method over gRPC. + + Lists the descriptors for monitored resource types + used by Logging. + + Returns: + Callable[[~.ListMonitoredResourceDescriptorsRequest], + ~.ListMonitoredResourceDescriptorsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_monitored_resource_descriptors' not in self._stubs: + self._stubs['list_monitored_resource_descriptors'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', + request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, + response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + ) + return self._stubs['list_monitored_resource_descriptors'] + + @property + def list_logs(self) -> Callable[ + [logging.ListLogsRequest], + logging.ListLogsResponse]: + r"""Return a callable for the list logs method over gRPC. + + Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + Returns: + Callable[[~.ListLogsRequest], + ~.ListLogsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_logs' not in self._stubs: + self._stubs['list_logs'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogs', + request_serializer=logging.ListLogsRequest.serialize, + response_deserializer=logging.ListLogsResponse.deserialize, + ) + return self._stubs['list_logs'] + + @property + def tail_log_entries(self) -> Callable[ + [logging.TailLogEntriesRequest], + logging.TailLogEntriesResponse]: + r"""Return a callable for the tail log entries method over gRPC. + + Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Returns: + Callable[[~.TailLogEntriesRequest], + ~.TailLogEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'tail_log_entries' not in self._stubs: + self._stubs['tail_log_entries'] = self.grpc_channel.stream_stream( + '/google.logging.v2.LoggingServiceV2/TailLogEntries', + request_serializer=logging.TailLogEntriesRequest.serialize, + response_deserializer=logging.TailLogEntriesResponse.deserialize, + ) + return self._stubs['tail_log_entries'] + + +__all__ = ( + 'LoggingServiceV2GrpcTransport', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a19007ab65c9 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -0,0 +1,406 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.logging_v2.types import logging +from google.protobuf import empty_pb2 # type: ignore +from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from .grpc import LoggingServiceV2GrpcTransport + + +class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): + """gRPC AsyncIO backend transport for LoggingServiceV2. + + Service for ingesting and querying logs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def delete_log(self) -> Callable[ + [logging.DeleteLogRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete log method over gRPC. + + Deletes all the log entries in a log. The log + reappears if it receives new entries. Log entries + written shortly before the delete operation might not be + deleted. Entries received after the delete operation + with a timestamp before the operation will be deleted. + + Returns: + Callable[[~.DeleteLogRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_log' not in self._stubs: + self._stubs['delete_log'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/DeleteLog', + request_serializer=logging.DeleteLogRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_log'] + + @property + def write_log_entries(self) -> Callable[ + [logging.WriteLogEntriesRequest], + Awaitable[logging.WriteLogEntriesResponse]]: + r"""Return a callable for the write log entries method over gRPC. + + Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + Returns: + Callable[[~.WriteLogEntriesRequest], + Awaitable[~.WriteLogEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'write_log_entries' not in self._stubs: + self._stubs['write_log_entries'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + request_serializer=logging.WriteLogEntriesRequest.serialize, + response_deserializer=logging.WriteLogEntriesResponse.deserialize, + ) + return self._stubs['write_log_entries'] + + @property + def list_log_entries(self) -> Callable[ + [logging.ListLogEntriesRequest], + Awaitable[logging.ListLogEntriesResponse]]: + r"""Return a callable for the list log entries method over gRPC. + + Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + Returns: + Callable[[~.ListLogEntriesRequest], + Awaitable[~.ListLogEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_log_entries' not in self._stubs: + self._stubs['list_log_entries'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogEntries', + request_serializer=logging.ListLogEntriesRequest.serialize, + response_deserializer=logging.ListLogEntriesResponse.deserialize, + ) + return self._stubs['list_log_entries'] + + @property + def list_monitored_resource_descriptors(self) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + Awaitable[logging.ListMonitoredResourceDescriptorsResponse]]: + r"""Return a callable for the list monitored resource + descriptors method over gRPC. + + Lists the descriptors for monitored resource types + used by Logging. + + Returns: + Callable[[~.ListMonitoredResourceDescriptorsRequest], + Awaitable[~.ListMonitoredResourceDescriptorsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_monitored_resource_descriptors' not in self._stubs: + self._stubs['list_monitored_resource_descriptors'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', + request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, + response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + ) + return self._stubs['list_monitored_resource_descriptors'] + + @property + def list_logs(self) -> Callable[ + [logging.ListLogsRequest], + Awaitable[logging.ListLogsResponse]]: + r"""Return a callable for the list logs method over gRPC. + + Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + Returns: + Callable[[~.ListLogsRequest], + Awaitable[~.ListLogsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_logs' not in self._stubs: + self._stubs['list_logs'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogs', + request_serializer=logging.ListLogsRequest.serialize, + response_deserializer=logging.ListLogsResponse.deserialize, + ) + return self._stubs['list_logs'] + + @property + def tail_log_entries(self) -> Callable[ + [logging.TailLogEntriesRequest], + Awaitable[logging.TailLogEntriesResponse]]: + r"""Return a callable for the tail log entries method over gRPC. + + Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Returns: + Callable[[~.TailLogEntriesRequest], + Awaitable[~.TailLogEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'tail_log_entries' not in self._stubs: + self._stubs['tail_log_entries'] = self.grpc_channel.stream_stream( + '/google.logging.v2.LoggingServiceV2/TailLogEntries', + request_serializer=logging.TailLogEntriesRequest.serialize, + response_deserializer=logging.TailLogEntriesResponse.deserialize, + ) + return self._stubs['tail_log_entries'] + + +__all__ = ( + 'LoggingServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/__init__.py new file mode 100644 index 000000000000..1b5d1805cdcd --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import MetricsServiceV2Client +from .async_client import MetricsServiceV2AsyncClient + +__all__ = ( + 'MetricsServiceV2Client', + 'MetricsServiceV2AsyncClient', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/async_client.py new file mode 100644 index 000000000000..764f44f66698 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -0,0 +1,640 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport +from .client import MetricsServiceV2Client + + +class MetricsServiceV2AsyncClient: + """Service for configuring logs-based metrics.""" + + _client: MetricsServiceV2Client + + DEFAULT_ENDPOINT = MetricsServiceV2Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + + log_metric_path = staticmethod(MetricsServiceV2Client.log_metric_path) + parse_log_metric_path = staticmethod(MetricsServiceV2Client.parse_log_metric_path) + common_billing_account_path = staticmethod(MetricsServiceV2Client.common_billing_account_path) + parse_common_billing_account_path = staticmethod(MetricsServiceV2Client.parse_common_billing_account_path) + common_folder_path = staticmethod(MetricsServiceV2Client.common_folder_path) + parse_common_folder_path = staticmethod(MetricsServiceV2Client.parse_common_folder_path) + common_organization_path = staticmethod(MetricsServiceV2Client.common_organization_path) + parse_common_organization_path = staticmethod(MetricsServiceV2Client.parse_common_organization_path) + common_project_path = staticmethod(MetricsServiceV2Client.common_project_path) + parse_common_project_path = staticmethod(MetricsServiceV2Client.parse_common_project_path) + common_location_path = staticmethod(MetricsServiceV2Client.common_location_path) + parse_common_location_path = staticmethod(MetricsServiceV2Client.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsServiceV2AsyncClient: The constructed client. + """ + return MetricsServiceV2Client.from_service_account_info.__func__(MetricsServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsServiceV2AsyncClient: The constructed client. + """ + return MetricsServiceV2Client.from_service_account_file.__func__(MetricsServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetricsServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + MetricsServiceV2Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(MetricsServiceV2Client).get_transport_class, type(MetricsServiceV2Client)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, MetricsServiceV2Transport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metrics service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.MetricsServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MetricsServiceV2Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_log_metrics(self, + request: logging_metrics.ListLogMetricsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogMetricsAsyncPager: + r"""Lists logs-based metrics. + + Args: + request (:class:`google.cloud.logging_v2.types.ListLogMetricsRequest`): + The request object. The parameters to ListLogMetrics. + parent (:class:`str`): + Required. The name of the project containing the + metrics: + + :: + + "projects/[PROJECT_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager: + Result returned from ListLogMetrics. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_metrics.ListLogMetricsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_log_metrics, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLogMetricsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_log_metric(self, + request: logging_metrics.GetLogMetricRequest = None, + *, + metric_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Gets a logs-based metric. + + Args: + request (:class:`google.cloud.logging_v2.types.GetLogMetricRequest`): + The request object. The parameters to GetLogMetric. + metric_name (:class:`str`): + Required. The resource name of the desired metric: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_metrics.GetLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_log_metric, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_log_metric(self, + request: logging_metrics.CreateLogMetricRequest = None, + *, + parent: str = None, + metric: logging_metrics.LogMetric = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates a logs-based metric. + + Args: + request (:class:`google.cloud.logging_v2.types.CreateLogMetricRequest`): + The request object. The parameters to CreateLogMetric. + parent (:class:`str`): + Required. The resource name of the project in which to + create the metric: + + :: + + "projects/[PROJECT_ID]" + + The new metric must be provided in the request. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (:class:`google.cloud.logging_v2.types.LogMetric`): + Required. The new logs-based metric, + which must not have an identifier that + already exists. + + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metric]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_metrics.CreateLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_log_metric, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_log_metric(self, + request: logging_metrics.UpdateLogMetricRequest = None, + *, + metric_name: str = None, + metric: logging_metrics.LogMetric = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates or updates a logs-based metric. + + Args: + request (:class:`google.cloud.logging_v2.types.UpdateLogMetricRequest`): + The request object. The parameters to UpdateLogMetric. + metric_name (:class:`str`): + Required. The resource name of the metric to update: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + The updated metric must be provided in the request and + it's ``name`` field must be the same as ``[METRIC_ID]`` + If the metric does not exist in ``[PROJECT_ID]``, then a + new metric is created. + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (:class:`google.cloud.logging_v2.types.LogMetric`): + Required. The updated metric. + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name, metric]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_metrics.UpdateLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_log_metric, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_log_metric(self, + request: logging_metrics.DeleteLogMetricRequest = None, + *, + metric_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a logs-based metric. + + Args: + request (:class:`google.cloud.logging_v2.types.DeleteLogMetricRequest`): + The request object. The parameters to DeleteLogMetric. + metric_name (:class:`str`): + Required. The resource name of the metric to delete: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_metrics.DeleteLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_log_metric, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "MetricsServiceV2AsyncClient", +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/client.py new file mode 100644 index 000000000000..af554cf6d6fd --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -0,0 +1,799 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import MetricsServiceV2GrpcTransport +from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport + + +class MetricsServiceV2ClientMeta(type): + """Metaclass for the MetricsServiceV2 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] + _transport_registry["grpc"] = MetricsServiceV2GrpcTransport + _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[MetricsServiceV2Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MetricsServiceV2Client(metaclass=MetricsServiceV2ClientMeta): + """Service for configuring logs-based metrics.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "logging.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetricsServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + MetricsServiceV2Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def log_metric_path(project: str,metric: str,) -> str: + """Returns a fully-qualified log_metric string.""" + return "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) + + @staticmethod + def parse_log_metric_path(path: str) -> Dict[str,str]: + """Parses a log_metric path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/metrics/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, MetricsServiceV2Transport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metrics service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, MetricsServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MetricsServiceV2Transport): + # transport is a MetricsServiceV2Transport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + ) + + def list_log_metrics(self, + request: Union[logging_metrics.ListLogMetricsRequest, dict] = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogMetricsPager: + r"""Lists logs-based metrics. + + Args: + request (Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]): + The request object. The parameters to ListLogMetrics. + parent (str): + Required. The name of the project containing the + metrics: + + :: + + "projects/[PROJECT_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager: + Result returned from ListLogMetrics. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_metrics.ListLogMetricsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_metrics.ListLogMetricsRequest): + request = logging_metrics.ListLogMetricsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_log_metrics] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLogMetricsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_log_metric(self, + request: Union[logging_metrics.GetLogMetricRequest, dict] = None, + *, + metric_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Gets a logs-based metric. + + Args: + request (Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]): + The request object. The parameters to GetLogMetric. + metric_name (str): + Required. The resource name of the desired metric: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_metrics.GetLogMetricRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_metrics.GetLogMetricRequest): + request = logging_metrics.GetLogMetricRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_log_metric(self, + request: Union[logging_metrics.CreateLogMetricRequest, dict] = None, + *, + parent: str = None, + metric: logging_metrics.LogMetric = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates a logs-based metric. + + Args: + request (Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]): + The request object. The parameters to CreateLogMetric. + parent (str): + Required. The resource name of the project in which to + create the metric: + + :: + + "projects/[PROJECT_ID]" + + The new metric must be provided in the request. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (google.cloud.logging_v2.types.LogMetric): + Required. The new logs-based metric, + which must not have an identifier that + already exists. + + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metric]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_metrics.CreateLogMetricRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_metrics.CreateLogMetricRequest): + request = logging_metrics.CreateLogMetricRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_log_metric(self, + request: Union[logging_metrics.UpdateLogMetricRequest, dict] = None, + *, + metric_name: str = None, + metric: logging_metrics.LogMetric = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates or updates a logs-based metric. + + Args: + request (Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]): + The request object. The parameters to UpdateLogMetric. + metric_name (str): + Required. The resource name of the metric to update: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + The updated metric must be provided in the request and + it's ``name`` field must be the same as ``[METRIC_ID]`` + If the metric does not exist in ``[PROJECT_ID]``, then a + new metric is created. + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (google.cloud.logging_v2.types.LogMetric): + Required. The updated metric. + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name, metric]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_metrics.UpdateLogMetricRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_metrics.UpdateLogMetricRequest): + request = logging_metrics.UpdateLogMetricRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_log_metric(self, + request: Union[logging_metrics.DeleteLogMetricRequest, dict] = None, + *, + metric_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a logs-based metric. + + Args: + request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): + The request object. The parameters to DeleteLogMetric. + metric_name (str): + Required. The resource name of the metric to delete: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_metrics.DeleteLogMetricRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_metrics.DeleteLogMetricRequest): + request = logging_metrics.DeleteLogMetricRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "MetricsServiceV2Client", +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/pagers.py new file mode 100644 index 000000000000..f6bf04e4f968 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.logging_v2.types import logging_metrics + + +class ListLogMetricsPager: + """A pager for iterating through ``list_log_metrics`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``metrics`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLogMetrics`` requests and continue to iterate + through the ``metrics`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_metrics.ListLogMetricsResponse], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogMetricsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogMetricsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_metrics.ListLogMetricsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_metrics.ListLogMetricsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_metrics.LogMetric]: + for page in self.pages: + yield from page.metrics + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLogMetricsAsyncPager: + """A pager for iterating through ``list_log_metrics`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``metrics`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLogMetrics`` requests and continue to iterate + through the ``metrics`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_metrics.ListLogMetricsResponse]], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogMetricsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogMetricsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_metrics.ListLogMetricsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_metrics.ListLogMetricsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_metrics.LogMetric]: + async def async_generator(): + async for page in self.pages: + for response in page.metrics: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py new file mode 100644 index 000000000000..28e9b710ec84 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MetricsServiceV2Transport +from .grpc import MetricsServiceV2GrpcTransport +from .grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] +_transport_registry['grpc'] = MetricsServiceV2GrpcTransport +_transport_registry['grpc_asyncio'] = MetricsServiceV2GrpcAsyncIOTransport + +__all__ = ( + 'MetricsServiceV2Transport', + 'MetricsServiceV2GrpcTransport', + 'MetricsServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py new file mode 100644 index 000000000000..b9170bf568f9 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-logging', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class MetricsServiceV2Transport(abc.ABC): + """Abstract transport class for MetricsServiceV2.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ) + + DEFAULT_HOST: str = 'logging.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_log_metrics: gapic_v1.method.wrap_method( + self.list_log_metrics, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_log_metric: gapic_v1.method.wrap_method( + self.get_log_metric, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_log_metric: gapic_v1.method.wrap_method( + self.create_log_metric, + default_timeout=60.0, + client_info=client_info, + ), + self.update_log_metric: gapic_v1.method.wrap_method( + self.update_log_metric, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_log_metric: gapic_v1.method.wrap_method( + self.delete_log_metric, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + @property + def list_log_metrics(self) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + Union[ + logging_metrics.ListLogMetricsResponse, + Awaitable[logging_metrics.ListLogMetricsResponse] + ]]: + raise NotImplementedError() + + @property + def get_log_metric(self) -> Callable[ + [logging_metrics.GetLogMetricRequest], + Union[ + logging_metrics.LogMetric, + Awaitable[logging_metrics.LogMetric] + ]]: + raise NotImplementedError() + + @property + def create_log_metric(self) -> Callable[ + [logging_metrics.CreateLogMetricRequest], + Union[ + logging_metrics.LogMetric, + Awaitable[logging_metrics.LogMetric] + ]]: + raise NotImplementedError() + + @property + def update_log_metric(self) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], + Union[ + logging_metrics.LogMetric, + Awaitable[logging_metrics.LogMetric] + ]]: + raise NotImplementedError() + + @property + def delete_log_metric(self) -> Callable[ + [logging_metrics.DeleteLogMetricRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'MetricsServiceV2Transport', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py new file mode 100644 index 000000000000..e300d9f5320e --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -0,0 +1,357 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import empty_pb2 # type: ignore +from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO + + +class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport): + """gRPC backend transport for MetricsServiceV2. + + Service for configuring logs-based metrics. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_log_metrics(self) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + logging_metrics.ListLogMetricsResponse]: + r"""Return a callable for the list log metrics method over gRPC. + + Lists logs-based metrics. + + Returns: + Callable[[~.ListLogMetricsRequest], + ~.ListLogMetricsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_log_metrics' not in self._stubs: + self._stubs['list_log_metrics'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + request_serializer=logging_metrics.ListLogMetricsRequest.serialize, + response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, + ) + return self._stubs['list_log_metrics'] + + @property + def get_log_metric(self) -> Callable[ + [logging_metrics.GetLogMetricRequest], + logging_metrics.LogMetric]: + r"""Return a callable for the get log metric method over gRPC. + + Gets a logs-based metric. + + Returns: + Callable[[~.GetLogMetricRequest], + ~.LogMetric]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_log_metric' not in self._stubs: + self._stubs['get_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/GetLogMetric', + request_serializer=logging_metrics.GetLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['get_log_metric'] + + @property + def create_log_metric(self) -> Callable[ + [logging_metrics.CreateLogMetricRequest], + logging_metrics.LogMetric]: + r"""Return a callable for the create log metric method over gRPC. + + Creates a logs-based metric. + + Returns: + Callable[[~.CreateLogMetricRequest], + ~.LogMetric]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_log_metric' not in self._stubs: + self._stubs['create_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + request_serializer=logging_metrics.CreateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['create_log_metric'] + + @property + def update_log_metric(self) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], + logging_metrics.LogMetric]: + r"""Return a callable for the update log metric method over gRPC. + + Creates or updates a logs-based metric. + + Returns: + Callable[[~.UpdateLogMetricRequest], + ~.LogMetric]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_log_metric' not in self._stubs: + self._stubs['update_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['update_log_metric'] + + @property + def delete_log_metric(self) -> Callable[ + [logging_metrics.DeleteLogMetricRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete log metric method over gRPC. + + Deletes a logs-based metric. + + Returns: + Callable[[~.DeleteLogMetricRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_log_metric' not in self._stubs: + self._stubs['delete_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_log_metric'] + + +__all__ = ( + 'MetricsServiceV2GrpcTransport', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py new file mode 100644 index 000000000000..7da832822ebd --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -0,0 +1,361 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import empty_pb2 # type: ignore +from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from .grpc import MetricsServiceV2GrpcTransport + + +class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): + """gRPC AsyncIO backend transport for MetricsServiceV2. + + Service for configuring logs-based metrics. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_log_metrics(self) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + Awaitable[logging_metrics.ListLogMetricsResponse]]: + r"""Return a callable for the list log metrics method over gRPC. + + Lists logs-based metrics. + + Returns: + Callable[[~.ListLogMetricsRequest], + Awaitable[~.ListLogMetricsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_log_metrics' not in self._stubs: + self._stubs['list_log_metrics'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + request_serializer=logging_metrics.ListLogMetricsRequest.serialize, + response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, + ) + return self._stubs['list_log_metrics'] + + @property + def get_log_metric(self) -> Callable[ + [logging_metrics.GetLogMetricRequest], + Awaitable[logging_metrics.LogMetric]]: + r"""Return a callable for the get log metric method over gRPC. + + Gets a logs-based metric. + + Returns: + Callable[[~.GetLogMetricRequest], + Awaitable[~.LogMetric]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_log_metric' not in self._stubs: + self._stubs['get_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/GetLogMetric', + request_serializer=logging_metrics.GetLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['get_log_metric'] + + @property + def create_log_metric(self) -> Callable[ + [logging_metrics.CreateLogMetricRequest], + Awaitable[logging_metrics.LogMetric]]: + r"""Return a callable for the create log metric method over gRPC. + + Creates a logs-based metric. + + Returns: + Callable[[~.CreateLogMetricRequest], + Awaitable[~.LogMetric]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_log_metric' not in self._stubs: + self._stubs['create_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + request_serializer=logging_metrics.CreateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['create_log_metric'] + + @property + def update_log_metric(self) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], + Awaitable[logging_metrics.LogMetric]]: + r"""Return a callable for the update log metric method over gRPC. + + Creates or updates a logs-based metric. + + Returns: + Callable[[~.UpdateLogMetricRequest], + Awaitable[~.LogMetric]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_log_metric' not in self._stubs: + self._stubs['update_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['update_log_metric'] + + @property + def delete_log_metric(self) -> Callable[ + [logging_metrics.DeleteLogMetricRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete log metric method over gRPC. + + Deletes a logs-based metric. + + Returns: + Callable[[~.DeleteLogMetricRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_log_metric' not in self._stubs: + self._stubs['delete_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_log_metric'] + + +__all__ = ( + 'MetricsServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/__init__.py new file mode 100644 index 000000000000..38c93c541801 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/__init__.py @@ -0,0 +1,138 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .log_entry import ( + LogEntry, + LogEntryOperation, + LogEntrySourceLocation, +) +from .logging import ( + DeleteLogRequest, + ListLogEntriesRequest, + ListLogEntriesResponse, + ListLogsRequest, + ListLogsResponse, + ListMonitoredResourceDescriptorsRequest, + ListMonitoredResourceDescriptorsResponse, + TailLogEntriesRequest, + TailLogEntriesResponse, + WriteLogEntriesPartialErrors, + WriteLogEntriesRequest, + WriteLogEntriesResponse, +) +from .logging_config import ( + BigQueryOptions, + CmekSettings, + CreateBucketRequest, + CreateExclusionRequest, + CreateSinkRequest, + CreateViewRequest, + DeleteBucketRequest, + DeleteExclusionRequest, + DeleteSinkRequest, + DeleteViewRequest, + GetBucketRequest, + GetCmekSettingsRequest, + GetExclusionRequest, + GetSinkRequest, + GetViewRequest, + ListBucketsRequest, + ListBucketsResponse, + ListExclusionsRequest, + ListExclusionsResponse, + ListSinksRequest, + ListSinksResponse, + ListViewsRequest, + ListViewsResponse, + LogBucket, + LogExclusion, + LogSink, + LogView, + UndeleteBucketRequest, + UpdateBucketRequest, + UpdateCmekSettingsRequest, + UpdateExclusionRequest, + UpdateSinkRequest, + UpdateViewRequest, + LifecycleState, +) +from .logging_metrics import ( + CreateLogMetricRequest, + DeleteLogMetricRequest, + GetLogMetricRequest, + ListLogMetricsRequest, + ListLogMetricsResponse, + LogMetric, + UpdateLogMetricRequest, +) + +__all__ = ( + 'LogEntry', + 'LogEntryOperation', + 'LogEntrySourceLocation', + 'DeleteLogRequest', + 'ListLogEntriesRequest', + 'ListLogEntriesResponse', + 'ListLogsRequest', + 'ListLogsResponse', + 'ListMonitoredResourceDescriptorsRequest', + 'ListMonitoredResourceDescriptorsResponse', + 'TailLogEntriesRequest', + 'TailLogEntriesResponse', + 'WriteLogEntriesPartialErrors', + 'WriteLogEntriesRequest', + 'WriteLogEntriesResponse', + 'BigQueryOptions', + 'CmekSettings', + 'CreateBucketRequest', + 'CreateExclusionRequest', + 'CreateSinkRequest', + 'CreateViewRequest', + 'DeleteBucketRequest', + 'DeleteExclusionRequest', + 'DeleteSinkRequest', + 'DeleteViewRequest', + 'GetBucketRequest', + 'GetCmekSettingsRequest', + 'GetExclusionRequest', + 'GetSinkRequest', + 'GetViewRequest', + 'ListBucketsRequest', + 'ListBucketsResponse', + 'ListExclusionsRequest', + 'ListExclusionsResponse', + 'ListSinksRequest', + 'ListSinksResponse', + 'ListViewsRequest', + 'ListViewsResponse', + 'LogBucket', + 'LogExclusion', + 'LogSink', + 'LogView', + 'UndeleteBucketRequest', + 'UpdateBucketRequest', + 'UpdateCmekSettingsRequest', + 'UpdateExclusionRequest', + 'UpdateSinkRequest', + 'UpdateViewRequest', + 'LifecycleState', + 'CreateLogMetricRequest', + 'DeleteLogMetricRequest', + 'GetLogMetricRequest', + 'ListLogMetricsRequest', + 'ListLogMetricsResponse', + 'LogMetric', + 'UpdateLogMetricRequest', +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/log_entry.py new file mode 100644 index 000000000000..45b1c8858763 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/log_entry.py @@ -0,0 +1,321 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.logging.type import http_request_pb2 # type: ignore +from google.logging.type import log_severity_pb2 # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.logging.v2', + manifest={ + 'LogEntry', + 'LogEntryOperation', + 'LogEntrySourceLocation', + }, +) + + +class LogEntry(proto.Message): + r"""An individual entry in a log. + Attributes: + log_name (str): + Required. The resource name of the log to which this log + entry belongs: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + A project number may be used in place of PROJECT_ID. The + project number is translated to its corresponding PROJECT_ID + internally and the ``log_name`` field will contain + PROJECT_ID in queries and exports. + + ``[LOG_ID]`` must be URL-encoded within ``log_name``. + Example: + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``[LOG_ID]`` must be less than 512 characters long and can + only include the following characters: upper and lower case + alphanumeric characters, forward-slash, underscore, hyphen, + and period. + + For backward compatibility, if ``log_name`` begins with a + forward-slash, such as ``/projects/...``, then the log entry + is ingested as usual but the forward-slash is removed. + Listing the log entry will not show the leading slash and + filtering for a log name with a leading slash will never + return any results. + resource (google.api.monitored_resource_pb2.MonitoredResource): + Required. The monitored resource that + produced this log entry. + Example: a log entry that reports a database + error would be associated with the monitored + resource designating the particular database + that reported the error. + proto_payload (google.protobuf.any_pb2.Any): + The log entry payload, represented as a + protocol buffer. Some Google Cloud Platform + services use this field for their log entry + payloads. + The following protocol buffer types are + supported; user-defined types are not supported: + + "type.googleapis.com/google.cloud.audit.AuditLog" + "type.googleapis.com/google.appengine.logging.v1.RequestLog". + text_payload (str): + The log entry payload, represented as a + Unicode string (UTF-8). + json_payload (google.protobuf.struct_pb2.Struct): + The log entry payload, represented as a + structure that is expressed as a JSON object. + timestamp (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time the event described by the log entry + occurred. This time is used to compute the log entry's age + and to enforce the logs retention period. If this field is + omitted in a new log entry, then Logging assigns it the + current time. Timestamps have nanosecond accuracy, but + trailing zeros in the fractional seconds might be omitted + when the timestamp is displayed. + + Incoming log entries must have timestamps that don't exceed + the `logs retention + period `__ + in the past, and that don't exceed 24 hours in the future. + Log entries outside those time boundaries aren't ingested by + Logging. + receive_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the log entry was + received by Logging. + severity (google.logging.type.log_severity_pb2.LogSeverity): + Optional. The severity of the log entry. The default value + is ``LogSeverity.DEFAULT``. + insert_id (str): + Optional. A unique identifier for the log entry. If you + provide a value, then Logging considers other log entries in + the same project, with the same ``timestamp``, and with the + same ``insert_id`` to be duplicates which are removed in a + single query result. However, there are no guarantees of + de-duplication in the export of logs. + + If the ``insert_id`` is omitted when writing a log entry, + the Logging API assigns its own unique identifier in this + field. + + In queries, the ``insert_id`` is also used to order log + entries that have the same ``log_name`` and ``timestamp`` + values. + http_request (google.logging.type.http_request_pb2.HttpRequest): + Optional. Information about the HTTP request + associated with this log entry, if applicable. + labels (Sequence[google.cloud.logging_v2.types.LogEntry.LabelsEntry]): + Optional. A set of user-defined (key, value) + data that provides additional information about + the log entry. + operation (google.cloud.logging_v2.types.LogEntryOperation): + Optional. Information about an operation + associated with the log entry, if applicable. + trace (str): + Optional. Resource name of the trace associated with the log + entry, if any. If it contains a relative resource name, the + name is assumed to be relative to + ``//tracing.googleapis.com``. Example: + ``projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824`` + span_id (str): + Optional. The span ID within the trace associated with the + log entry. + + For Trace spans, this is the same format that the Trace API + v2 uses: a 16-character hexadecimal encoding of an 8-byte + array, such as ``000000000000004a``. + trace_sampled (bool): + Optional. The sampling decision of the trace associated with + the log entry. + + True means that the trace resource name in the ``trace`` + field was sampled for storage in a trace backend. False + means that the trace was not sampled for storage when this + log entry was written, or the sampling decision was unknown + at the time. A non-sampled ``trace`` value is still useful + as a request correlation identifier. The default is False. + source_location (google.cloud.logging_v2.types.LogEntrySourceLocation): + Optional. Source code location information + associated with the log entry, if any. + """ + + log_name = proto.Field( + proto.STRING, + number=12, + ) + resource = proto.Field( + proto.MESSAGE, + number=8, + message=monitored_resource_pb2.MonitoredResource, + ) + proto_payload = proto.Field( + proto.MESSAGE, + number=2, + oneof='payload', + message=any_pb2.Any, + ) + text_payload = proto.Field( + proto.STRING, + number=3, + oneof='payload', + ) + json_payload = proto.Field( + proto.MESSAGE, + number=6, + oneof='payload', + message=struct_pb2.Struct, + ) + timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + receive_timestamp = proto.Field( + proto.MESSAGE, + number=24, + message=timestamp_pb2.Timestamp, + ) + severity = proto.Field( + proto.ENUM, + number=10, + enum=log_severity_pb2.LogSeverity, + ) + insert_id = proto.Field( + proto.STRING, + number=4, + ) + http_request = proto.Field( + proto.MESSAGE, + number=7, + message=http_request_pb2.HttpRequest, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=11, + ) + operation = proto.Field( + proto.MESSAGE, + number=15, + message='LogEntryOperation', + ) + trace = proto.Field( + proto.STRING, + number=22, + ) + span_id = proto.Field( + proto.STRING, + number=27, + ) + trace_sampled = proto.Field( + proto.BOOL, + number=30, + ) + source_location = proto.Field( + proto.MESSAGE, + number=23, + message='LogEntrySourceLocation', + ) + + +class LogEntryOperation(proto.Message): + r"""Additional information about a potentially long-running + operation with which a log entry is associated. + + Attributes: + id (str): + Optional. An arbitrary operation identifier. + Log entries with the same identifier are assumed + to be part of the same operation. + producer (str): + Optional. An arbitrary producer identifier. The combination + of ``id`` and ``producer`` must be globally unique. Examples + for ``producer``: ``"MyDivision.MyBigCompany.com"``, + ``"github.com/MyProject/MyApplication"``. + first (bool): + Optional. Set this to True if this is the + first log entry in the operation. + last (bool): + Optional. Set this to True if this is the + last log entry in the operation. + """ + + id = proto.Field( + proto.STRING, + number=1, + ) + producer = proto.Field( + proto.STRING, + number=2, + ) + first = proto.Field( + proto.BOOL, + number=3, + ) + last = proto.Field( + proto.BOOL, + number=4, + ) + + +class LogEntrySourceLocation(proto.Message): + r"""Additional information about the source code location that + produced the log entry. + + Attributes: + file (str): + Optional. Source file name. Depending on the + runtime environment, this might be a simple name + or a fully-qualified name. + line (int): + Optional. Line within the source file. + 1-based; 0 indicates no line number available. + function (str): + Optional. Human-readable name of the function or method + being invoked, with optional context such as the class or + package name. This information may be used in contexts such + as the logs viewer, where a file and line number are less + meaningful. The format can vary by language. For example: + ``qual.if.ied.Class.method`` (Java), ``dir/package.func`` + (Go), ``function`` (Python). + """ + + file = proto.Field( + proto.STRING, + number=1, + ) + line = proto.Field( + proto.INT64, + number=2, + ) + function = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging.py new file mode 100644 index 000000000000..cfae1781a75d --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging.py @@ -0,0 +1,573 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.logging_v2.types import log_entry +from google.protobuf import duration_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.logging.v2', + manifest={ + 'DeleteLogRequest', + 'WriteLogEntriesRequest', + 'WriteLogEntriesResponse', + 'WriteLogEntriesPartialErrors', + 'ListLogEntriesRequest', + 'ListLogEntriesResponse', + 'ListMonitoredResourceDescriptorsRequest', + 'ListMonitoredResourceDescriptorsResponse', + 'ListLogsRequest', + 'ListLogsResponse', + 'TailLogEntriesRequest', + 'TailLogEntriesResponse', + }, +) + + +class DeleteLogRequest(proto.Message): + r"""The parameters to DeleteLog. + Attributes: + log_name (str): + Required. The resource name of the log to delete: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + For more information about log names, see + [LogEntry][google.logging.v2.LogEntry]. + """ + + log_name = proto.Field( + proto.STRING, + number=1, + ) + + +class WriteLogEntriesRequest(proto.Message): + r"""The parameters to WriteLogEntries. + Attributes: + log_name (str): + Optional. A default log resource name that is assigned to + all log entries in ``entries`` that do not specify a value + for ``log_name``: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example: + + :: + + "projects/my-project-id/logs/syslog" + "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + + The permission ``logging.logEntries.create`` is needed on + each project, organization, billing account, or folder that + is receiving new log entries, whether the resource is + specified in ``logName`` or in an individual log entry. + resource (google.api.monitored_resource_pb2.MonitoredResource): + Optional. A default monitored resource object that is + assigned to all log entries in ``entries`` that do not + specify a value for ``resource``. Example: + + :: + + { "type": "gce_instance", + "labels": { + "zone": "us-central1-a", "instance_id": "00000000000000000000" }} + + See [LogEntry][google.logging.v2.LogEntry]. + labels (Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]): + Optional. Default labels that are added to the ``labels`` + field of all log entries in ``entries``. If a log entry + already has a label with the same key as a label in this + parameter, then the log entry's label is not changed. See + [LogEntry][google.logging.v2.LogEntry]. + entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + Required. The log entries to send to Logging. The order of + log entries in this list does not matter. Values supplied in + this method's ``log_name``, ``resource``, and ``labels`` + fields are copied into those log entries in this list that + do not include values for their corresponding fields. For + more information, see the + [LogEntry][google.logging.v2.LogEntry] type. + + If the ``timestamp`` or ``insert_id`` fields are missing in + log entries, then this method supplies the current time or a + unique identifier, respectively. The supplied values are + chosen so that, among the log entries that did not supply + their own values, the entries earlier in the list will sort + before the entries later in the list. See the + ``entries.list`` method. + + Log entries with timestamps that are more than the `logs + retention + period `__ in + the past or more than 24 hours in the future will not be + available when calling ``entries.list``. However, those log + entries can still be `exported with + LogSinks `__. + + To improve throughput and to avoid exceeding the `quota + limit `__ for + calls to ``entries.write``, you should try to include + several log entries in this list, rather than calling this + method for each individual log entry. + partial_success (bool): + Optional. Whether valid entries should be written even if + some other entries fail due to INVALID_ARGUMENT or + PERMISSION_DENIED errors. If any entry is not written, then + the response status is the error associated with one of the + failed entries and the response includes error details keyed + by the entries' zero-based index in the ``entries.write`` + method. + dry_run (bool): + Optional. If true, the request should expect + normal response, but the entries won't be + persisted nor exported. Useful for checking + whether the logging API endpoints are working + properly before sending valuable data. + """ + + log_name = proto.Field( + proto.STRING, + number=1, + ) + resource = proto.Field( + proto.MESSAGE, + number=2, + message=monitored_resource_pb2.MonitoredResource, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + entries = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=log_entry.LogEntry, + ) + partial_success = proto.Field( + proto.BOOL, + number=5, + ) + dry_run = proto.Field( + proto.BOOL, + number=6, + ) + + +class WriteLogEntriesResponse(proto.Message): + r"""Result returned from WriteLogEntries. """ + + +class WriteLogEntriesPartialErrors(proto.Message): + r"""Error details for WriteLogEntries with partial success. + Attributes: + log_entry_errors (Sequence[google.cloud.logging_v2.types.WriteLogEntriesPartialErrors.LogEntryErrorsEntry]): + When ``WriteLogEntriesRequest.partial_success`` is true, + records the error status for entries that were not written + due to a permanent error, keyed by the entry's zero-based + index in ``WriteLogEntriesRequest.entries``. + + Failed requests for which no entries are written will not + include per-entry errors. + """ + + log_entry_errors = proto.MapField( + proto.INT32, + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + + +class ListLogEntriesRequest(proto.Message): + r"""The parameters to ``ListLogEntries``. + Attributes: + resource_names (Sequence[str]): + Required. Names of one or more parent resources from which + to retrieve log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + May alternatively be one or more views + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + + Projects listed in the ``project_ids`` field are added to + this list. + filter (str): + Optional. A filter that chooses which log entries to return. + See `Advanced Logs + Queries `__. + Only log entries that match the filter are returned. An + empty filter matches all log entries in the resources listed + in ``resource_names``. Referencing a parent resource that is + not listed in ``resource_names`` will cause the filter to + return no results. The maximum length of the filter is 20000 + characters. + order_by (str): + Optional. How the results should be sorted. Presently, the + only permitted values are ``"timestamp asc"`` (default) and + ``"timestamp desc"``. The first option returns entries in + order of increasing values of ``LogEntry.timestamp`` (oldest + first), and the second option returns entries in order of + decreasing timestamps (newest first). Entries with equal + timestamps are returned in order of their ``insert_id`` + values. + page_size (int): + Optional. The maximum number of results to return from this + request. Default is 50. If the value is negative or exceeds + 1000, the request is rejected. The presence of + ``next_page_token`` in the response indicates that more + results might be available. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``page_token`` must be the value of ``next_page_token`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + """ + + resource_names = proto.RepeatedField( + proto.STRING, + number=8, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + order_by = proto.Field( + proto.STRING, + number=3, + ) + page_size = proto.Field( + proto.INT32, + number=4, + ) + page_token = proto.Field( + proto.STRING, + number=5, + ) + + +class ListLogEntriesResponse(proto.Message): + r"""Result returned from ``ListLogEntries``. + Attributes: + entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + A list of log entries. If ``entries`` is empty, + ``nextPageToken`` may still be returned, indicating that + more entries may exist. See ``nextPageToken`` for more + information. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call this method again using the value + of ``nextPageToken`` as ``pageToken``. + + If a value for ``next_page_token`` appears and the + ``entries`` field is empty, it means that the search found + no log entries so far but it did not have time to search all + the possible log entries. Retry the method with this value + for ``page_token`` to continue the search. Alternatively, + consider speeding up the search by changing your filter to + specify a single log name or resource type, or to narrow the + time range of the search. + """ + + @property + def raw_page(self): + return self + + entries = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=log_entry.LogEntry, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class ListMonitoredResourceDescriptorsRequest(proto.Message): + r"""The parameters to ListMonitoredResourceDescriptors + Attributes: + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + """ + + page_size = proto.Field( + proto.INT32, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class ListMonitoredResourceDescriptorsResponse(proto.Message): + r"""Result returned from ListMonitoredResourceDescriptors. + Attributes: + resource_descriptors (Sequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): + A list of resource descriptors. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call this method again using the value + of ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + resource_descriptors = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=monitored_resource_pb2.MonitoredResourceDescriptor, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class ListLogsRequest(proto.Message): + r"""The parameters to ListLogs. + Attributes: + parent (str): + Required. The resource name that owns the logs: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + resource_names (Sequence[str]): + Optional. The resource name that owns the logs: + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + + To support legacy queries, it could also be: + "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + resource_names = proto.RepeatedField( + proto.STRING, + number=8, + ) + + +class ListLogsResponse(proto.Message): + r"""Result returned from ListLogs. + Attributes: + log_names (Sequence[str]): + A list of log names. For example, + ``"projects/my-project/logs/syslog"`` or + ``"organizations/123/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call this method again using the value + of ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + log_names = proto.RepeatedField( + proto.STRING, + number=3, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class TailLogEntriesRequest(proto.Message): + r"""The parameters to ``TailLogEntries``. + Attributes: + resource_names (Sequence[str]): + Required. Name of a parent resource from which to retrieve + log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + May alternatively be one or more views: + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + "organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]". + filter (str): + Optional. A filter that chooses which log entries to return. + See `Advanced Logs + Filters `__. + Only log entries that match the filter are returned. An + empty filter matches all log entries in the resources listed + in ``resource_names``. Referencing a parent resource that is + not in ``resource_names`` will cause the filter to return no + results. The maximum length of the filter is 20000 + characters. + buffer_window (google.protobuf.duration_pb2.Duration): + Optional. The amount of time to buffer log + entries at the server before being returned to + prevent out of order results due to late + arriving log entries. Valid values are between + 0-60000 milliseconds. Defaults to 2000 + milliseconds. + """ + + resource_names = proto.RepeatedField( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + buffer_window = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + +class TailLogEntriesResponse(proto.Message): + r"""Result returned from ``TailLogEntries``. + Attributes: + entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + A list of log entries. Each response in the stream will + order entries with increasing values of + ``LogEntry.timestamp``. Ordering is not guaranteed between + separate responses. + suppression_info (Sequence[google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo]): + If entries that otherwise would have been + included in the session were not sent back to + the client, counts of relevant entries omitted + from the session with the reason that they were + not included. There will be at most one of each + reason per response. The counts represent the + number of suppressed entries since the last + streamed response. + """ + + class SuppressionInfo(proto.Message): + r"""Information about entries that were omitted from the session. + Attributes: + reason (google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo.Reason): + The reason that entries were omitted from the + session. + suppressed_count (int): + A lower bound on the count of entries omitted due to + ``reason``. + """ + class Reason(proto.Enum): + r"""An indicator of why entries were omitted.""" + REASON_UNSPECIFIED = 0 + RATE_LIMIT = 1 + NOT_CONSUMED = 2 + + reason = proto.Field( + proto.ENUM, + number=1, + enum='TailLogEntriesResponse.SuppressionInfo.Reason', + ) + suppressed_count = proto.Field( + proto.INT32, + number=2, + ) + + entries = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=log_entry.LogEntry, + ) + suppression_info = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=SuppressionInfo, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_config.py new file mode 100644 index 000000000000..a4b7b2571d7a --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_config.py @@ -0,0 +1,1457 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.logging.v2', + manifest={ + 'LifecycleState', + 'LogBucket', + 'LogView', + 'LogSink', + 'BigQueryOptions', + 'ListBucketsRequest', + 'ListBucketsResponse', + 'CreateBucketRequest', + 'UpdateBucketRequest', + 'GetBucketRequest', + 'DeleteBucketRequest', + 'UndeleteBucketRequest', + 'ListViewsRequest', + 'ListViewsResponse', + 'CreateViewRequest', + 'UpdateViewRequest', + 'GetViewRequest', + 'DeleteViewRequest', + 'ListSinksRequest', + 'ListSinksResponse', + 'GetSinkRequest', + 'CreateSinkRequest', + 'UpdateSinkRequest', + 'DeleteSinkRequest', + 'LogExclusion', + 'ListExclusionsRequest', + 'ListExclusionsResponse', + 'GetExclusionRequest', + 'CreateExclusionRequest', + 'UpdateExclusionRequest', + 'DeleteExclusionRequest', + 'GetCmekSettingsRequest', + 'UpdateCmekSettingsRequest', + 'CmekSettings', + }, +) + + +class LifecycleState(proto.Enum): + r"""LogBucket lifecycle states.""" + LIFECYCLE_STATE_UNSPECIFIED = 0 + ACTIVE = 1 + DELETE_REQUESTED = 2 + + +class LogBucket(proto.Message): + r"""Describes a repository of logs. + Attributes: + name (str): + The resource name of the bucket. For example: + "projects/my-project-id/locations/my-location/buckets/my-bucket-id + The supported locations are: "global" + + For the location of ``global`` it is unspecified where logs + are actually stored. Once a bucket has been created, the + location can not be changed. + description (str): + Describes this bucket. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + bucket. This is not set for any of the default + buckets. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of the + bucket. + retention_days (int): + Logs will be retained by default for this + amount of time, after which they will + automatically be deleted. The minimum retention + period is 1 day. If this value is set to zero at + bucket creation time, the default time of 30 + days will be used. + locked (bool): + Whether the bucket has been locked. + The retention period on a locked bucket may not + be changed. Locked buckets may only be deleted + if they are empty. + lifecycle_state (google.cloud.logging_v2.types.LifecycleState): + Output only. The bucket lifecycle state. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + retention_days = proto.Field( + proto.INT32, + number=11, + ) + locked = proto.Field( + proto.BOOL, + number=9, + ) + lifecycle_state = proto.Field( + proto.ENUM, + number=12, + enum='LifecycleState', + ) + + +class LogView(proto.Message): + r"""Describes a view over logs in a bucket. + Attributes: + name (str): + The resource name of the view. + For example + "projects/my-project-id/locations/my- + location/buckets/my-bucket-id/views/my-view + description (str): + Describes this view. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + view. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of the + view. + filter (str): + Filter that restricts which log entries in a bucket are + visible in this view. Filters are restricted to be a logical + AND of ==/!= of any of the following: originating + project/folder/organization/billing account. resource type + log id Example: SOURCE("projects/myproject") AND + resource.type = "gce_instance" AND LOG_ID("stdout") + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + filter = proto.Field( + proto.STRING, + number=7, + ) + + +class LogSink(proto.Message): + r"""Describes a sink used to export log entries to one of the + following destinations in any project: a Cloud Storage bucket, a + BigQuery dataset, or a Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. The sink must be + created within a project, organization, billing account, or + folder. + + Attributes: + name (str): + Required. The client-assigned sink identifier, unique within + the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink + identifiers are limited to 100 characters and can include + only the following characters: upper and lower-case + alphanumeric characters, underscores, hyphens, and periods. + First character has to be alphanumeric. + destination (str): + Required. The export destination: + + :: + + "storage.googleapis.com/[GCS_BUCKET]" + "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]" + "pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" + + The sink's ``writer_identity``, set when the sink is + created, must have permission to write to the destination or + else the log entries are not exported. For more information, + see `Exporting Logs with + Sinks `__. + filter (str): + Optional. An `advanced logs + filter `__. + The only exported log entries are those that are in the + resource owning the sink and that match the filter. For + example: + + :: + + logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR + description (str): + Optional. A description of this sink. + The maximum length of the description is 8000 + characters. + disabled (bool): + Optional. If set to True, then this sink is + disabled and it does not export any log entries. + exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): + Optional. Log entries that match any of the exclusion + filters will not be exported. If a log entry is matched by + both ``filter`` and one of ``exclusion_filters`` it will not + be exported. + output_version_format (google.cloud.logging_v2.types.LogSink.VersionFormat): + Deprecated. This field is unused. + writer_identity (str): + Output only. An IAM identity—a service account or + group—under which Logging writes the exported log entries to + the sink's destination. This field is set by + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] + and + [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] + based on the value of ``unique_writer_identity`` in those + methods. + + Until you grant this identity write-access to the + destination, log entry exports from this sink will fail. For + more information, see `Granting Access for a + Resource `__. + Consult the destination service's documentation to determine + the appropriate IAM roles to assign to the identity. + include_children (bool): + Optional. This field applies only to sinks owned by + organizations and folders. If the field is false, the + default, only the logs owned by the sink's parent resource + are available for export. If the field is true, then logs + from all the projects, folders, and billing accounts + contained in the sink's parent resource are also available + for export. Whether a particular log entry from the children + is exported depends on the sink's filter expression. For + example, if this field is true, then the filter + ``resource.type=gce_instance`` would export all Compute + Engine VM instance log entries from all projects in the + sink's parent. To only export entries from certain child + projects, filter on the project part of the log name: + + :: + + logName:("projects/test-project1/" OR "projects/test-project2/") AND + resource.type=gce_instance + bigquery_options (google.cloud.logging_v2.types.BigQueryOptions): + Optional. Options that affect sinks exporting + data to BigQuery. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + sink. + This field may not be present for older sinks. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of the + sink. + This field may not be present for older sinks. + """ + class VersionFormat(proto.Enum): + r"""Deprecated. This is unused.""" + VERSION_FORMAT_UNSPECIFIED = 0 + V2 = 1 + V1 = 2 + + name = proto.Field( + proto.STRING, + number=1, + ) + destination = proto.Field( + proto.STRING, + number=3, + ) + filter = proto.Field( + proto.STRING, + number=5, + ) + description = proto.Field( + proto.STRING, + number=18, + ) + disabled = proto.Field( + proto.BOOL, + number=19, + ) + exclusions = proto.RepeatedField( + proto.MESSAGE, + number=16, + message='LogExclusion', + ) + output_version_format = proto.Field( + proto.ENUM, + number=6, + enum=VersionFormat, + ) + writer_identity = proto.Field( + proto.STRING, + number=8, + ) + include_children = proto.Field( + proto.BOOL, + number=9, + ) + bigquery_options = proto.Field( + proto.MESSAGE, + number=12, + oneof='options', + message='BigQueryOptions', + ) + create_time = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + + +class BigQueryOptions(proto.Message): + r"""Options that change functionality of a sink exporting data to + BigQuery. + + Attributes: + use_partitioned_tables (bool): + Optional. Whether to use `BigQuery's partition + tables `__. + By default, Logging creates dated tables based on the log + entries' timestamps, e.g. syslog_20170523. With partitioned + tables the date suffix is no longer present and `special + query + syntax `__ + has to be used instead. In both cases, tables are sharded + based on UTC timezone. + uses_timestamp_column_partitioning (bool): + Output only. True if new timestamp column based partitioning + is in use, false if legacy ingestion-time partitioning is in + use. All new sinks will have this field set true and will + use timestamp column based partitioning. If + use_partitioned_tables is false, this value has no meaning + and will be false. Legacy sinks using partitioned tables + will have this field set to false. + """ + + use_partitioned_tables = proto.Field( + proto.BOOL, + number=1, + ) + uses_timestamp_column_partitioning = proto.Field( + proto.BOOL, + number=3, + ) + + +class ListBucketsRequest(proto.Message): + r"""The parameters to ``ListBuckets``. + Attributes: + parent (str): + Required. The parent resource whose buckets are to be + listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]" + + Note: The locations portion of the resource must be + specified, but supplying the character ``-`` in place of + [LOCATION_ID] will return all buckets. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + + +class ListBucketsResponse(proto.Message): + r"""The response from ListBuckets. + Attributes: + buckets (Sequence[google.cloud.logging_v2.types.LogBucket]): + A list of buckets. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + buckets = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='LogBucket', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateBucketRequest(proto.Message): + r"""The parameters to ``CreateBucket``. + Attributes: + parent (str): + Required. The resource in which to create the bucket: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + + Example: ``"projects/my-logging-project/locations/global"`` + bucket_id (str): + Required. A client-assigned identifier such as + ``"my-bucket"``. Identifiers are limited to 100 characters + and can include only letters, digits, underscores, hyphens, + and periods. + bucket (google.cloud.logging_v2.types.LogBucket): + Required. The new bucket. The region + specified in the new bucket must be compliant + with any Location Restriction Org Policy. The + name field in the bucket is ignored. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + bucket_id = proto.Field( + proto.STRING, + number=2, + ) + bucket = proto.Field( + proto.MESSAGE, + number=3, + message='LogBucket', + ) + + +class UpdateBucketRequest(proto.Message): + r"""The parameters to ``UpdateBucket``. + Attributes: + name (str): + Required. The full resource name of the bucket to update. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + Also requires permission + "resourcemanager.projects.updateLiens" to set the locked + property + bucket (google.cloud.logging_v2.types.LogBucket): + Required. The updated bucket. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask that specifies the fields in ``bucket`` + that need an update. A bucket field will be overwritten if, + and only if, it is in the update mask. ``name`` and output + only fields cannot be updated. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=retention_days``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + bucket = proto.Field( + proto.MESSAGE, + number=2, + message='LogBucket', + ) + update_mask = proto.Field( + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, + ) + + +class GetBucketRequest(proto.Message): + r"""The parameters to ``GetBucket``. + Attributes: + name (str): + Required. The resource name of the bucket: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBucketRequest(proto.Message): + r"""The parameters to ``DeleteBucket``. + Attributes: + name (str): + Required. The full resource name of the bucket to delete. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class UndeleteBucketRequest(proto.Message): + r"""The parameters to ``UndeleteBucket``. + Attributes: + name (str): + Required. The full resource name of the bucket to undelete. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListViewsRequest(proto.Message): + r"""The parameters to ``ListViews``. + Attributes: + parent (str): + Required. The bucket whose views are to be listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + + +class ListViewsResponse(proto.Message): + r"""The response from ListViews. + Attributes: + views (Sequence[google.cloud.logging_v2.types.LogView]): + A list of views. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + views = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='LogView', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateViewRequest(proto.Message): + r"""The parameters to ``CreateView``. + Attributes: + parent (str): + Required. The bucket in which to create the view + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-logging-project/locations/my-location/buckets/my-bucket"`` + view_id (str): + Required. The id to use for this view. + view (google.cloud.logging_v2.types.LogView): + Required. The new view. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + view_id = proto.Field( + proto.STRING, + number=2, + ) + view = proto.Field( + proto.MESSAGE, + number=3, + message='LogView', + ) + + +class UpdateViewRequest(proto.Message): + r"""The parameters to ``UpdateView``. + Attributes: + name (str): + Required. The full resource name of the view to update + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + view (google.cloud.logging_v2.types.LogView): + Required. The updated view. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that specifies the fields in ``view`` + that need an update. A field will be overwritten if, and + only if, it is in the update mask. ``name`` and output only + fields cannot be updated. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=filter``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + view = proto.Field( + proto.MESSAGE, + number=2, + message='LogView', + ) + update_mask = proto.Field( + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, + ) + + +class GetViewRequest(proto.Message): + r"""The parameters to ``GetView``. + Attributes: + name (str): + Required. The resource name of the policy: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteViewRequest(proto.Message): + r"""The parameters to ``DeleteView``. + Attributes: + name (str): + Required. The full resource name of the view to delete: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSinksRequest(proto.Message): + r"""The parameters to ``ListSinks``. + Attributes: + parent (str): + Required. The parent resource whose sinks are to be listed: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + + +class ListSinksResponse(proto.Message): + r"""Result returned from ``ListSinks``. + Attributes: + sinks (Sequence[google.cloud.logging_v2.types.LogSink]): + A list of sinks. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + sinks = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='LogSink', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class GetSinkRequest(proto.Message): + r"""The parameters to ``GetSink``. + Attributes: + sink_name (str): + Required. The resource name of the sink: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + """ + + sink_name = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateSinkRequest(proto.Message): + r"""The parameters to ``CreateSink``. + Attributes: + parent (str): + Required. The resource in which to create the sink: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + sink (google.cloud.logging_v2.types.LogSink): + Required. The new sink, whose ``name`` parameter is a sink + identifier that is not already in use. + unique_writer_identity (bool): + Optional. Determines the kind of IAM identity returned as + ``writer_identity`` in the new sink. If this value is + omitted or set to false, and if the sink's parent is a + project, then the value returned as ``writer_identity`` is + the same group or service account used by Logging before the + addition of writer identities to this API. The sink's + destination must be in the same project as the sink itself. + + If this field is set to true, or if the sink is owned by a + non-project resource such as an organization, then the value + of ``writer_identity`` will be a unique service account used + only for exports from the new sink. For more information, + see ``writer_identity`` in + [LogSink][google.logging.v2.LogSink]. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + sink = proto.Field( + proto.MESSAGE, + number=2, + message='LogSink', + ) + unique_writer_identity = proto.Field( + proto.BOOL, + number=3, + ) + + +class UpdateSinkRequest(proto.Message): + r"""The parameters to ``UpdateSink``. + Attributes: + sink_name (str): + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + sink (google.cloud.logging_v2.types.LogSink): + Required. The updated sink, whose name is the same + identifier that appears as part of ``sink_name``. + unique_writer_identity (bool): + Optional. See + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] + for a description of this field. When updating a sink, the + effect of this field on the value of ``writer_identity`` in + the updated sink depends on both the old and new values of + this field: + + - If the old and new values of this field are both false or + both true, then there is no change to the sink's + ``writer_identity``. + - If the old value is false and the new value is true, then + ``writer_identity`` is changed to a unique service + account. + - It is an error if the old value is true and the new value + is set to false or defaulted to false. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that specifies the fields in ``sink`` + that need an update. A sink field will be overwritten if, + and only if, it is in the update mask. ``name`` and output + only fields cannot be updated. + + An empty updateMask is temporarily treated as using the + following mask for backwards compatibility purposes: + destination,filter,includeChildren At some point in the + future, behavior will be removed and specifying an empty + updateMask will be an error. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=filter``. + """ + + sink_name = proto.Field( + proto.STRING, + number=1, + ) + sink = proto.Field( + proto.MESSAGE, + number=2, + message='LogSink', + ) + unique_writer_identity = proto.Field( + proto.BOOL, + number=3, + ) + update_mask = proto.Field( + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteSinkRequest(proto.Message): + r"""The parameters to ``DeleteSink``. + Attributes: + sink_name (str): + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + """ + + sink_name = proto.Field( + proto.STRING, + number=1, + ) + + +class LogExclusion(proto.Message): + r"""Specifies a set of log entries that are not to be stored in + Logging. If your GCP resource receives a large volume of logs, + you can use exclusions to reduce your chargeable logs. + Exclusions are processed after log sinks, so you can export log + entries before they are excluded. Note that organization-level + and folder-level exclusions don't apply to child resources, and + that you can't exclude audit log entries. + + Attributes: + name (str): + Required. A client-assigned identifier, such as + ``"load-balancer-exclusion"``. Identifiers are limited to + 100 characters and can include only letters, digits, + underscores, hyphens, and periods. First character has to be + alphanumeric. + description (str): + Optional. A description of this exclusion. + filter (str): + Required. An `advanced logs + filter `__ + that matches the log entries to be excluded. By using the + `sample + function `__, + you can exclude less than 100% of the matching log entries. + For example, the following query matches 99% of low-severity + log entries from Google Cloud Storage buckets: + + ``"resource.type=gcs_bucket severity`__ + for more information. + + Attributes: + name (str): + Required. The resource for which to retrieve CMEK settings. + + :: + + "projects/[PROJECT_ID]/cmekSettings" + "organizations/[ORGANIZATION_ID]/cmekSettings" + "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" + "folders/[FOLDER_ID]/cmekSettings" + + Example: ``"organizations/12345/cmekSettings"``. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP organization. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateCmekSettingsRequest(proto.Message): + r"""The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Attributes: + name (str): + Required. The resource name for the CMEK settings to update. + + :: + + "projects/[PROJECT_ID]/cmekSettings" + "organizations/[ORGANIZATION_ID]/cmekSettings" + "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" + "folders/[FOLDER_ID]/cmekSettings" + + Example: ``"organizations/12345/cmekSettings"``. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP organization. + cmek_settings (google.cloud.logging_v2.types.CmekSettings): + Required. The CMEK settings to update. + + See `Enabling CMEK for Logs + Router `__ + for more information. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask identifying which fields from + ``cmek_settings`` should be updated. A field will be + overwritten if and only if it is in the update mask. Output + only fields cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + Example: ``"updateMask=kmsKeyName"`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + cmek_settings = proto.Field( + proto.MESSAGE, + number=2, + message='CmekSettings', + ) + update_mask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class CmekSettings(proto.Message): + r"""Describes the customer-managed encryption key (CMEK) settings + associated with a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Logs Router can currently only be configured for + GCP organizations. Once configured, it applies to all projects and + folders in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Attributes: + name (str): + Output only. The resource name of the CMEK + settings. + kms_key_name (str): + The resource name for the configured Cloud KMS key. + + KMS key name format: + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" + + For example: + ``"projects/my-project-id/locations/my-region/keyRings/key-ring-name/cryptoKeys/key-name"`` + + To enable CMEK for the Logs Router, set this field to a + valid ``kms_key_name`` for which the associated service + account has the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned + for the key. + + The Cloud KMS key used by the Log Router can be updated by + changing the ``kms_key_name`` to a new valid key name. + Encryption operations that are in progress will be completed + with the key that was in use when they started. Decryption + operations will be completed using the key that was used at + the time of encryption unless access to that key has been + revoked. + + To disable CMEK for the Logs Router, set this field to an + empty string. + + See `Enabling CMEK for Logs + Router `__ + for more information. + service_account_id (str): + Output only. The service account that will be used by the + Logs Router to access your Cloud KMS key. + + Before enabling CMEK for Logs Router, you must first assign + the role ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` to + the service account that the Logs Router will use to access + your Cloud KMS key. Use + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings] + to obtain the service account ID. + + See `Enabling CMEK for Logs + Router `__ + for more information. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + kms_key_name = proto.Field( + proto.STRING, + number=2, + ) + service_account_id = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_metrics.py new file mode 100644 index 000000000000..252e43760b02 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_metrics.py @@ -0,0 +1,371 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.logging.v2', + manifest={ + 'LogMetric', + 'ListLogMetricsRequest', + 'ListLogMetricsResponse', + 'GetLogMetricRequest', + 'CreateLogMetricRequest', + 'UpdateLogMetricRequest', + 'DeleteLogMetricRequest', + }, +) + + +class LogMetric(proto.Message): + r"""Describes a logs-based metric. The value of the metric is the + number of log entries that match a logs filter in a given time + interval. + Logs-based metrics can also be used to extract values from logs + and create a distribution of the values. The distribution + records the statistics of the extracted values along with an + optional histogram of the values as specified by the bucket + options. + + Attributes: + name (str): + Required. The client-assigned metric identifier. Examples: + ``"error_count"``, ``"nginx/requests"``. + + Metric identifiers are limited to 100 characters and can + include only the following characters: ``A-Z``, ``a-z``, + ``0-9``, and the special characters ``_-.,+!*',()%/``. The + forward-slash character (``/``) denotes a hierarchy of name + pieces, and it cannot be the first character of the name. + + The metric identifier in this field must not be + `URL-encoded `__. + However, when the metric identifier appears as the + ``[METRIC_ID]`` part of a ``metric_name`` API parameter, + then the metric identifier must be URL-encoded. Example: + ``"projects/my-project/metrics/nginx%2Frequests"``. + description (str): + Optional. A description of this metric, which + is used in documentation. The maximum length of + the description is 8000 characters. + filter (str): + Required. An `advanced logs + filter `__ + which is used to match log entries. Example: + + :: + + "resource.type=gae_app AND severity>=ERROR" + + The maximum length of the filter is 20000 characters. + metric_descriptor (google.api.metric_pb2.MetricDescriptor): + Optional. The metric descriptor associated with the + logs-based metric. If unspecified, it uses a default metric + descriptor with a DELTA metric kind, INT64 value type, with + no labels and a unit of "1". Such a metric counts the number + of log entries matching the ``filter`` expression. + + The ``name``, ``type``, and ``description`` fields in the + ``metric_descriptor`` are output only, and is constructed + using the ``name`` and ``description`` field in the + LogMetric. + + To create a logs-based metric that records a distribution of + log values, a DELTA metric kind with a DISTRIBUTION value + type must be used along with a ``value_extractor`` + expression in the LogMetric. + + Each label in the metric descriptor must have a matching + label name as the key and an extractor expression as the + value in the ``label_extractors`` map. + + The ``metric_kind`` and ``value_type`` fields in the + ``metric_descriptor`` cannot be updated once initially + configured. New labels can be added in the + ``metric_descriptor``, but existing labels cannot be + modified except for their description. + value_extractor (str): + Optional. A ``value_extractor`` is required when using a + distribution logs-based metric to extract the values to + record from a log entry. Two functions are supported for + value extraction: ``EXTRACT(field)`` or + ``REGEXP_EXTRACT(field, regex)``. The argument are: + + 1. field: The name of the log entry field from which the + value is to be extracted. + 2. regex: A regular expression using the Google RE2 syntax + (https://github.com/google/re2/wiki/Syntax) with a single + capture group to extract data from the specified log + entry field. The value of the field is converted to a + string before applying the regex. It is an error to + specify a regex that does not include exactly one capture + group. + + The result of the extraction must be convertible to a double + type, as the distribution always records double values. If + either the extraction or the conversion to double fails, + then those values are not recorded in the distribution. + + Example: + ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` + label_extractors (Sequence[google.cloud.logging_v2.types.LogMetric.LabelExtractorsEntry]): + Optional. A map from a label key string to an extractor + expression which is used to extract data from a log entry + field and assign as the label value. Each label key + specified in the LabelDescriptor must have an associated + extractor expression in this map. The syntax of the + extractor expression is the same as for the + ``value_extractor`` field. + + The extracted value is converted to the type defined in the + label descriptor. If the either the extraction or the type + conversion fails, the label will have a default value. The + default value for a string label is an empty string, for an + integer label its 0, and for a boolean label its ``false``. + + Note that there are upper bounds on the maximum number of + labels and the number of active time series that are allowed + in a project. + bucket_options (google.api.distribution_pb2.BucketOptions): + Optional. The ``bucket_options`` are required when the + logs-based metric is using a DISTRIBUTION value type and it + describes the bucket boundaries used to create a histogram + of the extracted values. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + metric. + This field may not be present for older metrics. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of the + metric. + This field may not be present for older metrics. + version (google.cloud.logging_v2.types.LogMetric.ApiVersion): + Deprecated. The API version that created or + updated this metric. The v2 format is used by + default and cannot be changed. + """ + class ApiVersion(proto.Enum): + r"""Logging API version.""" + V2 = 0 + V1 = 1 + + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=2, + ) + filter = proto.Field( + proto.STRING, + number=3, + ) + metric_descriptor = proto.Field( + proto.MESSAGE, + number=5, + message=metric_pb2.MetricDescriptor, + ) + value_extractor = proto.Field( + proto.STRING, + number=6, + ) + label_extractors = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + bucket_options = proto.Field( + proto.MESSAGE, + number=8, + message=distribution_pb2.Distribution.BucketOptions, + ) + create_time = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + version = proto.Field( + proto.ENUM, + number=4, + enum=ApiVersion, + ) + + +class ListLogMetricsRequest(proto.Message): + r"""The parameters to ListLogMetrics. + Attributes: + parent (str): + Required. The name of the project containing the metrics: + + :: + + "projects/[PROJECT_ID]". + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + + +class ListLogMetricsResponse(proto.Message): + r"""Result returned from ListLogMetrics. + Attributes: + metrics (Sequence[google.cloud.logging_v2.types.LogMetric]): + A list of logs-based metrics. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + metrics = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='LogMetric', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class GetLogMetricRequest(proto.Message): + r"""The parameters to GetLogMetric. + Attributes: + metric_name (str): + Required. The resource name of the desired metric: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + """ + + metric_name = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateLogMetricRequest(proto.Message): + r"""The parameters to CreateLogMetric. + Attributes: + parent (str): + Required. The resource name of the project in which to + create the metric: + + :: + + "projects/[PROJECT_ID]" + + The new metric must be provided in the request. + metric (google.cloud.logging_v2.types.LogMetric): + Required. The new logs-based metric, which + must not have an identifier that already exists. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + metric = proto.Field( + proto.MESSAGE, + number=2, + message='LogMetric', + ) + + +class UpdateLogMetricRequest(proto.Message): + r"""The parameters to UpdateLogMetric. + Attributes: + metric_name (str): + Required. The resource name of the metric to update: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + The updated metric must be provided in the request and it's + ``name`` field must be the same as ``[METRIC_ID]`` If the + metric does not exist in ``[PROJECT_ID]``, then a new metric + is created. + metric (google.cloud.logging_v2.types.LogMetric): + Required. The updated metric. + """ + + metric_name = proto.Field( + proto.STRING, + number=1, + ) + metric = proto.Field( + proto.MESSAGE, + number=2, + message='LogMetric', + ) + + +class DeleteLogMetricRequest(proto.Message): + r"""The parameters to DeleteLogMetric. + Attributes: + metric_name (str): + Required. The resource name of the metric to delete: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + """ + + metric_name = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/mypy.ini b/packages/google-cloud-logging/owl-bot-staging/v2/mypy.ini new file mode 100644 index 000000000000..4505b485436b --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/noxfile.py b/packages/google-cloud-logging/owl-bot-staging/v2/noxfile.py new file mode 100644 index 000000000000..10ed0a998e1e --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/logging_v2/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.7') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7']) +def mypy(session): + """Run the type checker.""" + session.install('mypy', 'types-pkg_resources') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.6') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/scripts/fixup_logging_v2_keywords.py b/packages/google-cloud-logging/owl-bot-staging/v2/scripts/fixup_logging_v2_keywords.py new file mode 100644 index 000000000000..2a368fb9ccea --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/scripts/fixup_logging_v2_keywords.py @@ -0,0 +1,209 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class loggingCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_bucket': ('parent', 'bucket_id', 'bucket', ), + 'create_exclusion': ('parent', 'exclusion', ), + 'create_log_metric': ('parent', 'metric', ), + 'create_sink': ('parent', 'sink', 'unique_writer_identity', ), + 'create_view': ('parent', 'view_id', 'view', ), + 'delete_bucket': ('name', ), + 'delete_exclusion': ('name', ), + 'delete_log': ('log_name', ), + 'delete_log_metric': ('metric_name', ), + 'delete_sink': ('sink_name', ), + 'delete_view': ('name', ), + 'get_bucket': ('name', ), + 'get_cmek_settings': ('name', ), + 'get_exclusion': ('name', ), + 'get_log_metric': ('metric_name', ), + 'get_sink': ('sink_name', ), + 'get_view': ('name', ), + 'list_buckets': ('parent', 'page_token', 'page_size', ), + 'list_exclusions': ('parent', 'page_token', 'page_size', ), + 'list_log_entries': ('resource_names', 'filter', 'order_by', 'page_size', 'page_token', ), + 'list_log_metrics': ('parent', 'page_token', 'page_size', ), + 'list_logs': ('parent', 'page_size', 'page_token', 'resource_names', ), + 'list_monitored_resource_descriptors': ('page_size', 'page_token', ), + 'list_sinks': ('parent', 'page_token', 'page_size', ), + 'list_views': ('parent', 'page_token', 'page_size', ), + 'tail_log_entries': ('resource_names', 'filter', 'buffer_window', ), + 'undelete_bucket': ('name', ), + 'update_bucket': ('name', 'bucket', 'update_mask', ), + 'update_cmek_settings': ('name', 'cmek_settings', 'update_mask', ), + 'update_exclusion': ('name', 'exclusion', 'update_mask', ), + 'update_log_metric': ('metric_name', 'metric', ), + 'update_sink': ('sink_name', 'sink', 'unique_writer_identity', 'update_mask', ), + 'update_view': ('name', 'view', 'update_mask', ), + 'write_log_entries': ('entries', 'log_name', 'resource', 'labels', 'partial_success', 'dry_run', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=loggingCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the logging client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/setup.py b/packages/google-cloud-logging/owl-bot-staging/v2/setup.py new file mode 100644 index 000000000000..4b98728b93f3 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/setup.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-logging', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.15.0', + 'packaging >= 14.3', ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/tests/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/tests/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_config_service_v2.py new file mode 100644 index 000000000000..979cbd360592 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -0,0 +1,6447 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2AsyncClient +from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.services.config_service_v2 import transports +from google.cloud.logging_v2.services.config_service_v2.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.logging_v2.types import logging_config +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ConfigServiceV2Client._get_default_mtls_endpoint(None) is None + assert ConfigServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ConfigServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ConfigServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ConfigServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ConfigServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + ConfigServiceV2Client, + ConfigServiceV2AsyncClient, +]) +def test_config_service_v2_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'logging.googleapis.com:443' + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ConfigServiceV2GrpcTransport, "grpc"), + (transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_config_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + ConfigServiceV2Client, + ConfigServiceV2AsyncClient, +]) +def test_config_service_v2_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'logging.googleapis.com:443' + + +def test_config_service_v2_client_get_transport_class(): + transport = ConfigServiceV2Client.get_transport_class() + available_transports = [ + transports.ConfigServiceV2GrpcTransport, + ] + assert transport in available_transports + + transport = ConfigServiceV2Client.get_transport_class("grpc") + assert transport == transports.ConfigServiceV2GrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) +@mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) +def test_config_service_v2_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ConfigServiceV2Client, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ConfigServiceV2Client, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "true"), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "false"), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) +@mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_config_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_config_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_config_service_v2_client_client_options_from_dict(): + with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = ConfigServiceV2Client( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_list_buckets(transport: str = 'grpc', request_type=logging_config.ListBucketsRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListBucketsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListBucketsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBucketsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_buckets_from_dict(): + test_list_buckets(request_type=dict) + + +def test_list_buckets_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + client.list_buckets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListBucketsRequest() + + +@pytest.mark.asyncio +async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListBucketsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListBucketsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBucketsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_buckets_async_from_dict(): + await test_list_buckets_async(request_type=dict) + + +def test_list_buckets_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListBucketsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + call.return_value = logging_config.ListBucketsResponse() + client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_buckets_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListBucketsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse()) + await client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_buckets_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListBucketsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_buckets( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_buckets_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_buckets( + logging_config.ListBucketsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_buckets_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListBucketsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_buckets( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_buckets_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_buckets( + logging_config.ListBucketsRequest(), + parent='parent_value', + ) + + +def test_list_buckets_pager(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token='abc', + ), + logging_config.ListBucketsResponse( + buckets=[], + next_page_token='def', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token='ghi', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_buckets(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogBucket) + for i in results) + +def test_list_buckets_pages(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token='abc', + ), + logging_config.ListBucketsResponse( + buckets=[], + next_page_token='def', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token='ghi', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + ), + RuntimeError, + ) + pages = list(client.list_buckets(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_buckets_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token='abc', + ), + logging_config.ListBucketsResponse( + buckets=[], + next_page_token='def', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token='ghi', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_buckets(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogBucket) + for i in responses) + +@pytest.mark.asyncio +async def test_list_buckets_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token='abc', + ), + logging_config.ListBucketsResponse( + buckets=[], + next_page_token='def', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token='ghi', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_buckets(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_bucket(transport: str = 'grpc', request_type=logging_config.GetBucketRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + response = client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_get_bucket_from_dict(): + test_get_bucket(request_type=dict) + + +def test_get_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + client.get_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetBucketRequest() + + +@pytest.mark.asyncio +async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetBucketRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + )) + response = await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test_get_bucket_async_from_dict(): + await test_get_bucket_async(request_type=dict) + + +def test_get_bucket_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) + await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_create_bucket(transport: str = 'grpc', request_type=logging_config.CreateBucketRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + response = client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_create_bucket_from_dict(): + test_create_bucket(request_type=dict) + + +def test_create_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + client.create_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + + +@pytest.mark.asyncio +async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + )) + response = await client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test_create_bucket_async_from_dict(): + await test_create_bucket_async(request_type=dict) + + +def test_create_bucket_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) + await client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_update_bucket(transport: str = 'grpc', request_type=logging_config.UpdateBucketRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + response = client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_update_bucket_from_dict(): + test_update_bucket(request_type=dict) + + +def test_update_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + client.update_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + + +@pytest.mark.asyncio +async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + )) + response = await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test_update_bucket_async_from_dict(): + await test_update_bucket_async(request_type=dict) + + +def test_update_bucket_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) + await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_bucket(transport: str = 'grpc', request_type=logging_config.DeleteBucketRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_bucket_from_dict(): + test_delete_bucket(request_type=dict) + + +def test_delete_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + client.delete_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteBucketRequest() + + +@pytest.mark.asyncio +async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteBucketRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_bucket_async_from_dict(): + await test_delete_bucket_async(request_type=dict) + + +def test_delete_bucket_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + call.return_value = None + client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_undelete_bucket(transport: str = 'grpc', request_type=logging_config.UndeleteBucketRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UndeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_undelete_bucket_from_dict(): + test_undelete_bucket(request_type=dict) + + +def test_undelete_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + client.undelete_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UndeleteBucketRequest() + + +@pytest.mark.asyncio +async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UndeleteBucketRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UndeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_undelete_bucket_async_from_dict(): + await test_undelete_bucket_async(request_type=dict) + + +def test_undelete_bucket_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UndeleteBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + call.return_value = None + client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_undelete_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UndeleteBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_list_views(transport: str = 'grpc', request_type=logging_config.ListViewsRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListViewsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListViewsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_views_from_dict(): + test_list_views(request_type=dict) + + +def test_list_views_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + client.list_views() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListViewsRequest() + + +@pytest.mark.asyncio +async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListViewsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListViewsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListViewsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_views_async_from_dict(): + await test_list_views_async(request_type=dict) + + +def test_list_views_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListViewsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + call.return_value = logging_config.ListViewsResponse() + client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_views_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListViewsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse()) + await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_views_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_views( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_views_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_views( + logging_config.ListViewsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_views_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_views( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_views_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_views( + logging_config.ListViewsRequest(), + parent='parent_value', + ) + + +def test_list_views_pager(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token='abc', + ), + logging_config.ListViewsResponse( + views=[], + next_page_token='def', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token='ghi', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_views(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogView) + for i in results) + +def test_list_views_pages(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token='abc', + ), + logging_config.ListViewsResponse( + views=[], + next_page_token='def', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token='ghi', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + pages = list(client.list_views(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_views_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token='abc', + ), + logging_config.ListViewsResponse( + views=[], + next_page_token='def', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token='ghi', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_views(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogView) + for i in responses) + +@pytest.mark.asyncio +async def test_list_views_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token='abc', + ), + logging_config.ListViewsResponse( + views=[], + next_page_token='def', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token='ghi', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_views(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_view(transport: str = 'grpc', request_type=logging_config.GetViewRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + ) + response = client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetViewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +def test_get_view_from_dict(): + test_get_view(request_type=dict) + + +def test_get_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + client.get_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetViewRequest() + + +@pytest.mark.asyncio +async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetViewRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + response = await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetViewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +@pytest.mark.asyncio +async def test_get_view_async_from_dict(): + await test_get_view_async(request_type=dict) + + +def test_get_view_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetViewRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_view_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetViewRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) + await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_create_view(transport: str = 'grpc', request_type=logging_config.CreateViewRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + ) + response = client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateViewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +def test_create_view_from_dict(): + test_create_view(request_type=dict) + + +def test_create_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + client.create_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateViewRequest() + + +@pytest.mark.asyncio +async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateViewRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + response = await client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateViewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +@pytest.mark.asyncio +async def test_create_view_async_from_dict(): + await test_create_view_async(request_type=dict) + + +def test_create_view_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateViewRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_view_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateViewRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) + await client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_update_view(transport: str = 'grpc', request_type=logging_config.UpdateViewRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + ) + response = client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateViewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +def test_update_view_from_dict(): + test_update_view(request_type=dict) + + +def test_update_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + client.update_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateViewRequest() + + +@pytest.mark.asyncio +async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateViewRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + response = await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateViewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +@pytest.mark.asyncio +async def test_update_view_async_from_dict(): + await test_update_view_async(request_type=dict) + + +def test_update_view_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateViewRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_view_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateViewRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) + await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_view(transport: str = 'grpc', request_type=logging_config.DeleteViewRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_view_from_dict(): + test_delete_view(request_type=dict) + + +def test_delete_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + client.delete_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest() + + +@pytest.mark.asyncio +async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteViewRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_view_async_from_dict(): + await test_delete_view_async(request_type=dict) + + +def test_delete_view_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteViewRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + call.return_value = None + client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_view_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteViewRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_list_sinks(transport: str = 'grpc', request_type=logging_config.ListSinksRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListSinksResponse( + next_page_token='next_page_token_value', + ) + response = client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListSinksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSinksPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_sinks_from_dict(): + test_list_sinks(request_type=dict) + + +def test_list_sinks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + client.list_sinks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListSinksRequest() + + +@pytest.mark.asyncio +async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListSinksRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListSinksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSinksAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_sinks_async_from_dict(): + await test_list_sinks_async(request_type=dict) + + +def test_list_sinks_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListSinksRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + call.return_value = logging_config.ListSinksResponse() + client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_sinks_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListSinksRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse()) + await client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_sinks_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListSinksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sinks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_sinks_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sinks( + logging_config.ListSinksRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_sinks_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListSinksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sinks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_sinks_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sinks( + logging_config.ListSinksRequest(), + parent='parent_value', + ) + + +def test_list_sinks_pager(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token='abc', + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token='def', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token='ghi', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_sinks(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogSink) + for i in results) + +def test_list_sinks_pages(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token='abc', + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token='def', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token='ghi', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sinks(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_sinks_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token='abc', + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token='def', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token='ghi', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_sinks(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogSink) + for i in responses) + +@pytest.mark.asyncio +async def test_list_sinks_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token='abc', + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token='def', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token='ghi', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_sinks(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_sink(transport: str = 'grpc', request_type=logging_config.GetSinkRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), + ) + response = client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSinkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +def test_get_sink_from_dict(): + test_get_sink(request_type=dict) + + +def test_get_sink_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + client.get_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSinkRequest() + + +@pytest.mark.asyncio +async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSinkRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + response = await client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSinkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +@pytest.mark.asyncio +async def test_get_sink_async_from_dict(): + await test_get_sink_async(request_type=dict) + + +def test_get_sink_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSinkRequest() + + request.sink_name = 'sink_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSinkRequest() + + request.sink_name = 'sink_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + await client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name/value', + ) in kw['metadata'] + + +def test_get_sink_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_sink( + sink_name='sink_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].sink_name == 'sink_name_value' + + +def test_get_sink_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_sink( + logging_config.GetSinkRequest(), + sink_name='sink_name_value', + ) + + +@pytest.mark.asyncio +async def test_get_sink_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_sink( + sink_name='sink_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].sink_name == 'sink_name_value' + + +@pytest.mark.asyncio +async def test_get_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_sink( + logging_config.GetSinkRequest(), + sink_name='sink_name_value', + ) + + +def test_create_sink(transport: str = 'grpc', request_type=logging_config.CreateSinkRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), + ) + response = client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateSinkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +def test_create_sink_from_dict(): + test_create_sink(request_type=dict) + + +def test_create_sink_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + client.create_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateSinkRequest() + + +@pytest.mark.asyncio +async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateSinkRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + response = await client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateSinkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +@pytest.mark.asyncio +async def test_create_sink_async_from_dict(): + await test_create_sink_async(request_type=dict) + + +def test_create_sink_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateSinkRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateSinkRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + await client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_sink_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_sink( + parent='parent_value', + sink=logging_config.LogSink(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].sink == logging_config.LogSink(name='name_value') + + +def test_create_sink_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_sink( + logging_config.CreateSinkRequest(), + parent='parent_value', + sink=logging_config.LogSink(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_sink_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_sink( + parent='parent_value', + sink=logging_config.LogSink(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].sink == logging_config.LogSink(name='name_value') + + +@pytest.mark.asyncio +async def test_create_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_sink( + logging_config.CreateSinkRequest(), + parent='parent_value', + sink=logging_config.LogSink(name='name_value'), + ) + + +def test_update_sink(transport: str = 'grpc', request_type=logging_config.UpdateSinkRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), + ) + response = client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSinkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +def test_update_sink_from_dict(): + test_update_sink(request_type=dict) + + +def test_update_sink_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + client.update_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSinkRequest() + + +@pytest.mark.asyncio +async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSinkRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + response = await client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSinkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +@pytest.mark.asyncio +async def test_update_sink_async_from_dict(): + await test_update_sink_async(request_type=dict) + + +def test_update_sink_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSinkRequest() + + request.sink_name = 'sink_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSinkRequest() + + request.sink_name = 'sink_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + await client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name/value', + ) in kw['metadata'] + + +def test_update_sink_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_sink( + sink_name='sink_name_value', + sink=logging_config.LogSink(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].sink_name == 'sink_name_value' + assert args[0].sink == logging_config.LogSink(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +def test_update_sink_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_sink( + logging_config.UpdateSinkRequest(), + sink_name='sink_name_value', + sink=logging_config.LogSink(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.asyncio +async def test_update_sink_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_sink( + sink_name='sink_name_value', + sink=logging_config.LogSink(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].sink_name == 'sink_name_value' + assert args[0].sink == logging_config.LogSink(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +@pytest.mark.asyncio +async def test_update_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_sink( + logging_config.UpdateSinkRequest(), + sink_name='sink_name_value', + sink=logging_config.LogSink(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_sink(transport: str = 'grpc', request_type=logging_config.DeleteSinkRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_sink_from_dict(): + test_delete_sink(request_type=dict) + + +def test_delete_sink_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + client.delete_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest() + + +@pytest.mark.asyncio +async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteSinkRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_sink_async_from_dict(): + await test_delete_sink_async(request_type=dict) + + +def test_delete_sink_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteSinkRequest() + + request.sink_name = 'sink_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + call.return_value = None + client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteSinkRequest() + + request.sink_name = 'sink_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name/value', + ) in kw['metadata'] + + +def test_delete_sink_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_sink( + sink_name='sink_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].sink_name == 'sink_name_value' + + +def test_delete_sink_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_sink( + logging_config.DeleteSinkRequest(), + sink_name='sink_name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_sink_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_sink( + sink_name='sink_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].sink_name == 'sink_name_value' + + +@pytest.mark.asyncio +async def test_delete_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_sink( + logging_config.DeleteSinkRequest(), + sink_name='sink_name_value', + ) + + +def test_list_exclusions(transport: str = 'grpc', request_type=logging_config.ListExclusionsRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListExclusionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListExclusionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExclusionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_exclusions_from_dict(): + test_list_exclusions(request_type=dict) + + +def test_list_exclusions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + client.list_exclusions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListExclusionsRequest() + + +@pytest.mark.asyncio +async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListExclusionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExclusionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_exclusions_async_from_dict(): + await test_list_exclusions_async(request_type=dict) + + +def test_list_exclusions_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListExclusionsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + call.return_value = logging_config.ListExclusionsResponse() + client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_exclusions_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListExclusionsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) + await client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_exclusions_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListExclusionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_exclusions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_exclusions_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_exclusions( + logging_config.ListExclusionsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_exclusions_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListExclusionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_exclusions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_exclusions_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_exclusions( + logging_config.ListExclusionsRequest(), + parent='parent_value', + ) + + +def test_list_exclusions_pager(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_exclusions(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogExclusion) + for i in results) + +def test_list_exclusions_pages(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = list(client.list_exclusions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_exclusions_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_exclusions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogExclusion) + for i in responses) + +@pytest.mark.asyncio +async def test_list_exclusions_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_exclusions(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_exclusion(transport: str = 'grpc', request_type=logging_config.GetExclusionRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + response = client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +def test_get_exclusion_from_dict(): + test_get_exclusion(request_type=dict) + + +def test_get_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + client.get_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest() + + +@pytest.mark.asyncio +async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + response = await client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_get_exclusion_async_from_dict(): + await test_get_exclusion_async(request_type=dict) + + +def test_get_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetExclusionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetExclusionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + await client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_exclusion( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_exclusion( + logging_config.GetExclusionRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_exclusion( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_exclusion( + logging_config.GetExclusionRequest(), + name='name_value', + ) + + +def test_create_exclusion(transport: str = 'grpc', request_type=logging_config.CreateExclusionRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + response = client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +def test_create_exclusion_from_dict(): + test_create_exclusion(request_type=dict) + + +def test_create_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + client.create_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest() + + +@pytest.mark.asyncio +async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + response = await client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_create_exclusion_async_from_dict(): + await test_create_exclusion_async(request_type=dict) + + +def test_create_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateExclusionRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateExclusionRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + await client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_exclusion( + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].exclusion == logging_config.LogExclusion(name='name_value') + + +def test_create_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_exclusion( + logging_config.CreateExclusionRequest(), + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_exclusion( + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].exclusion == logging_config.LogExclusion(name='name_value') + + +@pytest.mark.asyncio +async def test_create_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_exclusion( + logging_config.CreateExclusionRequest(), + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) + + +def test_update_exclusion(transport: str = 'grpc', request_type=logging_config.UpdateExclusionRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + response = client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +def test_update_exclusion_from_dict(): + test_update_exclusion(request_type=dict) + + +def test_update_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + client.update_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() + + +@pytest.mark.asyncio +async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + response = await client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_update_exclusion_async_from_dict(): + await test_update_exclusion_async(request_type=dict) + + +def test_update_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + await client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_update_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_exclusion( + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].exclusion == logging_config.LogExclusion(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +def test_update_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_exclusion( + logging_config.UpdateExclusionRequest(), + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.asyncio +async def test_update_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_exclusion( + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].exclusion == logging_config.LogExclusion(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +@pytest.mark.asyncio +async def test_update_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_exclusion( + logging_config.UpdateExclusionRequest(), + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_exclusion(transport: str = 'grpc', request_type=logging_config.DeleteExclusionRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_exclusion_from_dict(): + test_delete_exclusion(request_type=dict) + + +def test_delete_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + client.delete_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() + + +@pytest.mark.asyncio +async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_exclusion_async_from_dict(): + await test_delete_exclusion_async(request_type=dict) + + +def test_delete_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + call.return_value = None + client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_exclusion( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_exclusion( + logging_config.DeleteExclusionRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_exclusion( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_exclusion( + logging_config.DeleteExclusionRequest(), + name='name_value', + ) + + +def test_get_cmek_settings(transport: str = 'grpc', request_type=logging_config.GetCmekSettingsRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + service_account_id='service_account_id_value', + ) + response = client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.service_account_id == 'service_account_id_value' + + +def test_get_cmek_settings_from_dict(): + test_get_cmek_settings(request_type=dict) + + +def test_get_cmek_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + client.get_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() + + +@pytest.mark.asyncio +async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + service_account_id='service_account_id_value', + )) + response = await client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.service_account_id == 'service_account_id_value' + + +@pytest.mark.asyncio +async def test_get_cmek_settings_async_from_dict(): + await test_get_cmek_settings_async(request_type=dict) + + +def test_get_cmek_settings_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_cmek_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) + await client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_update_cmek_settings(transport: str = 'grpc', request_type=logging_config.UpdateCmekSettingsRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + service_account_id='service_account_id_value', + ) + response = client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.service_account_id == 'service_account_id_value' + + +def test_update_cmek_settings_from_dict(): + test_update_cmek_settings(request_type=dict) + + +def test_update_cmek_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + client.update_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() + + +@pytest.mark.asyncio +async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + service_account_id='service_account_id_value', + )) + response = await client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.service_account_id == 'service_account_id_value' + + +@pytest.mark.asyncio +async def test_update_cmek_settings_async_from_dict(): + await test_update_cmek_settings_async(request_type=dict) + + +def test_update_cmek_settings_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateCmekSettingsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_cmek_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateCmekSettingsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) + await client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ConfigServiceV2Client(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ConfigServiceV2GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ConfigServiceV2GrpcTransport, + ) + +def test_config_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ConfigServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_config_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.ConfigServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_buckets', + 'get_bucket', + 'create_bucket', + 'update_bucket', + 'delete_bucket', + 'undelete_bucket', + 'list_views', + 'get_view', + 'create_view', + 'update_view', + 'delete_view', + 'list_sinks', + 'get_sink', + 'create_sink', + 'update_sink', + 'delete_sink', + 'list_exclusions', + 'get_exclusion', + 'create_exclusion', + 'update_exclusion', + 'delete_exclusion', + 'get_cmek_settings', + 'update_cmek_settings', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_config_service_v2_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfigServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_config_service_v2_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfigServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + ), + quota_project_id="octopus", + ) + + +def test_config_service_v2_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfigServiceV2Transport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_config_service_v2_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ConfigServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_config_service_v2_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ConfigServiceV2Client() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_config_service_v2_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_config_service_v2_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConfigServiceV2GrpcTransport, grpc_helpers), + (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_config_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) +def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_config_service_v2_host_no_port(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), + ) + assert client.transport._host == 'logging.googleapis.com:443' + + +def test_config_service_v2_host_with_port(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + ) + assert client.transport._host == 'logging.googleapis.com:8000' + +def test_config_service_v2_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConfigServiceV2GrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_config_service_v2_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConfigServiceV2GrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) +def test_config_service_v2_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) +def test_config_service_v2_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cmek_settings_path(): + project = "squid" + expected = "projects/{project}/cmekSettings".format(project=project, ) + actual = ConfigServiceV2Client.cmek_settings_path(project) + assert expected == actual + + +def test_parse_cmek_settings_path(): + expected = { + "project": "clam", + } + path = ConfigServiceV2Client.cmek_settings_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_cmek_settings_path(path) + assert expected == actual + +def test_log_bucket_path(): + project = "whelk" + location = "octopus" + bucket = "oyster" + expected = "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) + actual = ConfigServiceV2Client.log_bucket_path(project, location, bucket) + assert expected == actual + + +def test_parse_log_bucket_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "bucket": "mussel", + } + path = ConfigServiceV2Client.log_bucket_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_bucket_path(path) + assert expected == actual + +def test_log_exclusion_path(): + project = "winkle" + exclusion = "nautilus" + expected = "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) + actual = ConfigServiceV2Client.log_exclusion_path(project, exclusion) + assert expected == actual + + +def test_parse_log_exclusion_path(): + expected = { + "project": "scallop", + "exclusion": "abalone", + } + path = ConfigServiceV2Client.log_exclusion_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_exclusion_path(path) + assert expected == actual + +def test_log_sink_path(): + project = "squid" + sink = "clam" + expected = "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) + actual = ConfigServiceV2Client.log_sink_path(project, sink) + assert expected == actual + + +def test_parse_log_sink_path(): + expected = { + "project": "whelk", + "sink": "octopus", + } + path = ConfigServiceV2Client.log_sink_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_sink_path(path) + assert expected == actual + +def test_log_view_path(): + project = "oyster" + location = "nudibranch" + bucket = "cuttlefish" + view = "mussel" + expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) + actual = ConfigServiceV2Client.log_view_path(project, location, bucket, view) + assert expected == actual + + +def test_parse_log_view_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "bucket": "scallop", + "view": "abalone", + } + path = ConfigServiceV2Client.log_view_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_view_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ConfigServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ConfigServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = ConfigServiceV2Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ConfigServiceV2Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ConfigServiceV2Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ConfigServiceV2Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = ConfigServiceV2Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ConfigServiceV2Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ConfigServiceV2Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ConfigServiceV2Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: + transport_class = ConfigServiceV2Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_logging_service_v2.py new file mode 100644 index 000000000000..b95281460984 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -0,0 +1,2494 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api import monitored_resource_pb2 # type: ignore +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2AsyncClient +from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.services.logging_service_v2 import transports +from google.cloud.logging_v2.services.logging_service_v2.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging +from google.logging.type import http_request_pb2 # type: ignore +from google.logging.type import log_severity_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LoggingServiceV2Client._get_default_mtls_endpoint(None) is None + assert LoggingServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert LoggingServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert LoggingServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + LoggingServiceV2Client, + LoggingServiceV2AsyncClient, +]) +def test_logging_service_v2_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'logging.googleapis.com:443' + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.LoggingServiceV2GrpcTransport, "grpc"), + (transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_logging_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + LoggingServiceV2Client, + LoggingServiceV2AsyncClient, +]) +def test_logging_service_v2_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'logging.googleapis.com:443' + + +def test_logging_service_v2_client_get_transport_class(): + transport = LoggingServiceV2Client.get_transport_class() + available_transports = [ + transports.LoggingServiceV2GrpcTransport, + ] + assert transport in available_transports + + transport = LoggingServiceV2Client.get_transport_class("grpc") + assert transport == transports.LoggingServiceV2GrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) +def test_logging_service_v2_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(LoggingServiceV2Client, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(LoggingServiceV2Client, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "true"), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "false"), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_logging_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_logging_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_logging_service_v2_client_client_options_from_dict(): + with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = LoggingServiceV2Client( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_delete_log(transport: str = 'grpc', request_type=logging.DeleteLogRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging.DeleteLogRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_log_from_dict(): + test_delete_log(request_type=dict) + + +def test_delete_log_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + client.delete_log() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.DeleteLogRequest() + + +@pytest.mark.asyncio +async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=logging.DeleteLogRequest): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging.DeleteLogRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_log_async_from_dict(): + await test_delete_log_async(request_type=dict) + + +def test_delete_log_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.DeleteLogRequest() + + request.log_name = 'log_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + call.return_value = None + client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'log_name=log_name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_log_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.DeleteLogRequest() + + request.log_name = 'log_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'log_name=log_name/value', + ) in kw['metadata'] + + +def test_delete_log_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_log( + log_name='log_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].log_name == 'log_name_value' + + +def test_delete_log_flattened_error(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_log( + logging.DeleteLogRequest(), + log_name='log_name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_log_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_log( + log_name='log_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].log_name == 'log_name_value' + + +@pytest.mark.asyncio +async def test_delete_log_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_log( + logging.DeleteLogRequest(), + log_name='log_name_value', + ) + + +def test_write_log_entries(transport: str = 'grpc', request_type=logging.WriteLogEntriesRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.WriteLogEntriesResponse( + ) + response = client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging.WriteLogEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging.WriteLogEntriesResponse) + + +def test_write_log_entries_from_dict(): + test_write_log_entries(request_type=dict) + + +def test_write_log_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + client.write_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.WriteLogEntriesRequest() + + +@pytest.mark.asyncio +async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.WriteLogEntriesRequest): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse( + )) + response = await client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging.WriteLogEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging.WriteLogEntriesResponse) + + +@pytest.mark.asyncio +async def test_write_log_entries_async_from_dict(): + await test_write_log_entries_async(request_type=dict) + + +def test_write_log_entries_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.WriteLogEntriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.write_log_entries( + log_name='log_name_value', + resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), + labels={'key_value': 'value_value'}, + entries=[log_entry.LogEntry(log_name='log_name_value')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].log_name == 'log_name_value' + assert args[0].resource == monitored_resource_pb2.MonitoredResource(type_='type__value') + assert args[0].labels == {'key_value': 'value_value'} + assert args[0].entries == [log_entry.LogEntry(log_name='log_name_value')] + + +def test_write_log_entries_flattened_error(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.write_log_entries( + logging.WriteLogEntriesRequest(), + log_name='log_name_value', + resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), + labels={'key_value': 'value_value'}, + entries=[log_entry.LogEntry(log_name='log_name_value')], + ) + + +@pytest.mark.asyncio +async def test_write_log_entries_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.WriteLogEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.write_log_entries( + log_name='log_name_value', + resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), + labels={'key_value': 'value_value'}, + entries=[log_entry.LogEntry(log_name='log_name_value')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].log_name == 'log_name_value' + assert args[0].resource == monitored_resource_pb2.MonitoredResource(type_='type__value') + assert args[0].labels == {'key_value': 'value_value'} + assert args[0].entries == [log_entry.LogEntry(log_name='log_name_value')] + + +@pytest.mark.asyncio +async def test_write_log_entries_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.write_log_entries( + logging.WriteLogEntriesRequest(), + log_name='log_name_value', + resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), + labels={'key_value': 'value_value'}, + entries=[log_entry.LogEntry(log_name='log_name_value')], + ) + + +def test_list_log_entries(transport: str = 'grpc', request_type=logging.ListLogEntriesRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogEntriesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogEntriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_log_entries_from_dict(): + test_list_log_entries(request_type=dict) + + +def test_list_log_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + client.list_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogEntriesRequest() + + +@pytest.mark.asyncio +async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogEntriesRequest): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogEntriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_log_entries_async_from_dict(): + await test_list_log_entries_async(request_type=dict) + + +def test_list_log_entries_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogEntriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_log_entries( + resource_names=['resource_names_value'], + filter='filter_value', + order_by='order_by_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource_names == ['resource_names_value'] + assert args[0].filter == 'filter_value' + assert args[0].order_by == 'order_by_value' + + +def test_list_log_entries_flattened_error(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_log_entries( + logging.ListLogEntriesRequest(), + resource_names=['resource_names_value'], + filter='filter_value', + order_by='order_by_value', + ) + + +@pytest.mark.asyncio +async def test_list_log_entries_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_log_entries( + resource_names=['resource_names_value'], + filter='filter_value', + order_by='order_by_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource_names == ['resource_names_value'] + assert args[0].filter == 'filter_value' + assert args[0].order_by == 'order_by_value' + + +@pytest.mark.asyncio +async def test_list_log_entries_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_log_entries( + logging.ListLogEntriesRequest(), + resource_names=['resource_names_value'], + filter='filter_value', + order_by='order_by_value', + ) + + +def test_list_log_entries_pager(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token='abc', + ), + logging.ListLogEntriesResponse( + entries=[], + next_page_token='def', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token='ghi', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_log_entries(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, log_entry.LogEntry) + for i in results) + +def test_list_log_entries_pages(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token='abc', + ), + logging.ListLogEntriesResponse( + entries=[], + next_page_token='def', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token='ghi', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + ), + RuntimeError, + ) + pages = list(client.list_log_entries(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_log_entries_async_pager(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token='abc', + ), + logging.ListLogEntriesResponse( + entries=[], + next_page_token='def', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token='ghi', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_log_entries(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, log_entry.LogEntry) + for i in responses) + +@pytest.mark.asyncio +async def test_list_log_entries_async_pages(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token='abc', + ), + logging.ListLogEntriesResponse( + entries=[], + next_page_token='def', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token='ghi', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_log_entries(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_list_monitored_resource_descriptors(transport: str = 'grpc', request_type=logging.ListMonitoredResourceDescriptorsRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListMonitoredResourceDescriptorsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_monitored_resource_descriptors_from_dict(): + test_list_monitored_resource_descriptors(request_type=dict) + + +def test_list_monitored_resource_descriptors_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + client.list_monitored_resource_descriptors() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_asyncio', request_type=logging.ListMonitoredResourceDescriptorsRequest): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_from_dict(): + await test_list_monitored_resource_descriptors_async(request_type=dict) + + +def test_list_monitored_resource_descriptors_pager(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_monitored_resource_descriptors(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) + for i in results) + +def test_list_monitored_resource_descriptors_pages(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + pages = list(client.list_monitored_resource_descriptors(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_pager(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_monitored_resource_descriptors(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) + for i in responses) + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_pages(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_monitored_resource_descriptors(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_list_logs(transport: str = 'grpc', request_type=logging.ListLogsRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogsResponse( + log_names=['log_names_value'], + next_page_token='next_page_token_value', + ) + response = client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogsPager) + assert response.log_names == ['log_names_value'] + assert response.next_page_token == 'next_page_token_value' + + +def test_list_logs_from_dict(): + test_list_logs(request_type=dict) + + +def test_list_logs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + client.list_logs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogsRequest() + + +@pytest.mark.asyncio +async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogsRequest): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( + log_names=['log_names_value'], + next_page_token='next_page_token_value', + )) + response = await client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogsAsyncPager) + assert response.log_names == ['log_names_value'] + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_logs_async_from_dict(): + await test_list_logs_async(request_type=dict) + + +def test_list_logs_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.ListLogsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + call.return_value = logging.ListLogsResponse() + client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_logs_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.ListLogsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse()) + await client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_logs_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_logs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_logs_flattened_error(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_logs( + logging.ListLogsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_logs_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_logs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_logs_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_logs( + logging.ListLogsRequest(), + parent='parent_value', + ) + + +def test_list_logs_pager(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + logging.ListLogsResponse( + log_names=[], + next_page_token='def', + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token='ghi', + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_logs(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, str) + for i in results) + +def test_list_logs_pages(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + logging.ListLogsResponse( + log_names=[], + next_page_token='def', + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token='ghi', + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = list(client.list_logs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_logs_async_pager(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + logging.ListLogsResponse( + log_names=[], + next_page_token='def', + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token='ghi', + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_logs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) + for i in responses) + +@pytest.mark.asyncio +async def test_list_logs_async_pages(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + logging.ListLogsResponse( + log_names=[], + next_page_token='def', + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token='ghi', + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_logs(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_tail_log_entries(transport: str = 'grpc', request_type=logging.TailLogEntriesRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.tail_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([logging.TailLogEntriesResponse()]) + response = client.tail_log_entries(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, logging.TailLogEntriesResponse) + + +def test_tail_log_entries_from_dict(): + test_tail_log_entries(request_type=dict) + + +@pytest.mark.asyncio +async def test_tail_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.TailLogEntriesRequest): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.tail_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[logging.TailLogEntriesResponse()]) + response = await client.tail_log_entries(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, logging.TailLogEntriesResponse) + + +@pytest.mark.asyncio +async def test_tail_log_entries_async_from_dict(): + await test_tail_log_entries_async(request_type=dict) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = LoggingServiceV2Client(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.LoggingServiceV2GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.LoggingServiceV2GrpcTransport, + ) + +def test_logging_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LoggingServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_logging_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.LoggingServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete_log', + 'write_log_entries', + 'list_log_entries', + 'list_monitored_resource_descriptors', + 'list_logs', + 'tail_log_entries', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_logging_service_v2_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LoggingServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_logging_service_v2_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LoggingServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ), + quota_project_id="octopus", + ) + + +def test_logging_service_v2_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LoggingServiceV2Transport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_logging_service_v2_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LoggingServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_logging_service_v2_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LoggingServiceV2Client() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_logging_service_v2_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_logging_service_v2_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LoggingServiceV2GrpcTransport, grpc_helpers), + (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) +def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_logging_service_v2_host_no_port(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), + ) + assert client.transport._host == 'logging.googleapis.com:443' + + +def test_logging_service_v2_host_with_port(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + ) + assert client.transport._host == 'logging.googleapis.com:8000' + +def test_logging_service_v2_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LoggingServiceV2GrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_logging_service_v2_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LoggingServiceV2GrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) +def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) +def test_logging_service_v2_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_log_path(): + project = "squid" + log = "clam" + expected = "projects/{project}/logs/{log}".format(project=project, log=log, ) + actual = LoggingServiceV2Client.log_path(project, log) + assert expected == actual + + +def test_parse_log_path(): + expected = { + "project": "whelk", + "log": "octopus", + } + path = LoggingServiceV2Client.log_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_log_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = LoggingServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = LoggingServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = LoggingServiceV2Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = LoggingServiceV2Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = LoggingServiceV2Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = LoggingServiceV2Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = LoggingServiceV2Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = LoggingServiceV2Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = LoggingServiceV2Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = LoggingServiceV2Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: + transport_class = LoggingServiceV2Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_metrics_service_v2.py new file mode 100644 index 000000000000..5ce85b428459 --- /dev/null +++ b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -0,0 +1,2359 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api import distribution_pb2 # type: ignore +from google.api import label_pb2 # type: ignore +from google.api import launch_stage_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2AsyncClient +from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2Client +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.services.metrics_service_v2 import transports +from google.cloud.logging_v2.services.metrics_service_v2.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.logging_v2.types import logging_metrics +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MetricsServiceV2Client._get_default_mtls_endpoint(None) is None + assert MetricsServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert MetricsServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert MetricsServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert MetricsServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert MetricsServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + MetricsServiceV2Client, + MetricsServiceV2AsyncClient, +]) +def test_metrics_service_v2_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'logging.googleapis.com:443' + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.MetricsServiceV2GrpcTransport, "grpc"), + (transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_metrics_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + MetricsServiceV2Client, + MetricsServiceV2AsyncClient, +]) +def test_metrics_service_v2_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'logging.googleapis.com:443' + + +def test_metrics_service_v2_client_get_transport_class(): + transport = MetricsServiceV2Client.get_transport_class() + available_transports = [ + transports.MetricsServiceV2GrpcTransport, + ] + assert transport in available_transports + + transport = MetricsServiceV2Client.get_transport_class("grpc") + assert transport == transports.MetricsServiceV2GrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) +@mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) +def test_metrics_service_v2_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MetricsServiceV2Client, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MetricsServiceV2Client, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "true"), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "false"), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) +@mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_metrics_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_metrics_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_metrics_service_v2_client_client_options_from_dict(): + with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = MetricsServiceV2Client( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_list_log_metrics(transport: str = 'grpc', request_type=logging_metrics.ListLogMetricsRequest): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.ListLogMetricsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.ListLogMetricsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogMetricsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_log_metrics_from_dict(): + test_list_log_metrics(request_type=dict) + + +def test_list_log_metrics_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + client.list_log_metrics() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.ListLogMetricsRequest() + + +@pytest.mark.asyncio +async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.ListLogMetricsRequest): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.ListLogMetricsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogMetricsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_log_metrics_async_from_dict(): + await test_list_log_metrics_async(request_type=dict) + + +def test_list_log_metrics_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.ListLogMetricsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + call.return_value = logging_metrics.ListLogMetricsResponse() + client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_log_metrics_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.ListLogMetricsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse()) + await client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_log_metrics_flattened(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.ListLogMetricsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_log_metrics( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_log_metrics_flattened_error(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_log_metrics( + logging_metrics.ListLogMetricsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_log_metrics_flattened_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.ListLogMetricsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_log_metrics( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_log_metrics_flattened_error_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_log_metrics( + logging_metrics.ListLogMetricsRequest(), + parent='parent_value', + ) + + +def test_list_log_metrics_pager(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token='abc', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[], + next_page_token='def', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token='ghi', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_log_metrics(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_metrics.LogMetric) + for i in results) + +def test_list_log_metrics_pages(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token='abc', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[], + next_page_token='def', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token='ghi', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + ), + RuntimeError, + ) + pages = list(client.list_log_metrics(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_log_metrics_async_pager(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token='abc', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[], + next_page_token='def', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token='ghi', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_log_metrics(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_metrics.LogMetric) + for i in responses) + +@pytest.mark.asyncio +async def test_list_log_metrics_async_pages(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token='abc', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[], + next_page_token='def', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token='ghi', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_log_metrics(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_log_metric(transport: str = 'grpc', request_type=logging_metrics.GetLogMetricRequest): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + response = client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.GetLogMetricRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +def test_get_log_metric_from_dict(): + test_get_log_metric(request_type=dict) + + +def test_get_log_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + client.get_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.GetLogMetricRequest() + + +@pytest.mark.asyncio +async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.GetLogMetricRequest): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + response = await client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.GetLogMetricRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +@pytest.mark.asyncio +async def test_get_log_metric_async_from_dict(): + await test_get_log_metric_async(request_type=dict) + + +def test_get_log_metric_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.GetLogMetricRequest() + + request.metric_name = 'metric_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_log_metric_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.GetLogMetricRequest() + + request.metric_name = 'metric_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + await client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name/value', + ) in kw['metadata'] + + +def test_get_log_metric_flattened(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_log_metric( + metric_name='metric_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].metric_name == 'metric_name_value' + + +def test_get_log_metric_flattened_error(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_log_metric( + logging_metrics.GetLogMetricRequest(), + metric_name='metric_name_value', + ) + + +@pytest.mark.asyncio +async def test_get_log_metric_flattened_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_log_metric( + metric_name='metric_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].metric_name == 'metric_name_value' + + +@pytest.mark.asyncio +async def test_get_log_metric_flattened_error_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_log_metric( + logging_metrics.GetLogMetricRequest(), + metric_name='metric_name_value', + ) + + +def test_create_log_metric(transport: str = 'grpc', request_type=logging_metrics.CreateLogMetricRequest): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + response = client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.CreateLogMetricRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +def test_create_log_metric_from_dict(): + test_create_log_metric(request_type=dict) + + +def test_create_log_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + client.create_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.CreateLogMetricRequest() + + +@pytest.mark.asyncio +async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.CreateLogMetricRequest): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + response = await client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.CreateLogMetricRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +@pytest.mark.asyncio +async def test_create_log_metric_async_from_dict(): + await test_create_log_metric_async(request_type=dict) + + +def test_create_log_metric_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.CreateLogMetricRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_log_metric_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.CreateLogMetricRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + await client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_log_metric_flattened(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_log_metric( + parent='parent_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].metric == logging_metrics.LogMetric(name='name_value') + + +def test_create_log_metric_flattened_error(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_log_metric( + logging_metrics.CreateLogMetricRequest(), + parent='parent_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_log_metric_flattened_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_log_metric( + parent='parent_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].metric == logging_metrics.LogMetric(name='name_value') + + +@pytest.mark.asyncio +async def test_create_log_metric_flattened_error_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_log_metric( + logging_metrics.CreateLogMetricRequest(), + parent='parent_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + +def test_update_log_metric(transport: str = 'grpc', request_type=logging_metrics.UpdateLogMetricRequest): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + response = client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.UpdateLogMetricRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +def test_update_log_metric_from_dict(): + test_update_log_metric(request_type=dict) + + +def test_update_log_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + client.update_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.UpdateLogMetricRequest() + + +@pytest.mark.asyncio +async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.UpdateLogMetricRequest): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + response = await client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.UpdateLogMetricRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +@pytest.mark.asyncio +async def test_update_log_metric_async_from_dict(): + await test_update_log_metric_async(request_type=dict) + + +def test_update_log_metric_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.UpdateLogMetricRequest() + + request.metric_name = 'metric_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_log_metric_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.UpdateLogMetricRequest() + + request.metric_name = 'metric_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + await client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name/value', + ) in kw['metadata'] + + +def test_update_log_metric_flattened(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_log_metric( + metric_name='metric_name_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].metric_name == 'metric_name_value' + assert args[0].metric == logging_metrics.LogMetric(name='name_value') + + +def test_update_log_metric_flattened_error(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_log_metric( + logging_metrics.UpdateLogMetricRequest(), + metric_name='metric_name_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_update_log_metric_flattened_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_log_metric( + metric_name='metric_name_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].metric_name == 'metric_name_value' + assert args[0].metric == logging_metrics.LogMetric(name='name_value') + + +@pytest.mark.asyncio +async def test_update_log_metric_flattened_error_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_log_metric( + logging_metrics.UpdateLogMetricRequest(), + metric_name='metric_name_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + +def test_delete_log_metric(transport: str = 'grpc', request_type=logging_metrics.DeleteLogMetricRequest): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.DeleteLogMetricRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_log_metric_from_dict(): + test_delete_log_metric(request_type=dict) + + +def test_delete_log_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + client.delete_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.DeleteLogMetricRequest() + + +@pytest.mark.asyncio +async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.DeleteLogMetricRequest): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.DeleteLogMetricRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_log_metric_async_from_dict(): + await test_delete_log_metric_async(request_type=dict) + + +def test_delete_log_metric_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.DeleteLogMetricRequest() + + request.metric_name = 'metric_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + call.return_value = None + client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_log_metric_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.DeleteLogMetricRequest() + + request.metric_name = 'metric_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name/value', + ) in kw['metadata'] + + +def test_delete_log_metric_flattened(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_log_metric( + metric_name='metric_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].metric_name == 'metric_name_value' + + +def test_delete_log_metric_flattened_error(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_log_metric( + logging_metrics.DeleteLogMetricRequest(), + metric_name='metric_name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_log_metric_flattened_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_log_metric( + metric_name='metric_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].metric_name == 'metric_name_value' + + +@pytest.mark.asyncio +async def test_delete_log_metric_flattened_error_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_log_metric( + logging_metrics.DeleteLogMetricRequest(), + metric_name='metric_name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MetricsServiceV2Client(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MetricsServiceV2GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MetricsServiceV2GrpcTransport, + ) + +def test_metrics_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MetricsServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_metrics_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.MetricsServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_log_metrics', + 'get_log_metric', + 'create_log_metric', + 'update_log_metric', + 'delete_log_metric', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_metrics_service_v2_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricsServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_metrics_service_v2_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricsServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ), + quota_project_id="octopus", + ) + + +def test_metrics_service_v2_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricsServiceV2Transport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_metrics_service_v2_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MetricsServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_metrics_service_v2_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MetricsServiceV2Client() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_metrics_service_v2_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_metrics_service_v2_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetricsServiceV2GrpcTransport, grpc_helpers), + (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) +def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_metrics_service_v2_host_no_port(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), + ) + assert client.transport._host == 'logging.googleapis.com:443' + + +def test_metrics_service_v2_host_with_port(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + ) + assert client.transport._host == 'logging.googleapis.com:8000' + +def test_metrics_service_v2_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetricsServiceV2GrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_metrics_service_v2_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetricsServiceV2GrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) +def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) +def test_metrics_service_v2_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_log_metric_path(): + project = "squid" + metric = "clam" + expected = "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) + actual = MetricsServiceV2Client.log_metric_path(project, metric) + assert expected == actual + + +def test_parse_log_metric_path(): + expected = { + "project": "whelk", + "metric": "octopus", + } + path = MetricsServiceV2Client.log_metric_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_log_metric_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = MetricsServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = MetricsServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = MetricsServiceV2Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = MetricsServiceV2Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = MetricsServiceV2Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = MetricsServiceV2Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = MetricsServiceV2Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = MetricsServiceV2Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = MetricsServiceV2Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = MetricsServiceV2Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: + transport_class = MetricsServiceV2Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) From 992d5ef83ae6b8329ffca4c5a419bc19bfa5e96a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Sep 2021 10:24:10 +0000 Subject: [PATCH 545/855] chore: use gapic-generator-python 0.52.0 (#407) - [ ] Regenerate this pull request now. fix: improper types in pagers generation PiperOrigin-RevId: 399773015 Source-Link: https://github.com/googleapis/googleapis/commit/410c184536a22fadaf00aec3cab04102e34d2322 Source-Link: https://github.com/googleapis/googleapis-gen/commit/290e883545e3ac9ff2bd00cd0dacb28f1b8ca945 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjkwZTg4MzU0NWUzYWM5ZmYyYmQwMGNkMGRhY2IyOGYxYjhjYTk0NSJ9 --- .../services/config_service_v2/pagers.py | 34 +++++++++---------- .../services/logging_service_v2/pagers.py | 26 +++++++------- .../services/metrics_service_v2/pagers.py | 10 +++--- 3 files changed, 35 insertions(+), 35 deletions(-) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/pagers.py index 11dce2ab7d58..43e0084a0019 100644 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator from google.cloud.logging_v2.types import logging_config @@ -62,14 +62,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging_config.ListBucketsResponse]: + def pages(self) -> Iterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[logging_config.LogBucket]: + def __iter__(self) -> Iterator[logging_config.LogBucket]: for page in self.pages: yield from page.buckets @@ -121,14 +121,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging_config.ListBucketsResponse]: + async def pages(self) -> AsyncIterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[logging_config.LogBucket]: + def __aiter__(self) -> AsyncIterator[logging_config.LogBucket]: async def async_generator(): async for page in self.pages: for response in page.buckets: @@ -184,14 +184,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging_config.ListViewsResponse]: + def pages(self) -> Iterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[logging_config.LogView]: + def __iter__(self) -> Iterator[logging_config.LogView]: for page in self.pages: yield from page.views @@ -243,14 +243,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging_config.ListViewsResponse]: + async def pages(self) -> AsyncIterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[logging_config.LogView]: + def __aiter__(self) -> AsyncIterator[logging_config.LogView]: async def async_generator(): async for page in self.pages: for response in page.views: @@ -306,14 +306,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging_config.ListSinksResponse]: + def pages(self) -> Iterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[logging_config.LogSink]: + def __iter__(self) -> Iterator[logging_config.LogSink]: for page in self.pages: yield from page.sinks @@ -365,14 +365,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging_config.ListSinksResponse]: + async def pages(self) -> AsyncIterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[logging_config.LogSink]: + def __aiter__(self) -> AsyncIterator[logging_config.LogSink]: async def async_generator(): async for page in self.pages: for response in page.sinks: @@ -428,14 +428,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging_config.ListExclusionsResponse]: + def pages(self) -> Iterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[logging_config.LogExclusion]: + def __iter__(self) -> Iterator[logging_config.LogExclusion]: for page in self.pages: yield from page.exclusions @@ -487,14 +487,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging_config.ListExclusionsResponse]: + async def pages(self) -> AsyncIterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[logging_config.LogExclusion]: + def __aiter__(self) -> AsyncIterator[logging_config.LogExclusion]: async def async_generator(): async for page in self.pages: for response in page.exclusions: diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 9b94311d2e33..95adb7e912c9 100644 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.types import log_entry @@ -64,14 +64,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging.ListLogEntriesResponse]: + def pages(self) -> Iterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[log_entry.LogEntry]: + def __iter__(self) -> Iterator[log_entry.LogEntry]: for page in self.pages: yield from page.entries @@ -123,14 +123,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging.ListLogEntriesResponse]: + async def pages(self) -> AsyncIterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[log_entry.LogEntry]: + def __aiter__(self) -> AsyncIterator[log_entry.LogEntry]: async def async_generator(): async for page in self.pages: for response in page.entries: @@ -186,14 +186,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging.ListMonitoredResourceDescriptorsResponse]: + def pages(self) -> Iterator[logging.ListMonitoredResourceDescriptorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[monitored_resource_pb2.MonitoredResourceDescriptor]: + def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescriptor]: for page in self.pages: yield from page.resource_descriptors @@ -245,14 +245,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging.ListMonitoredResourceDescriptorsResponse]: + async def pages(self) -> AsyncIterator[logging.ListMonitoredResourceDescriptorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[monitored_resource_pb2.MonitoredResourceDescriptor]: + def __aiter__(self) -> AsyncIterator[monitored_resource_pb2.MonitoredResourceDescriptor]: async def async_generator(): async for page in self.pages: for response in page.resource_descriptors: @@ -308,14 +308,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging.ListLogsResponse]: + def pages(self) -> Iterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[str]: + def __iter__(self) -> Iterator[str]: for page in self.pages: yield from page.log_names @@ -367,14 +367,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging.ListLogsResponse]: + async def pages(self) -> AsyncIterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[str]: + def __aiter__(self) -> AsyncIterator[str]: async def async_generator(): async for page in self.pages: for response in page.log_names: diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index f6bf04e4f968..a3faa77a20f7 100644 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator from google.cloud.logging_v2.types import logging_metrics @@ -62,14 +62,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging_metrics.ListLogMetricsResponse]: + def pages(self) -> Iterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[logging_metrics.LogMetric]: + def __iter__(self) -> Iterator[logging_metrics.LogMetric]: for page in self.pages: yield from page.metrics @@ -121,14 +121,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging_metrics.ListLogMetricsResponse]: + async def pages(self) -> AsyncIterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[logging_metrics.LogMetric]: + def __aiter__(self) -> AsyncIterator[logging_metrics.LogMetric]: async def async_generator(): async for page in self.pages: for response in page.metrics: From 9036813d36e0665ab24a003d1d95a1d66fcd7549 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Oct 2021 16:25:27 -0700 Subject: [PATCH 546/855] feat: added support for iam AuditData proto (#396) --- .../google/cloud/logging_v2/entries.py | 1 + packages/google-cloud-logging/setup.py | 1 + .../tests/system/test_system.py | 33 +++++++++++++++++++ 3 files changed, 35 insertions(+) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/entries.py b/packages/google-cloud-logging/google/cloud/logging_v2/entries.py index fa7e5d9d178c..0af5a46f72ba 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/entries.py @@ -30,6 +30,7 @@ # import officially supported proto definitions import google.cloud.audit.audit_log_pb2 # noqa: F401 import google.cloud.appengine_logging # noqa: F401 +from google.iam.v1.logging import audit_data_pb2 # noqa: F401 _GLOBAL_RESOURCE = Resource(type="global", labels={}) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 6a9ed8a59d67..5b882211a3c7 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -39,6 +39,7 @@ # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-cloud-core >= 1.4.1, <3.0.0dev", + "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", "proto-plus >= 1.11.0", "packaging >= 14.3", ] diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 81de866ee3b5..532eea96b09f 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -260,6 +260,39 @@ def test_list_entry_with_requestlog(self): protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url ) + def test_list_entry_with_auditdata(self): + """ + Test emitting and listing logs containing a google.iam.v1.logging.AuditData proto message + """ + from google.protobuf import descriptor_pool + from google.cloud.logging_v2 import entries + + pool = descriptor_pool.Default() + type_name = "google.iam.v1.logging.AuditData" + type_url = "type.googleapis.com/" + type_name + # Make sure the descriptor is known in the registry. + # Raises KeyError if unknown + pool.FindMessageTypeByName(type_name) + + # create log + req_dict = {"@type": type_url, "policyDelta": {}} + req_struct = self._dict_to_struct(req_dict) + + logger = Config.CLIENT.logger(f"auditdata-proto-{uuid.uuid1()}") + logger.log_proto(req_struct) + + # retrieve log + retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) + protobuf_entry = retry(lambda: next(logger.list_entries()))() + + self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) + self.assertIsNone(protobuf_entry.payload_pb) + self.assertIsInstance(protobuf_entry.payload_json, dict) + self.assertEqual(protobuf_entry.payload_json["@type"], type_url) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url + ) + def test_log_text(self): TEXT_PAYLOAD = "System test: test_log_text" logger = Config.CLIENT.logger(self._logger_name("log_text")) From cb051b8c3af70d639e966ee5ce2a0a2ddfc87ea7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 12 Oct 2021 17:30:17 +0000 Subject: [PATCH 547/855] chore(python): fix formatting issue in noxfile.py.j2 (#417) fix: improper types in pagers generation fix: add 'dict' annotation type to 'request' fix(deps): require proto-plus==1.15.0 --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/samples/lint/common.cfg | 2 +- .../.kokoro/samples/python3.10/common.cfg | 40 + .../.kokoro/samples/python3.10/continuous.cfg | 6 + .../samples/python3.10/periodic-head.cfg | 11 + .../.kokoro/samples/python3.10/periodic.cfg | 6 + .../.kokoro/samples/python3.10/presubmit.cfg | 6 + .../.kokoro/samples/python3.6/common.cfg | 2 +- .../.kokoro/samples/python3.6/periodic.cfg | 2 +- .../.kokoro/samples/python3.7/common.cfg | 2 +- .../.kokoro/samples/python3.7/periodic.cfg | 2 +- .../.kokoro/samples/python3.8/common.cfg | 2 +- .../.kokoro/samples/python3.8/periodic.cfg | 2 +- .../.kokoro/samples/python3.9/common.cfg | 2 +- .../.kokoro/samples/python3.9/periodic.cfg | 2 +- .../.kokoro/test-samples-against-head.sh | 2 - .../.kokoro/test-samples.sh | 2 - packages/google-cloud-logging/.trampolinerc | 15 +- .../google-cloud-logging/CONTRIBUTING.rst | 12 +- packages/google-cloud-logging/docs/conf.py | 10 +- .../services/config_service_v2/client.py | 94 +- .../services/config_service_v2/pagers.py | 36 +- .../config_service_v2/transports/base.py | 2 +- .../config_service_v2/transports/grpc.py | 6 +- .../transports/grpc_asyncio.py | 6 +- .../services/logging_service_v2/client.py | 32 +- .../services/logging_service_v2/pagers.py | 28 +- .../logging_service_v2/transports/base.py | 2 +- .../logging_service_v2/transports/grpc.py | 6 +- .../transports/grpc_asyncio.py | 6 +- .../services/metrics_service_v2/client.py | 22 +- .../services/metrics_service_v2/pagers.py | 12 +- .../metrics_service_v2/transports/base.py | 2 +- .../metrics_service_v2/transports/grpc.py | 6 +- .../transports/grpc_asyncio.py | 6 +- packages/google-cloud-logging/noxfile.py | 12 +- .../owl-bot-staging/v2/.coveragerc | 17 - .../owl-bot-staging/v2/MANIFEST.in | 2 - .../owl-bot-staging/v2/README.rst | 49 - .../owl-bot-staging/v2/docs/conf.py | 376 - .../owl-bot-staging/v2/docs/index.rst | 7 - .../v2/docs/logging_v2/config_service_v2.rst | 10 - .../v2/docs/logging_v2/logging_service_v2.rst | 10 - .../v2/docs/logging_v2/metrics_service_v2.rst | 10 - .../v2/docs/logging_v2/services.rst | 8 - .../v2/docs/logging_v2/types.rst | 7 - .../v2/google/cloud/logging/__init__.py | 143 - .../v2/google/cloud/logging/py.typed | 2 - .../v2/google/cloud/logging_v2/__init__.py | 144 - .../cloud/logging_v2/gapic_metadata.json | 391 - .../v2/google/cloud/logging_v2/py.typed | 2 - .../cloud/logging_v2/services/__init__.py | 15 - .../services/config_service_v2/__init__.py | 22 - .../config_service_v2/async_client.py | 2016 ------ .../services/config_service_v2/client.py | 2198 ------ .../services/config_service_v2/pagers.py | 506 -- .../config_service_v2/transports/__init__.py | 33 - .../config_service_v2/transports/base.py | 536 -- .../config_service_v2/transports/grpc.py | 878 --- .../transports/grpc_asyncio.py | 882 --- .../services/logging_service_v2/__init__.py | 22 - .../logging_service_v2/async_client.py | 781 -- .../services/logging_service_v2/client.py | 920 --- .../services/logging_service_v2/pagers.py | 386 - .../logging_service_v2/transports/__init__.py | 33 - .../logging_service_v2/transports/base.py | 291 - .../logging_service_v2/transports/grpc.py | 402 - .../transports/grpc_asyncio.py | 406 -- .../services/metrics_service_v2/__init__.py | 22 - .../metrics_service_v2/async_client.py | 640 -- .../services/metrics_service_v2/client.py | 799 -- .../services/metrics_service_v2/pagers.py | 140 - .../metrics_service_v2/transports/__init__.py | 33 - .../metrics_service_v2/transports/base.py | 261 - .../metrics_service_v2/transports/grpc.py | 357 - .../transports/grpc_asyncio.py | 361 - .../google/cloud/logging_v2/types/__init__.py | 138 - .../cloud/logging_v2/types/log_entry.py | 321 - .../google/cloud/logging_v2/types/logging.py | 573 -- .../cloud/logging_v2/types/logging_config.py | 1457 ---- .../cloud/logging_v2/types/logging_metrics.py | 371 - .../owl-bot-staging/v2/mypy.ini | 3 - .../owl-bot-staging/v2/noxfile.py | 132 - .../v2/scripts/fixup_logging_v2_keywords.py | 209 - .../owl-bot-staging/v2/setup.py | 54 - .../owl-bot-staging/v2/tests/__init__.py | 16 - .../owl-bot-staging/v2/tests/unit/__init__.py | 16 - .../v2/tests/unit/gapic/__init__.py | 16 - .../tests/unit/gapic/logging_v2/__init__.py | 16 - .../logging_v2/test_config_service_v2.py | 6447 ----------------- .../logging_v2/test_logging_service_v2.py | 2494 ------- .../logging_v2/test_metrics_service_v2.py | 2359 ------ packages/google-cloud-logging/renovate.json | 5 +- .../samples/snippets/noxfile.py | 52 +- .../templates/install_deps.tmpl.rst | 2 +- packages/google-cloud-logging/setup.py | 2 +- .../testing/constraints-3.6.txt | 2 +- 97 files changed, 277 insertions(+), 28934 deletions(-) create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.10/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.10/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.10/periodic-head.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.10/periodic.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.10/presubmit.cfg delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/.coveragerc delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/MANIFEST.in delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/README.rst delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/docs/conf.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/docs/index.rst delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/config_service_v2.rst delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/logging_service_v2.rst delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/metrics_service_v2.rst delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/services.rst delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/types.rst delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/__init__.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/py.typed delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/__init__.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/gapic_metadata.json delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/py.typed delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/__init__.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/__init__.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/async_client.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/client.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/pagers.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/base.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/__init__.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/async_client.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/client.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/pagers.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/base.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/__init__.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/async_client.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/client.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/pagers.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/__init__.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/log_entry.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_config.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_metrics.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/mypy.ini delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/noxfile.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/scripts/fixup_logging_v2_keywords.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/setup.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/tests/__init__.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/__init__.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/__init__.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/__init__.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_config_service_v2.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_logging_service_v2.py delete mode 100644 packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_metrics_service_v2.py diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 2567653c000d..0b76845028a9 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:87eee22d276554e4e52863ec9b1cb6a7245815dfae20439712bf644348215a5a + digest: sha256:424d88d5d08ddd955782a4359559dc536e658db1a77416c9a4fff79df9519ad2 diff --git a/packages/google-cloud-logging/.kokoro/samples/lint/common.cfg b/packages/google-cloud-logging/.kokoro/samples/lint/common.cfg index ceb20370e175..9123caa63185 100644 --- a/packages/google-cloud-logging/.kokoro/samples/lint/common.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/lint/common.cfg @@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-logging/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.10/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.10/common.cfg new file mode 100644 index 000000000000..61142a001697 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.10/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.10" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-310" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.10/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.10/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.10/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.10/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.10/periodic-head.cfg new file mode 100644 index 000000000000..7e2973e3b659 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.10/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.10/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.10/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.10/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.10/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.10/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.10/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg index 1bfa98c11b08..792a4d14c2eb 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-logging/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg index 0f8e2c95d19d..3ba076feaa21 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-logging/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg index 6c1d661d2f91..144751654145 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-logging/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg index d4c521347140..f6f6943a51fc 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-logging/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh b/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh index 635a5ace0c20..ba3a707b040c 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh @@ -23,6 +23,4 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-logging - exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-logging/.kokoro/test-samples.sh b/packages/google-cloud-logging/.kokoro/test-samples.sh index 0f5f8d4008f6..11c042d342d7 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples.sh @@ -24,8 +24,6 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-logging - # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then # preserving the test runner implementation. diff --git a/packages/google-cloud-logging/.trampolinerc b/packages/google-cloud-logging/.trampolinerc index 6f984309b209..a51abf0b5c2b 100644 --- a/packages/google-cloud-logging/.trampolinerc +++ b/packages/google-cloud-logging/.trampolinerc @@ -21,9 +21,22 @@ required_envvars+=() pass_down_envvars+=( "ENVIRONMENT" "RUNTIME" + "NOX_SESSION" + ############### + # Docs builds + ############### "STAGING_BUCKET" "V2_STAGING_BUCKET" - "NOX_SESSION" + ################## + # Samples builds + ################## + "INSTALL_LIBRARY_FROM_SOURCE" + "RUN_TESTS_SESSION" + "BUILD_SPECIFIC_GCLOUD_PROJECT" + # Target directories. + "RUN_TESTS_DIRS" + # The nox session to run. + "RUN_TESTS_SESSION" ) # Prevent unintentional override on the default image. diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index 2a720ed444e9..1839c49a7c50 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. + 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.9 -- -k + $ nox -s unit-3.10 -- -k .. note:: @@ -113,9 +113,9 @@ Coding Style export GOOGLE_CLOUD_TESTING_BRANCH="main" By doing this, you are specifying the location of the most up-to-date - version of ``python-logging``. The the suggested remote name ``upstream`` - should point to the official ``googleapis`` checkout and the - the branch should be the main branch on that remote (``main``). + version of ``python-logging``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). - This repository contains configuration for the `pre-commit `__ tool, which automates checking @@ -225,11 +225,13 @@ We support: - `Python 3.7`_ - `Python 3.8`_ - `Python 3.9`_ +- `Python 3.10`_ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index d6e9cca6e6b4..743981d3a4c6 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -76,8 +76,8 @@ # The encoding of source files. # source_encoding = 'utf-8-sig' -# The master toctree document. -master_doc = "index" +# The root toctree document. +root_doc = "index" # General information about the project. project = "google-cloud-logging" @@ -280,7 +280,7 @@ # author, documentclass [howto, manual, or own class]). latex_documents = [ ( - master_doc, + root_doc, "google-cloud-logging.tex", "google-cloud-logging Documentation", author, @@ -315,7 +315,7 @@ # (source start file, name, description, authors, manual section). man_pages = [ ( - master_doc, + root_doc, "google-cloud-logging", "google-cloud-logging Documentation", [author], @@ -334,7 +334,7 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + root_doc, "google-cloud-logging", "google-cloud-logging Documentation", author, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 2ef4f49b6f7e..d14827a1e285 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -17,7 +17,7 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore @@ -405,7 +405,7 @@ def __init__( def list_buckets( self, - request: logging_config.ListBucketsRequest = None, + request: Union[logging_config.ListBucketsRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -415,7 +415,7 @@ def list_buckets( r"""Lists buckets. Args: - request (google.cloud.logging_v2.types.ListBucketsRequest): + request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): The request object. The parameters to `ListBuckets`. parent (str): Required. The parent resource whose buckets are to be @@ -494,7 +494,7 @@ def list_buckets( def get_bucket( self, - request: logging_config.GetBucketRequest = None, + request: Union[logging_config.GetBucketRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -503,7 +503,7 @@ def get_bucket( r"""Gets a bucket. Args: - request (google.cloud.logging_v2.types.GetBucketRequest): + request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): The request object. The parameters to `GetBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -541,7 +541,7 @@ def get_bucket( def create_bucket( self, - request: logging_config.CreateBucketRequest = None, + request: Union[logging_config.CreateBucketRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -552,7 +552,7 @@ def create_bucket( cannot be changed. Args: - request (google.cloud.logging_v2.types.CreateBucketRequest): + request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): The request object. The parameters to `CreateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -590,7 +590,7 @@ def create_bucket( def update_bucket( self, - request: logging_config.UpdateBucketRequest = None, + request: Union[logging_config.UpdateBucketRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -609,7 +609,7 @@ def update_bucket( A buckets region may not be modified after it is created. Args: - request (google.cloud.logging_v2.types.UpdateBucketRequest): + request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): The request object. The parameters to `UpdateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -647,7 +647,7 @@ def update_bucket( def delete_bucket( self, - request: logging_config.DeleteBucketRequest = None, + request: Union[logging_config.DeleteBucketRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -658,7 +658,7 @@ def delete_bucket( the bucket will be permanently deleted. Args: - request (google.cloud.logging_v2.types.DeleteBucketRequest): + request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): The request object. The parameters to `DeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -691,7 +691,7 @@ def delete_bucket( def undelete_bucket( self, - request: logging_config.UndeleteBucketRequest = None, + request: Union[logging_config.UndeleteBucketRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -701,7 +701,7 @@ def undelete_bucket( may be undeleted within the grace period of 7 days. Args: - request (google.cloud.logging_v2.types.UndeleteBucketRequest): + request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): The request object. The parameters to `UndeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -734,7 +734,7 @@ def undelete_bucket( def list_views( self, - request: logging_config.ListViewsRequest = None, + request: Union[logging_config.ListViewsRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -744,7 +744,7 @@ def list_views( r"""Lists views on a bucket. Args: - request (google.cloud.logging_v2.types.ListViewsRequest): + request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): The request object. The parameters to `ListViews`. parent (str): Required. The bucket whose views are to be listed: @@ -815,7 +815,7 @@ def list_views( def get_view( self, - request: logging_config.GetViewRequest = None, + request: Union[logging_config.GetViewRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -824,7 +824,7 @@ def get_view( r"""Gets a view. Args: - request (google.cloud.logging_v2.types.GetViewRequest): + request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): The request object. The parameters to `GetView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -864,7 +864,7 @@ def get_view( def create_view( self, - request: logging_config.CreateViewRequest = None, + request: Union[logging_config.CreateViewRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -874,7 +874,7 @@ def create_view( contain a maximum of 50 views. Args: - request (google.cloud.logging_v2.types.CreateViewRequest): + request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): The request object. The parameters to `CreateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -914,7 +914,7 @@ def create_view( def update_view( self, - request: logging_config.UpdateViewRequest = None, + request: Union[logging_config.UpdateViewRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -924,7 +924,7 @@ def update_view( existing view with values from the new view: ``filter``. Args: - request (google.cloud.logging_v2.types.UpdateViewRequest): + request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): The request object. The parameters to `UpdateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -964,7 +964,7 @@ def update_view( def delete_view( self, - request: logging_config.DeleteViewRequest = None, + request: Union[logging_config.DeleteViewRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -973,7 +973,7 @@ def delete_view( r"""Deletes a view from a bucket. Args: - request (google.cloud.logging_v2.types.DeleteViewRequest): + request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): The request object. The parameters to `DeleteView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -1006,7 +1006,7 @@ def delete_view( def list_sinks( self, - request: logging_config.ListSinksRequest = None, + request: Union[logging_config.ListSinksRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1016,7 +1016,7 @@ def list_sinks( r"""Lists sinks. Args: - request (google.cloud.logging_v2.types.ListSinksRequest): + request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): The request object. The parameters to `ListSinks`. parent (str): Required. The parent resource whose sinks are to be @@ -1091,7 +1091,7 @@ def list_sinks( def get_sink( self, - request: logging_config.GetSinkRequest = None, + request: Union[logging_config.GetSinkRequest, dict] = None, *, sink_name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1101,7 +1101,7 @@ def get_sink( r"""Gets a sink. Args: - request (google.cloud.logging_v2.types.GetSinkRequest): + request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): The request object. The parameters to `GetSink`. sink_name (str): Required. The resource name of the sink: @@ -1178,7 +1178,7 @@ def get_sink( def create_sink( self, - request: logging_config.CreateSinkRequest = None, + request: Union[logging_config.CreateSinkRequest, dict] = None, *, parent: str = None, sink: logging_config.LogSink = None, @@ -1193,7 +1193,7 @@ def create_sink( entries only from the resource owning the sink. Args: - request (google.cloud.logging_v2.types.CreateSinkRequest): + request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): The request object. The parameters to `CreateSink`. parent (str): Required. The resource in which to create the sink: @@ -1278,7 +1278,7 @@ def create_sink( def update_sink( self, - request: logging_config.UpdateSinkRequest = None, + request: Union[logging_config.UpdateSinkRequest, dict] = None, *, sink_name: str = None, sink: logging_config.LogSink = None, @@ -1295,7 +1295,7 @@ def update_sink( the ``unique_writer_identity`` field. Args: - request (google.cloud.logging_v2.types.UpdateSinkRequest): + request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): The request object. The parameters to `UpdateSink`. sink_name (str): Required. The full resource name of the sink to update, @@ -1404,7 +1404,7 @@ def update_sink( def delete_sink( self, - request: logging_config.DeleteSinkRequest = None, + request: Union[logging_config.DeleteSinkRequest, dict] = None, *, sink_name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1415,7 +1415,7 @@ def delete_sink( then that service account is also deleted. Args: - request (google.cloud.logging_v2.types.DeleteSinkRequest): + request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): The request object. The parameters to `DeleteSink`. sink_name (str): Required. The full resource name of the sink to delete, @@ -1479,7 +1479,7 @@ def delete_sink( def list_exclusions( self, - request: logging_config.ListExclusionsRequest = None, + request: Union[logging_config.ListExclusionsRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1489,7 +1489,7 @@ def list_exclusions( r"""Lists all the exclusions in a parent resource. Args: - request (google.cloud.logging_v2.types.ListExclusionsRequest): + request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): The request object. The parameters to `ListExclusions`. parent (str): Required. The parent resource whose exclusions are to be @@ -1564,7 +1564,7 @@ def list_exclusions( def get_exclusion( self, - request: logging_config.GetExclusionRequest = None, + request: Union[logging_config.GetExclusionRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1574,7 +1574,7 @@ def get_exclusion( r"""Gets the description of an exclusion. Args: - request (google.cloud.logging_v2.types.GetExclusionRequest): + request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): The request object. The parameters to `GetExclusion`. name (str): Required. The resource name of an existing exclusion: @@ -1652,7 +1652,7 @@ def get_exclusion( def create_exclusion( self, - request: logging_config.CreateExclusionRequest = None, + request: Union[logging_config.CreateExclusionRequest, dict] = None, *, parent: str = None, exclusion: logging_config.LogExclusion = None, @@ -1666,7 +1666,7 @@ def create_exclusion( resource. Args: - request (google.cloud.logging_v2.types.CreateExclusionRequest): + request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): The request object. The parameters to `CreateExclusion`. parent (str): Required. The parent resource in which to create the @@ -1755,7 +1755,7 @@ def create_exclusion( def update_exclusion( self, - request: logging_config.UpdateExclusionRequest = None, + request: Union[logging_config.UpdateExclusionRequest, dict] = None, *, name: str = None, exclusion: logging_config.LogExclusion = None, @@ -1768,7 +1768,7 @@ def update_exclusion( exclusion. Args: - request (google.cloud.logging_v2.types.UpdateExclusionRequest): + request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): The request object. The parameters to `UpdateExclusion`. name (str): Required. The resource name of the exclusion to update: @@ -1872,7 +1872,7 @@ def update_exclusion( def delete_exclusion( self, - request: logging_config.DeleteExclusionRequest = None, + request: Union[logging_config.DeleteExclusionRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1882,7 +1882,7 @@ def delete_exclusion( r"""Deletes an exclusion. Args: - request (google.cloud.logging_v2.types.DeleteExclusionRequest): + request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): The request object. The parameters to `DeleteExclusion`. name (str): Required. The resource name of an existing exclusion to @@ -1945,7 +1945,7 @@ def delete_exclusion( def get_cmek_settings( self, - request: logging_config.GetCmekSettingsRequest = None, + request: Union[logging_config.GetCmekSettingsRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -1962,7 +1962,7 @@ def get_cmek_settings( for more information. Args: - request (google.cloud.logging_v2.types.GetCmekSettingsRequest): + request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. See [Enabling CMEK for Logs @@ -2016,7 +2016,7 @@ def get_cmek_settings( def update_cmek_settings( self, - request: logging_config.UpdateCmekSettingsRequest = None, + request: Union[logging_config.UpdateCmekSettingsRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -2039,7 +2039,7 @@ def update_cmek_settings( for more information. Args: - request (google.cloud.logging_v2.types.UpdateCmekSettingsRequest): + request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. See [Enabling CMEK for Logs diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index 6d8e11fb83e8..b0be053e43e5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.logging_v2.types import logging_config @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging_config.ListBucketsResponse]: + def pages(self) -> Iterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[logging_config.LogBucket]: + def __iter__(self) -> Iterator[logging_config.LogBucket]: for page in self.pages: yield from page.buckets @@ -136,14 +136,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging_config.ListBucketsResponse]: + async def pages(self) -> AsyncIterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[logging_config.LogBucket]: + def __aiter__(self) -> AsyncIterator[logging_config.LogBucket]: async def async_generator(): async for page in self.pages: for response in page.buckets: @@ -202,14 +202,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging_config.ListViewsResponse]: + def pages(self) -> Iterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[logging_config.LogView]: + def __iter__(self) -> Iterator[logging_config.LogView]: for page in self.pages: yield from page.views @@ -264,14 +264,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging_config.ListViewsResponse]: + async def pages(self) -> AsyncIterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[logging_config.LogView]: + def __aiter__(self) -> AsyncIterator[logging_config.LogView]: async def async_generator(): async for page in self.pages: for response in page.views: @@ -330,14 +330,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging_config.ListSinksResponse]: + def pages(self) -> Iterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[logging_config.LogSink]: + def __iter__(self) -> Iterator[logging_config.LogSink]: for page in self.pages: yield from page.sinks @@ -392,14 +392,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging_config.ListSinksResponse]: + async def pages(self) -> AsyncIterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[logging_config.LogSink]: + def __aiter__(self) -> AsyncIterator[logging_config.LogSink]: async def async_generator(): async for page in self.pages: for response in page.sinks: @@ -458,14 +458,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging_config.ListExclusionsResponse]: + def pages(self) -> Iterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[logging_config.LogExclusion]: + def __iter__(self) -> Iterator[logging_config.LogExclusion]: for page in self.pages: yield from page.exclusions @@ -520,14 +520,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging_config.ListExclusionsResponse]: + async def pages(self) -> AsyncIterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[logging_config.LogExclusion]: + def __aiter__(self) -> AsyncIterator[logging_config.LogExclusion]: async def async_generator(): async for page in self.pages: for response in page.exclusions: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index e191687e9458..1ffcb227014a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -121,7 +121,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index ccd766fa79a3..7fb2560b9be7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -81,16 +81,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 3b8139e1e050..9a844e7c295c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -128,16 +128,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index c5dba2d04471..73909c7fff95 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -17,17 +17,7 @@ from distutils import util import os import re -from typing import ( - Callable, - Dict, - Optional, - Iterable, - Iterator, - Sequence, - Tuple, - Type, - Union, -) +from typing import Dict, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore @@ -359,7 +349,7 @@ def __init__( def delete_log( self, - request: logging.DeleteLogRequest = None, + request: Union[logging.DeleteLogRequest, dict] = None, *, log_name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -373,7 +363,7 @@ def delete_log( with a timestamp before the operation will be deleted. Args: - request (google.cloud.logging_v2.types.DeleteLogRequest): + request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): The request object. The parameters to DeleteLog. log_name (str): Required. The resource name of the log to delete: @@ -438,7 +428,7 @@ def delete_log( def write_log_entries( self, - request: logging.WriteLogEntriesRequest = None, + request: Union[logging.WriteLogEntriesRequest, dict] = None, *, log_name: str = None, resource: monitored_resource_pb2.MonitoredResource = None, @@ -457,7 +447,7 @@ def write_log_entries( organizations, billing accounts or folders) Args: - request (google.cloud.logging_v2.types.WriteLogEntriesRequest): + request (Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]): The request object. The parameters to WriteLogEntries. log_name (str): Optional. A default log resource name that is assigned @@ -596,7 +586,7 @@ def write_log_entries( def list_log_entries( self, - request: logging.ListLogEntriesRequest = None, + request: Union[logging.ListLogEntriesRequest, dict] = None, *, resource_names: Sequence[str] = None, filter: str = None, @@ -611,7 +601,7 @@ def list_log_entries( Logs `__. Args: - request (google.cloud.logging_v2.types.ListLogEntriesRequest): + request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): The request object. The parameters to `ListLogEntries`. resource_names (Sequence[str]): Required. Names of one or more parent resources from @@ -720,7 +710,7 @@ def list_log_entries( def list_monitored_resource_descriptors( self, - request: logging.ListMonitoredResourceDescriptorsRequest = None, + request: Union[logging.ListMonitoredResourceDescriptorsRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -730,7 +720,7 @@ def list_monitored_resource_descriptors( used by Logging. Args: - request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): + request (Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]): The request object. The parameters to ListMonitoredResourceDescriptors retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -776,7 +766,7 @@ def list_monitored_resource_descriptors( def list_logs( self, - request: logging.ListLogsRequest = None, + request: Union[logging.ListLogsRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -788,7 +778,7 @@ def list_logs( listed. Args: - request (google.cloud.logging_v2.types.ListLogsRequest): + request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): The request object. The parameters to ListLogs. parent (str): Required. The resource name that owns the logs: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index b06007cb4a40..ca4d01fac494 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.api import monitored_resource_pb2 # type: ignore @@ -76,14 +76,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging.ListLogEntriesResponse]: + def pages(self) -> Iterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[log_entry.LogEntry]: + def __iter__(self) -> Iterator[log_entry.LogEntry]: for page in self.pages: yield from page.entries @@ -138,14 +138,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging.ListLogEntriesResponse]: + async def pages(self) -> AsyncIterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[log_entry.LogEntry]: + def __aiter__(self) -> AsyncIterator[log_entry.LogEntry]: async def async_generator(): async for page in self.pages: for response in page.entries: @@ -204,14 +204,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging.ListMonitoredResourceDescriptorsResponse]: + def pages(self) -> Iterator[logging.ListMonitoredResourceDescriptorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[monitored_resource_pb2.MonitoredResourceDescriptor]: + def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescriptor]: for page in self.pages: yield from page.resource_descriptors @@ -270,7 +270,7 @@ def __getattr__(self, name: str) -> Any: @property async def pages( self, - ) -> AsyncIterable[logging.ListMonitoredResourceDescriptorsResponse]: + ) -> AsyncIterator[logging.ListMonitoredResourceDescriptorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token @@ -279,7 +279,7 @@ async def pages( def __aiter__( self, - ) -> AsyncIterable[monitored_resource_pb2.MonitoredResourceDescriptor]: + ) -> AsyncIterator[monitored_resource_pb2.MonitoredResourceDescriptor]: async def async_generator(): async for page in self.pages: for response in page.resource_descriptors: @@ -338,14 +338,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging.ListLogsResponse]: + def pages(self) -> Iterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[str]: + def __iter__(self) -> Iterator[str]: for page in self.pages: yield from page.log_names @@ -400,14 +400,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging.ListLogsResponse]: + async def pages(self) -> AsyncIterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[str]: + def __aiter__(self) -> AsyncIterator[str]: async def async_generator(): async for page in self.pages: for response in page.log_names: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index fb5e3c8b00f1..05c273d6d16c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -122,7 +122,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index fddf039883f7..a1031e93e2e6 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -81,16 +81,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 3e158cd6f54a..a71fb28f9b84 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -128,16 +128,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 9749b5eff239..7d1e2a21d743 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -17,7 +17,7 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore @@ -352,7 +352,7 @@ def __init__( def list_log_metrics( self, - request: logging_metrics.ListLogMetricsRequest = None, + request: Union[logging_metrics.ListLogMetricsRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -362,7 +362,7 @@ def list_log_metrics( r"""Lists logs-based metrics. Args: - request (google.cloud.logging_v2.types.ListLogMetricsRequest): + request (Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]): The request object. The parameters to ListLogMetrics. parent (str): Required. The name of the project containing the @@ -434,7 +434,7 @@ def list_log_metrics( def get_log_metric( self, - request: logging_metrics.GetLogMetricRequest = None, + request: Union[logging_metrics.GetLogMetricRequest, dict] = None, *, metric_name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -444,7 +444,7 @@ def get_log_metric( r"""Gets a logs-based metric. Args: - request (google.cloud.logging_v2.types.GetLogMetricRequest): + request (Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]): The request object. The parameters to GetLogMetric. metric_name (str): Required. The resource name of the desired metric: @@ -518,7 +518,7 @@ def get_log_metric( def create_log_metric( self, - request: logging_metrics.CreateLogMetricRequest = None, + request: Union[logging_metrics.CreateLogMetricRequest, dict] = None, *, parent: str = None, metric: logging_metrics.LogMetric = None, @@ -529,7 +529,7 @@ def create_log_metric( r"""Creates a logs-based metric. Args: - request (google.cloud.logging_v2.types.CreateLogMetricRequest): + request (Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]): The request object. The parameters to CreateLogMetric. parent (str): Required. The resource name of the project in which to @@ -614,7 +614,7 @@ def create_log_metric( def update_log_metric( self, - request: logging_metrics.UpdateLogMetricRequest = None, + request: Union[logging_metrics.UpdateLogMetricRequest, dict] = None, *, metric_name: str = None, metric: logging_metrics.LogMetric = None, @@ -625,7 +625,7 @@ def update_log_metric( r"""Creates or updates a logs-based metric. Args: - request (google.cloud.logging_v2.types.UpdateLogMetricRequest): + request (Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]): The request object. The parameters to UpdateLogMetric. metric_name (str): Required. The resource name of the metric to update: @@ -711,7 +711,7 @@ def update_log_metric( def delete_log_metric( self, - request: logging_metrics.DeleteLogMetricRequest = None, + request: Union[logging_metrics.DeleteLogMetricRequest, dict] = None, *, metric_name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -721,7 +721,7 @@ def delete_log_metric( r"""Deletes a logs-based metric. Args: - request (google.cloud.logging_v2.types.DeleteLogMetricRequest): + request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): The request object. The parameters to DeleteLogMetric. metric_name (str): Required. The resource name of the metric to delete: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 8ff178d2409d..7026e3858c12 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.logging_v2.types import logging_metrics @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging_metrics.ListLogMetricsResponse]: + def pages(self) -> Iterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[logging_metrics.LogMetric]: + def __iter__(self) -> Iterator[logging_metrics.LogMetric]: for page in self.pages: yield from page.metrics @@ -136,14 +136,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging_metrics.ListLogMetricsResponse]: + async def pages(self) -> AsyncIterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[logging_metrics.LogMetric]: + def __aiter__(self) -> AsyncIterator[logging_metrics.LogMetric]: async def async_generator(): async for page in self.pages: for response in page.metrics: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index f62eb00461bb..1ce8b3a6bd57 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -122,7 +122,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 6a7a2c6a61a7..49e7263d42b8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -81,16 +81,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index d2d9b6188061..9ddc1975ab44 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -128,16 +128,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 9e49bd0d9f8d..63e5d4aa0825 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -29,7 +29,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -84,18 +84,18 @@ def default(session): constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) - session.install( "mock", + "asyncmock", "pytest", "pytest-cov", - "flask", - "webob", - "django", + "pytest-asyncio", "-c", constraints_path, ) + session.install("flask", "-c", constraints_path) + session.install("webob", "-c", constraints_path) + session.install("django", "-c", constraints_path) session.install("-e", ".", "-c", constraints_path) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/.coveragerc b/packages/google-cloud-logging/owl-bot-staging/v2/.coveragerc deleted file mode 100644 index b38d22e21fd1..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/.coveragerc +++ /dev/null @@ -1,17 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/logging/__init__.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/MANIFEST.in b/packages/google-cloud-logging/owl-bot-staging/v2/MANIFEST.in deleted file mode 100644 index f8c276f2cce8..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/logging *.py -recursive-include google/cloud/logging_v2 *.py diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/README.rst b/packages/google-cloud-logging/owl-bot-staging/v2/README.rst deleted file mode 100644 index 56aa7d0a8ad9..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Logging API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Logging API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/docs/conf.py b/packages/google-cloud-logging/owl-bot-staging/v2/docs/conf.py deleted file mode 100644 index eb6783779012..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-logging documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-logging" -copyright = u"2020, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-logging-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-logging.tex", - u"google-cloud-logging Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-logging", - u"Google Cloud Logging Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-logging", - u"google-cloud-logging Documentation", - author, - "google-cloud-logging", - "GAPIC library for Google Cloud Logging API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/docs/index.rst b/packages/google-cloud-logging/owl-bot-staging/v2/docs/index.rst deleted file mode 100644 index 6a4859643f45..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - logging_v2/services - logging_v2/types diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/config_service_v2.rst b/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/config_service_v2.rst deleted file mode 100644 index f7c0a7701de1..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/config_service_v2.rst +++ /dev/null @@ -1,10 +0,0 @@ -ConfigServiceV2 ---------------------------------- - -.. automodule:: google.cloud.logging_v2.services.config_service_v2 - :members: - :inherited-members: - -.. automodule:: google.cloud.logging_v2.services.config_service_v2.pagers - :members: - :inherited-members: diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/logging_service_v2.rst b/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/logging_service_v2.rst deleted file mode 100644 index f41c0c89b78c..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/logging_service_v2.rst +++ /dev/null @@ -1,10 +0,0 @@ -LoggingServiceV2 ----------------------------------- - -.. automodule:: google.cloud.logging_v2.services.logging_service_v2 - :members: - :inherited-members: - -.. automodule:: google.cloud.logging_v2.services.logging_service_v2.pagers - :members: - :inherited-members: diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/metrics_service_v2.rst b/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/metrics_service_v2.rst deleted file mode 100644 index fd4d9bc7d9ba..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/metrics_service_v2.rst +++ /dev/null @@ -1,10 +0,0 @@ -MetricsServiceV2 ----------------------------------- - -.. automodule:: google.cloud.logging_v2.services.metrics_service_v2 - :members: - :inherited-members: - -.. automodule:: google.cloud.logging_v2.services.metrics_service_v2.pagers - :members: - :inherited-members: diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/services.rst b/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/services.rst deleted file mode 100644 index d7a0471b13c3..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/services.rst +++ /dev/null @@ -1,8 +0,0 @@ -Services for Google Cloud Logging v2 API -======================================== -.. toctree:: - :maxdepth: 2 - - config_service_v2 - logging_service_v2 - metrics_service_v2 diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/types.rst b/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/types.rst deleted file mode 100644 index 843c0dc370d4..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/docs/logging_v2/types.rst +++ /dev/null @@ -1,7 +0,0 @@ -Types for Google Cloud Logging v2 API -===================================== - -.. automodule:: google.cloud.logging_v2.types - :members: - :undoc-members: - :show-inheritance: diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/__init__.py deleted file mode 100644 index 16e3d0cc06cf..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/__init__.py +++ /dev/null @@ -1,143 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.cloud.logging_v2.services.config_service_v2.client import ConfigServiceV2Client -from google.cloud.logging_v2.services.config_service_v2.async_client import ConfigServiceV2AsyncClient -from google.cloud.logging_v2.services.logging_service_v2.client import LoggingServiceV2Client -from google.cloud.logging_v2.services.logging_service_v2.async_client import LoggingServiceV2AsyncClient -from google.cloud.logging_v2.services.metrics_service_v2.client import MetricsServiceV2Client -from google.cloud.logging_v2.services.metrics_service_v2.async_client import MetricsServiceV2AsyncClient - -from google.cloud.logging_v2.types.log_entry import LogEntry -from google.cloud.logging_v2.types.log_entry import LogEntryOperation -from google.cloud.logging_v2.types.log_entry import LogEntrySourceLocation -from google.cloud.logging_v2.types.logging import DeleteLogRequest -from google.cloud.logging_v2.types.logging import ListLogEntriesRequest -from google.cloud.logging_v2.types.logging import ListLogEntriesResponse -from google.cloud.logging_v2.types.logging import ListLogsRequest -from google.cloud.logging_v2.types.logging import ListLogsResponse -from google.cloud.logging_v2.types.logging import ListMonitoredResourceDescriptorsRequest -from google.cloud.logging_v2.types.logging import ListMonitoredResourceDescriptorsResponse -from google.cloud.logging_v2.types.logging import TailLogEntriesRequest -from google.cloud.logging_v2.types.logging import TailLogEntriesResponse -from google.cloud.logging_v2.types.logging import WriteLogEntriesPartialErrors -from google.cloud.logging_v2.types.logging import WriteLogEntriesRequest -from google.cloud.logging_v2.types.logging import WriteLogEntriesResponse -from google.cloud.logging_v2.types.logging_config import BigQueryOptions -from google.cloud.logging_v2.types.logging_config import CmekSettings -from google.cloud.logging_v2.types.logging_config import CreateBucketRequest -from google.cloud.logging_v2.types.logging_config import CreateExclusionRequest -from google.cloud.logging_v2.types.logging_config import CreateSinkRequest -from google.cloud.logging_v2.types.logging_config import CreateViewRequest -from google.cloud.logging_v2.types.logging_config import DeleteBucketRequest -from google.cloud.logging_v2.types.logging_config import DeleteExclusionRequest -from google.cloud.logging_v2.types.logging_config import DeleteSinkRequest -from google.cloud.logging_v2.types.logging_config import DeleteViewRequest -from google.cloud.logging_v2.types.logging_config import GetBucketRequest -from google.cloud.logging_v2.types.logging_config import GetCmekSettingsRequest -from google.cloud.logging_v2.types.logging_config import GetExclusionRequest -from google.cloud.logging_v2.types.logging_config import GetSinkRequest -from google.cloud.logging_v2.types.logging_config import GetViewRequest -from google.cloud.logging_v2.types.logging_config import ListBucketsRequest -from google.cloud.logging_v2.types.logging_config import ListBucketsResponse -from google.cloud.logging_v2.types.logging_config import ListExclusionsRequest -from google.cloud.logging_v2.types.logging_config import ListExclusionsResponse -from google.cloud.logging_v2.types.logging_config import ListSinksRequest -from google.cloud.logging_v2.types.logging_config import ListSinksResponse -from google.cloud.logging_v2.types.logging_config import ListViewsRequest -from google.cloud.logging_v2.types.logging_config import ListViewsResponse -from google.cloud.logging_v2.types.logging_config import LogBucket -from google.cloud.logging_v2.types.logging_config import LogExclusion -from google.cloud.logging_v2.types.logging_config import LogSink -from google.cloud.logging_v2.types.logging_config import LogView -from google.cloud.logging_v2.types.logging_config import UndeleteBucketRequest -from google.cloud.logging_v2.types.logging_config import UpdateBucketRequest -from google.cloud.logging_v2.types.logging_config import UpdateCmekSettingsRequest -from google.cloud.logging_v2.types.logging_config import UpdateExclusionRequest -from google.cloud.logging_v2.types.logging_config import UpdateSinkRequest -from google.cloud.logging_v2.types.logging_config import UpdateViewRequest -from google.cloud.logging_v2.types.logging_config import LifecycleState -from google.cloud.logging_v2.types.logging_metrics import CreateLogMetricRequest -from google.cloud.logging_v2.types.logging_metrics import DeleteLogMetricRequest -from google.cloud.logging_v2.types.logging_metrics import GetLogMetricRequest -from google.cloud.logging_v2.types.logging_metrics import ListLogMetricsRequest -from google.cloud.logging_v2.types.logging_metrics import ListLogMetricsResponse -from google.cloud.logging_v2.types.logging_metrics import LogMetric -from google.cloud.logging_v2.types.logging_metrics import UpdateLogMetricRequest - -__all__ = ('ConfigServiceV2Client', - 'ConfigServiceV2AsyncClient', - 'LoggingServiceV2Client', - 'LoggingServiceV2AsyncClient', - 'MetricsServiceV2Client', - 'MetricsServiceV2AsyncClient', - 'LogEntry', - 'LogEntryOperation', - 'LogEntrySourceLocation', - 'DeleteLogRequest', - 'ListLogEntriesRequest', - 'ListLogEntriesResponse', - 'ListLogsRequest', - 'ListLogsResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'TailLogEntriesRequest', - 'TailLogEntriesResponse', - 'WriteLogEntriesPartialErrors', - 'WriteLogEntriesRequest', - 'WriteLogEntriesResponse', - 'BigQueryOptions', - 'CmekSettings', - 'CreateBucketRequest', - 'CreateExclusionRequest', - 'CreateSinkRequest', - 'CreateViewRequest', - 'DeleteBucketRequest', - 'DeleteExclusionRequest', - 'DeleteSinkRequest', - 'DeleteViewRequest', - 'GetBucketRequest', - 'GetCmekSettingsRequest', - 'GetExclusionRequest', - 'GetSinkRequest', - 'GetViewRequest', - 'ListBucketsRequest', - 'ListBucketsResponse', - 'ListExclusionsRequest', - 'ListExclusionsResponse', - 'ListSinksRequest', - 'ListSinksResponse', - 'ListViewsRequest', - 'ListViewsResponse', - 'LogBucket', - 'LogExclusion', - 'LogSink', - 'LogView', - 'UndeleteBucketRequest', - 'UpdateBucketRequest', - 'UpdateCmekSettingsRequest', - 'UpdateExclusionRequest', - 'UpdateSinkRequest', - 'UpdateViewRequest', - 'LifecycleState', - 'CreateLogMetricRequest', - 'DeleteLogMetricRequest', - 'GetLogMetricRequest', - 'ListLogMetricsRequest', - 'ListLogMetricsResponse', - 'LogMetric', - 'UpdateLogMetricRequest', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/py.typed b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/py.typed deleted file mode 100644 index 6c7420d0d9cb..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-logging package uses inline types. diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/__init__.py deleted file mode 100644 index 1dc1e1eac254..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/__init__.py +++ /dev/null @@ -1,144 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .services.config_service_v2 import ConfigServiceV2Client -from .services.config_service_v2 import ConfigServiceV2AsyncClient -from .services.logging_service_v2 import LoggingServiceV2Client -from .services.logging_service_v2 import LoggingServiceV2AsyncClient -from .services.metrics_service_v2 import MetricsServiceV2Client -from .services.metrics_service_v2 import MetricsServiceV2AsyncClient - -from .types.log_entry import LogEntry -from .types.log_entry import LogEntryOperation -from .types.log_entry import LogEntrySourceLocation -from .types.logging import DeleteLogRequest -from .types.logging import ListLogEntriesRequest -from .types.logging import ListLogEntriesResponse -from .types.logging import ListLogsRequest -from .types.logging import ListLogsResponse -from .types.logging import ListMonitoredResourceDescriptorsRequest -from .types.logging import ListMonitoredResourceDescriptorsResponse -from .types.logging import TailLogEntriesRequest -from .types.logging import TailLogEntriesResponse -from .types.logging import WriteLogEntriesPartialErrors -from .types.logging import WriteLogEntriesRequest -from .types.logging import WriteLogEntriesResponse -from .types.logging_config import BigQueryOptions -from .types.logging_config import CmekSettings -from .types.logging_config import CreateBucketRequest -from .types.logging_config import CreateExclusionRequest -from .types.logging_config import CreateSinkRequest -from .types.logging_config import CreateViewRequest -from .types.logging_config import DeleteBucketRequest -from .types.logging_config import DeleteExclusionRequest -from .types.logging_config import DeleteSinkRequest -from .types.logging_config import DeleteViewRequest -from .types.logging_config import GetBucketRequest -from .types.logging_config import GetCmekSettingsRequest -from .types.logging_config import GetExclusionRequest -from .types.logging_config import GetSinkRequest -from .types.logging_config import GetViewRequest -from .types.logging_config import ListBucketsRequest -from .types.logging_config import ListBucketsResponse -from .types.logging_config import ListExclusionsRequest -from .types.logging_config import ListExclusionsResponse -from .types.logging_config import ListSinksRequest -from .types.logging_config import ListSinksResponse -from .types.logging_config import ListViewsRequest -from .types.logging_config import ListViewsResponse -from .types.logging_config import LogBucket -from .types.logging_config import LogExclusion -from .types.logging_config import LogSink -from .types.logging_config import LogView -from .types.logging_config import UndeleteBucketRequest -from .types.logging_config import UpdateBucketRequest -from .types.logging_config import UpdateCmekSettingsRequest -from .types.logging_config import UpdateExclusionRequest -from .types.logging_config import UpdateSinkRequest -from .types.logging_config import UpdateViewRequest -from .types.logging_config import LifecycleState -from .types.logging_metrics import CreateLogMetricRequest -from .types.logging_metrics import DeleteLogMetricRequest -from .types.logging_metrics import GetLogMetricRequest -from .types.logging_metrics import ListLogMetricsRequest -from .types.logging_metrics import ListLogMetricsResponse -from .types.logging_metrics import LogMetric -from .types.logging_metrics import UpdateLogMetricRequest - -__all__ = ( - 'ConfigServiceV2AsyncClient', - 'LoggingServiceV2AsyncClient', - 'MetricsServiceV2AsyncClient', -'BigQueryOptions', -'CmekSettings', -'ConfigServiceV2Client', -'CreateBucketRequest', -'CreateExclusionRequest', -'CreateLogMetricRequest', -'CreateSinkRequest', -'CreateViewRequest', -'DeleteBucketRequest', -'DeleteExclusionRequest', -'DeleteLogMetricRequest', -'DeleteLogRequest', -'DeleteSinkRequest', -'DeleteViewRequest', -'GetBucketRequest', -'GetCmekSettingsRequest', -'GetExclusionRequest', -'GetLogMetricRequest', -'GetSinkRequest', -'GetViewRequest', -'LifecycleState', -'ListBucketsRequest', -'ListBucketsResponse', -'ListExclusionsRequest', -'ListExclusionsResponse', -'ListLogEntriesRequest', -'ListLogEntriesResponse', -'ListLogMetricsRequest', -'ListLogMetricsResponse', -'ListLogsRequest', -'ListLogsResponse', -'ListMonitoredResourceDescriptorsRequest', -'ListMonitoredResourceDescriptorsResponse', -'ListSinksRequest', -'ListSinksResponse', -'ListViewsRequest', -'ListViewsResponse', -'LogBucket', -'LogEntry', -'LogEntryOperation', -'LogEntrySourceLocation', -'LogExclusion', -'LogMetric', -'LogSink', -'LogView', -'LoggingServiceV2Client', -'MetricsServiceV2Client', -'TailLogEntriesRequest', -'TailLogEntriesResponse', -'UndeleteBucketRequest', -'UpdateBucketRequest', -'UpdateCmekSettingsRequest', -'UpdateExclusionRequest', -'UpdateLogMetricRequest', -'UpdateSinkRequest', -'UpdateViewRequest', -'WriteLogEntriesPartialErrors', -'WriteLogEntriesRequest', -'WriteLogEntriesResponse', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/gapic_metadata.json b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/gapic_metadata.json deleted file mode 100644 index da4eefd477fc..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/gapic_metadata.json +++ /dev/null @@ -1,391 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.logging_v2", - "protoPackage": "google.logging.v2", - "schema": "1.0", - "services": { - "ConfigServiceV2": { - "clients": { - "grpc": { - "libraryClient": "ConfigServiceV2Client", - "rpcs": { - "CreateBucket": { - "methods": [ - "create_bucket" - ] - }, - "CreateExclusion": { - "methods": [ - "create_exclusion" - ] - }, - "CreateSink": { - "methods": [ - "create_sink" - ] - }, - "CreateView": { - "methods": [ - "create_view" - ] - }, - "DeleteBucket": { - "methods": [ - "delete_bucket" - ] - }, - "DeleteExclusion": { - "methods": [ - "delete_exclusion" - ] - }, - "DeleteSink": { - "methods": [ - "delete_sink" - ] - }, - "DeleteView": { - "methods": [ - "delete_view" - ] - }, - "GetBucket": { - "methods": [ - "get_bucket" - ] - }, - "GetCmekSettings": { - "methods": [ - "get_cmek_settings" - ] - }, - "GetExclusion": { - "methods": [ - "get_exclusion" - ] - }, - "GetSink": { - "methods": [ - "get_sink" - ] - }, - "GetView": { - "methods": [ - "get_view" - ] - }, - "ListBuckets": { - "methods": [ - "list_buckets" - ] - }, - "ListExclusions": { - "methods": [ - "list_exclusions" - ] - }, - "ListSinks": { - "methods": [ - "list_sinks" - ] - }, - "ListViews": { - "methods": [ - "list_views" - ] - }, - "UndeleteBucket": { - "methods": [ - "undelete_bucket" - ] - }, - "UpdateBucket": { - "methods": [ - "update_bucket" - ] - }, - "UpdateCmekSettings": { - "methods": [ - "update_cmek_settings" - ] - }, - "UpdateExclusion": { - "methods": [ - "update_exclusion" - ] - }, - "UpdateSink": { - "methods": [ - "update_sink" - ] - }, - "UpdateView": { - "methods": [ - "update_view" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ConfigServiceV2AsyncClient", - "rpcs": { - "CreateBucket": { - "methods": [ - "create_bucket" - ] - }, - "CreateExclusion": { - "methods": [ - "create_exclusion" - ] - }, - "CreateSink": { - "methods": [ - "create_sink" - ] - }, - "CreateView": { - "methods": [ - "create_view" - ] - }, - "DeleteBucket": { - "methods": [ - "delete_bucket" - ] - }, - "DeleteExclusion": { - "methods": [ - "delete_exclusion" - ] - }, - "DeleteSink": { - "methods": [ - "delete_sink" - ] - }, - "DeleteView": { - "methods": [ - "delete_view" - ] - }, - "GetBucket": { - "methods": [ - "get_bucket" - ] - }, - "GetCmekSettings": { - "methods": [ - "get_cmek_settings" - ] - }, - "GetExclusion": { - "methods": [ - "get_exclusion" - ] - }, - "GetSink": { - "methods": [ - "get_sink" - ] - }, - "GetView": { - "methods": [ - "get_view" - ] - }, - "ListBuckets": { - "methods": [ - "list_buckets" - ] - }, - "ListExclusions": { - "methods": [ - "list_exclusions" - ] - }, - "ListSinks": { - "methods": [ - "list_sinks" - ] - }, - "ListViews": { - "methods": [ - "list_views" - ] - }, - "UndeleteBucket": { - "methods": [ - "undelete_bucket" - ] - }, - "UpdateBucket": { - "methods": [ - "update_bucket" - ] - }, - "UpdateCmekSettings": { - "methods": [ - "update_cmek_settings" - ] - }, - "UpdateExclusion": { - "methods": [ - "update_exclusion" - ] - }, - "UpdateSink": { - "methods": [ - "update_sink" - ] - }, - "UpdateView": { - "methods": [ - "update_view" - ] - } - } - } - } - }, - "LoggingServiceV2": { - "clients": { - "grpc": { - "libraryClient": "LoggingServiceV2Client", - "rpcs": { - "DeleteLog": { - "methods": [ - "delete_log" - ] - }, - "ListLogEntries": { - "methods": [ - "list_log_entries" - ] - }, - "ListLogs": { - "methods": [ - "list_logs" - ] - }, - "ListMonitoredResourceDescriptors": { - "methods": [ - "list_monitored_resource_descriptors" - ] - }, - "TailLogEntries": { - "methods": [ - "tail_log_entries" - ] - }, - "WriteLogEntries": { - "methods": [ - "write_log_entries" - ] - } - } - }, - "grpc-async": { - "libraryClient": "LoggingServiceV2AsyncClient", - "rpcs": { - "DeleteLog": { - "methods": [ - "delete_log" - ] - }, - "ListLogEntries": { - "methods": [ - "list_log_entries" - ] - }, - "ListLogs": { - "methods": [ - "list_logs" - ] - }, - "ListMonitoredResourceDescriptors": { - "methods": [ - "list_monitored_resource_descriptors" - ] - }, - "TailLogEntries": { - "methods": [ - "tail_log_entries" - ] - }, - "WriteLogEntries": { - "methods": [ - "write_log_entries" - ] - } - } - } - } - }, - "MetricsServiceV2": { - "clients": { - "grpc": { - "libraryClient": "MetricsServiceV2Client", - "rpcs": { - "CreateLogMetric": { - "methods": [ - "create_log_metric" - ] - }, - "DeleteLogMetric": { - "methods": [ - "delete_log_metric" - ] - }, - "GetLogMetric": { - "methods": [ - "get_log_metric" - ] - }, - "ListLogMetrics": { - "methods": [ - "list_log_metrics" - ] - }, - "UpdateLogMetric": { - "methods": [ - "update_log_metric" - ] - } - } - }, - "grpc-async": { - "libraryClient": "MetricsServiceV2AsyncClient", - "rpcs": { - "CreateLogMetric": { - "methods": [ - "create_log_metric" - ] - }, - "DeleteLogMetric": { - "methods": [ - "delete_log_metric" - ] - }, - "GetLogMetric": { - "methods": [ - "get_log_metric" - ] - }, - "ListLogMetrics": { - "methods": [ - "list_log_metrics" - ] - }, - "UpdateLogMetric": { - "methods": [ - "update_log_metric" - ] - } - } - } - } - } - } -} diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/py.typed b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/py.typed deleted file mode 100644 index 6c7420d0d9cb..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-logging package uses inline types. diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/__init__.py deleted file mode 100644 index 4de65971c238..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/__init__.py deleted file mode 100644 index 2b27a12e93f8..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import ConfigServiceV2Client -from .async_client import ConfigServiceV2AsyncClient - -__all__ = ( - 'ConfigServiceV2Client', - 'ConfigServiceV2AsyncClient', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/async_client.py deleted file mode 100644 index 82e84aab817c..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ /dev/null @@ -1,2016 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.logging_v2.services.config_service_v2 import pagers -from google.cloud.logging_v2.types import logging_config -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport -from .client import ConfigServiceV2Client - - -class ConfigServiceV2AsyncClient: - """Service for configuring sinks used to route log entries.""" - - _client: ConfigServiceV2Client - - DEFAULT_ENDPOINT = ConfigServiceV2Client.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT - - cmek_settings_path = staticmethod(ConfigServiceV2Client.cmek_settings_path) - parse_cmek_settings_path = staticmethod(ConfigServiceV2Client.parse_cmek_settings_path) - log_bucket_path = staticmethod(ConfigServiceV2Client.log_bucket_path) - parse_log_bucket_path = staticmethod(ConfigServiceV2Client.parse_log_bucket_path) - log_exclusion_path = staticmethod(ConfigServiceV2Client.log_exclusion_path) - parse_log_exclusion_path = staticmethod(ConfigServiceV2Client.parse_log_exclusion_path) - log_sink_path = staticmethod(ConfigServiceV2Client.log_sink_path) - parse_log_sink_path = staticmethod(ConfigServiceV2Client.parse_log_sink_path) - log_view_path = staticmethod(ConfigServiceV2Client.log_view_path) - parse_log_view_path = staticmethod(ConfigServiceV2Client.parse_log_view_path) - common_billing_account_path = staticmethod(ConfigServiceV2Client.common_billing_account_path) - parse_common_billing_account_path = staticmethod(ConfigServiceV2Client.parse_common_billing_account_path) - common_folder_path = staticmethod(ConfigServiceV2Client.common_folder_path) - parse_common_folder_path = staticmethod(ConfigServiceV2Client.parse_common_folder_path) - common_organization_path = staticmethod(ConfigServiceV2Client.common_organization_path) - parse_common_organization_path = staticmethod(ConfigServiceV2Client.parse_common_organization_path) - common_project_path = staticmethod(ConfigServiceV2Client.common_project_path) - parse_common_project_path = staticmethod(ConfigServiceV2Client.parse_common_project_path) - common_location_path = staticmethod(ConfigServiceV2Client.common_location_path) - parse_common_location_path = staticmethod(ConfigServiceV2Client.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ConfigServiceV2AsyncClient: The constructed client. - """ - return ConfigServiceV2Client.from_service_account_info.__func__(ConfigServiceV2AsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ConfigServiceV2AsyncClient: The constructed client. - """ - return ConfigServiceV2Client.from_service_account_file.__func__(ConfigServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> ConfigServiceV2Transport: - """Returns the transport used by the client instance. - - Returns: - ConfigServiceV2Transport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(ConfigServiceV2Client).get_transport_class, type(ConfigServiceV2Client)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, ConfigServiceV2Transport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the config service v2 client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.ConfigServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = ConfigServiceV2Client( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def list_buckets(self, - request: logging_config.ListBucketsRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBucketsAsyncPager: - r"""Lists buckets. - - Args: - request (:class:`google.cloud.logging_v2.types.ListBucketsRequest`): - The request object. The parameters to `ListBuckets`. - parent (:class:`str`): - Required. The parent resource whose buckets are to be - listed: - - :: - - "projects/[PROJECT_ID]/locations/[LOCATION_ID]" - "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" - "folders/[FOLDER_ID]/locations/[LOCATION_ID]" - - Note: The locations portion of the resource must be - specified, but supplying the character ``-`` in place of - [LOCATION_ID] will return all buckets. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager: - The response from ListBuckets. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_config.ListBucketsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_buckets, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBucketsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_bucket(self, - request: logging_config.GetBucketRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogBucket: - r"""Gets a bucket. - - Args: - request (:class:`google.cloud.logging_v2.types.GetBucketRequest`): - The request object. The parameters to `GetBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. - """ - # Create or coerce a protobuf request object. - request = logging_config.GetBucketRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_bucket(self, - request: logging_config.CreateBucketRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogBucket: - r"""Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. - - Args: - request (:class:`google.cloud.logging_v2.types.CreateBucketRequest`): - The request object. The parameters to `CreateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. - """ - # Create or coerce a protobuf request object. - request = logging_config.CreateBucketRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_bucket(self, - request: logging_config.UpdateBucketRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogBucket: - r"""Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. - - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. - - A buckets region may not be modified after it is created. - - Args: - request (:class:`google.cloud.logging_v2.types.UpdateBucketRequest`): - The request object. The parameters to `UpdateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. - """ - # Create or coerce a protobuf request object. - request = logging_config.UpdateBucketRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_bucket(self, - request: logging_config.DeleteBucketRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. - - Args: - request (:class:`google.cloud.logging_v2.types.DeleteBucketRequest`): - The request object. The parameters to `DeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - request = logging_config.DeleteBucketRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def undelete_bucket(self, - request: logging_config.UndeleteBucketRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. - - Args: - request (:class:`google.cloud.logging_v2.types.UndeleteBucketRequest`): - The request object. The parameters to `UndeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - request = logging_config.UndeleteBucketRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.undelete_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_views(self, - request: logging_config.ListViewsRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListViewsAsyncPager: - r"""Lists views on a bucket. - - Args: - request (:class:`google.cloud.logging_v2.types.ListViewsRequest`): - The request object. The parameters to `ListViews`. - parent (:class:`str`): - Required. The bucket whose views are to be listed: - - :: - - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager: - The response from ListViews. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_config.ListViewsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_views, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListViewsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_view(self, - request: logging_config.GetViewRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogView: - r"""Gets a view. - - Args: - request (:class:`google.cloud.logging_v2.types.GetViewRequest`): - The request object. The parameters to `GetView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. - - """ - # Create or coerce a protobuf request object. - request = logging_config.GetViewRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_view(self, - request: logging_config.CreateViewRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogView: - r"""Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. - - Args: - request (:class:`google.cloud.logging_v2.types.CreateViewRequest`): - The request object. The parameters to `CreateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. - - """ - # Create or coerce a protobuf request object. - request = logging_config.CreateViewRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_view(self, - request: logging_config.UpdateViewRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogView: - r"""Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. - - Args: - request (:class:`google.cloud.logging_v2.types.UpdateViewRequest`): - The request object. The parameters to `UpdateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. - - """ - # Create or coerce a protobuf request object. - request = logging_config.UpdateViewRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_view(self, - request: logging_config.DeleteViewRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a view from a bucket. - - Args: - request (:class:`google.cloud.logging_v2.types.DeleteViewRequest`): - The request object. The parameters to `DeleteView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - request = logging_config.DeleteViewRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_sinks(self, - request: logging_config.ListSinksRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSinksAsyncPager: - r"""Lists sinks. - - Args: - request (:class:`google.cloud.logging_v2.types.ListSinksRequest`): - The request object. The parameters to `ListSinks`. - parent (:class:`str`): - Required. The parent resource whose sinks are to be - listed: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager: - Result returned from ListSinks. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_config.ListSinksRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_sinks, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListSinksAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_sink(self, - request: logging_config.GetSinkRequest = None, - *, - sink_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogSink: - r"""Gets a sink. - - Args: - request (:class:`google.cloud.logging_v2.types.GetSinkRequest`): - The request object. The parameters to `GetSink`. - sink_name (:class:`str`): - Required. The resource name of the sink: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - Example: ``"projects/my-project-id/sinks/my-sink-id"``. - - This corresponds to the ``sink_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogSink: - Describes a sink used to export log - entries to one of the following - destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_config.GetSinkRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if sink_name is not None: - request.sink_name = sink_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_sink, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_sink(self, - request: logging_config.CreateSinkRequest = None, - *, - parent: str = None, - sink: logging_config.LogSink = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogSink: - r"""Creates a sink that exports specified log entries to a - destination. The export of newly-ingested log entries begins - immediately, unless the sink's ``writer_identity`` is not - permitted to write to the destination. A sink can export log - entries only from the resource owning the sink. - - Args: - request (:class:`google.cloud.logging_v2.types.CreateSinkRequest`): - The request object. The parameters to `CreateSink`. - parent (:class:`str`): - Required. The resource in which to create the sink: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - sink (:class:`google.cloud.logging_v2.types.LogSink`): - Required. The new sink, whose ``name`` parameter is a - sink identifier that is not already in use. - - This corresponds to the ``sink`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogSink: - Describes a sink used to export log - entries to one of the following - destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, sink]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_config.CreateSinkRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if sink is not None: - request.sink = sink - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_sink, - default_timeout=120.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_sink(self, - request: logging_config.UpdateSinkRequest = None, - *, - sink_name: str = None, - sink: logging_config.LogSink = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogSink: - r"""Updates a sink. This method replaces the following fields in the - existing sink with values from the new sink: ``destination``, - and ``filter``. - - The updated sink might also have a new ``writer_identity``; see - the ``unique_writer_identity`` field. - - Args: - request (:class:`google.cloud.logging_v2.types.UpdateSinkRequest`): - The request object. The parameters to `UpdateSink`. - sink_name (:class:`str`): - Required. The full resource name of the sink to update, - including the parent resource and the sink identifier: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - Example: ``"projects/my-project-id/sinks/my-sink-id"``. - - This corresponds to the ``sink_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - sink (:class:`google.cloud.logging_v2.types.LogSink`): - Required. The updated sink, whose name is the same - identifier that appears as part of ``sink_name``. - - This corresponds to the ``sink`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Optional. Field mask that specifies the fields in - ``sink`` that need an update. A sink field will be - overwritten if, and only if, it is in the update mask. - ``name`` and output only fields cannot be updated. - - An empty updateMask is temporarily treated as using the - following mask for backwards compatibility purposes: - destination,filter,includeChildren At some point in the - future, behavior will be removed and specifying an empty - updateMask will be an error. - - For a detailed ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - - Example: ``updateMask=filter``. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogSink: - Describes a sink used to export log - entries to one of the following - destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name, sink, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_config.UpdateSinkRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if sink_name is not None: - request.sink_name = sink_name - if sink is not None: - request.sink = sink - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_sink, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_sink(self, - request: logging_config.DeleteSinkRequest = None, - *, - sink_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a sink. If the sink has a unique ``writer_identity``, - then that service account is also deleted. - - Args: - request (:class:`google.cloud.logging_v2.types.DeleteSinkRequest`): - The request object. The parameters to `DeleteSink`. - sink_name (:class:`str`): - Required. The full resource name of the sink to delete, - including the parent resource and the sink identifier: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - Example: ``"projects/my-project-id/sinks/my-sink-id"``. - - This corresponds to the ``sink_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_config.DeleteSinkRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if sink_name is not None: - request.sink_name = sink_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_sink, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_exclusions(self, - request: logging_config.ListExclusionsRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListExclusionsAsyncPager: - r"""Lists all the exclusions in a parent resource. - - Args: - request (:class:`google.cloud.logging_v2.types.ListExclusionsRequest`): - The request object. The parameters to `ListExclusions`. - parent (:class:`str`): - Required. The parent resource whose exclusions are to be - listed. - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager: - Result returned from ListExclusions. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_config.ListExclusionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_exclusions, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListExclusionsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_exclusion(self, - request: logging_config.GetExclusionRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Gets the description of an exclusion. - - Args: - request (:class:`google.cloud.logging_v2.types.GetExclusionRequest`): - The request object. The parameters to `GetExclusion`. - name (:class:`str`): - Required. The resource name of an existing exclusion: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_config.GetExclusionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_exclusion, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_exclusion(self, - request: logging_config.CreateExclusionRequest = None, - *, - parent: str = None, - exclusion: logging_config.LogExclusion = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. - - Args: - request (:class:`google.cloud.logging_v2.types.CreateExclusionRequest`): - The request object. The parameters to `CreateExclusion`. - parent (:class:`str`): - Required. The parent resource in which to create the - exclusion: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): - Required. The new exclusion, whose ``name`` parameter is - an exclusion name that is not already used in the parent - resource. - - This corresponds to the ``exclusion`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, exclusion]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_config.CreateExclusionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if exclusion is not None: - request.exclusion = exclusion - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_exclusion, - default_timeout=120.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_exclusion(self, - request: logging_config.UpdateExclusionRequest = None, - *, - name: str = None, - exclusion: logging_config.LogExclusion = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Changes one or more properties of an existing - exclusion. - - Args: - request (:class:`google.cloud.logging_v2.types.UpdateExclusionRequest`): - The request object. The parameters to `UpdateExclusion`. - name (:class:`str`): - Required. The resource name of the exclusion to update: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): - Required. New values for the existing exclusion. Only - the fields specified in ``update_mask`` are relevant. - - This corresponds to the ``exclusion`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. A non-empty list of fields to change in the - existing exclusion. New values for the fields are taken - from the corresponding fields in the - [LogExclusion][google.logging.v2.LogExclusion] included - in this request. Fields not mentioned in ``update_mask`` - are not changed and are ignored in the request. - - For example, to change the filter and description of an - exclusion, specify an ``update_mask`` of - ``"filter,description"``. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, exclusion, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_config.UpdateExclusionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if exclusion is not None: - request.exclusion = exclusion - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_exclusion, - default_timeout=120.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_exclusion(self, - request: logging_config.DeleteExclusionRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an exclusion. - - Args: - request (:class:`google.cloud.logging_v2.types.DeleteExclusionRequest`): - The request object. The parameters to `DeleteExclusion`. - name (:class:`str`): - Required. The resource name of an existing exclusion to - delete: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_config.DeleteExclusionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_exclusion, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_cmek_settings(self, - request: logging_config.GetCmekSettingsRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.CmekSettings: - r"""Gets the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - Args: - request (:class:`google.cloud.logging_v2.types.GetCmekSettingsRequest`): - The request object. The parameters to - [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. - See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.CmekSettings: - Describes the customer-managed encryption key (CMEK) settings associated with - a project, folder, organization, billing account, or - flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See [Enabling CMEK for Logs - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) - for more information. - - """ - # Create or coerce a protobuf request object. - request = logging_config.GetCmekSettingsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_cmek_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_cmek_settings(self, - request: logging_config.UpdateCmekSettingsRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.CmekSettings: - r"""Updates the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. - - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] - will fail if 1) ``kms_key_name`` is invalid, or 2) the - associated service account does not have the required - ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for - the key, or 3) access to the key is disabled. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - Args: - request (:class:`google.cloud.logging_v2.types.UpdateCmekSettingsRequest`): - The request object. The parameters to - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.CmekSettings: - Describes the customer-managed encryption key (CMEK) settings associated with - a project, folder, organization, billing account, or - flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See [Enabling CMEK for Logs - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) - for more information. - - """ - # Create or coerce a protobuf request object. - request = logging_config.UpdateCmekSettingsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_cmek_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "ConfigServiceV2AsyncClient", -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/client.py deleted file mode 100644 index acf10f2292b8..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/client.py +++ /dev/null @@ -1,2198 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.logging_v2.services.config_service_v2 import pagers -from google.cloud.logging_v2.types import logging_config -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO -from .transports.grpc import ConfigServiceV2GrpcTransport -from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport - - -class ConfigServiceV2ClientMeta(type): - """Metaclass for the ConfigServiceV2 client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] - _transport_registry["grpc"] = ConfigServiceV2GrpcTransport - _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[ConfigServiceV2Transport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class ConfigServiceV2Client(metaclass=ConfigServiceV2ClientMeta): - """Service for configuring sinks used to route log entries.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "logging.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ConfigServiceV2Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ConfigServiceV2Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> ConfigServiceV2Transport: - """Returns the transport used by the client instance. - - Returns: - ConfigServiceV2Transport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def cmek_settings_path(project: str,) -> str: - """Returns a fully-qualified cmek_settings string.""" - return "projects/{project}/cmekSettings".format(project=project, ) - - @staticmethod - def parse_cmek_settings_path(path: str) -> Dict[str,str]: - """Parses a cmek_settings path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) - return m.groupdict() if m else {} - - @staticmethod - def log_bucket_path(project: str,location: str,bucket: str,) -> str: - """Returns a fully-qualified log_bucket string.""" - return "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) - - @staticmethod - def parse_log_bucket_path(path: str) -> Dict[str,str]: - """Parses a log_bucket path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def log_exclusion_path(project: str,exclusion: str,) -> str: - """Returns a fully-qualified log_exclusion string.""" - return "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) - - @staticmethod - def parse_log_exclusion_path(path: str) -> Dict[str,str]: - """Parses a log_exclusion path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/exclusions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def log_sink_path(project: str,sink: str,) -> str: - """Returns a fully-qualified log_sink string.""" - return "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) - - @staticmethod - def parse_log_sink_path(path: str) -> Dict[str,str]: - """Parses a log_sink path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/sinks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def log_view_path(project: str,location: str,bucket: str,view: str,) -> str: - """Returns a fully-qualified log_view string.""" - return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) - - @staticmethod - def parse_log_view_path(path: str) -> Dict[str,str]: - """Parses a log_view path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ConfigServiceV2Transport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the config service v2 client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ConfigServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, ConfigServiceV2Transport): - # transport is a ConfigServiceV2Transport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), - ) - - def list_buckets(self, - request: Union[logging_config.ListBucketsRequest, dict] = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBucketsPager: - r"""Lists buckets. - - Args: - request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): - The request object. The parameters to `ListBuckets`. - parent (str): - Required. The parent resource whose buckets are to be - listed: - - :: - - "projects/[PROJECT_ID]/locations/[LOCATION_ID]" - "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" - "folders/[FOLDER_ID]/locations/[LOCATION_ID]" - - Note: The locations portion of the resource must be - specified, but supplying the character ``-`` in place of - [LOCATION_ID] will return all buckets. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager: - The response from ListBuckets. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListBucketsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.ListBucketsRequest): - request = logging_config.ListBucketsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_buckets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBucketsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_bucket(self, - request: Union[logging_config.GetBucketRequest, dict] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogBucket: - r"""Gets a bucket. - - Args: - request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): - The request object. The parameters to `GetBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.GetBucketRequest): - request = logging_config.GetBucketRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_bucket] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_bucket(self, - request: Union[logging_config.CreateBucketRequest, dict] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogBucket: - r"""Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. - - Args: - request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): - The request object. The parameters to `CreateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.CreateBucketRequest): - request = logging_config.CreateBucketRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_bucket] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_bucket(self, - request: Union[logging_config.UpdateBucketRequest, dict] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogBucket: - r"""Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. - - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. - - A buckets region may not be modified after it is created. - - Args: - request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): - The request object. The parameters to `UpdateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.UpdateBucketRequest): - request = logging_config.UpdateBucketRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_bucket] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_bucket(self, - request: Union[logging_config.DeleteBucketRequest, dict] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. - - Args: - request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): - The request object. The parameters to `DeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.DeleteBucketRequest): - request = logging_config.DeleteBucketRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_bucket] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def undelete_bucket(self, - request: Union[logging_config.UndeleteBucketRequest, dict] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. - - Args: - request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): - The request object. The parameters to `UndeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UndeleteBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.UndeleteBucketRequest): - request = logging_config.UndeleteBucketRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.undelete_bucket] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_views(self, - request: Union[logging_config.ListViewsRequest, dict] = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListViewsPager: - r"""Lists views on a bucket. - - Args: - request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): - The request object. The parameters to `ListViews`. - parent (str): - Required. The bucket whose views are to be listed: - - :: - - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager: - The response from ListViews. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListViewsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.ListViewsRequest): - request = logging_config.ListViewsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_views] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListViewsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_view(self, - request: Union[logging_config.GetViewRequest, dict] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogView: - r"""Gets a view. - - Args: - request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): - The request object. The parameters to `GetView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.GetViewRequest): - request = logging_config.GetViewRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_view] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_view(self, - request: Union[logging_config.CreateViewRequest, dict] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogView: - r"""Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. - - Args: - request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): - The request object. The parameters to `CreateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.CreateViewRequest): - request = logging_config.CreateViewRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_view] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_view(self, - request: Union[logging_config.UpdateViewRequest, dict] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogView: - r"""Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. - - Args: - request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): - The request object. The parameters to `UpdateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.UpdateViewRequest): - request = logging_config.UpdateViewRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_view] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_view(self, - request: Union[logging_config.DeleteViewRequest, dict] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a view from a bucket. - - Args: - request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): - The request object. The parameters to `DeleteView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.DeleteViewRequest): - request = logging_config.DeleteViewRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_view] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_sinks(self, - request: Union[logging_config.ListSinksRequest, dict] = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSinksPager: - r"""Lists sinks. - - Args: - request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): - The request object. The parameters to `ListSinks`. - parent (str): - Required. The parent resource whose sinks are to be - listed: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager: - Result returned from ListSinks. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListSinksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.ListSinksRequest): - request = logging_config.ListSinksRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_sinks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListSinksPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_sink(self, - request: Union[logging_config.GetSinkRequest, dict] = None, - *, - sink_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogSink: - r"""Gets a sink. - - Args: - request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): - The request object. The parameters to `GetSink`. - sink_name (str): - Required. The resource name of the sink: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - Example: ``"projects/my-project-id/sinks/my-sink-id"``. - - This corresponds to the ``sink_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogSink: - Describes a sink used to export log - entries to one of the following - destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.GetSinkRequest): - request = logging_config.GetSinkRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if sink_name is not None: - request.sink_name = sink_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_sink] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_sink(self, - request: Union[logging_config.CreateSinkRequest, dict] = None, - *, - parent: str = None, - sink: logging_config.LogSink = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogSink: - r"""Creates a sink that exports specified log entries to a - destination. The export of newly-ingested log entries begins - immediately, unless the sink's ``writer_identity`` is not - permitted to write to the destination. A sink can export log - entries only from the resource owning the sink. - - Args: - request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): - The request object. The parameters to `CreateSink`. - parent (str): - Required. The resource in which to create the sink: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - sink (google.cloud.logging_v2.types.LogSink): - Required. The new sink, whose ``name`` parameter is a - sink identifier that is not already in use. - - This corresponds to the ``sink`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogSink: - Describes a sink used to export log - entries to one of the following - destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, sink]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.CreateSinkRequest): - request = logging_config.CreateSinkRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if sink is not None: - request.sink = sink - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_sink] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_sink(self, - request: Union[logging_config.UpdateSinkRequest, dict] = None, - *, - sink_name: str = None, - sink: logging_config.LogSink = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogSink: - r"""Updates a sink. This method replaces the following fields in the - existing sink with values from the new sink: ``destination``, - and ``filter``. - - The updated sink might also have a new ``writer_identity``; see - the ``unique_writer_identity`` field. - - Args: - request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): - The request object. The parameters to `UpdateSink`. - sink_name (str): - Required. The full resource name of the sink to update, - including the parent resource and the sink identifier: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - Example: ``"projects/my-project-id/sinks/my-sink-id"``. - - This corresponds to the ``sink_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - sink (google.cloud.logging_v2.types.LogSink): - Required. The updated sink, whose name is the same - identifier that appears as part of ``sink_name``. - - This corresponds to the ``sink`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Field mask that specifies the fields in - ``sink`` that need an update. A sink field will be - overwritten if, and only if, it is in the update mask. - ``name`` and output only fields cannot be updated. - - An empty updateMask is temporarily treated as using the - following mask for backwards compatibility purposes: - destination,filter,includeChildren At some point in the - future, behavior will be removed and specifying an empty - updateMask will be an error. - - For a detailed ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - - Example: ``updateMask=filter``. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogSink: - Describes a sink used to export log - entries to one of the following - destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name, sink, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.UpdateSinkRequest): - request = logging_config.UpdateSinkRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if sink_name is not None: - request.sink_name = sink_name - if sink is not None: - request.sink = sink - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_sink] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_sink(self, - request: Union[logging_config.DeleteSinkRequest, dict] = None, - *, - sink_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a sink. If the sink has a unique ``writer_identity``, - then that service account is also deleted. - - Args: - request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): - The request object. The parameters to `DeleteSink`. - sink_name (str): - Required. The full resource name of the sink to delete, - including the parent resource and the sink identifier: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - Example: ``"projects/my-project-id/sinks/my-sink-id"``. - - This corresponds to the ``sink_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.DeleteSinkRequest): - request = logging_config.DeleteSinkRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if sink_name is not None: - request.sink_name = sink_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_sink] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_exclusions(self, - request: Union[logging_config.ListExclusionsRequest, dict] = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListExclusionsPager: - r"""Lists all the exclusions in a parent resource. - - Args: - request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): - The request object. The parameters to `ListExclusions`. - parent (str): - Required. The parent resource whose exclusions are to be - listed. - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager: - Result returned from ListExclusions. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListExclusionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.ListExclusionsRequest): - request = logging_config.ListExclusionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_exclusions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListExclusionsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_exclusion(self, - request: Union[logging_config.GetExclusionRequest, dict] = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Gets the description of an exclusion. - - Args: - request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): - The request object. The parameters to `GetExclusion`. - name (str): - Required. The resource name of an existing exclusion: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.GetExclusionRequest): - request = logging_config.GetExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_exclusion] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_exclusion(self, - request: Union[logging_config.CreateExclusionRequest, dict] = None, - *, - parent: str = None, - exclusion: logging_config.LogExclusion = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. - - Args: - request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): - The request object. The parameters to `CreateExclusion`. - parent (str): - Required. The parent resource in which to create the - exclusion: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - exclusion (google.cloud.logging_v2.types.LogExclusion): - Required. The new exclusion, whose ``name`` parameter is - an exclusion name that is not already used in the parent - resource. - - This corresponds to the ``exclusion`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, exclusion]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.CreateExclusionRequest): - request = logging_config.CreateExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if exclusion is not None: - request.exclusion = exclusion - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_exclusion] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_exclusion(self, - request: Union[logging_config.UpdateExclusionRequest, dict] = None, - *, - name: str = None, - exclusion: logging_config.LogExclusion = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Changes one or more properties of an existing - exclusion. - - Args: - request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): - The request object. The parameters to `UpdateExclusion`. - name (str): - Required. The resource name of the exclusion to update: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - exclusion (google.cloud.logging_v2.types.LogExclusion): - Required. New values for the existing exclusion. Only - the fields specified in ``update_mask`` are relevant. - - This corresponds to the ``exclusion`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A non-empty list of fields to change in the - existing exclusion. New values for the fields are taken - from the corresponding fields in the - [LogExclusion][google.logging.v2.LogExclusion] included - in this request. Fields not mentioned in ``update_mask`` - are not changed and are ignored in the request. - - For example, to change the filter and description of an - exclusion, specify an ``update_mask`` of - ``"filter,description"``. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, exclusion, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.UpdateExclusionRequest): - request = logging_config.UpdateExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if exclusion is not None: - request.exclusion = exclusion - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_exclusion] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_exclusion(self, - request: Union[logging_config.DeleteExclusionRequest, dict] = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an exclusion. - - Args: - request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): - The request object. The parameters to `DeleteExclusion`. - name (str): - Required. The resource name of an existing exclusion to - delete: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.DeleteExclusionRequest): - request = logging_config.DeleteExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_exclusion] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_cmek_settings(self, - request: Union[logging_config.GetCmekSettingsRequest, dict] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.CmekSettings: - r"""Gets the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - Args: - request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): - The request object. The parameters to - [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. - See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.CmekSettings: - Describes the customer-managed encryption key (CMEK) settings associated with - a project, folder, organization, billing account, or - flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See [Enabling CMEK for Logs - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) - for more information. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetCmekSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.GetCmekSettingsRequest): - request = logging_config.GetCmekSettingsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_cmek_settings] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_cmek_settings(self, - request: Union[logging_config.UpdateCmekSettingsRequest, dict] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.CmekSettings: - r"""Updates the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. - - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] - will fail if 1) ``kms_key_name`` is invalid, or 2) the - associated service account does not have the required - ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for - the key, or 3) access to the key is disabled. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - Args: - request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): - The request object. The parameters to - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.CmekSettings: - Describes the customer-managed encryption key (CMEK) settings associated with - a project, folder, organization, billing account, or - flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See [Enabling CMEK for Logs - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) - for more information. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateCmekSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.UpdateCmekSettingsRequest): - request = logging_config.UpdateCmekSettingsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_cmek_settings] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "ConfigServiceV2Client", -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/pagers.py deleted file mode 100644 index 43e0084a0019..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ /dev/null @@ -1,506 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.logging_v2.types import logging_config - - -class ListBucketsPager: - """A pager for iterating through ``list_buckets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListBucketsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``buckets`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBuckets`` requests and continue to iterate - through the ``buckets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListBucketsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., logging_config.ListBucketsResponse], - request: logging_config.ListBucketsRequest, - response: logging_config.ListBucketsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListBucketsRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListBucketsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging_config.ListBucketsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[logging_config.ListBucketsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[logging_config.LogBucket]: - for page in self.pages: - yield from page.buckets - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBucketsAsyncPager: - """A pager for iterating through ``list_buckets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListBucketsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``buckets`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBuckets`` requests and continue to iterate - through the ``buckets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListBucketsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListBucketsResponse]], - request: logging_config.ListBucketsRequest, - response: logging_config.ListBucketsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListBucketsRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListBucketsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging_config.ListBucketsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[logging_config.ListBucketsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[logging_config.LogBucket]: - async def async_generator(): - async for page in self.pages: - for response in page.buckets: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListViewsPager: - """A pager for iterating through ``list_views`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListViewsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``views`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListViews`` requests and continue to iterate - through the ``views`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListViewsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., logging_config.ListViewsResponse], - request: logging_config.ListViewsRequest, - response: logging_config.ListViewsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListViewsRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListViewsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging_config.ListViewsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[logging_config.ListViewsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[logging_config.LogView]: - for page in self.pages: - yield from page.views - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListViewsAsyncPager: - """A pager for iterating through ``list_views`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListViewsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``views`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListViews`` requests and continue to iterate - through the ``views`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListViewsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListViewsResponse]], - request: logging_config.ListViewsRequest, - response: logging_config.ListViewsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListViewsRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListViewsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging_config.ListViewsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[logging_config.ListViewsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[logging_config.LogView]: - async def async_generator(): - async for page in self.pages: - for response in page.views: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSinksPager: - """A pager for iterating through ``list_sinks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListSinksResponse` object, and - provides an ``__iter__`` method to iterate through its - ``sinks`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListSinks`` requests and continue to iterate - through the ``sinks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListSinksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., logging_config.ListSinksResponse], - request: logging_config.ListSinksRequest, - response: logging_config.ListSinksResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListSinksRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListSinksResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging_config.ListSinksRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[logging_config.ListSinksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[logging_config.LogSink]: - for page in self.pages: - yield from page.sinks - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSinksAsyncPager: - """A pager for iterating through ``list_sinks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListSinksResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``sinks`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSinks`` requests and continue to iterate - through the ``sinks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListSinksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListSinksResponse]], - request: logging_config.ListSinksRequest, - response: logging_config.ListSinksResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListSinksRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListSinksResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging_config.ListSinksRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[logging_config.ListSinksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[logging_config.LogSink]: - async def async_generator(): - async for page in self.pages: - for response in page.sinks: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListExclusionsPager: - """A pager for iterating through ``list_exclusions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListExclusionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``exclusions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListExclusions`` requests and continue to iterate - through the ``exclusions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListExclusionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., logging_config.ListExclusionsResponse], - request: logging_config.ListExclusionsRequest, - response: logging_config.ListExclusionsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListExclusionsRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListExclusionsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging_config.ListExclusionsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[logging_config.ListExclusionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[logging_config.LogExclusion]: - for page in self.pages: - yield from page.exclusions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListExclusionsAsyncPager: - """A pager for iterating through ``list_exclusions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListExclusionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``exclusions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListExclusions`` requests and continue to iterate - through the ``exclusions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListExclusionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListExclusionsResponse]], - request: logging_config.ListExclusionsRequest, - response: logging_config.ListExclusionsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListExclusionsRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListExclusionsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging_config.ListExclusionsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[logging_config.ListExclusionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[logging_config.LogExclusion]: - async def async_generator(): - async for page in self.pages: - for response in page.exclusions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py deleted file mode 100644 index 6e18c331ff70..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import ConfigServiceV2Transport -from .grpc import ConfigServiceV2GrpcTransport -from .grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] -_transport_registry['grpc'] = ConfigServiceV2GrpcTransport -_transport_registry['grpc_asyncio'] = ConfigServiceV2GrpcAsyncIOTransport - -__all__ = ( - 'ConfigServiceV2Transport', - 'ConfigServiceV2GrpcTransport', - 'ConfigServiceV2GrpcAsyncIOTransport', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/base.py deleted file mode 100644 index e7f0db9d401c..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ /dev/null @@ -1,536 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.logging_v2.types import logging_config -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-logging', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class ConfigServiceV2Transport(abc.ABC): - """Abstract transport class for ConfigServiceV2.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - ) - - DEFAULT_HOST: str = 'logging.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_buckets: gapic_v1.method.wrap_method( - self.list_buckets, - default_timeout=None, - client_info=client_info, - ), - self.get_bucket: gapic_v1.method.wrap_method( - self.get_bucket, - default_timeout=None, - client_info=client_info, - ), - self.create_bucket: gapic_v1.method.wrap_method( - self.create_bucket, - default_timeout=None, - client_info=client_info, - ), - self.update_bucket: gapic_v1.method.wrap_method( - self.update_bucket, - default_timeout=None, - client_info=client_info, - ), - self.delete_bucket: gapic_v1.method.wrap_method( - self.delete_bucket, - default_timeout=None, - client_info=client_info, - ), - self.undelete_bucket: gapic_v1.method.wrap_method( - self.undelete_bucket, - default_timeout=None, - client_info=client_info, - ), - self.list_views: gapic_v1.method.wrap_method( - self.list_views, - default_timeout=None, - client_info=client_info, - ), - self.get_view: gapic_v1.method.wrap_method( - self.get_view, - default_timeout=None, - client_info=client_info, - ), - self.create_view: gapic_v1.method.wrap_method( - self.create_view, - default_timeout=None, - client_info=client_info, - ), - self.update_view: gapic_v1.method.wrap_method( - self.update_view, - default_timeout=None, - client_info=client_info, - ), - self.delete_view: gapic_v1.method.wrap_method( - self.delete_view, - default_timeout=None, - client_info=client_info, - ), - self.list_sinks: gapic_v1.method.wrap_method( - self.list_sinks, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_sink: gapic_v1.method.wrap_method( - self.get_sink, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_sink: gapic_v1.method.wrap_method( - self.create_sink, - default_timeout=120.0, - client_info=client_info, - ), - self.update_sink: gapic_v1.method.wrap_method( - self.update_sink, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_sink: gapic_v1.method.wrap_method( - self.delete_sink, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_exclusions: gapic_v1.method.wrap_method( - self.list_exclusions, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_exclusion: gapic_v1.method.wrap_method( - self.get_exclusion, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_exclusion: gapic_v1.method.wrap_method( - self.create_exclusion, - default_timeout=120.0, - client_info=client_info, - ), - self.update_exclusion: gapic_v1.method.wrap_method( - self.update_exclusion, - default_timeout=120.0, - client_info=client_info, - ), - self.delete_exclusion: gapic_v1.method.wrap_method( - self.delete_exclusion, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_cmek_settings: gapic_v1.method.wrap_method( - self.get_cmek_settings, - default_timeout=None, - client_info=client_info, - ), - self.update_cmek_settings: gapic_v1.method.wrap_method( - self.update_cmek_settings, - default_timeout=None, - client_info=client_info, - ), - } - - @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - Union[ - logging_config.ListBucketsResponse, - Awaitable[logging_config.ListBucketsResponse] - ]]: - raise NotImplementedError() - - @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - Union[ - logging_config.LogBucket, - Awaitable[logging_config.LogBucket] - ]]: - raise NotImplementedError() - - @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - Union[ - logging_config.LogBucket, - Awaitable[logging_config.LogBucket] - ]]: - raise NotImplementedError() - - @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Union[ - logging_config.LogBucket, - Awaitable[logging_config.LogBucket] - ]]: - raise NotImplementedError() - - @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - Union[ - logging_config.ListViewsResponse, - Awaitable[logging_config.ListViewsResponse] - ]]: - raise NotImplementedError() - - @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - Union[ - logging_config.LogView, - Awaitable[logging_config.LogView] - ]]: - raise NotImplementedError() - - @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - Union[ - logging_config.LogView, - Awaitable[logging_config.LogView] - ]]: - raise NotImplementedError() - - @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - Union[ - logging_config.LogView, - Awaitable[logging_config.LogView] - ]]: - raise NotImplementedError() - - @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - Union[ - logging_config.ListSinksResponse, - Awaitable[logging_config.ListSinksResponse] - ]]: - raise NotImplementedError() - - @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - Union[ - logging_config.LogSink, - Awaitable[logging_config.LogSink] - ]]: - raise NotImplementedError() - - @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - Union[ - logging_config.LogSink, - Awaitable[logging_config.LogSink] - ]]: - raise NotImplementedError() - - @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - Union[ - logging_config.LogSink, - Awaitable[logging_config.LogSink] - ]]: - raise NotImplementedError() - - @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - Union[ - logging_config.ListExclusionsResponse, - Awaitable[logging_config.ListExclusionsResponse] - ]]: - raise NotImplementedError() - - @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - Union[ - logging_config.LogExclusion, - Awaitable[logging_config.LogExclusion] - ]]: - raise NotImplementedError() - - @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - Union[ - logging_config.LogExclusion, - Awaitable[logging_config.LogExclusion] - ]]: - raise NotImplementedError() - - @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - Union[ - logging_config.LogExclusion, - Awaitable[logging_config.LogExclusion] - ]]: - raise NotImplementedError() - - @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - Union[ - logging_config.CmekSettings, - Awaitable[logging_config.CmekSettings] - ]]: - raise NotImplementedError() - - @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - Union[ - logging_config.CmekSettings, - Awaitable[logging_config.CmekSettings] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'ConfigServiceV2Transport', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py deleted file mode 100644 index 94e4af68334b..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ /dev/null @@ -1,878 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.logging_v2.types import logging_config -from google.protobuf import empty_pb2 # type: ignore -from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO - - -class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport): - """gRPC backend transport for ConfigServiceV2. - - Service for configuring sinks used to route log entries. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - logging_config.ListBucketsResponse]: - r"""Return a callable for the list buckets method over gRPC. - - Lists buckets. - - Returns: - Callable[[~.ListBucketsRequest], - ~.ListBucketsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_buckets' not in self._stubs: - self._stubs['list_buckets'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListBuckets', - request_serializer=logging_config.ListBucketsRequest.serialize, - response_deserializer=logging_config.ListBucketsResponse.deserialize, - ) - return self._stubs['list_buckets'] - - @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - logging_config.LogBucket]: - r"""Return a callable for the get bucket method over gRPC. - - Gets a bucket. - - Returns: - Callable[[~.GetBucketRequest], - ~.LogBucket]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_bucket' not in self._stubs: - self._stubs['get_bucket'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetBucket', - request_serializer=logging_config.GetBucketRequest.serialize, - response_deserializer=logging_config.LogBucket.deserialize, - ) - return self._stubs['get_bucket'] - - @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - logging_config.LogBucket]: - r"""Return a callable for the create bucket method over gRPC. - - Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. - - Returns: - Callable[[~.CreateBucketRequest], - ~.LogBucket]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_bucket' not in self._stubs: - self._stubs['create_bucket'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucket', - request_serializer=logging_config.CreateBucketRequest.serialize, - response_deserializer=logging_config.LogBucket.deserialize, - ) - return self._stubs['create_bucket'] - - @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - logging_config.LogBucket]: - r"""Return a callable for the update bucket method over gRPC. - - Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. - - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. - - A buckets region may not be modified after it is created. - - Returns: - Callable[[~.UpdateBucketRequest], - ~.LogBucket]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_bucket' not in self._stubs: - self._stubs['update_bucket'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucket', - request_serializer=logging_config.UpdateBucketRequest.serialize, - response_deserializer=logging_config.LogBucket.deserialize, - ) - return self._stubs['update_bucket'] - - @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete bucket method over gRPC. - - Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. - - Returns: - Callable[[~.DeleteBucketRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_bucket' not in self._stubs: - self._stubs['delete_bucket'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteBucket', - request_serializer=logging_config.DeleteBucketRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_bucket'] - - @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - empty_pb2.Empty]: - r"""Return a callable for the undelete bucket method over gRPC. - - Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. - - Returns: - Callable[[~.UndeleteBucketRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'undelete_bucket' not in self._stubs: - self._stubs['undelete_bucket'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UndeleteBucket', - request_serializer=logging_config.UndeleteBucketRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['undelete_bucket'] - - @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - logging_config.ListViewsResponse]: - r"""Return a callable for the list views method over gRPC. - - Lists views on a bucket. - - Returns: - Callable[[~.ListViewsRequest], - ~.ListViewsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_views' not in self._stubs: - self._stubs['list_views'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListViews', - request_serializer=logging_config.ListViewsRequest.serialize, - response_deserializer=logging_config.ListViewsResponse.deserialize, - ) - return self._stubs['list_views'] - - @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - logging_config.LogView]: - r"""Return a callable for the get view method over gRPC. - - Gets a view. - - Returns: - Callable[[~.GetViewRequest], - ~.LogView]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_view' not in self._stubs: - self._stubs['get_view'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetView', - request_serializer=logging_config.GetViewRequest.serialize, - response_deserializer=logging_config.LogView.deserialize, - ) - return self._stubs['get_view'] - - @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - logging_config.LogView]: - r"""Return a callable for the create view method over gRPC. - - Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. - - Returns: - Callable[[~.CreateViewRequest], - ~.LogView]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_view' not in self._stubs: - self._stubs['create_view'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateView', - request_serializer=logging_config.CreateViewRequest.serialize, - response_deserializer=logging_config.LogView.deserialize, - ) - return self._stubs['create_view'] - - @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - logging_config.LogView]: - r"""Return a callable for the update view method over gRPC. - - Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. - - Returns: - Callable[[~.UpdateViewRequest], - ~.LogView]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_view' not in self._stubs: - self._stubs['update_view'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateView', - request_serializer=logging_config.UpdateViewRequest.serialize, - response_deserializer=logging_config.LogView.deserialize, - ) - return self._stubs['update_view'] - - @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete view method over gRPC. - - Deletes a view from a bucket. - - Returns: - Callable[[~.DeleteViewRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_view' not in self._stubs: - self._stubs['delete_view'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteView', - request_serializer=logging_config.DeleteViewRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_view'] - - @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - logging_config.ListSinksResponse]: - r"""Return a callable for the list sinks method over gRPC. - - Lists sinks. - - Returns: - Callable[[~.ListSinksRequest], - ~.ListSinksResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_sinks' not in self._stubs: - self._stubs['list_sinks'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListSinks', - request_serializer=logging_config.ListSinksRequest.serialize, - response_deserializer=logging_config.ListSinksResponse.deserialize, - ) - return self._stubs['list_sinks'] - - @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - logging_config.LogSink]: - r"""Return a callable for the get sink method over gRPC. - - Gets a sink. - - Returns: - Callable[[~.GetSinkRequest], - ~.LogSink]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_sink' not in self._stubs: - self._stubs['get_sink'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSink', - request_serializer=logging_config.GetSinkRequest.serialize, - response_deserializer=logging_config.LogSink.deserialize, - ) - return self._stubs['get_sink'] - - @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - logging_config.LogSink]: - r"""Return a callable for the create sink method over gRPC. - - Creates a sink that exports specified log entries to a - destination. The export of newly-ingested log entries begins - immediately, unless the sink's ``writer_identity`` is not - permitted to write to the destination. A sink can export log - entries only from the resource owning the sink. - - Returns: - Callable[[~.CreateSinkRequest], - ~.LogSink]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_sink' not in self._stubs: - self._stubs['create_sink'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateSink', - request_serializer=logging_config.CreateSinkRequest.serialize, - response_deserializer=logging_config.LogSink.deserialize, - ) - return self._stubs['create_sink'] - - @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - logging_config.LogSink]: - r"""Return a callable for the update sink method over gRPC. - - Updates a sink. This method replaces the following fields in the - existing sink with values from the new sink: ``destination``, - and ``filter``. - - The updated sink might also have a new ``writer_identity``; see - the ``unique_writer_identity`` field. - - Returns: - Callable[[~.UpdateSinkRequest], - ~.LogSink]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_sink' not in self._stubs: - self._stubs['update_sink'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSink', - request_serializer=logging_config.UpdateSinkRequest.serialize, - response_deserializer=logging_config.LogSink.deserialize, - ) - return self._stubs['update_sink'] - - @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete sink method over gRPC. - - Deletes a sink. If the sink has a unique ``writer_identity``, - then that service account is also deleted. - - Returns: - Callable[[~.DeleteSinkRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_sink' not in self._stubs: - self._stubs['delete_sink'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteSink', - request_serializer=logging_config.DeleteSinkRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_sink'] - - @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - logging_config.ListExclusionsResponse]: - r"""Return a callable for the list exclusions method over gRPC. - - Lists all the exclusions in a parent resource. - - Returns: - Callable[[~.ListExclusionsRequest], - ~.ListExclusionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_exclusions' not in self._stubs: - self._stubs['list_exclusions'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListExclusions', - request_serializer=logging_config.ListExclusionsRequest.serialize, - response_deserializer=logging_config.ListExclusionsResponse.deserialize, - ) - return self._stubs['list_exclusions'] - - @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - logging_config.LogExclusion]: - r"""Return a callable for the get exclusion method over gRPC. - - Gets the description of an exclusion. - - Returns: - Callable[[~.GetExclusionRequest], - ~.LogExclusion]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_exclusion' not in self._stubs: - self._stubs['get_exclusion'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetExclusion', - request_serializer=logging_config.GetExclusionRequest.serialize, - response_deserializer=logging_config.LogExclusion.deserialize, - ) - return self._stubs['get_exclusion'] - - @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - logging_config.LogExclusion]: - r"""Return a callable for the create exclusion method over gRPC. - - Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. - - Returns: - Callable[[~.CreateExclusionRequest], - ~.LogExclusion]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_exclusion' not in self._stubs: - self._stubs['create_exclusion'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateExclusion', - request_serializer=logging_config.CreateExclusionRequest.serialize, - response_deserializer=logging_config.LogExclusion.deserialize, - ) - return self._stubs['create_exclusion'] - - @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - logging_config.LogExclusion]: - r"""Return a callable for the update exclusion method over gRPC. - - Changes one or more properties of an existing - exclusion. - - Returns: - Callable[[~.UpdateExclusionRequest], - ~.LogExclusion]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_exclusion' not in self._stubs: - self._stubs['update_exclusion'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateExclusion', - request_serializer=logging_config.UpdateExclusionRequest.serialize, - response_deserializer=logging_config.LogExclusion.deserialize, - ) - return self._stubs['update_exclusion'] - - @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete exclusion method over gRPC. - - Deletes an exclusion. - - Returns: - Callable[[~.DeleteExclusionRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_exclusion' not in self._stubs: - self._stubs['delete_exclusion'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteExclusion', - request_serializer=logging_config.DeleteExclusionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_exclusion'] - - @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - logging_config.CmekSettings]: - r"""Return a callable for the get cmek settings method over gRPC. - - Gets the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - Returns: - Callable[[~.GetCmekSettingsRequest], - ~.CmekSettings]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_cmek_settings' not in self._stubs: - self._stubs['get_cmek_settings'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetCmekSettings', - request_serializer=logging_config.GetCmekSettingsRequest.serialize, - response_deserializer=logging_config.CmekSettings.deserialize, - ) - return self._stubs['get_cmek_settings'] - - @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - logging_config.CmekSettings]: - r"""Return a callable for the update cmek settings method over gRPC. - - Updates the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. - - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] - will fail if 1) ``kms_key_name`` is invalid, or 2) the - associated service account does not have the required - ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for - the key, or 3) access to the key is disabled. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - Returns: - Callable[[~.UpdateCmekSettingsRequest], - ~.CmekSettings]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_cmek_settings' not in self._stubs: - self._stubs['update_cmek_settings'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', - request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, - response_deserializer=logging_config.CmekSettings.deserialize, - ) - return self._stubs['update_cmek_settings'] - - -__all__ = ( - 'ConfigServiceV2GrpcTransport', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py deleted file mode 100644 index 78e442d8a4e3..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ /dev/null @@ -1,882 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.logging_v2.types import logging_config -from google.protobuf import empty_pb2 # type: ignore -from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO -from .grpc import ConfigServiceV2GrpcTransport - - -class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): - """gRPC AsyncIO backend transport for ConfigServiceV2. - - Service for configuring sinks used to route log entries. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - Awaitable[logging_config.ListBucketsResponse]]: - r"""Return a callable for the list buckets method over gRPC. - - Lists buckets. - - Returns: - Callable[[~.ListBucketsRequest], - Awaitable[~.ListBucketsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_buckets' not in self._stubs: - self._stubs['list_buckets'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListBuckets', - request_serializer=logging_config.ListBucketsRequest.serialize, - response_deserializer=logging_config.ListBucketsResponse.deserialize, - ) - return self._stubs['list_buckets'] - - @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - Awaitable[logging_config.LogBucket]]: - r"""Return a callable for the get bucket method over gRPC. - - Gets a bucket. - - Returns: - Callable[[~.GetBucketRequest], - Awaitable[~.LogBucket]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_bucket' not in self._stubs: - self._stubs['get_bucket'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetBucket', - request_serializer=logging_config.GetBucketRequest.serialize, - response_deserializer=logging_config.LogBucket.deserialize, - ) - return self._stubs['get_bucket'] - - @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - Awaitable[logging_config.LogBucket]]: - r"""Return a callable for the create bucket method over gRPC. - - Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. - - Returns: - Callable[[~.CreateBucketRequest], - Awaitable[~.LogBucket]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_bucket' not in self._stubs: - self._stubs['create_bucket'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucket', - request_serializer=logging_config.CreateBucketRequest.serialize, - response_deserializer=logging_config.LogBucket.deserialize, - ) - return self._stubs['create_bucket'] - - @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Awaitable[logging_config.LogBucket]]: - r"""Return a callable for the update bucket method over gRPC. - - Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. - - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. - - A buckets region may not be modified after it is created. - - Returns: - Callable[[~.UpdateBucketRequest], - Awaitable[~.LogBucket]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_bucket' not in self._stubs: - self._stubs['update_bucket'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucket', - request_serializer=logging_config.UpdateBucketRequest.serialize, - response_deserializer=logging_config.LogBucket.deserialize, - ) - return self._stubs['update_bucket'] - - @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete bucket method over gRPC. - - Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. - - Returns: - Callable[[~.DeleteBucketRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_bucket' not in self._stubs: - self._stubs['delete_bucket'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteBucket', - request_serializer=logging_config.DeleteBucketRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_bucket'] - - @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the undelete bucket method over gRPC. - - Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. - - Returns: - Callable[[~.UndeleteBucketRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'undelete_bucket' not in self._stubs: - self._stubs['undelete_bucket'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UndeleteBucket', - request_serializer=logging_config.UndeleteBucketRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['undelete_bucket'] - - @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - Awaitable[logging_config.ListViewsResponse]]: - r"""Return a callable for the list views method over gRPC. - - Lists views on a bucket. - - Returns: - Callable[[~.ListViewsRequest], - Awaitable[~.ListViewsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_views' not in self._stubs: - self._stubs['list_views'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListViews', - request_serializer=logging_config.ListViewsRequest.serialize, - response_deserializer=logging_config.ListViewsResponse.deserialize, - ) - return self._stubs['list_views'] - - @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - Awaitable[logging_config.LogView]]: - r"""Return a callable for the get view method over gRPC. - - Gets a view. - - Returns: - Callable[[~.GetViewRequest], - Awaitable[~.LogView]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_view' not in self._stubs: - self._stubs['get_view'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetView', - request_serializer=logging_config.GetViewRequest.serialize, - response_deserializer=logging_config.LogView.deserialize, - ) - return self._stubs['get_view'] - - @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - Awaitable[logging_config.LogView]]: - r"""Return a callable for the create view method over gRPC. - - Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. - - Returns: - Callable[[~.CreateViewRequest], - Awaitable[~.LogView]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_view' not in self._stubs: - self._stubs['create_view'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateView', - request_serializer=logging_config.CreateViewRequest.serialize, - response_deserializer=logging_config.LogView.deserialize, - ) - return self._stubs['create_view'] - - @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - Awaitable[logging_config.LogView]]: - r"""Return a callable for the update view method over gRPC. - - Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. - - Returns: - Callable[[~.UpdateViewRequest], - Awaitable[~.LogView]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_view' not in self._stubs: - self._stubs['update_view'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateView', - request_serializer=logging_config.UpdateViewRequest.serialize, - response_deserializer=logging_config.LogView.deserialize, - ) - return self._stubs['update_view'] - - @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete view method over gRPC. - - Deletes a view from a bucket. - - Returns: - Callable[[~.DeleteViewRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_view' not in self._stubs: - self._stubs['delete_view'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteView', - request_serializer=logging_config.DeleteViewRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_view'] - - @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - Awaitable[logging_config.ListSinksResponse]]: - r"""Return a callable for the list sinks method over gRPC. - - Lists sinks. - - Returns: - Callable[[~.ListSinksRequest], - Awaitable[~.ListSinksResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_sinks' not in self._stubs: - self._stubs['list_sinks'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListSinks', - request_serializer=logging_config.ListSinksRequest.serialize, - response_deserializer=logging_config.ListSinksResponse.deserialize, - ) - return self._stubs['list_sinks'] - - @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - Awaitable[logging_config.LogSink]]: - r"""Return a callable for the get sink method over gRPC. - - Gets a sink. - - Returns: - Callable[[~.GetSinkRequest], - Awaitable[~.LogSink]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_sink' not in self._stubs: - self._stubs['get_sink'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSink', - request_serializer=logging_config.GetSinkRequest.serialize, - response_deserializer=logging_config.LogSink.deserialize, - ) - return self._stubs['get_sink'] - - @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - Awaitable[logging_config.LogSink]]: - r"""Return a callable for the create sink method over gRPC. - - Creates a sink that exports specified log entries to a - destination. The export of newly-ingested log entries begins - immediately, unless the sink's ``writer_identity`` is not - permitted to write to the destination. A sink can export log - entries only from the resource owning the sink. - - Returns: - Callable[[~.CreateSinkRequest], - Awaitable[~.LogSink]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_sink' not in self._stubs: - self._stubs['create_sink'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateSink', - request_serializer=logging_config.CreateSinkRequest.serialize, - response_deserializer=logging_config.LogSink.deserialize, - ) - return self._stubs['create_sink'] - - @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - Awaitable[logging_config.LogSink]]: - r"""Return a callable for the update sink method over gRPC. - - Updates a sink. This method replaces the following fields in the - existing sink with values from the new sink: ``destination``, - and ``filter``. - - The updated sink might also have a new ``writer_identity``; see - the ``unique_writer_identity`` field. - - Returns: - Callable[[~.UpdateSinkRequest], - Awaitable[~.LogSink]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_sink' not in self._stubs: - self._stubs['update_sink'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSink', - request_serializer=logging_config.UpdateSinkRequest.serialize, - response_deserializer=logging_config.LogSink.deserialize, - ) - return self._stubs['update_sink'] - - @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete sink method over gRPC. - - Deletes a sink. If the sink has a unique ``writer_identity``, - then that service account is also deleted. - - Returns: - Callable[[~.DeleteSinkRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_sink' not in self._stubs: - self._stubs['delete_sink'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteSink', - request_serializer=logging_config.DeleteSinkRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_sink'] - - @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - Awaitable[logging_config.ListExclusionsResponse]]: - r"""Return a callable for the list exclusions method over gRPC. - - Lists all the exclusions in a parent resource. - - Returns: - Callable[[~.ListExclusionsRequest], - Awaitable[~.ListExclusionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_exclusions' not in self._stubs: - self._stubs['list_exclusions'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListExclusions', - request_serializer=logging_config.ListExclusionsRequest.serialize, - response_deserializer=logging_config.ListExclusionsResponse.deserialize, - ) - return self._stubs['list_exclusions'] - - @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - Awaitable[logging_config.LogExclusion]]: - r"""Return a callable for the get exclusion method over gRPC. - - Gets the description of an exclusion. - - Returns: - Callable[[~.GetExclusionRequest], - Awaitable[~.LogExclusion]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_exclusion' not in self._stubs: - self._stubs['get_exclusion'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetExclusion', - request_serializer=logging_config.GetExclusionRequest.serialize, - response_deserializer=logging_config.LogExclusion.deserialize, - ) - return self._stubs['get_exclusion'] - - @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - Awaitable[logging_config.LogExclusion]]: - r"""Return a callable for the create exclusion method over gRPC. - - Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. - - Returns: - Callable[[~.CreateExclusionRequest], - Awaitable[~.LogExclusion]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_exclusion' not in self._stubs: - self._stubs['create_exclusion'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateExclusion', - request_serializer=logging_config.CreateExclusionRequest.serialize, - response_deserializer=logging_config.LogExclusion.deserialize, - ) - return self._stubs['create_exclusion'] - - @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - Awaitable[logging_config.LogExclusion]]: - r"""Return a callable for the update exclusion method over gRPC. - - Changes one or more properties of an existing - exclusion. - - Returns: - Callable[[~.UpdateExclusionRequest], - Awaitable[~.LogExclusion]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_exclusion' not in self._stubs: - self._stubs['update_exclusion'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateExclusion', - request_serializer=logging_config.UpdateExclusionRequest.serialize, - response_deserializer=logging_config.LogExclusion.deserialize, - ) - return self._stubs['update_exclusion'] - - @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete exclusion method over gRPC. - - Deletes an exclusion. - - Returns: - Callable[[~.DeleteExclusionRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_exclusion' not in self._stubs: - self._stubs['delete_exclusion'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteExclusion', - request_serializer=logging_config.DeleteExclusionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_exclusion'] - - @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - Awaitable[logging_config.CmekSettings]]: - r"""Return a callable for the get cmek settings method over gRPC. - - Gets the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - Returns: - Callable[[~.GetCmekSettingsRequest], - Awaitable[~.CmekSettings]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_cmek_settings' not in self._stubs: - self._stubs['get_cmek_settings'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetCmekSettings', - request_serializer=logging_config.GetCmekSettingsRequest.serialize, - response_deserializer=logging_config.CmekSettings.deserialize, - ) - return self._stubs['get_cmek_settings'] - - @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - Awaitable[logging_config.CmekSettings]]: - r"""Return a callable for the update cmek settings method over gRPC. - - Updates the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. - - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] - will fail if 1) ``kms_key_name`` is invalid, or 2) the - associated service account does not have the required - ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for - the key, or 3) access to the key is disabled. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - Returns: - Callable[[~.UpdateCmekSettingsRequest], - Awaitable[~.CmekSettings]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_cmek_settings' not in self._stubs: - self._stubs['update_cmek_settings'] = self.grpc_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', - request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, - response_deserializer=logging_config.CmekSettings.deserialize, - ) - return self._stubs['update_cmek_settings'] - - -__all__ = ( - 'ConfigServiceV2GrpcAsyncIOTransport', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/__init__.py deleted file mode 100644 index ed08d1888503..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import LoggingServiceV2Client -from .async_client import LoggingServiceV2AsyncClient - -__all__ = ( - 'LoggingServiceV2Client', - 'LoggingServiceV2AsyncClient', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/async_client.py deleted file mode 100644 index dd9cbb78dd9a..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ /dev/null @@ -1,781 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.logging_v2.services.logging_service_v2 import pagers -from google.cloud.logging_v2.types import log_entry -from google.cloud.logging_v2.types import logging -from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport -from .client import LoggingServiceV2Client - - -class LoggingServiceV2AsyncClient: - """Service for ingesting and querying logs.""" - - _client: LoggingServiceV2Client - - DEFAULT_ENDPOINT = LoggingServiceV2Client.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT - - log_path = staticmethod(LoggingServiceV2Client.log_path) - parse_log_path = staticmethod(LoggingServiceV2Client.parse_log_path) - common_billing_account_path = staticmethod(LoggingServiceV2Client.common_billing_account_path) - parse_common_billing_account_path = staticmethod(LoggingServiceV2Client.parse_common_billing_account_path) - common_folder_path = staticmethod(LoggingServiceV2Client.common_folder_path) - parse_common_folder_path = staticmethod(LoggingServiceV2Client.parse_common_folder_path) - common_organization_path = staticmethod(LoggingServiceV2Client.common_organization_path) - parse_common_organization_path = staticmethod(LoggingServiceV2Client.parse_common_organization_path) - common_project_path = staticmethod(LoggingServiceV2Client.common_project_path) - parse_common_project_path = staticmethod(LoggingServiceV2Client.parse_common_project_path) - common_location_path = staticmethod(LoggingServiceV2Client.common_location_path) - parse_common_location_path = staticmethod(LoggingServiceV2Client.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LoggingServiceV2AsyncClient: The constructed client. - """ - return LoggingServiceV2Client.from_service_account_info.__func__(LoggingServiceV2AsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LoggingServiceV2AsyncClient: The constructed client. - """ - return LoggingServiceV2Client.from_service_account_file.__func__(LoggingServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> LoggingServiceV2Transport: - """Returns the transport used by the client instance. - - Returns: - LoggingServiceV2Transport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(LoggingServiceV2Client).get_transport_class, type(LoggingServiceV2Client)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, LoggingServiceV2Transport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the logging service v2 client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.LoggingServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = LoggingServiceV2Client( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def delete_log(self, - request: logging.DeleteLogRequest = None, - *, - log_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. - - Args: - request (:class:`google.cloud.logging_v2.types.DeleteLogRequest`): - The request object. The parameters to DeleteLog. - log_name (:class:`str`): - Required. The resource name of the log to delete: - - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" - - ``[LOG_ID]`` must be URL-encoded. For example, - ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. - For more information about log names, see - [LogEntry][google.logging.v2.LogEntry]. - - This corresponds to the ``log_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging.DeleteLogRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if log_name is not None: - request.log_name = log_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_log, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("log_name", request.log_name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def write_log_entries(self, - request: logging.WriteLogEntriesRequest = None, - *, - log_name: str = None, - resource: monitored_resource_pb2.MonitoredResource = None, - labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, - entries: Sequence[log_entry.LogEntry] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging.WriteLogEntriesResponse: - r"""Writes log entries to Logging. This API method is the - only way to send log entries to Logging. This method is - used, directly or indirectly, by the Logging agent - (fluentd) and all logging libraries configured to use - Logging. A single request may contain log entries for a - maximum of 1000 different resources (projects, - organizations, billing accounts or folders) - - Args: - request (:class:`google.cloud.logging_v2.types.WriteLogEntriesRequest`): - The request object. The parameters to WriteLogEntries. - log_name (:class:`str`): - Optional. A default log resource name that is assigned - to all log entries in ``entries`` that do not specify a - value for ``log_name``: - - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" - - ``[LOG_ID]`` must be URL-encoded. For example: - - :: - - "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" - - The permission ``logging.logEntries.create`` is needed - on each project, organization, billing account, or - folder that is receiving new log entries, whether the - resource is specified in ``logName`` or in an individual - log entry. - - This corresponds to the ``log_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - resource (:class:`google.api.monitored_resource_pb2.MonitoredResource`): - Optional. A default monitored resource object that is - assigned to all log entries in ``entries`` that do not - specify a value for ``resource``. Example: - - :: - - { "type": "gce_instance", - "labels": { - "zone": "us-central1-a", "instance_id": "00000000000000000000" }} - - See [LogEntry][google.logging.v2.LogEntry]. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - labels (:class:`Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]`): - Optional. Default labels that are added to the - ``labels`` field of all log entries in ``entries``. If a - log entry already has a label with the same key as a - label in this parameter, then the log entry's label is - not changed. See [LogEntry][google.logging.v2.LogEntry]. - - This corresponds to the ``labels`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entries (:class:`Sequence[google.cloud.logging_v2.types.LogEntry]`): - Required. The log entries to send to Logging. The order - of log entries in this list does not matter. Values - supplied in this method's ``log_name``, ``resource``, - and ``labels`` fields are copied into those log entries - in this list that do not include values for their - corresponding fields. For more information, see the - [LogEntry][google.logging.v2.LogEntry] type. - - If the ``timestamp`` or ``insert_id`` fields are missing - in log entries, then this method supplies the current - time or a unique identifier, respectively. The supplied - values are chosen so that, among the log entries that - did not supply their own values, the entries earlier in - the list will sort before the entries later in the list. - See the ``entries.list`` method. - - Log entries with timestamps that are more than the `logs - retention - period `__ - in the past or more than 24 hours in the future will not - be available when calling ``entries.list``. However, - those log entries can still be `exported with - LogSinks `__. - - To improve throughput and to avoid exceeding the `quota - limit `__ - for calls to ``entries.write``, you should try to - include several log entries in this list, rather than - calling this method for each individual log entry. - - This corresponds to the ``entries`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.WriteLogEntriesResponse: - Result returned from WriteLogEntries. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name, resource, labels, entries]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging.WriteLogEntriesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if log_name is not None: - request.log_name = log_name - if resource is not None: - request.resource = resource - - if labels: - request.labels.update(labels) - if entries: - request.entries.extend(entries) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.write_log_entries, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_log_entries(self, - request: logging.ListLogEntriesRequest = None, - *, - resource_names: Sequence[str] = None, - filter: str = None, - order_by: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListLogEntriesAsyncPager: - r"""Lists log entries. Use this method to retrieve log entries that - originated from a project/folder/organization/billing account. - For ways to export log entries, see `Exporting - Logs `__. - - Args: - request (:class:`google.cloud.logging_v2.types.ListLogEntriesRequest`): - The request object. The parameters to `ListLogEntries`. - resource_names (:class:`Sequence[str]`): - Required. Names of one or more parent resources from - which to retrieve log entries: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - May alternatively be one or more views - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - - Projects listed in the ``project_ids`` field are added - to this list. - - This corresponds to the ``resource_names`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (:class:`str`): - Optional. A filter that chooses which log entries to - return. See `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources - listed in ``resource_names``. Referencing a parent - resource that is not listed in ``resource_names`` will - cause the filter to return no results. The maximum - length of the filter is 20000 characters. - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - order_by (:class:`str`): - Optional. How the results should be sorted. Presently, - the only permitted values are ``"timestamp asc"`` - (default) and ``"timestamp desc"``. The first option - returns entries in order of increasing values of - ``LogEntry.timestamp`` (oldest first), and the second - option returns entries in order of decreasing timestamps - (newest first). Entries with equal timestamps are - returned in order of their ``insert_id`` values. - - This corresponds to the ``order_by`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager: - Result returned from ListLogEntries. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource_names, filter, order_by]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging.ListLogEntriesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if filter is not None: - request.filter = filter - if order_by is not None: - request.order_by = order_by - if resource_names: - request.resource_names.extend(resource_names) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_log_entries, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListLogEntriesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_monitored_resource_descriptors(self, - request: logging.ListMonitoredResourceDescriptorsRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: - r"""Lists the descriptors for monitored resource types - used by Logging. - - Args: - request (:class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest`): - The request object. The parameters to - ListMonitoredResourceDescriptors - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager: - Result returned from - ListMonitoredResourceDescriptors. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - request = logging.ListMonitoredResourceDescriptorsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_monitored_resource_descriptors, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListMonitoredResourceDescriptorsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_logs(self, - request: logging.ListLogsRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListLogsAsyncPager: - r"""Lists the logs in projects, organizations, folders, - or billing accounts. Only logs that have entries are - listed. - - Args: - request (:class:`google.cloud.logging_v2.types.ListLogsRequest`): - The request object. The parameters to ListLogs. - parent (:class:`str`): - Required. The resource name that owns the logs: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager: - Result returned from ListLogs. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging.ListLogsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_logs, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListLogsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def tail_log_entries(self, - requests: AsyncIterator[logging.TailLogEntriesRequest] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: - r"""Streaming read of log entries as they are ingested. - Until the stream is terminated, it will continue reading - logs. - - Args: - requests (AsyncIterator[`google.cloud.logging_v2.types.TailLogEntriesRequest`]): - The request object AsyncIterator. The parameters to `TailLogEntries`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - AsyncIterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: - Result returned from TailLogEntries. - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.tail_log_entries, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = rpc( - requests, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "LoggingServiceV2AsyncClient", -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/client.py deleted file mode 100644 index 354945976630..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/client.py +++ /dev/null @@ -1,920 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Dict, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.logging_v2.services.logging_service_v2 import pagers -from google.cloud.logging_v2.types import log_entry -from google.cloud.logging_v2.types import logging -from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO -from .transports.grpc import LoggingServiceV2GrpcTransport -from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport - - -class LoggingServiceV2ClientMeta(type): - """Metaclass for the LoggingServiceV2 client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] - _transport_registry["grpc"] = LoggingServiceV2GrpcTransport - _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[LoggingServiceV2Transport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class LoggingServiceV2Client(metaclass=LoggingServiceV2ClientMeta): - """Service for ingesting and querying logs.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "logging.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LoggingServiceV2Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LoggingServiceV2Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> LoggingServiceV2Transport: - """Returns the transport used by the client instance. - - Returns: - LoggingServiceV2Transport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def log_path(project: str,log: str,) -> str: - """Returns a fully-qualified log string.""" - return "projects/{project}/logs/{log}".format(project=project, log=log, ) - - @staticmethod - def parse_log_path(path: str) -> Dict[str,str]: - """Parses a log path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/logs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, LoggingServiceV2Transport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the logging service v2 client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, LoggingServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, LoggingServiceV2Transport): - # transport is a LoggingServiceV2Transport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), - ) - - def delete_log(self, - request: Union[logging.DeleteLogRequest, dict] = None, - *, - log_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. - - Args: - request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): - The request object. The parameters to DeleteLog. - log_name (str): - Required. The resource name of the log to delete: - - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" - - ``[LOG_ID]`` must be URL-encoded. For example, - ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. - For more information about log names, see - [LogEntry][google.logging.v2.LogEntry]. - - This corresponds to the ``log_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging.DeleteLogRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging.DeleteLogRequest): - request = logging.DeleteLogRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if log_name is not None: - request.log_name = log_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_log] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("log_name", request.log_name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def write_log_entries(self, - request: Union[logging.WriteLogEntriesRequest, dict] = None, - *, - log_name: str = None, - resource: monitored_resource_pb2.MonitoredResource = None, - labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, - entries: Sequence[log_entry.LogEntry] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging.WriteLogEntriesResponse: - r"""Writes log entries to Logging. This API method is the - only way to send log entries to Logging. This method is - used, directly or indirectly, by the Logging agent - (fluentd) and all logging libraries configured to use - Logging. A single request may contain log entries for a - maximum of 1000 different resources (projects, - organizations, billing accounts or folders) - - Args: - request (Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]): - The request object. The parameters to WriteLogEntries. - log_name (str): - Optional. A default log resource name that is assigned - to all log entries in ``entries`` that do not specify a - value for ``log_name``: - - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" - - ``[LOG_ID]`` must be URL-encoded. For example: - - :: - - "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" - - The permission ``logging.logEntries.create`` is needed - on each project, organization, billing account, or - folder that is receiving new log entries, whether the - resource is specified in ``logName`` or in an individual - log entry. - - This corresponds to the ``log_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - resource (google.api.monitored_resource_pb2.MonitoredResource): - Optional. A default monitored resource object that is - assigned to all log entries in ``entries`` that do not - specify a value for ``resource``. Example: - - :: - - { "type": "gce_instance", - "labels": { - "zone": "us-central1-a", "instance_id": "00000000000000000000" }} - - See [LogEntry][google.logging.v2.LogEntry]. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - labels (Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]): - Optional. Default labels that are added to the - ``labels`` field of all log entries in ``entries``. If a - log entry already has a label with the same key as a - label in this parameter, then the log entry's label is - not changed. See [LogEntry][google.logging.v2.LogEntry]. - - This corresponds to the ``labels`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entries (Sequence[google.cloud.logging_v2.types.LogEntry]): - Required. The log entries to send to Logging. The order - of log entries in this list does not matter. Values - supplied in this method's ``log_name``, ``resource``, - and ``labels`` fields are copied into those log entries - in this list that do not include values for their - corresponding fields. For more information, see the - [LogEntry][google.logging.v2.LogEntry] type. - - If the ``timestamp`` or ``insert_id`` fields are missing - in log entries, then this method supplies the current - time or a unique identifier, respectively. The supplied - values are chosen so that, among the log entries that - did not supply their own values, the entries earlier in - the list will sort before the entries later in the list. - See the ``entries.list`` method. - - Log entries with timestamps that are more than the `logs - retention - period `__ - in the past or more than 24 hours in the future will not - be available when calling ``entries.list``. However, - those log entries can still be `exported with - LogSinks `__. - - To improve throughput and to avoid exceeding the `quota - limit `__ - for calls to ``entries.write``, you should try to - include several log entries in this list, rather than - calling this method for each individual log entry. - - This corresponds to the ``entries`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.WriteLogEntriesResponse: - Result returned from WriteLogEntries. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name, resource, labels, entries]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging.WriteLogEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging.WriteLogEntriesRequest): - request = logging.WriteLogEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if log_name is not None: - request.log_name = log_name - if resource is not None: - request.resource = resource - if labels is not None: - request.labels = labels - if entries is not None: - request.entries = entries - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.write_log_entries] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_log_entries(self, - request: Union[logging.ListLogEntriesRequest, dict] = None, - *, - resource_names: Sequence[str] = None, - filter: str = None, - order_by: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListLogEntriesPager: - r"""Lists log entries. Use this method to retrieve log entries that - originated from a project/folder/organization/billing account. - For ways to export log entries, see `Exporting - Logs `__. - - Args: - request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): - The request object. The parameters to `ListLogEntries`. - resource_names (Sequence[str]): - Required. Names of one or more parent resources from - which to retrieve log entries: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - May alternatively be one or more views - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - - Projects listed in the ``project_ids`` field are added - to this list. - - This corresponds to the ``resource_names`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (str): - Optional. A filter that chooses which log entries to - return. See `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources - listed in ``resource_names``. Referencing a parent - resource that is not listed in ``resource_names`` will - cause the filter to return no results. The maximum - length of the filter is 20000 characters. - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - order_by (str): - Optional. How the results should be sorted. Presently, - the only permitted values are ``"timestamp asc"`` - (default) and ``"timestamp desc"``. The first option - returns entries in order of increasing values of - ``LogEntry.timestamp`` (oldest first), and the second - option returns entries in order of decreasing timestamps - (newest first). Entries with equal timestamps are - returned in order of their ``insert_id`` values. - - This corresponds to the ``order_by`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager: - Result returned from ListLogEntries. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource_names, filter, order_by]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging.ListLogEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging.ListLogEntriesRequest): - request = logging.ListLogEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if resource_names is not None: - request.resource_names = resource_names - if filter is not None: - request.filter = filter - if order_by is not None: - request.order_by = order_by - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_log_entries] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListLogEntriesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_monitored_resource_descriptors(self, - request: Union[logging.ListMonitoredResourceDescriptorsRequest, dict] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListMonitoredResourceDescriptorsPager: - r"""Lists the descriptors for monitored resource types - used by Logging. - - Args: - request (Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]): - The request object. The parameters to - ListMonitoredResourceDescriptors - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager: - Result returned from - ListMonitoredResourceDescriptors. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging.ListMonitoredResourceDescriptorsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest): - request = logging.ListMonitoredResourceDescriptorsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_monitored_resource_descriptors] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListMonitoredResourceDescriptorsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_logs(self, - request: Union[logging.ListLogsRequest, dict] = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListLogsPager: - r"""Lists the logs in projects, organizations, folders, - or billing accounts. Only logs that have entries are - listed. - - Args: - request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): - The request object. The parameters to ListLogs. - parent (str): - Required. The resource name that owns the logs: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager: - Result returned from ListLogs. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging.ListLogsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging.ListLogsRequest): - request = logging.ListLogsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_logs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListLogsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def tail_log_entries(self, - requests: Iterator[logging.TailLogEntriesRequest] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Iterable[logging.TailLogEntriesResponse]: - r"""Streaming read of log entries as they are ingested. - Until the stream is terminated, it will continue reading - logs. - - Args: - requests (Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]): - The request object iterator. The parameters to `TailLogEntries`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: - Result returned from TailLogEntries. - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.tail_log_entries] - - # Send the request. - response = rpc( - requests, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "LoggingServiceV2Client", -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/pagers.py deleted file mode 100644 index 95adb7e912c9..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ /dev/null @@ -1,386 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.logging_v2.types import log_entry -from google.cloud.logging_v2.types import logging - - -class ListLogEntriesPager: - """A pager for iterating through ``list_log_entries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``entries`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListLogEntries`` requests and continue to iterate - through the ``entries`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., logging.ListLogEntriesResponse], - request: logging.ListLogEntriesRequest, - response: logging.ListLogEntriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListLogEntriesRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListLogEntriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging.ListLogEntriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[logging.ListLogEntriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[log_entry.LogEntry]: - for page in self.pages: - yield from page.entries - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListLogEntriesAsyncPager: - """A pager for iterating through ``list_log_entries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``entries`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListLogEntries`` requests and continue to iterate - through the ``entries`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[logging.ListLogEntriesResponse]], - request: logging.ListLogEntriesRequest, - response: logging.ListLogEntriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListLogEntriesRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListLogEntriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging.ListLogEntriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[logging.ListLogEntriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[log_entry.LogEntry]: - async def async_generator(): - async for page in self.pages: - for response in page.entries: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMonitoredResourceDescriptorsPager: - """A pager for iterating through ``list_monitored_resource_descriptors`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``resource_descriptors`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListMonitoredResourceDescriptors`` requests and continue to iterate - through the ``resource_descriptors`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., logging.ListMonitoredResourceDescriptorsResponse], - request: logging.ListMonitoredResourceDescriptorsRequest, - response: logging.ListMonitoredResourceDescriptorsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging.ListMonitoredResourceDescriptorsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[logging.ListMonitoredResourceDescriptorsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescriptor]: - for page in self.pages: - yield from page.resource_descriptors - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMonitoredResourceDescriptorsAsyncPager: - """A pager for iterating through ``list_monitored_resource_descriptors`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``resource_descriptors`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListMonitoredResourceDescriptors`` requests and continue to iterate - through the ``resource_descriptors`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[logging.ListMonitoredResourceDescriptorsResponse]], - request: logging.ListMonitoredResourceDescriptorsRequest, - response: logging.ListMonitoredResourceDescriptorsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging.ListMonitoredResourceDescriptorsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[logging.ListMonitoredResourceDescriptorsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[monitored_resource_pb2.MonitoredResourceDescriptor]: - async def async_generator(): - async for page in self.pages: - for response in page.resource_descriptors: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListLogsPager: - """A pager for iterating through ``list_logs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListLogsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``log_names`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListLogs`` requests and continue to iterate - through the ``log_names`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListLogsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., logging.ListLogsResponse], - request: logging.ListLogsRequest, - response: logging.ListLogsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListLogsRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListLogsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging.ListLogsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[logging.ListLogsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[str]: - for page in self.pages: - yield from page.log_names - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListLogsAsyncPager: - """A pager for iterating through ``list_logs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListLogsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``log_names`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListLogs`` requests and continue to iterate - through the ``log_names`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListLogsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[logging.ListLogsResponse]], - request: logging.ListLogsRequest, - response: logging.ListLogsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListLogsRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListLogsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging.ListLogsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[logging.ListLogsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[str]: - async def async_generator(): - async for page in self.pages: - for response in page.log_names: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py deleted file mode 100644 index 46e9a1fcbf4c..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import LoggingServiceV2Transport -from .grpc import LoggingServiceV2GrpcTransport -from .grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] -_transport_registry['grpc'] = LoggingServiceV2GrpcTransport -_transport_registry['grpc_asyncio'] = LoggingServiceV2GrpcAsyncIOTransport - -__all__ = ( - 'LoggingServiceV2Transport', - 'LoggingServiceV2GrpcTransport', - 'LoggingServiceV2GrpcAsyncIOTransport', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/base.py deleted file mode 100644 index 222ed3c1f99c..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ /dev/null @@ -1,291 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.logging_v2.types import logging -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-logging', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class LoggingServiceV2Transport(abc.ABC): - """Abstract transport class for LoggingServiceV2.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - ) - - DEFAULT_HOST: str = 'logging.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.delete_log: gapic_v1.method.wrap_method( - self.delete_log, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.write_log_entries: gapic_v1.method.wrap_method( - self.write_log_entries, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_log_entries: gapic_v1.method.wrap_method( - self.list_log_entries, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_monitored_resource_descriptors: gapic_v1.method.wrap_method( - self.list_monitored_resource_descriptors, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_logs: gapic_v1.method.wrap_method( - self.list_logs, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.tail_log_entries: gapic_v1.method.wrap_method( - self.tail_log_entries, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - } - - @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - Union[ - logging.WriteLogEntriesResponse, - Awaitable[logging.WriteLogEntriesResponse] - ]]: - raise NotImplementedError() - - @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - Union[ - logging.ListLogEntriesResponse, - Awaitable[logging.ListLogEntriesResponse] - ]]: - raise NotImplementedError() - - @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - Union[ - logging.ListMonitoredResourceDescriptorsResponse, - Awaitable[logging.ListMonitoredResourceDescriptorsResponse] - ]]: - raise NotImplementedError() - - @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - Union[ - logging.ListLogsResponse, - Awaitable[logging.ListLogsResponse] - ]]: - raise NotImplementedError() - - @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - Union[ - logging.TailLogEntriesResponse, - Awaitable[logging.TailLogEntriesResponse] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'LoggingServiceV2Transport', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py deleted file mode 100644 index f66cb54a21aa..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ /dev/null @@ -1,402 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.logging_v2.types import logging -from google.protobuf import empty_pb2 # type: ignore -from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO - - -class LoggingServiceV2GrpcTransport(LoggingServiceV2Transport): - """gRPC backend transport for LoggingServiceV2. - - Service for ingesting and querying logs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete log method over gRPC. - - Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. - - Returns: - Callable[[~.DeleteLogRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_log' not in self._stubs: - self._stubs['delete_log'] = self.grpc_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/DeleteLog', - request_serializer=logging.DeleteLogRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_log'] - - @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - logging.WriteLogEntriesResponse]: - r"""Return a callable for the write log entries method over gRPC. - - Writes log entries to Logging. This API method is the - only way to send log entries to Logging. This method is - used, directly or indirectly, by the Logging agent - (fluentd) and all logging libraries configured to use - Logging. A single request may contain log entries for a - maximum of 1000 different resources (projects, - organizations, billing accounts or folders) - - Returns: - Callable[[~.WriteLogEntriesRequest], - ~.WriteLogEntriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'write_log_entries' not in self._stubs: - self._stubs['write_log_entries'] = self.grpc_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/WriteLogEntries', - request_serializer=logging.WriteLogEntriesRequest.serialize, - response_deserializer=logging.WriteLogEntriesResponse.deserialize, - ) - return self._stubs['write_log_entries'] - - @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - logging.ListLogEntriesResponse]: - r"""Return a callable for the list log entries method over gRPC. - - Lists log entries. Use this method to retrieve log entries that - originated from a project/folder/organization/billing account. - For ways to export log entries, see `Exporting - Logs `__. - - Returns: - Callable[[~.ListLogEntriesRequest], - ~.ListLogEntriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_log_entries' not in self._stubs: - self._stubs['list_log_entries'] = self.grpc_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogEntries', - request_serializer=logging.ListLogEntriesRequest.serialize, - response_deserializer=logging.ListLogEntriesResponse.deserialize, - ) - return self._stubs['list_log_entries'] - - @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - logging.ListMonitoredResourceDescriptorsResponse]: - r"""Return a callable for the list monitored resource - descriptors method over gRPC. - - Lists the descriptors for monitored resource types - used by Logging. - - Returns: - Callable[[~.ListMonitoredResourceDescriptorsRequest], - ~.ListMonitoredResourceDescriptorsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self.grpc_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', - request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, - response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, - ) - return self._stubs['list_monitored_resource_descriptors'] - - @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - logging.ListLogsResponse]: - r"""Return a callable for the list logs method over gRPC. - - Lists the logs in projects, organizations, folders, - or billing accounts. Only logs that have entries are - listed. - - Returns: - Callable[[~.ListLogsRequest], - ~.ListLogsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_logs' not in self._stubs: - self._stubs['list_logs'] = self.grpc_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogs', - request_serializer=logging.ListLogsRequest.serialize, - response_deserializer=logging.ListLogsResponse.deserialize, - ) - return self._stubs['list_logs'] - - @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - logging.TailLogEntriesResponse]: - r"""Return a callable for the tail log entries method over gRPC. - - Streaming read of log entries as they are ingested. - Until the stream is terminated, it will continue reading - logs. - - Returns: - Callable[[~.TailLogEntriesRequest], - ~.TailLogEntriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'tail_log_entries' not in self._stubs: - self._stubs['tail_log_entries'] = self.grpc_channel.stream_stream( - '/google.logging.v2.LoggingServiceV2/TailLogEntries', - request_serializer=logging.TailLogEntriesRequest.serialize, - response_deserializer=logging.TailLogEntriesResponse.deserialize, - ) - return self._stubs['tail_log_entries'] - - -__all__ = ( - 'LoggingServiceV2GrpcTransport', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py deleted file mode 100644 index a19007ab65c9..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ /dev/null @@ -1,406 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.logging_v2.types import logging -from google.protobuf import empty_pb2 # type: ignore -from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO -from .grpc import LoggingServiceV2GrpcTransport - - -class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): - """gRPC AsyncIO backend transport for LoggingServiceV2. - - Service for ingesting and querying logs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete log method over gRPC. - - Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. - - Returns: - Callable[[~.DeleteLogRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_log' not in self._stubs: - self._stubs['delete_log'] = self.grpc_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/DeleteLog', - request_serializer=logging.DeleteLogRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_log'] - - @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - Awaitable[logging.WriteLogEntriesResponse]]: - r"""Return a callable for the write log entries method over gRPC. - - Writes log entries to Logging. This API method is the - only way to send log entries to Logging. This method is - used, directly or indirectly, by the Logging agent - (fluentd) and all logging libraries configured to use - Logging. A single request may contain log entries for a - maximum of 1000 different resources (projects, - organizations, billing accounts or folders) - - Returns: - Callable[[~.WriteLogEntriesRequest], - Awaitable[~.WriteLogEntriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'write_log_entries' not in self._stubs: - self._stubs['write_log_entries'] = self.grpc_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/WriteLogEntries', - request_serializer=logging.WriteLogEntriesRequest.serialize, - response_deserializer=logging.WriteLogEntriesResponse.deserialize, - ) - return self._stubs['write_log_entries'] - - @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - Awaitable[logging.ListLogEntriesResponse]]: - r"""Return a callable for the list log entries method over gRPC. - - Lists log entries. Use this method to retrieve log entries that - originated from a project/folder/organization/billing account. - For ways to export log entries, see `Exporting - Logs `__. - - Returns: - Callable[[~.ListLogEntriesRequest], - Awaitable[~.ListLogEntriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_log_entries' not in self._stubs: - self._stubs['list_log_entries'] = self.grpc_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogEntries', - request_serializer=logging.ListLogEntriesRequest.serialize, - response_deserializer=logging.ListLogEntriesResponse.deserialize, - ) - return self._stubs['list_log_entries'] - - @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - Awaitable[logging.ListMonitoredResourceDescriptorsResponse]]: - r"""Return a callable for the list monitored resource - descriptors method over gRPC. - - Lists the descriptors for monitored resource types - used by Logging. - - Returns: - Callable[[~.ListMonitoredResourceDescriptorsRequest], - Awaitable[~.ListMonitoredResourceDescriptorsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self.grpc_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', - request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, - response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, - ) - return self._stubs['list_monitored_resource_descriptors'] - - @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - Awaitable[logging.ListLogsResponse]]: - r"""Return a callable for the list logs method over gRPC. - - Lists the logs in projects, organizations, folders, - or billing accounts. Only logs that have entries are - listed. - - Returns: - Callable[[~.ListLogsRequest], - Awaitable[~.ListLogsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_logs' not in self._stubs: - self._stubs['list_logs'] = self.grpc_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogs', - request_serializer=logging.ListLogsRequest.serialize, - response_deserializer=logging.ListLogsResponse.deserialize, - ) - return self._stubs['list_logs'] - - @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - Awaitable[logging.TailLogEntriesResponse]]: - r"""Return a callable for the tail log entries method over gRPC. - - Streaming read of log entries as they are ingested. - Until the stream is terminated, it will continue reading - logs. - - Returns: - Callable[[~.TailLogEntriesRequest], - Awaitable[~.TailLogEntriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'tail_log_entries' not in self._stubs: - self._stubs['tail_log_entries'] = self.grpc_channel.stream_stream( - '/google.logging.v2.LoggingServiceV2/TailLogEntries', - request_serializer=logging.TailLogEntriesRequest.serialize, - response_deserializer=logging.TailLogEntriesResponse.deserialize, - ) - return self._stubs['tail_log_entries'] - - -__all__ = ( - 'LoggingServiceV2GrpcAsyncIOTransport', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/__init__.py deleted file mode 100644 index 1b5d1805cdcd..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import MetricsServiceV2Client -from .async_client import MetricsServiceV2AsyncClient - -__all__ = ( - 'MetricsServiceV2Client', - 'MetricsServiceV2AsyncClient', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/async_client.py deleted file mode 100644 index 764f44f66698..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ /dev/null @@ -1,640 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.api import distribution_pb2 # type: ignore -from google.api import metric_pb2 # type: ignore -from google.cloud.logging_v2.services.metrics_service_v2 import pagers -from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport -from .client import MetricsServiceV2Client - - -class MetricsServiceV2AsyncClient: - """Service for configuring logs-based metrics.""" - - _client: MetricsServiceV2Client - - DEFAULT_ENDPOINT = MetricsServiceV2Client.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT - - log_metric_path = staticmethod(MetricsServiceV2Client.log_metric_path) - parse_log_metric_path = staticmethod(MetricsServiceV2Client.parse_log_metric_path) - common_billing_account_path = staticmethod(MetricsServiceV2Client.common_billing_account_path) - parse_common_billing_account_path = staticmethod(MetricsServiceV2Client.parse_common_billing_account_path) - common_folder_path = staticmethod(MetricsServiceV2Client.common_folder_path) - parse_common_folder_path = staticmethod(MetricsServiceV2Client.parse_common_folder_path) - common_organization_path = staticmethod(MetricsServiceV2Client.common_organization_path) - parse_common_organization_path = staticmethod(MetricsServiceV2Client.parse_common_organization_path) - common_project_path = staticmethod(MetricsServiceV2Client.common_project_path) - parse_common_project_path = staticmethod(MetricsServiceV2Client.parse_common_project_path) - common_location_path = staticmethod(MetricsServiceV2Client.common_location_path) - parse_common_location_path = staticmethod(MetricsServiceV2Client.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetricsServiceV2AsyncClient: The constructed client. - """ - return MetricsServiceV2Client.from_service_account_info.__func__(MetricsServiceV2AsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetricsServiceV2AsyncClient: The constructed client. - """ - return MetricsServiceV2Client.from_service_account_file.__func__(MetricsServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> MetricsServiceV2Transport: - """Returns the transport used by the client instance. - - Returns: - MetricsServiceV2Transport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(MetricsServiceV2Client).get_transport_class, type(MetricsServiceV2Client)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, MetricsServiceV2Transport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the metrics service v2 client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.MetricsServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = MetricsServiceV2Client( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def list_log_metrics(self, - request: logging_metrics.ListLogMetricsRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListLogMetricsAsyncPager: - r"""Lists logs-based metrics. - - Args: - request (:class:`google.cloud.logging_v2.types.ListLogMetricsRequest`): - The request object. The parameters to ListLogMetrics. - parent (:class:`str`): - Required. The name of the project containing the - metrics: - - :: - - "projects/[PROJECT_ID]" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager: - Result returned from ListLogMetrics. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_metrics.ListLogMetricsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_log_metrics, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListLogMetricsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_log_metric(self, - request: logging_metrics.GetLogMetricRequest = None, - *, - metric_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_metrics.LogMetric: - r"""Gets a logs-based metric. - - Args: - request (:class:`google.cloud.logging_v2.types.GetLogMetricRequest`): - The request object. The parameters to GetLogMetric. - metric_name (:class:`str`): - Required. The resource name of the desired metric: - - :: - - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - - This corresponds to the ``metric_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogMetric: - Describes a logs-based metric. The - value of the metric is the number of log - entries that match a logs filter in a - given time interval. - Logs-based metrics can also be used to - extract values from logs and create a - distribution of the values. The - distribution records the statistics of - the extracted values along with an - optional histogram of the values as - specified by the bucket options. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_metrics.GetLogMetricRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if metric_name is not None: - request.metric_name = metric_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_log_metric, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_log_metric(self, - request: logging_metrics.CreateLogMetricRequest = None, - *, - parent: str = None, - metric: logging_metrics.LogMetric = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_metrics.LogMetric: - r"""Creates a logs-based metric. - - Args: - request (:class:`google.cloud.logging_v2.types.CreateLogMetricRequest`): - The request object. The parameters to CreateLogMetric. - parent (:class:`str`): - Required. The resource name of the project in which to - create the metric: - - :: - - "projects/[PROJECT_ID]" - - The new metric must be provided in the request. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - metric (:class:`google.cloud.logging_v2.types.LogMetric`): - Required. The new logs-based metric, - which must not have an identifier that - already exists. - - This corresponds to the ``metric`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogMetric: - Describes a logs-based metric. The - value of the metric is the number of log - entries that match a logs filter in a - given time interval. - Logs-based metrics can also be used to - extract values from logs and create a - distribution of the values. The - distribution records the statistics of - the extracted values along with an - optional histogram of the values as - specified by the bucket options. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, metric]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_metrics.CreateLogMetricRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if metric is not None: - request.metric = metric - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_log_metric, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_log_metric(self, - request: logging_metrics.UpdateLogMetricRequest = None, - *, - metric_name: str = None, - metric: logging_metrics.LogMetric = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_metrics.LogMetric: - r"""Creates or updates a logs-based metric. - - Args: - request (:class:`google.cloud.logging_v2.types.UpdateLogMetricRequest`): - The request object. The parameters to UpdateLogMetric. - metric_name (:class:`str`): - Required. The resource name of the metric to update: - - :: - - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - - The updated metric must be provided in the request and - it's ``name`` field must be the same as ``[METRIC_ID]`` - If the metric does not exist in ``[PROJECT_ID]``, then a - new metric is created. - - This corresponds to the ``metric_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - metric (:class:`google.cloud.logging_v2.types.LogMetric`): - Required. The updated metric. - This corresponds to the ``metric`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogMetric: - Describes a logs-based metric. The - value of the metric is the number of log - entries that match a logs filter in a - given time interval. - Logs-based metrics can also be used to - extract values from logs and create a - distribution of the values. The - distribution records the statistics of - the extracted values along with an - optional histogram of the values as - specified by the bucket options. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name, metric]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_metrics.UpdateLogMetricRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if metric_name is not None: - request.metric_name = metric_name - if metric is not None: - request.metric = metric - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_log_metric, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_log_metric(self, - request: logging_metrics.DeleteLogMetricRequest = None, - *, - metric_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a logs-based metric. - - Args: - request (:class:`google.cloud.logging_v2.types.DeleteLogMetricRequest`): - The request object. The parameters to DeleteLogMetric. - metric_name (:class:`str`): - Required. The resource name of the metric to delete: - - :: - - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - - This corresponds to the ``metric_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = logging_metrics.DeleteLogMetricRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if metric_name is not None: - request.metric_name = metric_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_log_metric, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "MetricsServiceV2AsyncClient", -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/client.py deleted file mode 100644 index af554cf6d6fd..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ /dev/null @@ -1,799 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.api import distribution_pb2 # type: ignore -from google.api import metric_pb2 # type: ignore -from google.cloud.logging_v2.services.metrics_service_v2 import pagers -from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO -from .transports.grpc import MetricsServiceV2GrpcTransport -from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport - - -class MetricsServiceV2ClientMeta(type): - """Metaclass for the MetricsServiceV2 client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] - _transport_registry["grpc"] = MetricsServiceV2GrpcTransport - _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[MetricsServiceV2Transport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class MetricsServiceV2Client(metaclass=MetricsServiceV2ClientMeta): - """Service for configuring logs-based metrics.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "logging.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetricsServiceV2Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetricsServiceV2Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> MetricsServiceV2Transport: - """Returns the transport used by the client instance. - - Returns: - MetricsServiceV2Transport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def log_metric_path(project: str,metric: str,) -> str: - """Returns a fully-qualified log_metric string.""" - return "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) - - @staticmethod - def parse_log_metric_path(path: str) -> Dict[str,str]: - """Parses a log_metric path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/metrics/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, MetricsServiceV2Transport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the metrics service v2 client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, MetricsServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, MetricsServiceV2Transport): - # transport is a MetricsServiceV2Transport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), - ) - - def list_log_metrics(self, - request: Union[logging_metrics.ListLogMetricsRequest, dict] = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListLogMetricsPager: - r"""Lists logs-based metrics. - - Args: - request (Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]): - The request object. The parameters to ListLogMetrics. - parent (str): - Required. The name of the project containing the - metrics: - - :: - - "projects/[PROJECT_ID]" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager: - Result returned from ListLogMetrics. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.ListLogMetricsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_metrics.ListLogMetricsRequest): - request = logging_metrics.ListLogMetricsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_log_metrics] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListLogMetricsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_log_metric(self, - request: Union[logging_metrics.GetLogMetricRequest, dict] = None, - *, - metric_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_metrics.LogMetric: - r"""Gets a logs-based metric. - - Args: - request (Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]): - The request object. The parameters to GetLogMetric. - metric_name (str): - Required. The resource name of the desired metric: - - :: - - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - - This corresponds to the ``metric_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogMetric: - Describes a logs-based metric. The - value of the metric is the number of log - entries that match a logs filter in a - given time interval. - Logs-based metrics can also be used to - extract values from logs and create a - distribution of the values. The - distribution records the statistics of - the extracted values along with an - optional histogram of the values as - specified by the bucket options. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.GetLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_metrics.GetLogMetricRequest): - request = logging_metrics.GetLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if metric_name is not None: - request.metric_name = metric_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_log_metric] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_log_metric(self, - request: Union[logging_metrics.CreateLogMetricRequest, dict] = None, - *, - parent: str = None, - metric: logging_metrics.LogMetric = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_metrics.LogMetric: - r"""Creates a logs-based metric. - - Args: - request (Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]): - The request object. The parameters to CreateLogMetric. - parent (str): - Required. The resource name of the project in which to - create the metric: - - :: - - "projects/[PROJECT_ID]" - - The new metric must be provided in the request. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - metric (google.cloud.logging_v2.types.LogMetric): - Required. The new logs-based metric, - which must not have an identifier that - already exists. - - This corresponds to the ``metric`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogMetric: - Describes a logs-based metric. The - value of the metric is the number of log - entries that match a logs filter in a - given time interval. - Logs-based metrics can also be used to - extract values from logs and create a - distribution of the values. The - distribution records the statistics of - the extracted values along with an - optional histogram of the values as - specified by the bucket options. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, metric]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.CreateLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_metrics.CreateLogMetricRequest): - request = logging_metrics.CreateLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if metric is not None: - request.metric = metric - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_log_metric] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_log_metric(self, - request: Union[logging_metrics.UpdateLogMetricRequest, dict] = None, - *, - metric_name: str = None, - metric: logging_metrics.LogMetric = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_metrics.LogMetric: - r"""Creates or updates a logs-based metric. - - Args: - request (Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]): - The request object. The parameters to UpdateLogMetric. - metric_name (str): - Required. The resource name of the metric to update: - - :: - - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - - The updated metric must be provided in the request and - it's ``name`` field must be the same as ``[METRIC_ID]`` - If the metric does not exist in ``[PROJECT_ID]``, then a - new metric is created. - - This corresponds to the ``metric_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - metric (google.cloud.logging_v2.types.LogMetric): - Required. The updated metric. - This corresponds to the ``metric`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.logging_v2.types.LogMetric: - Describes a logs-based metric. The - value of the metric is the number of log - entries that match a logs filter in a - given time interval. - Logs-based metrics can also be used to - extract values from logs and create a - distribution of the values. The - distribution records the statistics of - the extracted values along with an - optional histogram of the values as - specified by the bucket options. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name, metric]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.UpdateLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_metrics.UpdateLogMetricRequest): - request = logging_metrics.UpdateLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if metric_name is not None: - request.metric_name = metric_name - if metric is not None: - request.metric = metric - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_log_metric] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_log_metric(self, - request: Union[logging_metrics.DeleteLogMetricRequest, dict] = None, - *, - metric_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a logs-based metric. - - Args: - request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): - The request object. The parameters to DeleteLogMetric. - metric_name (str): - Required. The resource name of the metric to delete: - - :: - - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - - This corresponds to the ``metric_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.DeleteLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_metrics.DeleteLogMetricRequest): - request = logging_metrics.DeleteLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if metric_name is not None: - request.metric_name = metric_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_log_metric] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "MetricsServiceV2Client", -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/pagers.py deleted file mode 100644 index a3faa77a20f7..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ /dev/null @@ -1,140 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.logging_v2.types import logging_metrics - - -class ListLogMetricsPager: - """A pager for iterating through ``list_log_metrics`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``metrics`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListLogMetrics`` requests and continue to iterate - through the ``metrics`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., logging_metrics.ListLogMetricsResponse], - request: logging_metrics.ListLogMetricsRequest, - response: logging_metrics.ListLogMetricsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListLogMetricsRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListLogMetricsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging_metrics.ListLogMetricsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[logging_metrics.ListLogMetricsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[logging_metrics.LogMetric]: - for page in self.pages: - yield from page.metrics - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListLogMetricsAsyncPager: - """A pager for iterating through ``list_log_metrics`` requests. - - This class thinly wraps an initial - :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``metrics`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListLogMetrics`` requests and continue to iterate - through the ``metrics`` field on the - corresponding responses. - - All the usual :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[logging_metrics.ListLogMetricsResponse]], - request: logging_metrics.ListLogMetricsRequest, - response: logging_metrics.ListLogMetricsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.logging_v2.types.ListLogMetricsRequest): - The initial request object. - response (google.cloud.logging_v2.types.ListLogMetricsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = logging_metrics.ListLogMetricsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[logging_metrics.ListLogMetricsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[logging_metrics.LogMetric]: - async def async_generator(): - async for page in self.pages: - for response in page.metrics: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py deleted file mode 100644 index 28e9b710ec84..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import MetricsServiceV2Transport -from .grpc import MetricsServiceV2GrpcTransport -from .grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] -_transport_registry['grpc'] = MetricsServiceV2GrpcTransport -_transport_registry['grpc_asyncio'] = MetricsServiceV2GrpcAsyncIOTransport - -__all__ = ( - 'MetricsServiceV2Transport', - 'MetricsServiceV2GrpcTransport', - 'MetricsServiceV2GrpcAsyncIOTransport', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py deleted file mode 100644 index b9170bf568f9..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ /dev/null @@ -1,261 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-logging', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class MetricsServiceV2Transport(abc.ABC): - """Abstract transport class for MetricsServiceV2.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - ) - - DEFAULT_HOST: str = 'logging.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_log_metrics: gapic_v1.method.wrap_method( - self.list_log_metrics, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_log_metric: gapic_v1.method.wrap_method( - self.get_log_metric, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_log_metric: gapic_v1.method.wrap_method( - self.create_log_metric, - default_timeout=60.0, - client_info=client_info, - ), - self.update_log_metric: gapic_v1.method.wrap_method( - self.update_log_metric, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_log_metric: gapic_v1.method.wrap_method( - self.delete_log_metric, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - Union[ - logging_metrics.ListLogMetricsResponse, - Awaitable[logging_metrics.ListLogMetricsResponse] - ]]: - raise NotImplementedError() - - @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - Union[ - logging_metrics.LogMetric, - Awaitable[logging_metrics.LogMetric] - ]]: - raise NotImplementedError() - - @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - Union[ - logging_metrics.LogMetric, - Awaitable[logging_metrics.LogMetric] - ]]: - raise NotImplementedError() - - @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - Union[ - logging_metrics.LogMetric, - Awaitable[logging_metrics.LogMetric] - ]]: - raise NotImplementedError() - - @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'MetricsServiceV2Transport', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py deleted file mode 100644 index e300d9f5320e..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ /dev/null @@ -1,357 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import empty_pb2 # type: ignore -from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO - - -class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport): - """gRPC backend transport for MetricsServiceV2. - - Service for configuring logs-based metrics. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - logging_metrics.ListLogMetricsResponse]: - r"""Return a callable for the list log metrics method over gRPC. - - Lists logs-based metrics. - - Returns: - Callable[[~.ListLogMetricsRequest], - ~.ListLogMetricsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_log_metrics' not in self._stubs: - self._stubs['list_log_metrics'] = self.grpc_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/ListLogMetrics', - request_serializer=logging_metrics.ListLogMetricsRequest.serialize, - response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, - ) - return self._stubs['list_log_metrics'] - - @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - logging_metrics.LogMetric]: - r"""Return a callable for the get log metric method over gRPC. - - Gets a logs-based metric. - - Returns: - Callable[[~.GetLogMetricRequest], - ~.LogMetric]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_log_metric' not in self._stubs: - self._stubs['get_log_metric'] = self.grpc_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/GetLogMetric', - request_serializer=logging_metrics.GetLogMetricRequest.serialize, - response_deserializer=logging_metrics.LogMetric.deserialize, - ) - return self._stubs['get_log_metric'] - - @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - logging_metrics.LogMetric]: - r"""Return a callable for the create log metric method over gRPC. - - Creates a logs-based metric. - - Returns: - Callable[[~.CreateLogMetricRequest], - ~.LogMetric]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_log_metric' not in self._stubs: - self._stubs['create_log_metric'] = self.grpc_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/CreateLogMetric', - request_serializer=logging_metrics.CreateLogMetricRequest.serialize, - response_deserializer=logging_metrics.LogMetric.deserialize, - ) - return self._stubs['create_log_metric'] - - @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - logging_metrics.LogMetric]: - r"""Return a callable for the update log metric method over gRPC. - - Creates or updates a logs-based metric. - - Returns: - Callable[[~.UpdateLogMetricRequest], - ~.LogMetric]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_log_metric' not in self._stubs: - self._stubs['update_log_metric'] = self.grpc_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', - request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, - response_deserializer=logging_metrics.LogMetric.deserialize, - ) - return self._stubs['update_log_metric'] - - @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete log metric method over gRPC. - - Deletes a logs-based metric. - - Returns: - Callable[[~.DeleteLogMetricRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_log_metric' not in self._stubs: - self._stubs['delete_log_metric'] = self.grpc_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', - request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_log_metric'] - - -__all__ = ( - 'MetricsServiceV2GrpcTransport', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py deleted file mode 100644 index 7da832822ebd..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ /dev/null @@ -1,361 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import empty_pb2 # type: ignore -from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO -from .grpc import MetricsServiceV2GrpcTransport - - -class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): - """gRPC AsyncIO backend transport for MetricsServiceV2. - - Service for configuring logs-based metrics. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - Awaitable[logging_metrics.ListLogMetricsResponse]]: - r"""Return a callable for the list log metrics method over gRPC. - - Lists logs-based metrics. - - Returns: - Callable[[~.ListLogMetricsRequest], - Awaitable[~.ListLogMetricsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_log_metrics' not in self._stubs: - self._stubs['list_log_metrics'] = self.grpc_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/ListLogMetrics', - request_serializer=logging_metrics.ListLogMetricsRequest.serialize, - response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, - ) - return self._stubs['list_log_metrics'] - - @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - Awaitable[logging_metrics.LogMetric]]: - r"""Return a callable for the get log metric method over gRPC. - - Gets a logs-based metric. - - Returns: - Callable[[~.GetLogMetricRequest], - Awaitable[~.LogMetric]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_log_metric' not in self._stubs: - self._stubs['get_log_metric'] = self.grpc_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/GetLogMetric', - request_serializer=logging_metrics.GetLogMetricRequest.serialize, - response_deserializer=logging_metrics.LogMetric.deserialize, - ) - return self._stubs['get_log_metric'] - - @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - Awaitable[logging_metrics.LogMetric]]: - r"""Return a callable for the create log metric method over gRPC. - - Creates a logs-based metric. - - Returns: - Callable[[~.CreateLogMetricRequest], - Awaitable[~.LogMetric]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_log_metric' not in self._stubs: - self._stubs['create_log_metric'] = self.grpc_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/CreateLogMetric', - request_serializer=logging_metrics.CreateLogMetricRequest.serialize, - response_deserializer=logging_metrics.LogMetric.deserialize, - ) - return self._stubs['create_log_metric'] - - @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - Awaitable[logging_metrics.LogMetric]]: - r"""Return a callable for the update log metric method over gRPC. - - Creates or updates a logs-based metric. - - Returns: - Callable[[~.UpdateLogMetricRequest], - Awaitable[~.LogMetric]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_log_metric' not in self._stubs: - self._stubs['update_log_metric'] = self.grpc_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', - request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, - response_deserializer=logging_metrics.LogMetric.deserialize, - ) - return self._stubs['update_log_metric'] - - @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete log metric method over gRPC. - - Deletes a logs-based metric. - - Returns: - Callable[[~.DeleteLogMetricRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_log_metric' not in self._stubs: - self._stubs['delete_log_metric'] = self.grpc_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', - request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_log_metric'] - - -__all__ = ( - 'MetricsServiceV2GrpcAsyncIOTransport', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/__init__.py deleted file mode 100644 index 38c93c541801..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/__init__.py +++ /dev/null @@ -1,138 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .log_entry import ( - LogEntry, - LogEntryOperation, - LogEntrySourceLocation, -) -from .logging import ( - DeleteLogRequest, - ListLogEntriesRequest, - ListLogEntriesResponse, - ListLogsRequest, - ListLogsResponse, - ListMonitoredResourceDescriptorsRequest, - ListMonitoredResourceDescriptorsResponse, - TailLogEntriesRequest, - TailLogEntriesResponse, - WriteLogEntriesPartialErrors, - WriteLogEntriesRequest, - WriteLogEntriesResponse, -) -from .logging_config import ( - BigQueryOptions, - CmekSettings, - CreateBucketRequest, - CreateExclusionRequest, - CreateSinkRequest, - CreateViewRequest, - DeleteBucketRequest, - DeleteExclusionRequest, - DeleteSinkRequest, - DeleteViewRequest, - GetBucketRequest, - GetCmekSettingsRequest, - GetExclusionRequest, - GetSinkRequest, - GetViewRequest, - ListBucketsRequest, - ListBucketsResponse, - ListExclusionsRequest, - ListExclusionsResponse, - ListSinksRequest, - ListSinksResponse, - ListViewsRequest, - ListViewsResponse, - LogBucket, - LogExclusion, - LogSink, - LogView, - UndeleteBucketRequest, - UpdateBucketRequest, - UpdateCmekSettingsRequest, - UpdateExclusionRequest, - UpdateSinkRequest, - UpdateViewRequest, - LifecycleState, -) -from .logging_metrics import ( - CreateLogMetricRequest, - DeleteLogMetricRequest, - GetLogMetricRequest, - ListLogMetricsRequest, - ListLogMetricsResponse, - LogMetric, - UpdateLogMetricRequest, -) - -__all__ = ( - 'LogEntry', - 'LogEntryOperation', - 'LogEntrySourceLocation', - 'DeleteLogRequest', - 'ListLogEntriesRequest', - 'ListLogEntriesResponse', - 'ListLogsRequest', - 'ListLogsResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'TailLogEntriesRequest', - 'TailLogEntriesResponse', - 'WriteLogEntriesPartialErrors', - 'WriteLogEntriesRequest', - 'WriteLogEntriesResponse', - 'BigQueryOptions', - 'CmekSettings', - 'CreateBucketRequest', - 'CreateExclusionRequest', - 'CreateSinkRequest', - 'CreateViewRequest', - 'DeleteBucketRequest', - 'DeleteExclusionRequest', - 'DeleteSinkRequest', - 'DeleteViewRequest', - 'GetBucketRequest', - 'GetCmekSettingsRequest', - 'GetExclusionRequest', - 'GetSinkRequest', - 'GetViewRequest', - 'ListBucketsRequest', - 'ListBucketsResponse', - 'ListExclusionsRequest', - 'ListExclusionsResponse', - 'ListSinksRequest', - 'ListSinksResponse', - 'ListViewsRequest', - 'ListViewsResponse', - 'LogBucket', - 'LogExclusion', - 'LogSink', - 'LogView', - 'UndeleteBucketRequest', - 'UpdateBucketRequest', - 'UpdateCmekSettingsRequest', - 'UpdateExclusionRequest', - 'UpdateSinkRequest', - 'UpdateViewRequest', - 'LifecycleState', - 'CreateLogMetricRequest', - 'DeleteLogMetricRequest', - 'GetLogMetricRequest', - 'ListLogMetricsRequest', - 'ListLogMetricsResponse', - 'LogMetric', - 'UpdateLogMetricRequest', -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/log_entry.py deleted file mode 100644 index 45b1c8858763..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/log_entry.py +++ /dev/null @@ -1,321 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.api import monitored_resource_pb2 # type: ignore -from google.logging.type import http_request_pb2 # type: ignore -from google.logging.type import log_severity_pb2 # type: ignore -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.logging.v2', - manifest={ - 'LogEntry', - 'LogEntryOperation', - 'LogEntrySourceLocation', - }, -) - - -class LogEntry(proto.Message): - r"""An individual entry in a log. - Attributes: - log_name (str): - Required. The resource name of the log to which this log - entry belongs: - - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" - - A project number may be used in place of PROJECT_ID. The - project number is translated to its corresponding PROJECT_ID - internally and the ``log_name`` field will contain - PROJECT_ID in queries and exports. - - ``[LOG_ID]`` must be URL-encoded within ``log_name``. - Example: - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. - ``[LOG_ID]`` must be less than 512 characters long and can - only include the following characters: upper and lower case - alphanumeric characters, forward-slash, underscore, hyphen, - and period. - - For backward compatibility, if ``log_name`` begins with a - forward-slash, such as ``/projects/...``, then the log entry - is ingested as usual but the forward-slash is removed. - Listing the log entry will not show the leading slash and - filtering for a log name with a leading slash will never - return any results. - resource (google.api.monitored_resource_pb2.MonitoredResource): - Required. The monitored resource that - produced this log entry. - Example: a log entry that reports a database - error would be associated with the monitored - resource designating the particular database - that reported the error. - proto_payload (google.protobuf.any_pb2.Any): - The log entry payload, represented as a - protocol buffer. Some Google Cloud Platform - services use this field for their log entry - payloads. - The following protocol buffer types are - supported; user-defined types are not supported: - - "type.googleapis.com/google.cloud.audit.AuditLog" - "type.googleapis.com/google.appengine.logging.v1.RequestLog". - text_payload (str): - The log entry payload, represented as a - Unicode string (UTF-8). - json_payload (google.protobuf.struct_pb2.Struct): - The log entry payload, represented as a - structure that is expressed as a JSON object. - timestamp (google.protobuf.timestamp_pb2.Timestamp): - Optional. The time the event described by the log entry - occurred. This time is used to compute the log entry's age - and to enforce the logs retention period. If this field is - omitted in a new log entry, then Logging assigns it the - current time. Timestamps have nanosecond accuracy, but - trailing zeros in the fractional seconds might be omitted - when the timestamp is displayed. - - Incoming log entries must have timestamps that don't exceed - the `logs retention - period `__ - in the past, and that don't exceed 24 hours in the future. - Log entries outside those time boundaries aren't ingested by - Logging. - receive_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the log entry was - received by Logging. - severity (google.logging.type.log_severity_pb2.LogSeverity): - Optional. The severity of the log entry. The default value - is ``LogSeverity.DEFAULT``. - insert_id (str): - Optional. A unique identifier for the log entry. If you - provide a value, then Logging considers other log entries in - the same project, with the same ``timestamp``, and with the - same ``insert_id`` to be duplicates which are removed in a - single query result. However, there are no guarantees of - de-duplication in the export of logs. - - If the ``insert_id`` is omitted when writing a log entry, - the Logging API assigns its own unique identifier in this - field. - - In queries, the ``insert_id`` is also used to order log - entries that have the same ``log_name`` and ``timestamp`` - values. - http_request (google.logging.type.http_request_pb2.HttpRequest): - Optional. Information about the HTTP request - associated with this log entry, if applicable. - labels (Sequence[google.cloud.logging_v2.types.LogEntry.LabelsEntry]): - Optional. A set of user-defined (key, value) - data that provides additional information about - the log entry. - operation (google.cloud.logging_v2.types.LogEntryOperation): - Optional. Information about an operation - associated with the log entry, if applicable. - trace (str): - Optional. Resource name of the trace associated with the log - entry, if any. If it contains a relative resource name, the - name is assumed to be relative to - ``//tracing.googleapis.com``. Example: - ``projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824`` - span_id (str): - Optional. The span ID within the trace associated with the - log entry. - - For Trace spans, this is the same format that the Trace API - v2 uses: a 16-character hexadecimal encoding of an 8-byte - array, such as ``000000000000004a``. - trace_sampled (bool): - Optional. The sampling decision of the trace associated with - the log entry. - - True means that the trace resource name in the ``trace`` - field was sampled for storage in a trace backend. False - means that the trace was not sampled for storage when this - log entry was written, or the sampling decision was unknown - at the time. A non-sampled ``trace`` value is still useful - as a request correlation identifier. The default is False. - source_location (google.cloud.logging_v2.types.LogEntrySourceLocation): - Optional. Source code location information - associated with the log entry, if any. - """ - - log_name = proto.Field( - proto.STRING, - number=12, - ) - resource = proto.Field( - proto.MESSAGE, - number=8, - message=monitored_resource_pb2.MonitoredResource, - ) - proto_payload = proto.Field( - proto.MESSAGE, - number=2, - oneof='payload', - message=any_pb2.Any, - ) - text_payload = proto.Field( - proto.STRING, - number=3, - oneof='payload', - ) - json_payload = proto.Field( - proto.MESSAGE, - number=6, - oneof='payload', - message=struct_pb2.Struct, - ) - timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - receive_timestamp = proto.Field( - proto.MESSAGE, - number=24, - message=timestamp_pb2.Timestamp, - ) - severity = proto.Field( - proto.ENUM, - number=10, - enum=log_severity_pb2.LogSeverity, - ) - insert_id = proto.Field( - proto.STRING, - number=4, - ) - http_request = proto.Field( - proto.MESSAGE, - number=7, - message=http_request_pb2.HttpRequest, - ) - labels = proto.MapField( - proto.STRING, - proto.STRING, - number=11, - ) - operation = proto.Field( - proto.MESSAGE, - number=15, - message='LogEntryOperation', - ) - trace = proto.Field( - proto.STRING, - number=22, - ) - span_id = proto.Field( - proto.STRING, - number=27, - ) - trace_sampled = proto.Field( - proto.BOOL, - number=30, - ) - source_location = proto.Field( - proto.MESSAGE, - number=23, - message='LogEntrySourceLocation', - ) - - -class LogEntryOperation(proto.Message): - r"""Additional information about a potentially long-running - operation with which a log entry is associated. - - Attributes: - id (str): - Optional. An arbitrary operation identifier. - Log entries with the same identifier are assumed - to be part of the same operation. - producer (str): - Optional. An arbitrary producer identifier. The combination - of ``id`` and ``producer`` must be globally unique. Examples - for ``producer``: ``"MyDivision.MyBigCompany.com"``, - ``"github.com/MyProject/MyApplication"``. - first (bool): - Optional. Set this to True if this is the - first log entry in the operation. - last (bool): - Optional. Set this to True if this is the - last log entry in the operation. - """ - - id = proto.Field( - proto.STRING, - number=1, - ) - producer = proto.Field( - proto.STRING, - number=2, - ) - first = proto.Field( - proto.BOOL, - number=3, - ) - last = proto.Field( - proto.BOOL, - number=4, - ) - - -class LogEntrySourceLocation(proto.Message): - r"""Additional information about the source code location that - produced the log entry. - - Attributes: - file (str): - Optional. Source file name. Depending on the - runtime environment, this might be a simple name - or a fully-qualified name. - line (int): - Optional. Line within the source file. - 1-based; 0 indicates no line number available. - function (str): - Optional. Human-readable name of the function or method - being invoked, with optional context such as the class or - package name. This information may be used in contexts such - as the logs viewer, where a file and line number are less - meaningful. The format can vary by language. For example: - ``qual.if.ied.Class.method`` (Java), ``dir/package.func`` - (Go), ``function`` (Python). - """ - - file = proto.Field( - proto.STRING, - number=1, - ) - line = proto.Field( - proto.INT64, - number=2, - ) - function = proto.Field( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging.py deleted file mode 100644 index cfae1781a75d..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging.py +++ /dev/null @@ -1,573 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.api import monitored_resource_pb2 # type: ignore -from google.cloud.logging_v2.types import log_entry -from google.protobuf import duration_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.logging.v2', - manifest={ - 'DeleteLogRequest', - 'WriteLogEntriesRequest', - 'WriteLogEntriesResponse', - 'WriteLogEntriesPartialErrors', - 'ListLogEntriesRequest', - 'ListLogEntriesResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'ListLogsRequest', - 'ListLogsResponse', - 'TailLogEntriesRequest', - 'TailLogEntriesResponse', - }, -) - - -class DeleteLogRequest(proto.Message): - r"""The parameters to DeleteLog. - Attributes: - log_name (str): - Required. The resource name of the log to delete: - - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" - - ``[LOG_ID]`` must be URL-encoded. For example, - ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. - For more information about log names, see - [LogEntry][google.logging.v2.LogEntry]. - """ - - log_name = proto.Field( - proto.STRING, - number=1, - ) - - -class WriteLogEntriesRequest(proto.Message): - r"""The parameters to WriteLogEntries. - Attributes: - log_name (str): - Optional. A default log resource name that is assigned to - all log entries in ``entries`` that do not specify a value - for ``log_name``: - - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" - - ``[LOG_ID]`` must be URL-encoded. For example: - - :: - - "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" - - The permission ``logging.logEntries.create`` is needed on - each project, organization, billing account, or folder that - is receiving new log entries, whether the resource is - specified in ``logName`` or in an individual log entry. - resource (google.api.monitored_resource_pb2.MonitoredResource): - Optional. A default monitored resource object that is - assigned to all log entries in ``entries`` that do not - specify a value for ``resource``. Example: - - :: - - { "type": "gce_instance", - "labels": { - "zone": "us-central1-a", "instance_id": "00000000000000000000" }} - - See [LogEntry][google.logging.v2.LogEntry]. - labels (Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]): - Optional. Default labels that are added to the ``labels`` - field of all log entries in ``entries``. If a log entry - already has a label with the same key as a label in this - parameter, then the log entry's label is not changed. See - [LogEntry][google.logging.v2.LogEntry]. - entries (Sequence[google.cloud.logging_v2.types.LogEntry]): - Required. The log entries to send to Logging. The order of - log entries in this list does not matter. Values supplied in - this method's ``log_name``, ``resource``, and ``labels`` - fields are copied into those log entries in this list that - do not include values for their corresponding fields. For - more information, see the - [LogEntry][google.logging.v2.LogEntry] type. - - If the ``timestamp`` or ``insert_id`` fields are missing in - log entries, then this method supplies the current time or a - unique identifier, respectively. The supplied values are - chosen so that, among the log entries that did not supply - their own values, the entries earlier in the list will sort - before the entries later in the list. See the - ``entries.list`` method. - - Log entries with timestamps that are more than the `logs - retention - period `__ in - the past or more than 24 hours in the future will not be - available when calling ``entries.list``. However, those log - entries can still be `exported with - LogSinks `__. - - To improve throughput and to avoid exceeding the `quota - limit `__ for - calls to ``entries.write``, you should try to include - several log entries in this list, rather than calling this - method for each individual log entry. - partial_success (bool): - Optional. Whether valid entries should be written even if - some other entries fail due to INVALID_ARGUMENT or - PERMISSION_DENIED errors. If any entry is not written, then - the response status is the error associated with one of the - failed entries and the response includes error details keyed - by the entries' zero-based index in the ``entries.write`` - method. - dry_run (bool): - Optional. If true, the request should expect - normal response, but the entries won't be - persisted nor exported. Useful for checking - whether the logging API endpoints are working - properly before sending valuable data. - """ - - log_name = proto.Field( - proto.STRING, - number=1, - ) - resource = proto.Field( - proto.MESSAGE, - number=2, - message=monitored_resource_pb2.MonitoredResource, - ) - labels = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - entries = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=log_entry.LogEntry, - ) - partial_success = proto.Field( - proto.BOOL, - number=5, - ) - dry_run = proto.Field( - proto.BOOL, - number=6, - ) - - -class WriteLogEntriesResponse(proto.Message): - r"""Result returned from WriteLogEntries. """ - - -class WriteLogEntriesPartialErrors(proto.Message): - r"""Error details for WriteLogEntries with partial success. - Attributes: - log_entry_errors (Sequence[google.cloud.logging_v2.types.WriteLogEntriesPartialErrors.LogEntryErrorsEntry]): - When ``WriteLogEntriesRequest.partial_success`` is true, - records the error status for entries that were not written - due to a permanent error, keyed by the entry's zero-based - index in ``WriteLogEntriesRequest.entries``. - - Failed requests for which no entries are written will not - include per-entry errors. - """ - - log_entry_errors = proto.MapField( - proto.INT32, - proto.MESSAGE, - number=1, - message=status_pb2.Status, - ) - - -class ListLogEntriesRequest(proto.Message): - r"""The parameters to ``ListLogEntries``. - Attributes: - resource_names (Sequence[str]): - Required. Names of one or more parent resources from which - to retrieve log entries: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - May alternatively be one or more views - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - - Projects listed in the ``project_ids`` field are added to - this list. - filter (str): - Optional. A filter that chooses which log entries to return. - See `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources listed - in ``resource_names``. Referencing a parent resource that is - not listed in ``resource_names`` will cause the filter to - return no results. The maximum length of the filter is 20000 - characters. - order_by (str): - Optional. How the results should be sorted. Presently, the - only permitted values are ``"timestamp asc"`` (default) and - ``"timestamp desc"``. The first option returns entries in - order of increasing values of ``LogEntry.timestamp`` (oldest - first), and the second option returns entries in order of - decreasing timestamps (newest first). Entries with equal - timestamps are returned in order of their ``insert_id`` - values. - page_size (int): - Optional. The maximum number of results to return from this - request. Default is 50. If the value is negative or exceeds - 1000, the request is rejected. The presence of - ``next_page_token`` in the response indicates that more - results might be available. - page_token (str): - Optional. If present, then retrieve the next batch of - results from the preceding call to this method. - ``page_token`` must be the value of ``next_page_token`` from - the previous response. The values of other method parameters - should be identical to those in the previous call. - """ - - resource_names = proto.RepeatedField( - proto.STRING, - number=8, - ) - filter = proto.Field( - proto.STRING, - number=2, - ) - order_by = proto.Field( - proto.STRING, - number=3, - ) - page_size = proto.Field( - proto.INT32, - number=4, - ) - page_token = proto.Field( - proto.STRING, - number=5, - ) - - -class ListLogEntriesResponse(proto.Message): - r"""Result returned from ``ListLogEntries``. - Attributes: - entries (Sequence[google.cloud.logging_v2.types.LogEntry]): - A list of log entries. If ``entries`` is empty, - ``nextPageToken`` may still be returned, indicating that - more entries may exist. See ``nextPageToken`` for more - information. - next_page_token (str): - If there might be more results than those appearing in this - response, then ``nextPageToken`` is included. To get the - next set of results, call this method again using the value - of ``nextPageToken`` as ``pageToken``. - - If a value for ``next_page_token`` appears and the - ``entries`` field is empty, it means that the search found - no log entries so far but it did not have time to search all - the possible log entries. Retry the method with this value - for ``page_token`` to continue the search. Alternatively, - consider speeding up the search by changing your filter to - specify a single log name or resource type, or to narrow the - time range of the search. - """ - - @property - def raw_page(self): - return self - - entries = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=log_entry.LogEntry, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class ListMonitoredResourceDescriptorsRequest(proto.Message): - r"""The parameters to ListMonitoredResourceDescriptors - Attributes: - page_size (int): - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more - results might be available. - page_token (str): - Optional. If present, then retrieve the next batch of - results from the preceding call to this method. - ``pageToken`` must be the value of ``nextPageToken`` from - the previous response. The values of other method parameters - should be identical to those in the previous call. - """ - - page_size = proto.Field( - proto.INT32, - number=1, - ) - page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class ListMonitoredResourceDescriptorsResponse(proto.Message): - r"""Result returned from ListMonitoredResourceDescriptors. - Attributes: - resource_descriptors (Sequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): - A list of resource descriptors. - next_page_token (str): - If there might be more results than those appearing in this - response, then ``nextPageToken`` is included. To get the - next set of results, call this method again using the value - of ``nextPageToken`` as ``pageToken``. - """ - - @property - def raw_page(self): - return self - - resource_descriptors = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=monitored_resource_pb2.MonitoredResourceDescriptor, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class ListLogsRequest(proto.Message): - r"""The parameters to ListLogs. - Attributes: - parent (str): - Required. The resource name that owns the logs: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]". - page_size (int): - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more - results might be available. - page_token (str): - Optional. If present, then retrieve the next batch of - results from the preceding call to this method. - ``pageToken`` must be the value of ``nextPageToken`` from - the previous response. The values of other method parameters - should be identical to those in the previous call. - resource_names (Sequence[str]): - Optional. The resource name that owns the logs: - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - - To support legacy queries, it could also be: - "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - resource_names = proto.RepeatedField( - proto.STRING, - number=8, - ) - - -class ListLogsResponse(proto.Message): - r"""Result returned from ListLogs. - Attributes: - log_names (Sequence[str]): - A list of log names. For example, - ``"projects/my-project/logs/syslog"`` or - ``"organizations/123/logs/cloudresourcemanager.googleapis.com%2Factivity"``. - next_page_token (str): - If there might be more results than those appearing in this - response, then ``nextPageToken`` is included. To get the - next set of results, call this method again using the value - of ``nextPageToken`` as ``pageToken``. - """ - - @property - def raw_page(self): - return self - - log_names = proto.RepeatedField( - proto.STRING, - number=3, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class TailLogEntriesRequest(proto.Message): - r"""The parameters to ``TailLogEntries``. - Attributes: - resource_names (Sequence[str]): - Required. Name of a parent resource from which to retrieve - log entries: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - May alternatively be one or more views: - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - "organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]". - filter (str): - Optional. A filter that chooses which log entries to return. - See `Advanced Logs - Filters `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources listed - in ``resource_names``. Referencing a parent resource that is - not in ``resource_names`` will cause the filter to return no - results. The maximum length of the filter is 20000 - characters. - buffer_window (google.protobuf.duration_pb2.Duration): - Optional. The amount of time to buffer log - entries at the server before being returned to - prevent out of order results due to late - arriving log entries. Valid values are between - 0-60000 milliseconds. Defaults to 2000 - milliseconds. - """ - - resource_names = proto.RepeatedField( - proto.STRING, - number=1, - ) - filter = proto.Field( - proto.STRING, - number=2, - ) - buffer_window = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - - -class TailLogEntriesResponse(proto.Message): - r"""Result returned from ``TailLogEntries``. - Attributes: - entries (Sequence[google.cloud.logging_v2.types.LogEntry]): - A list of log entries. Each response in the stream will - order entries with increasing values of - ``LogEntry.timestamp``. Ordering is not guaranteed between - separate responses. - suppression_info (Sequence[google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo]): - If entries that otherwise would have been - included in the session were not sent back to - the client, counts of relevant entries omitted - from the session with the reason that they were - not included. There will be at most one of each - reason per response. The counts represent the - number of suppressed entries since the last - streamed response. - """ - - class SuppressionInfo(proto.Message): - r"""Information about entries that were omitted from the session. - Attributes: - reason (google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo.Reason): - The reason that entries were omitted from the - session. - suppressed_count (int): - A lower bound on the count of entries omitted due to - ``reason``. - """ - class Reason(proto.Enum): - r"""An indicator of why entries were omitted.""" - REASON_UNSPECIFIED = 0 - RATE_LIMIT = 1 - NOT_CONSUMED = 2 - - reason = proto.Field( - proto.ENUM, - number=1, - enum='TailLogEntriesResponse.SuppressionInfo.Reason', - ) - suppressed_count = proto.Field( - proto.INT32, - number=2, - ) - - entries = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=log_entry.LogEntry, - ) - suppression_info = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=SuppressionInfo, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_config.py deleted file mode 100644 index a4b7b2571d7a..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_config.py +++ /dev/null @@ -1,1457 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.logging.v2', - manifest={ - 'LifecycleState', - 'LogBucket', - 'LogView', - 'LogSink', - 'BigQueryOptions', - 'ListBucketsRequest', - 'ListBucketsResponse', - 'CreateBucketRequest', - 'UpdateBucketRequest', - 'GetBucketRequest', - 'DeleteBucketRequest', - 'UndeleteBucketRequest', - 'ListViewsRequest', - 'ListViewsResponse', - 'CreateViewRequest', - 'UpdateViewRequest', - 'GetViewRequest', - 'DeleteViewRequest', - 'ListSinksRequest', - 'ListSinksResponse', - 'GetSinkRequest', - 'CreateSinkRequest', - 'UpdateSinkRequest', - 'DeleteSinkRequest', - 'LogExclusion', - 'ListExclusionsRequest', - 'ListExclusionsResponse', - 'GetExclusionRequest', - 'CreateExclusionRequest', - 'UpdateExclusionRequest', - 'DeleteExclusionRequest', - 'GetCmekSettingsRequest', - 'UpdateCmekSettingsRequest', - 'CmekSettings', - }, -) - - -class LifecycleState(proto.Enum): - r"""LogBucket lifecycle states.""" - LIFECYCLE_STATE_UNSPECIFIED = 0 - ACTIVE = 1 - DELETE_REQUESTED = 2 - - -class LogBucket(proto.Message): - r"""Describes a repository of logs. - Attributes: - name (str): - The resource name of the bucket. For example: - "projects/my-project-id/locations/my-location/buckets/my-bucket-id - The supported locations are: "global" - - For the location of ``global`` it is unspecified where logs - are actually stored. Once a bucket has been created, the - location can not be changed. - description (str): - Describes this bucket. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of the - bucket. This is not set for any of the default - buckets. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of the - bucket. - retention_days (int): - Logs will be retained by default for this - amount of time, after which they will - automatically be deleted. The minimum retention - period is 1 day. If this value is set to zero at - bucket creation time, the default time of 30 - days will be used. - locked (bool): - Whether the bucket has been locked. - The retention period on a locked bucket may not - be changed. Locked buckets may only be deleted - if they are empty. - lifecycle_state (google.cloud.logging_v2.types.LifecycleState): - Output only. The bucket lifecycle state. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - description = proto.Field( - proto.STRING, - number=3, - ) - create_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - retention_days = proto.Field( - proto.INT32, - number=11, - ) - locked = proto.Field( - proto.BOOL, - number=9, - ) - lifecycle_state = proto.Field( - proto.ENUM, - number=12, - enum='LifecycleState', - ) - - -class LogView(proto.Message): - r"""Describes a view over logs in a bucket. - Attributes: - name (str): - The resource name of the view. - For example - "projects/my-project-id/locations/my- - location/buckets/my-bucket-id/views/my-view - description (str): - Describes this view. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of the - view. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of the - view. - filter (str): - Filter that restricts which log entries in a bucket are - visible in this view. Filters are restricted to be a logical - AND of ==/!= of any of the following: originating - project/folder/organization/billing account. resource type - log id Example: SOURCE("projects/myproject") AND - resource.type = "gce_instance" AND LOG_ID("stdout") - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - description = proto.Field( - proto.STRING, - number=3, - ) - create_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - filter = proto.Field( - proto.STRING, - number=7, - ) - - -class LogSink(proto.Message): - r"""Describes a sink used to export log entries to one of the - following destinations in any project: a Cloud Storage bucket, a - BigQuery dataset, or a Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. The sink must be - created within a project, organization, billing account, or - folder. - - Attributes: - name (str): - Required. The client-assigned sink identifier, unique within - the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink - identifiers are limited to 100 characters and can include - only the following characters: upper and lower-case - alphanumeric characters, underscores, hyphens, and periods. - First character has to be alphanumeric. - destination (str): - Required. The export destination: - - :: - - "storage.googleapis.com/[GCS_BUCKET]" - "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]" - "pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" - - The sink's ``writer_identity``, set when the sink is - created, must have permission to write to the destination or - else the log entries are not exported. For more information, - see `Exporting Logs with - Sinks `__. - filter (str): - Optional. An `advanced logs - filter `__. - The only exported log entries are those that are in the - resource owning the sink and that match the filter. For - example: - - :: - - logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR - description (str): - Optional. A description of this sink. - The maximum length of the description is 8000 - characters. - disabled (bool): - Optional. If set to True, then this sink is - disabled and it does not export any log entries. - exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): - Optional. Log entries that match any of the exclusion - filters will not be exported. If a log entry is matched by - both ``filter`` and one of ``exclusion_filters`` it will not - be exported. - output_version_format (google.cloud.logging_v2.types.LogSink.VersionFormat): - Deprecated. This field is unused. - writer_identity (str): - Output only. An IAM identity—a service account or - group—under which Logging writes the exported log entries to - the sink's destination. This field is set by - [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] - and - [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] - based on the value of ``unique_writer_identity`` in those - methods. - - Until you grant this identity write-access to the - destination, log entry exports from this sink will fail. For - more information, see `Granting Access for a - Resource `__. - Consult the destination service's documentation to determine - the appropriate IAM roles to assign to the identity. - include_children (bool): - Optional. This field applies only to sinks owned by - organizations and folders. If the field is false, the - default, only the logs owned by the sink's parent resource - are available for export. If the field is true, then logs - from all the projects, folders, and billing accounts - contained in the sink's parent resource are also available - for export. Whether a particular log entry from the children - is exported depends on the sink's filter expression. For - example, if this field is true, then the filter - ``resource.type=gce_instance`` would export all Compute - Engine VM instance log entries from all projects in the - sink's parent. To only export entries from certain child - projects, filter on the project part of the log name: - - :: - - logName:("projects/test-project1/" OR "projects/test-project2/") AND - resource.type=gce_instance - bigquery_options (google.cloud.logging_v2.types.BigQueryOptions): - Optional. Options that affect sinks exporting - data to BigQuery. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of the - sink. - This field may not be present for older sinks. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of the - sink. - This field may not be present for older sinks. - """ - class VersionFormat(proto.Enum): - r"""Deprecated. This is unused.""" - VERSION_FORMAT_UNSPECIFIED = 0 - V2 = 1 - V1 = 2 - - name = proto.Field( - proto.STRING, - number=1, - ) - destination = proto.Field( - proto.STRING, - number=3, - ) - filter = proto.Field( - proto.STRING, - number=5, - ) - description = proto.Field( - proto.STRING, - number=18, - ) - disabled = proto.Field( - proto.BOOL, - number=19, - ) - exclusions = proto.RepeatedField( - proto.MESSAGE, - number=16, - message='LogExclusion', - ) - output_version_format = proto.Field( - proto.ENUM, - number=6, - enum=VersionFormat, - ) - writer_identity = proto.Field( - proto.STRING, - number=8, - ) - include_children = proto.Field( - proto.BOOL, - number=9, - ) - bigquery_options = proto.Field( - proto.MESSAGE, - number=12, - oneof='options', - message='BigQueryOptions', - ) - create_time = proto.Field( - proto.MESSAGE, - number=13, - message=timestamp_pb2.Timestamp, - ) - update_time = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp_pb2.Timestamp, - ) - - -class BigQueryOptions(proto.Message): - r"""Options that change functionality of a sink exporting data to - BigQuery. - - Attributes: - use_partitioned_tables (bool): - Optional. Whether to use `BigQuery's partition - tables `__. - By default, Logging creates dated tables based on the log - entries' timestamps, e.g. syslog_20170523. With partitioned - tables the date suffix is no longer present and `special - query - syntax `__ - has to be used instead. In both cases, tables are sharded - based on UTC timezone. - uses_timestamp_column_partitioning (bool): - Output only. True if new timestamp column based partitioning - is in use, false if legacy ingestion-time partitioning is in - use. All new sinks will have this field set true and will - use timestamp column based partitioning. If - use_partitioned_tables is false, this value has no meaning - and will be false. Legacy sinks using partitioned tables - will have this field set to false. - """ - - use_partitioned_tables = proto.Field( - proto.BOOL, - number=1, - ) - uses_timestamp_column_partitioning = proto.Field( - proto.BOOL, - number=3, - ) - - -class ListBucketsRequest(proto.Message): - r"""The parameters to ``ListBuckets``. - Attributes: - parent (str): - Required. The parent resource whose buckets are to be - listed: - - :: - - "projects/[PROJECT_ID]/locations/[LOCATION_ID]" - "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" - "folders/[FOLDER_ID]/locations/[LOCATION_ID]" - - Note: The locations portion of the resource must be - specified, but supplying the character ``-`` in place of - [LOCATION_ID] will return all buckets. - page_token (str): - Optional. If present, then retrieve the next batch of - results from the preceding call to this method. - ``pageToken`` must be the value of ``nextPageToken`` from - the previous response. The values of other method parameters - should be identical to those in the previous call. - page_size (int): - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more - results might be available. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_token = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - - -class ListBucketsResponse(proto.Message): - r"""The response from ListBuckets. - Attributes: - buckets (Sequence[google.cloud.logging_v2.types.LogBucket]): - A list of buckets. - next_page_token (str): - If there might be more results than appear in this response, - then ``nextPageToken`` is included. To get the next set of - results, call the same method again using the value of - ``nextPageToken`` as ``pageToken``. - """ - - @property - def raw_page(self): - return self - - buckets = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='LogBucket', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateBucketRequest(proto.Message): - r"""The parameters to ``CreateBucket``. - Attributes: - parent (str): - Required. The resource in which to create the bucket: - - :: - - "projects/[PROJECT_ID]/locations/[LOCATION_ID]" - - Example: ``"projects/my-logging-project/locations/global"`` - bucket_id (str): - Required. A client-assigned identifier such as - ``"my-bucket"``. Identifiers are limited to 100 characters - and can include only letters, digits, underscores, hyphens, - and periods. - bucket (google.cloud.logging_v2.types.LogBucket): - Required. The new bucket. The region - specified in the new bucket must be compliant - with any Location Restriction Org Policy. The - name field in the bucket is ignored. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - bucket_id = proto.Field( - proto.STRING, - number=2, - ) - bucket = proto.Field( - proto.MESSAGE, - number=3, - message='LogBucket', - ) - - -class UpdateBucketRequest(proto.Message): - r"""The parameters to ``UpdateBucket``. - Attributes: - name (str): - Required. The full resource name of the bucket to update. - - :: - - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. - Also requires permission - "resourcemanager.projects.updateLiens" to set the locked - property - bucket (google.cloud.logging_v2.types.LogBucket): - Required. The updated bucket. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask that specifies the fields in ``bucket`` - that need an update. A bucket field will be overwritten if, - and only if, it is in the update mask. ``name`` and output - only fields cannot be updated. - - For a detailed ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - - Example: ``updateMask=retention_days``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - bucket = proto.Field( - proto.MESSAGE, - number=2, - message='LogBucket', - ) - update_mask = proto.Field( - proto.MESSAGE, - number=4, - message=field_mask_pb2.FieldMask, - ) - - -class GetBucketRequest(proto.Message): - r"""The parameters to ``GetBucket``. - Attributes: - name (str): - Required. The resource name of the bucket: - - :: - - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteBucketRequest(proto.Message): - r"""The parameters to ``DeleteBucket``. - Attributes: - name (str): - Required. The full resource name of the bucket to delete. - - :: - - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class UndeleteBucketRequest(proto.Message): - r"""The parameters to ``UndeleteBucket``. - Attributes: - name (str): - Required. The full resource name of the bucket to undelete. - - :: - - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListViewsRequest(proto.Message): - r"""The parameters to ``ListViews``. - Attributes: - parent (str): - Required. The bucket whose views are to be listed: - - :: - - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". - page_token (str): - Optional. If present, then retrieve the next batch of - results from the preceding call to this method. - ``pageToken`` must be the value of ``nextPageToken`` from - the previous response. The values of other method parameters - should be identical to those in the previous call. - page_size (int): - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more - results might be available. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_token = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - - -class ListViewsResponse(proto.Message): - r"""The response from ListViews. - Attributes: - views (Sequence[google.cloud.logging_v2.types.LogView]): - A list of views. - next_page_token (str): - If there might be more results than appear in this response, - then ``nextPageToken`` is included. To get the next set of - results, call the same method again using the value of - ``nextPageToken`` as ``pageToken``. - """ - - @property - def raw_page(self): - return self - - views = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='LogView', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateViewRequest(proto.Message): - r"""The parameters to ``CreateView``. - Attributes: - parent (str): - Required. The bucket in which to create the view - - :: - - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - - Example: - ``"projects/my-logging-project/locations/my-location/buckets/my-bucket"`` - view_id (str): - Required. The id to use for this view. - view (google.cloud.logging_v2.types.LogView): - Required. The new view. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - view_id = proto.Field( - proto.STRING, - number=2, - ) - view = proto.Field( - proto.MESSAGE, - number=3, - message='LogView', - ) - - -class UpdateViewRequest(proto.Message): - r"""The parameters to ``UpdateView``. - Attributes: - name (str): - Required. The full resource name of the view to update - - :: - - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. - view (google.cloud.logging_v2.types.LogView): - Required. The updated view. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Field mask that specifies the fields in ``view`` - that need an update. A field will be overwritten if, and - only if, it is in the update mask. ``name`` and output only - fields cannot be updated. - - For a detailed ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - - Example: ``updateMask=filter``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - view = proto.Field( - proto.MESSAGE, - number=2, - message='LogView', - ) - update_mask = proto.Field( - proto.MESSAGE, - number=4, - message=field_mask_pb2.FieldMask, - ) - - -class GetViewRequest(proto.Message): - r"""The parameters to ``GetView``. - Attributes: - name (str): - Required. The resource name of the policy: - - :: - - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteViewRequest(proto.Message): - r"""The parameters to ``DeleteView``. - Attributes: - name (str): - Required. The full resource name of the view to delete: - - :: - - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListSinksRequest(proto.Message): - r"""The parameters to ``ListSinks``. - Attributes: - parent (str): - Required. The parent resource whose sinks are to be listed: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]". - page_token (str): - Optional. If present, then retrieve the next batch of - results from the preceding call to this method. - ``pageToken`` must be the value of ``nextPageToken`` from - the previous response. The values of other method parameters - should be identical to those in the previous call. - page_size (int): - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more - results might be available. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_token = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - - -class ListSinksResponse(proto.Message): - r"""Result returned from ``ListSinks``. - Attributes: - sinks (Sequence[google.cloud.logging_v2.types.LogSink]): - A list of sinks. - next_page_token (str): - If there might be more results than appear in this response, - then ``nextPageToken`` is included. To get the next set of - results, call the same method again using the value of - ``nextPageToken`` as ``pageToken``. - """ - - @property - def raw_page(self): - return self - - sinks = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='LogSink', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class GetSinkRequest(proto.Message): - r"""The parameters to ``GetSink``. - Attributes: - sink_name (str): - Required. The resource name of the sink: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - Example: ``"projects/my-project-id/sinks/my-sink-id"``. - """ - - sink_name = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateSinkRequest(proto.Message): - r"""The parameters to ``CreateSink``. - Attributes: - parent (str): - Required. The resource in which to create the sink: - - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. - sink (google.cloud.logging_v2.types.LogSink): - Required. The new sink, whose ``name`` parameter is a sink - identifier that is not already in use. - unique_writer_identity (bool): - Optional. Determines the kind of IAM identity returned as - ``writer_identity`` in the new sink. If this value is - omitted or set to false, and if the sink's parent is a - project, then the value returned as ``writer_identity`` is - the same group or service account used by Logging before the - addition of writer identities to this API. The sink's - destination must be in the same project as the sink itself. - - If this field is set to true, or if the sink is owned by a - non-project resource such as an organization, then the value - of ``writer_identity`` will be a unique service account used - only for exports from the new sink. For more information, - see ``writer_identity`` in - [LogSink][google.logging.v2.LogSink]. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - sink = proto.Field( - proto.MESSAGE, - number=2, - message='LogSink', - ) - unique_writer_identity = proto.Field( - proto.BOOL, - number=3, - ) - - -class UpdateSinkRequest(proto.Message): - r"""The parameters to ``UpdateSink``. - Attributes: - sink_name (str): - Required. The full resource name of the sink to update, - including the parent resource and the sink identifier: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - Example: ``"projects/my-project-id/sinks/my-sink-id"``. - sink (google.cloud.logging_v2.types.LogSink): - Required. The updated sink, whose name is the same - identifier that appears as part of ``sink_name``. - unique_writer_identity (bool): - Optional. See - [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] - for a description of this field. When updating a sink, the - effect of this field on the value of ``writer_identity`` in - the updated sink depends on both the old and new values of - this field: - - - If the old and new values of this field are both false or - both true, then there is no change to the sink's - ``writer_identity``. - - If the old value is false and the new value is true, then - ``writer_identity`` is changed to a unique service - account. - - It is an error if the old value is true and the new value - is set to false or defaulted to false. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Field mask that specifies the fields in ``sink`` - that need an update. A sink field will be overwritten if, - and only if, it is in the update mask. ``name`` and output - only fields cannot be updated. - - An empty updateMask is temporarily treated as using the - following mask for backwards compatibility purposes: - destination,filter,includeChildren At some point in the - future, behavior will be removed and specifying an empty - updateMask will be an error. - - For a detailed ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - - Example: ``updateMask=filter``. - """ - - sink_name = proto.Field( - proto.STRING, - number=1, - ) - sink = proto.Field( - proto.MESSAGE, - number=2, - message='LogSink', - ) - unique_writer_identity = proto.Field( - proto.BOOL, - number=3, - ) - update_mask = proto.Field( - proto.MESSAGE, - number=4, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteSinkRequest(proto.Message): - r"""The parameters to ``DeleteSink``. - Attributes: - sink_name (str): - Required. The full resource name of the sink to delete, - including the parent resource and the sink identifier: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - Example: ``"projects/my-project-id/sinks/my-sink-id"``. - """ - - sink_name = proto.Field( - proto.STRING, - number=1, - ) - - -class LogExclusion(proto.Message): - r"""Specifies a set of log entries that are not to be stored in - Logging. If your GCP resource receives a large volume of logs, - you can use exclusions to reduce your chargeable logs. - Exclusions are processed after log sinks, so you can export log - entries before they are excluded. Note that organization-level - and folder-level exclusions don't apply to child resources, and - that you can't exclude audit log entries. - - Attributes: - name (str): - Required. A client-assigned identifier, such as - ``"load-balancer-exclusion"``. Identifiers are limited to - 100 characters and can include only letters, digits, - underscores, hyphens, and periods. First character has to be - alphanumeric. - description (str): - Optional. A description of this exclusion. - filter (str): - Required. An `advanced logs - filter `__ - that matches the log entries to be excluded. By using the - `sample - function `__, - you can exclude less than 100% of the matching log entries. - For example, the following query matches 99% of low-severity - log entries from Google Cloud Storage buckets: - - ``"resource.type=gcs_bucket severity`__ - for more information. - - Attributes: - name (str): - Required. The resource for which to retrieve CMEK settings. - - :: - - "projects/[PROJECT_ID]/cmekSettings" - "organizations/[ORGANIZATION_ID]/cmekSettings" - "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" - "folders/[FOLDER_ID]/cmekSettings" - - Example: ``"organizations/12345/cmekSettings"``. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP organization. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateCmekSettingsRequest(proto.Message): - r"""The parameters to - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - Attributes: - name (str): - Required. The resource name for the CMEK settings to update. - - :: - - "projects/[PROJECT_ID]/cmekSettings" - "organizations/[ORGANIZATION_ID]/cmekSettings" - "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" - "folders/[FOLDER_ID]/cmekSettings" - - Example: ``"organizations/12345/cmekSettings"``. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP organization. - cmek_settings (google.cloud.logging_v2.types.CmekSettings): - Required. The CMEK settings to update. - - See `Enabling CMEK for Logs - Router `__ - for more information. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Field mask identifying which fields from - ``cmek_settings`` should be updated. A field will be - overwritten if and only if it is in the update mask. Output - only fields cannot be updated. - - See [FieldMask][google.protobuf.FieldMask] for more - information. - - Example: ``"updateMask=kmsKeyName"`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - cmek_settings = proto.Field( - proto.MESSAGE, - number=2, - message='CmekSettings', - ) - update_mask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class CmekSettings(proto.Message): - r"""Describes the customer-managed encryption key (CMEK) settings - associated with a project, folder, organization, billing account, or - flexible resource. - - Note: CMEK for the Logs Router can currently only be configured for - GCP organizations. Once configured, it applies to all projects and - folders in the GCP organization. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - Attributes: - name (str): - Output only. The resource name of the CMEK - settings. - kms_key_name (str): - The resource name for the configured Cloud KMS key. - - KMS key name format: - "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" - - For example: - ``"projects/my-project-id/locations/my-region/keyRings/key-ring-name/cryptoKeys/key-name"`` - - To enable CMEK for the Logs Router, set this field to a - valid ``kms_key_name`` for which the associated service - account has the required - ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned - for the key. - - The Cloud KMS key used by the Log Router can be updated by - changing the ``kms_key_name`` to a new valid key name. - Encryption operations that are in progress will be completed - with the key that was in use when they started. Decryption - operations will be completed using the key that was used at - the time of encryption unless access to that key has been - revoked. - - To disable CMEK for the Logs Router, set this field to an - empty string. - - See `Enabling CMEK for Logs - Router `__ - for more information. - service_account_id (str): - Output only. The service account that will be used by the - Logs Router to access your Cloud KMS key. - - Before enabling CMEK for Logs Router, you must first assign - the role ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` to - the service account that the Logs Router will use to access - your Cloud KMS key. Use - [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings] - to obtain the service account ID. - - See `Enabling CMEK for Logs - Router `__ - for more information. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - kms_key_name = proto.Field( - proto.STRING, - number=2, - ) - service_account_id = proto.Field( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_metrics.py deleted file mode 100644 index 252e43760b02..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/google/cloud/logging_v2/types/logging_metrics.py +++ /dev/null @@ -1,371 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.api import distribution_pb2 # type: ignore -from google.api import metric_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.logging.v2', - manifest={ - 'LogMetric', - 'ListLogMetricsRequest', - 'ListLogMetricsResponse', - 'GetLogMetricRequest', - 'CreateLogMetricRequest', - 'UpdateLogMetricRequest', - 'DeleteLogMetricRequest', - }, -) - - -class LogMetric(proto.Message): - r"""Describes a logs-based metric. The value of the metric is the - number of log entries that match a logs filter in a given time - interval. - Logs-based metrics can also be used to extract values from logs - and create a distribution of the values. The distribution - records the statistics of the extracted values along with an - optional histogram of the values as specified by the bucket - options. - - Attributes: - name (str): - Required. The client-assigned metric identifier. Examples: - ``"error_count"``, ``"nginx/requests"``. - - Metric identifiers are limited to 100 characters and can - include only the following characters: ``A-Z``, ``a-z``, - ``0-9``, and the special characters ``_-.,+!*',()%/``. The - forward-slash character (``/``) denotes a hierarchy of name - pieces, and it cannot be the first character of the name. - - The metric identifier in this field must not be - `URL-encoded `__. - However, when the metric identifier appears as the - ``[METRIC_ID]`` part of a ``metric_name`` API parameter, - then the metric identifier must be URL-encoded. Example: - ``"projects/my-project/metrics/nginx%2Frequests"``. - description (str): - Optional. A description of this metric, which - is used in documentation. The maximum length of - the description is 8000 characters. - filter (str): - Required. An `advanced logs - filter `__ - which is used to match log entries. Example: - - :: - - "resource.type=gae_app AND severity>=ERROR" - - The maximum length of the filter is 20000 characters. - metric_descriptor (google.api.metric_pb2.MetricDescriptor): - Optional. The metric descriptor associated with the - logs-based metric. If unspecified, it uses a default metric - descriptor with a DELTA metric kind, INT64 value type, with - no labels and a unit of "1". Such a metric counts the number - of log entries matching the ``filter`` expression. - - The ``name``, ``type``, and ``description`` fields in the - ``metric_descriptor`` are output only, and is constructed - using the ``name`` and ``description`` field in the - LogMetric. - - To create a logs-based metric that records a distribution of - log values, a DELTA metric kind with a DISTRIBUTION value - type must be used along with a ``value_extractor`` - expression in the LogMetric. - - Each label in the metric descriptor must have a matching - label name as the key and an extractor expression as the - value in the ``label_extractors`` map. - - The ``metric_kind`` and ``value_type`` fields in the - ``metric_descriptor`` cannot be updated once initially - configured. New labels can be added in the - ``metric_descriptor``, but existing labels cannot be - modified except for their description. - value_extractor (str): - Optional. A ``value_extractor`` is required when using a - distribution logs-based metric to extract the values to - record from a log entry. Two functions are supported for - value extraction: ``EXTRACT(field)`` or - ``REGEXP_EXTRACT(field, regex)``. The argument are: - - 1. field: The name of the log entry field from which the - value is to be extracted. - 2. regex: A regular expression using the Google RE2 syntax - (https://github.com/google/re2/wiki/Syntax) with a single - capture group to extract data from the specified log - entry field. The value of the field is converted to a - string before applying the regex. It is an error to - specify a regex that does not include exactly one capture - group. - - The result of the extraction must be convertible to a double - type, as the distribution always records double values. If - either the extraction or the conversion to double fails, - then those values are not recorded in the distribution. - - Example: - ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` - label_extractors (Sequence[google.cloud.logging_v2.types.LogMetric.LabelExtractorsEntry]): - Optional. A map from a label key string to an extractor - expression which is used to extract data from a log entry - field and assign as the label value. Each label key - specified in the LabelDescriptor must have an associated - extractor expression in this map. The syntax of the - extractor expression is the same as for the - ``value_extractor`` field. - - The extracted value is converted to the type defined in the - label descriptor. If the either the extraction or the type - conversion fails, the label will have a default value. The - default value for a string label is an empty string, for an - integer label its 0, and for a boolean label its ``false``. - - Note that there are upper bounds on the maximum number of - labels and the number of active time series that are allowed - in a project. - bucket_options (google.api.distribution_pb2.BucketOptions): - Optional. The ``bucket_options`` are required when the - logs-based metric is using a DISTRIBUTION value type and it - describes the bucket boundaries used to create a histogram - of the extracted values. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of the - metric. - This field may not be present for older metrics. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of the - metric. - This field may not be present for older metrics. - version (google.cloud.logging_v2.types.LogMetric.ApiVersion): - Deprecated. The API version that created or - updated this metric. The v2 format is used by - default and cannot be changed. - """ - class ApiVersion(proto.Enum): - r"""Logging API version.""" - V2 = 0 - V1 = 1 - - name = proto.Field( - proto.STRING, - number=1, - ) - description = proto.Field( - proto.STRING, - number=2, - ) - filter = proto.Field( - proto.STRING, - number=3, - ) - metric_descriptor = proto.Field( - proto.MESSAGE, - number=5, - message=metric_pb2.MetricDescriptor, - ) - value_extractor = proto.Field( - proto.STRING, - number=6, - ) - label_extractors = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - bucket_options = proto.Field( - proto.MESSAGE, - number=8, - message=distribution_pb2.Distribution.BucketOptions, - ) - create_time = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - update_time = proto.Field( - proto.MESSAGE, - number=10, - message=timestamp_pb2.Timestamp, - ) - version = proto.Field( - proto.ENUM, - number=4, - enum=ApiVersion, - ) - - -class ListLogMetricsRequest(proto.Message): - r"""The parameters to ListLogMetrics. - Attributes: - parent (str): - Required. The name of the project containing the metrics: - - :: - - "projects/[PROJECT_ID]". - page_token (str): - Optional. If present, then retrieve the next batch of - results from the preceding call to this method. - ``pageToken`` must be the value of ``nextPageToken`` from - the previous response. The values of other method parameters - should be identical to those in the previous call. - page_size (int): - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more - results might be available. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_token = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - - -class ListLogMetricsResponse(proto.Message): - r"""Result returned from ListLogMetrics. - Attributes: - metrics (Sequence[google.cloud.logging_v2.types.LogMetric]): - A list of logs-based metrics. - next_page_token (str): - If there might be more results than appear in this response, - then ``nextPageToken`` is included. To get the next set of - results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. - """ - - @property - def raw_page(self): - return self - - metrics = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='LogMetric', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class GetLogMetricRequest(proto.Message): - r"""The parameters to GetLogMetric. - Attributes: - metric_name (str): - Required. The resource name of the desired metric: - - :: - - "projects/[PROJECT_ID]/metrics/[METRIC_ID]". - """ - - metric_name = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateLogMetricRequest(proto.Message): - r"""The parameters to CreateLogMetric. - Attributes: - parent (str): - Required. The resource name of the project in which to - create the metric: - - :: - - "projects/[PROJECT_ID]" - - The new metric must be provided in the request. - metric (google.cloud.logging_v2.types.LogMetric): - Required. The new logs-based metric, which - must not have an identifier that already exists. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - metric = proto.Field( - proto.MESSAGE, - number=2, - message='LogMetric', - ) - - -class UpdateLogMetricRequest(proto.Message): - r"""The parameters to UpdateLogMetric. - Attributes: - metric_name (str): - Required. The resource name of the metric to update: - - :: - - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - - The updated metric must be provided in the request and it's - ``name`` field must be the same as ``[METRIC_ID]`` If the - metric does not exist in ``[PROJECT_ID]``, then a new metric - is created. - metric (google.cloud.logging_v2.types.LogMetric): - Required. The updated metric. - """ - - metric_name = proto.Field( - proto.STRING, - number=1, - ) - metric = proto.Field( - proto.MESSAGE, - number=2, - message='LogMetric', - ) - - -class DeleteLogMetricRequest(proto.Message): - r"""The parameters to DeleteLogMetric. - Attributes: - metric_name (str): - Required. The resource name of the metric to delete: - - :: - - "projects/[PROJECT_ID]/metrics/[METRIC_ID]". - """ - - metric_name = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/mypy.ini b/packages/google-cloud-logging/owl-bot-staging/v2/mypy.ini deleted file mode 100644 index 4505b485436b..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.6 -namespace_packages = True diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/noxfile.py b/packages/google-cloud-logging/owl-bot-staging/v2/noxfile.py deleted file mode 100644 index 10ed0a998e1e..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/noxfile.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", -] - -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/logging_v2/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python='3.7') -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=['3.6', '3.7']) -def mypy(session): - """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python='3.6') -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/scripts/fixup_logging_v2_keywords.py b/packages/google-cloud-logging/owl-bot-staging/v2/scripts/fixup_logging_v2_keywords.py deleted file mode 100644 index 2a368fb9ccea..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/scripts/fixup_logging_v2_keywords.py +++ /dev/null @@ -1,209 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class loggingCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_bucket': ('parent', 'bucket_id', 'bucket', ), - 'create_exclusion': ('parent', 'exclusion', ), - 'create_log_metric': ('parent', 'metric', ), - 'create_sink': ('parent', 'sink', 'unique_writer_identity', ), - 'create_view': ('parent', 'view_id', 'view', ), - 'delete_bucket': ('name', ), - 'delete_exclusion': ('name', ), - 'delete_log': ('log_name', ), - 'delete_log_metric': ('metric_name', ), - 'delete_sink': ('sink_name', ), - 'delete_view': ('name', ), - 'get_bucket': ('name', ), - 'get_cmek_settings': ('name', ), - 'get_exclusion': ('name', ), - 'get_log_metric': ('metric_name', ), - 'get_sink': ('sink_name', ), - 'get_view': ('name', ), - 'list_buckets': ('parent', 'page_token', 'page_size', ), - 'list_exclusions': ('parent', 'page_token', 'page_size', ), - 'list_log_entries': ('resource_names', 'filter', 'order_by', 'page_size', 'page_token', ), - 'list_log_metrics': ('parent', 'page_token', 'page_size', ), - 'list_logs': ('parent', 'page_size', 'page_token', 'resource_names', ), - 'list_monitored_resource_descriptors': ('page_size', 'page_token', ), - 'list_sinks': ('parent', 'page_token', 'page_size', ), - 'list_views': ('parent', 'page_token', 'page_size', ), - 'tail_log_entries': ('resource_names', 'filter', 'buffer_window', ), - 'undelete_bucket': ('name', ), - 'update_bucket': ('name', 'bucket', 'update_mask', ), - 'update_cmek_settings': ('name', 'cmek_settings', 'update_mask', ), - 'update_exclusion': ('name', 'exclusion', 'update_mask', ), - 'update_log_metric': ('metric_name', 'metric', ), - 'update_sink': ('sink_name', 'sink', 'unique_writer_identity', 'update_mask', ), - 'update_view': ('name', 'view', 'update_mask', ), - 'write_log_entries': ('entries', 'log_name', 'resource', 'labels', 'partial_success', 'dry_run', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=loggingCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the logging client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/setup.py b/packages/google-cloud-logging/owl-bot-staging/v2/setup.py deleted file mode 100644 index 4b98728b93f3..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/setup.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import setuptools # type: ignore - -version = '0.1.0' - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: - readme = readme_file.read() - -setuptools.setup( - name='google-cloud-logging', - version=version, - long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', - 'libcst >= 0.2.5', - 'proto-plus >= 1.15.0', - 'packaging >= 14.3', ), - python_requires='>=3.6', - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', - ], - zip_safe=False, -) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/tests/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/tests/__init__.py deleted file mode 100644 index b54a5fcc42cd..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/__init__.py deleted file mode 100644 index b54a5fcc42cd..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/__init__.py deleted file mode 100644 index b54a5fcc42cd..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/__init__.py b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/__init__.py deleted file mode 100644 index b54a5fcc42cd..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_config_service_v2.py deleted file mode 100644 index 979cbd360592..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ /dev/null @@ -1,6447 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2AsyncClient -from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client -from google.cloud.logging_v2.services.config_service_v2 import pagers -from google.cloud.logging_v2.services.config_service_v2 import transports -from google.cloud.logging_v2.services.config_service_v2.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.logging_v2.types import logging_config -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert ConfigServiceV2Client._get_default_mtls_endpoint(None) is None - assert ConfigServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert ConfigServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert ConfigServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert ConfigServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert ConfigServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - ConfigServiceV2Client, - ConfigServiceV2AsyncClient, -]) -def test_config_service_v2_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'logging.googleapis.com:443' - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.ConfigServiceV2GrpcTransport, "grpc"), - (transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_config_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class", [ - ConfigServiceV2Client, - ConfigServiceV2AsyncClient, -]) -def test_config_service_v2_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'logging.googleapis.com:443' - - -def test_config_service_v2_client_get_transport_class(): - transport = ConfigServiceV2Client.get_transport_class() - available_transports = [ - transports.ConfigServiceV2GrpcTransport, - ] - assert transport in available_transports - - transport = ConfigServiceV2Client.get_transport_class("grpc") - assert transport == transports.ConfigServiceV2GrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), - (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) -@mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) -def test_config_service_v2_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(ConfigServiceV2Client, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(ConfigServiceV2Client, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "true"), - (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "false"), - (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) -@mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), - (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_config_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), - (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_config_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_config_service_v2_client_client_options_from_dict(): - with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = ConfigServiceV2Client( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_list_buckets(transport: str = 'grpc', request_type=logging_config.ListBucketsRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_buckets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBucketsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_buckets_from_dict(): - test_list_buckets(request_type=dict) - - -def test_list_buckets_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - client.list_buckets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() - - -@pytest.mark.asyncio -async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListBucketsRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_buckets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBucketsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_buckets_async_from_dict(): - await test_list_buckets_async(request_type=dict) - - -def test_list_buckets_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.ListBucketsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - call.return_value = logging_config.ListBucketsResponse() - client.list_buckets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_buckets_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.ListBucketsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse()) - await client.list_buckets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_buckets_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListBucketsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_buckets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_buckets_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_buckets( - logging_config.ListBucketsRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_buckets_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListBucketsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_buckets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_buckets_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_buckets( - logging_config.ListBucketsRequest(), - parent='parent_value', - ) - - -def test_list_buckets_pager(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - logging_config.LogBucket(), - logging_config.LogBucket(), - ], - next_page_token='abc', - ), - logging_config.ListBucketsResponse( - buckets=[], - next_page_token='def', - ), - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - ], - next_page_token='ghi', - ), - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - logging_config.LogBucket(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_buckets(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogBucket) - for i in results) - -def test_list_buckets_pages(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - logging_config.LogBucket(), - logging_config.LogBucket(), - ], - next_page_token='abc', - ), - logging_config.ListBucketsResponse( - buckets=[], - next_page_token='def', - ), - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - ], - next_page_token='ghi', - ), - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - logging_config.LogBucket(), - ], - ), - RuntimeError, - ) - pages = list(client.list_buckets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_buckets_async_pager(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - logging_config.LogBucket(), - logging_config.LogBucket(), - ], - next_page_token='abc', - ), - logging_config.ListBucketsResponse( - buckets=[], - next_page_token='def', - ), - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - ], - next_page_token='ghi', - ), - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - logging_config.LogBucket(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_buckets(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogBucket) - for i in responses) - -@pytest.mark.asyncio -async def test_list_buckets_async_pages(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - logging_config.LogBucket(), - logging_config.LogBucket(), - ], - next_page_token='abc', - ), - logging_config.ListBucketsResponse( - buckets=[], - next_page_token='def', - ), - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - ], - next_page_token='ghi', - ), - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - logging_config.LogBucket(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_buckets(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_bucket(transport: str = 'grpc', request_type=logging_config.GetBucketRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - ) - response = client.get_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - - -def test_get_bucket_from_dict(): - test_get_bucket(request_type=dict) - - -def test_get_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: - client.get_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() - - -@pytest.mark.asyncio -async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetBucketRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - )) - response = await client.get_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - - -@pytest.mark.asyncio -async def test_get_bucket_async_from_dict(): - await test_get_bucket_async(request_type=dict) - - -def test_get_bucket_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.GetBucketRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: - call.return_value = logging_config.LogBucket() - client.get_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.GetBucketRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) - await client.get_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_create_bucket(transport: str = 'grpc', request_type=logging_config.CreateBucketRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - ) - response = client.create_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - - -def test_create_bucket_from_dict(): - test_create_bucket(request_type=dict) - - -def test_create_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: - client.create_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() - - -@pytest.mark.asyncio -async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - )) - response = await client.create_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - - -@pytest.mark.asyncio -async def test_create_bucket_async_from_dict(): - await test_create_bucket_async(request_type=dict) - - -def test_create_bucket_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.CreateBucketRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: - call.return_value = logging_config.LogBucket() - client.create_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.CreateBucketRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) - await client.create_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_update_bucket(transport: str = 'grpc', request_type=logging_config.UpdateBucketRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - ) - response = client.update_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - - -def test_update_bucket_from_dict(): - test_update_bucket(request_type=dict) - - -def test_update_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: - client.update_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() - - -@pytest.mark.asyncio -async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - )) - response = await client.update_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - - -@pytest.mark.asyncio -async def test_update_bucket_async_from_dict(): - await test_update_bucket_async(request_type=dict) - - -def test_update_bucket_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateBucketRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: - call.return_value = logging_config.LogBucket() - client.update_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateBucketRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) - await client.update_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_bucket(transport: str = 'grpc', request_type=logging_config.DeleteBucketRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_bucket_from_dict(): - test_delete_bucket(request_type=dict) - - -def test_delete_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: - client.delete_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() - - -@pytest.mark.asyncio -async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteBucketRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_bucket_async_from_dict(): - await test_delete_bucket_async(request_type=dict) - - -def test_delete_bucket_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.DeleteBucketRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: - call.return_value = None - client.delete_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.DeleteBucketRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_undelete_bucket(transport: str = 'grpc', request_type=logging_config.UndeleteBucketRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.undelete_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_undelete_bucket_from_dict(): - test_undelete_bucket(request_type=dict) - - -def test_undelete_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: - client.undelete_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() - - -@pytest.mark.asyncio -async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UndeleteBucketRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.undelete_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_undelete_bucket_async_from_dict(): - await test_undelete_bucket_async(request_type=dict) - - -def test_undelete_bucket_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UndeleteBucketRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: - call.return_value = None - client.undelete_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_undelete_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UndeleteBucketRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.undelete_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_list_views(transport: str = 'grpc', request_type=logging_config.ListViewsRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListViewsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_views(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_views_from_dict(): - test_list_views(request_type=dict) - - -def test_list_views_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - client.list_views() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() - - -@pytest.mark.asyncio -async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListViewsRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_views(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_views_async_from_dict(): - await test_list_views_async(request_type=dict) - - -def test_list_views_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.ListViewsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - call.return_value = logging_config.ListViewsResponse() - client.list_views(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_views_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.ListViewsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse()) - await client.list_views(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_views_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListViewsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_views( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_views_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_views( - logging_config.ListViewsRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_views_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListViewsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_views( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_views_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_views( - logging_config.ListViewsRequest(), - parent='parent_value', - ) - - -def test_list_views_pager(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - logging_config.LogView(), - ], - next_page_token='abc', - ), - logging_config.ListViewsResponse( - views=[], - next_page_token='def', - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - ], - next_page_token='ghi', - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_views(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogView) - for i in results) - -def test_list_views_pages(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - logging_config.LogView(), - ], - next_page_token='abc', - ), - logging_config.ListViewsResponse( - views=[], - next_page_token='def', - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - ], - next_page_token='ghi', - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - ], - ), - RuntimeError, - ) - pages = list(client.list_views(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_views_async_pager(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - logging_config.LogView(), - ], - next_page_token='abc', - ), - logging_config.ListViewsResponse( - views=[], - next_page_token='def', - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - ], - next_page_token='ghi', - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_views(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogView) - for i in responses) - -@pytest.mark.asyncio -async def test_list_views_async_pages(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - logging_config.LogView(), - ], - next_page_token='abc', - ), - logging_config.ListViewsResponse( - views=[], - next_page_token='def', - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - ], - next_page_token='ghi', - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_views(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_view(transport: str = 'grpc', request_type=logging_config.GetViewRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - ) - response = client.get_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - - -def test_get_view_from_dict(): - test_get_view(request_type=dict) - - -def test_get_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - client.get_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() - - -@pytest.mark.asyncio -async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetViewRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) - response = await client.get_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - - -@pytest.mark.asyncio -async def test_get_view_async_from_dict(): - await test_get_view_async(request_type=dict) - - -def test_get_view_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.GetViewRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - call.return_value = logging_config.LogView() - client.get_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_view_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.GetViewRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) - await client.get_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_create_view(transport: str = 'grpc', request_type=logging_config.CreateViewRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - ) - response = client.create_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - - -def test_create_view_from_dict(): - test_create_view(request_type=dict) - - -def test_create_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - client.create_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() - - -@pytest.mark.asyncio -async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateViewRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) - response = await client.create_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - - -@pytest.mark.asyncio -async def test_create_view_async_from_dict(): - await test_create_view_async(request_type=dict) - - -def test_create_view_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.CreateViewRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - call.return_value = logging_config.LogView() - client.create_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_view_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.CreateViewRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) - await client.create_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_update_view(transport: str = 'grpc', request_type=logging_config.UpdateViewRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - ) - response = client.update_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - - -def test_update_view_from_dict(): - test_update_view(request_type=dict) - - -def test_update_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - client.update_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() - - -@pytest.mark.asyncio -async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateViewRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) - response = await client.update_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - - -@pytest.mark.asyncio -async def test_update_view_async_from_dict(): - await test_update_view_async(request_type=dict) - - -def test_update_view_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateViewRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - call.return_value = logging_config.LogView() - client.update_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_view_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateViewRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) - await client.update_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_view(transport: str = 'grpc', request_type=logging_config.DeleteViewRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_view_from_dict(): - test_delete_view(request_type=dict) - - -def test_delete_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: - client.delete_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() - - -@pytest.mark.asyncio -async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteViewRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_view_async_from_dict(): - await test_delete_view_async(request_type=dict) - - -def test_delete_view_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.DeleteViewRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: - call.return_value = None - client.delete_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_view_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.DeleteViewRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_list_sinks(transport: str = 'grpc', request_type=logging_config.ListSinksRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListSinksResponse( - next_page_token='next_page_token_value', - ) - response = client.list_sinks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSinksPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_sinks_from_dict(): - test_list_sinks(request_type=dict) - - -def test_list_sinks_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - client.list_sinks() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() - - -@pytest.mark.asyncio -async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListSinksRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_sinks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSinksAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_sinks_async_from_dict(): - await test_list_sinks_async(request_type=dict) - - -def test_list_sinks_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.ListSinksRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - call.return_value = logging_config.ListSinksResponse() - client.list_sinks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_sinks_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.ListSinksRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse()) - await client.list_sinks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_sinks_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListSinksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_sinks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_sinks_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_sinks( - logging_config.ListSinksRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_sinks_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListSinksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_sinks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_sinks_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_sinks( - logging_config.ListSinksRequest(), - parent='parent_value', - ) - - -def test_list_sinks_pager(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - logging_config.LogSink(), - ], - next_page_token='abc', - ), - logging_config.ListSinksResponse( - sinks=[], - next_page_token='def', - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - ], - next_page_token='ghi', - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_sinks(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogSink) - for i in results) - -def test_list_sinks_pages(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - logging_config.LogSink(), - ], - next_page_token='abc', - ), - logging_config.ListSinksResponse( - sinks=[], - next_page_token='def', - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - ], - next_page_token='ghi', - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - ], - ), - RuntimeError, - ) - pages = list(client.list_sinks(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_sinks_async_pager(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - logging_config.LogSink(), - ], - next_page_token='abc', - ), - logging_config.ListSinksResponse( - sinks=[], - next_page_token='def', - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - ], - next_page_token='ghi', - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_sinks(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogSink) - for i in responses) - -@pytest.mark.asyncio -async def test_list_sinks_async_pages(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - logging_config.LogSink(), - ], - next_page_token='abc', - ), - logging_config.ListSinksResponse( - sinks=[], - next_page_token='def', - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - ], - next_page_token='ghi', - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_sinks(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_sink(transport: str = 'grpc', request_type=logging_config.GetSinkRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), - ) - response = client.get_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' - assert response.include_children is True - - -def test_get_sink_from_dict(): - test_get_sink(request_type=dict) - - -def test_get_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - client.get_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() - - -@pytest.mark.asyncio -async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSinkRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) - response = await client.get_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' - assert response.include_children is True - - -@pytest.mark.asyncio -async def test_get_sink_async_from_dict(): - await test_get_sink_async(request_type=dict) - - -def test_get_sink_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.GetSinkRequest() - - request.sink_name = 'sink_name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - call.return_value = logging_config.LogSink() - client.get_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'sink_name=sink_name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.GetSinkRequest() - - request.sink_name = 'sink_name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) - await client.get_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'sink_name=sink_name/value', - ) in kw['metadata'] - - -def test_get_sink_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_sink( - sink_name='sink_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].sink_name == 'sink_name_value' - - -def test_get_sink_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_sink( - logging_config.GetSinkRequest(), - sink_name='sink_name_value', - ) - - -@pytest.mark.asyncio -async def test_get_sink_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_sink( - sink_name='sink_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].sink_name == 'sink_name_value' - - -@pytest.mark.asyncio -async def test_get_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_sink( - logging_config.GetSinkRequest(), - sink_name='sink_name_value', - ) - - -def test_create_sink(transport: str = 'grpc', request_type=logging_config.CreateSinkRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), - ) - response = client.create_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' - assert response.include_children is True - - -def test_create_sink_from_dict(): - test_create_sink(request_type=dict) - - -def test_create_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - client.create_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() - - -@pytest.mark.asyncio -async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateSinkRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) - response = await client.create_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' - assert response.include_children is True - - -@pytest.mark.asyncio -async def test_create_sink_async_from_dict(): - await test_create_sink_async(request_type=dict) - - -def test_create_sink_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.CreateSinkRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - call.return_value = logging_config.LogSink() - client.create_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.CreateSinkRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) - await client.create_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_sink_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_sink( - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].sink == logging_config.LogSink(name='name_value') - - -def test_create_sink_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_sink( - logging_config.CreateSinkRequest(), - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_sink_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_sink( - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].sink == logging_config.LogSink(name='name_value') - - -@pytest.mark.asyncio -async def test_create_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_sink( - logging_config.CreateSinkRequest(), - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), - ) - - -def test_update_sink(transport: str = 'grpc', request_type=logging_config.UpdateSinkRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), - ) - response = client.update_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' - assert response.include_children is True - - -def test_update_sink_from_dict(): - test_update_sink(request_type=dict) - - -def test_update_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - client.update_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() - - -@pytest.mark.asyncio -async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSinkRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) - response = await client.update_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' - assert response.include_children is True - - -@pytest.mark.asyncio -async def test_update_sink_async_from_dict(): - await test_update_sink_async(request_type=dict) - - -def test_update_sink_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateSinkRequest() - - request.sink_name = 'sink_name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - call.return_value = logging_config.LogSink() - client.update_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'sink_name=sink_name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateSinkRequest() - - request.sink_name = 'sink_name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) - await client.update_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'sink_name=sink_name/value', - ) in kw['metadata'] - - -def test_update_sink_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_sink( - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].sink_name == 'sink_name_value' - assert args[0].sink == logging_config.LogSink(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -def test_update_sink_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_sink( - logging_config.UpdateSinkRequest(), - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.asyncio -async def test_update_sink_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_sink( - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].sink_name == 'sink_name_value' - assert args[0].sink == logging_config.LogSink(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -@pytest.mark.asyncio -async def test_update_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_sink( - logging_config.UpdateSinkRequest(), - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_sink(transport: str = 'grpc', request_type=logging_config.DeleteSinkRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_sink_from_dict(): - test_delete_sink(request_type=dict) - - -def test_delete_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: - client.delete_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() - - -@pytest.mark.asyncio -async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteSinkRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_sink_async_from_dict(): - await test_delete_sink_async(request_type=dict) - - -def test_delete_sink_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.DeleteSinkRequest() - - request.sink_name = 'sink_name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: - call.return_value = None - client.delete_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'sink_name=sink_name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.DeleteSinkRequest() - - request.sink_name = 'sink_name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'sink_name=sink_name/value', - ) in kw['metadata'] - - -def test_delete_sink_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_sink( - sink_name='sink_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].sink_name == 'sink_name_value' - - -def test_delete_sink_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_sink( - logging_config.DeleteSinkRequest(), - sink_name='sink_name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_sink_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_sink( - sink_name='sink_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].sink_name == 'sink_name_value' - - -@pytest.mark.asyncio -async def test_delete_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_sink( - logging_config.DeleteSinkRequest(), - sink_name='sink_name_value', - ) - - -def test_list_exclusions(transport: str = 'grpc', request_type=logging_config.ListExclusionsRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_exclusions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExclusionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_exclusions_from_dict(): - test_list_exclusions(request_type=dict) - - -def test_list_exclusions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - client.list_exclusions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() - - -@pytest.mark.asyncio -async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_exclusions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExclusionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_exclusions_async_from_dict(): - await test_list_exclusions_async(request_type=dict) - - -def test_list_exclusions_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.ListExclusionsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - call.return_value = logging_config.ListExclusionsResponse() - client.list_exclusions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_exclusions_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.ListExclusionsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) - await client.list_exclusions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_exclusions_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListExclusionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_exclusions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_exclusions_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_exclusions( - logging_config.ListExclusionsRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_exclusions_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListExclusionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_exclusions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_exclusions_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_exclusions( - logging_config.ListExclusionsRequest(), - parent='parent_value', - ) - - -def test_list_exclusions_pager(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token='abc', - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token='def', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token='ghi', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_exclusions(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogExclusion) - for i in results) - -def test_list_exclusions_pages(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token='abc', - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token='def', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token='ghi', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - pages = list(client.list_exclusions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_exclusions_async_pager(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token='abc', - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token='def', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token='ghi', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_exclusions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogExclusion) - for i in responses) - -@pytest.mark.asyncio -async def test_list_exclusions_async_pages(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token='abc', - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token='def', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token='ghi', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_exclusions(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_exclusion(transport: str = 'grpc', request_type=logging_config.GetExclusionRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - ) - response = client.get_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True - - -def test_get_exclusion_from_dict(): - test_get_exclusion(request_type=dict) - - -def test_get_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: - client.get_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() - - -@pytest.mark.asyncio -async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) - response = await client.get_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True - - -@pytest.mark.asyncio -async def test_get_exclusion_async_from_dict(): - await test_get_exclusion_async(request_type=dict) - - -def test_get_exclusion_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.GetExclusionRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: - call.return_value = logging_config.LogExclusion() - client.get_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.GetExclusionRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) - await client.get_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_exclusion_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_exclusion( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_exclusion_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_exclusion( - logging_config.GetExclusionRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_exclusion( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_exclusion( - logging_config.GetExclusionRequest(), - name='name_value', - ) - - -def test_create_exclusion(transport: str = 'grpc', request_type=logging_config.CreateExclusionRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - ) - response = client.create_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True - - -def test_create_exclusion_from_dict(): - test_create_exclusion(request_type=dict) - - -def test_create_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: - client.create_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() - - -@pytest.mark.asyncio -async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) - response = await client.create_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True - - -@pytest.mark.asyncio -async def test_create_exclusion_async_from_dict(): - await test_create_exclusion_async(request_type=dict) - - -def test_create_exclusion_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.CreateExclusionRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: - call.return_value = logging_config.LogExclusion() - client.create_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.CreateExclusionRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) - await client.create_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_exclusion_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_exclusion( - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].exclusion == logging_config.LogExclusion(name='name_value') - - -def test_create_exclusion_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_exclusion( - logging_config.CreateExclusionRequest(), - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_exclusion( - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].exclusion == logging_config.LogExclusion(name='name_value') - - -@pytest.mark.asyncio -async def test_create_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_exclusion( - logging_config.CreateExclusionRequest(), - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), - ) - - -def test_update_exclusion(transport: str = 'grpc', request_type=logging_config.UpdateExclusionRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - ) - response = client.update_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True - - -def test_update_exclusion_from_dict(): - test_update_exclusion(request_type=dict) - - -def test_update_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: - client.update_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() - - -@pytest.mark.asyncio -async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) - response = await client.update_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True - - -@pytest.mark.asyncio -async def test_update_exclusion_async_from_dict(): - await test_update_exclusion_async(request_type=dict) - - -def test_update_exclusion_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateExclusionRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: - call.return_value = logging_config.LogExclusion() - client.update_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateExclusionRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) - await client.update_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_update_exclusion_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_exclusion( - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].exclusion == logging_config.LogExclusion(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -def test_update_exclusion_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_exclusion( - logging_config.UpdateExclusionRequest(), - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.asyncio -async def test_update_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_exclusion( - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].exclusion == logging_config.LogExclusion(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -@pytest.mark.asyncio -async def test_update_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_exclusion( - logging_config.UpdateExclusionRequest(), - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_exclusion(transport: str = 'grpc', request_type=logging_config.DeleteExclusionRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_exclusion_from_dict(): - test_delete_exclusion(request_type=dict) - - -def test_delete_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: - client.delete_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() - - -@pytest.mark.asyncio -async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_exclusion_async_from_dict(): - await test_delete_exclusion_async(request_type=dict) - - -def test_delete_exclusion_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.DeleteExclusionRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: - call.return_value = None - client.delete_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.DeleteExclusionRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_exclusion_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_exclusion( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_exclusion_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_exclusion( - logging_config.DeleteExclusionRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_exclusion( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_exclusion( - logging_config.DeleteExclusionRequest(), - name='name_value', - ) - - -def test_get_cmek_settings(transport: str = 'grpc', request_type=logging_config.GetCmekSettingsRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', - ) - response = client.get_cmek_settings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' - - -def test_get_cmek_settings_from_dict(): - test_get_cmek_settings(request_type=dict) - - -def test_get_cmek_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - client.get_cmek_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() - - -@pytest.mark.asyncio -async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', - )) - response = await client.get_cmek_settings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' - - -@pytest.mark.asyncio -async def test_get_cmek_settings_async_from_dict(): - await test_get_cmek_settings_async(request_type=dict) - - -def test_get_cmek_settings_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.GetCmekSettingsRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - call.return_value = logging_config.CmekSettings() - client.get_cmek_settings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_cmek_settings_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.GetCmekSettingsRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) - await client.get_cmek_settings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_update_cmek_settings(transport: str = 'grpc', request_type=logging_config.UpdateCmekSettingsRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', - ) - response = client.update_cmek_settings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' - - -def test_update_cmek_settings_from_dict(): - test_update_cmek_settings(request_type=dict) - - -def test_update_cmek_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - client.update_cmek_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() - - -@pytest.mark.asyncio -async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', - )) - response = await client.update_cmek_settings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' - - -@pytest.mark.asyncio -async def test_update_cmek_settings_async_from_dict(): - await test_update_cmek_settings_async(request_type=dict) - - -def test_update_cmek_settings_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateCmekSettingsRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - call.return_value = logging_config.CmekSettings() - client.update_cmek_settings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_cmek_settings_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateCmekSettingsRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) - await client.update_cmek_settings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ConfigServiceV2Client(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ConfigServiceV2GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.ConfigServiceV2GrpcTransport, - transports.ConfigServiceV2GrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ConfigServiceV2GrpcTransport, - ) - -def test_config_service_v2_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ConfigServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_config_service_v2_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport.__init__') as Transport: - Transport.return_value = None - transport = transports.ConfigServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_buckets', - 'get_bucket', - 'create_bucket', - 'update_bucket', - 'delete_bucket', - 'undelete_bucket', - 'list_views', - 'get_view', - 'create_view', - 'update_view', - 'delete_view', - 'list_sinks', - 'get_sink', - 'create_sink', - 'update_sink', - 'delete_sink', - 'list_exclusions', - 'get_exclusion', - 'create_exclusion', - 'update_exclusion', - 'delete_exclusion', - 'get_cmek_settings', - 'update_cmek_settings', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_config_service_v2_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ConfigServiceV2Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_config_service_v2_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ConfigServiceV2Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - ), - quota_project_id="octopus", - ) - - -def test_config_service_v2_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ConfigServiceV2Transport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_config_service_v2_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ConfigServiceV2Client() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_config_service_v2_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ConfigServiceV2Client() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ConfigServiceV2GrpcTransport, - transports.ConfigServiceV2GrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_config_service_v2_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ConfigServiceV2GrpcTransport, - transports.ConfigServiceV2GrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_config_service_v2_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ConfigServiceV2GrpcTransport, grpc_helpers), - (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_config_service_v2_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "logging.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), - scopes=["1", "2"], - default_host="logging.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) -def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_config_service_v2_host_no_port(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), - ) - assert client.transport._host == 'logging.googleapis.com:443' - - -def test_config_service_v2_host_with_port(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), - ) - assert client.transport._host == 'logging.googleapis.com:8000' - -def test_config_service_v2_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ConfigServiceV2GrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_config_service_v2_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ConfigServiceV2GrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) -def test_config_service_v2_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) -def test_config_service_v2_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_cmek_settings_path(): - project = "squid" - expected = "projects/{project}/cmekSettings".format(project=project, ) - actual = ConfigServiceV2Client.cmek_settings_path(project) - assert expected == actual - - -def test_parse_cmek_settings_path(): - expected = { - "project": "clam", - } - path = ConfigServiceV2Client.cmek_settings_path(**expected) - - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_cmek_settings_path(path) - assert expected == actual - -def test_log_bucket_path(): - project = "whelk" - location = "octopus" - bucket = "oyster" - expected = "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) - actual = ConfigServiceV2Client.log_bucket_path(project, location, bucket) - assert expected == actual - - -def test_parse_log_bucket_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "bucket": "mussel", - } - path = ConfigServiceV2Client.log_bucket_path(**expected) - - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_bucket_path(path) - assert expected == actual - -def test_log_exclusion_path(): - project = "winkle" - exclusion = "nautilus" - expected = "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) - actual = ConfigServiceV2Client.log_exclusion_path(project, exclusion) - assert expected == actual - - -def test_parse_log_exclusion_path(): - expected = { - "project": "scallop", - "exclusion": "abalone", - } - path = ConfigServiceV2Client.log_exclusion_path(**expected) - - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_exclusion_path(path) - assert expected == actual - -def test_log_sink_path(): - project = "squid" - sink = "clam" - expected = "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) - actual = ConfigServiceV2Client.log_sink_path(project, sink) - assert expected == actual - - -def test_parse_log_sink_path(): - expected = { - "project": "whelk", - "sink": "octopus", - } - path = ConfigServiceV2Client.log_sink_path(**expected) - - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_sink_path(path) - assert expected == actual - -def test_log_view_path(): - project = "oyster" - location = "nudibranch" - bucket = "cuttlefish" - view = "mussel" - expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) - actual = ConfigServiceV2Client.log_view_path(project, location, bucket, view) - assert expected == actual - - -def test_parse_log_view_path(): - expected = { - "project": "winkle", - "location": "nautilus", - "bucket": "scallop", - "view": "abalone", - } - path = ConfigServiceV2Client.log_view_path(**expected) - - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_view_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = ConfigServiceV2Client.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = ConfigServiceV2Client.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = ConfigServiceV2Client.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = ConfigServiceV2Client.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = ConfigServiceV2Client.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = ConfigServiceV2Client.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = ConfigServiceV2Client.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = ConfigServiceV2Client.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = ConfigServiceV2Client.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = ConfigServiceV2Client.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: - transport_class = ConfigServiceV2Client.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_logging_service_v2.py deleted file mode 100644 index b95281460984..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ /dev/null @@ -1,2494 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api import monitored_resource_pb2 # type: ignore -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2AsyncClient -from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client -from google.cloud.logging_v2.services.logging_service_v2 import pagers -from google.cloud.logging_v2.services.logging_service_v2 import transports -from google.cloud.logging_v2.services.logging_service_v2.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.logging_v2.types import log_entry -from google.cloud.logging_v2.types import logging -from google.logging.type import http_request_pb2 # type: ignore -from google.logging.type import log_severity_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert LoggingServiceV2Client._get_default_mtls_endpoint(None) is None - assert LoggingServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert LoggingServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert LoggingServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - LoggingServiceV2Client, - LoggingServiceV2AsyncClient, -]) -def test_logging_service_v2_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'logging.googleapis.com:443' - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.LoggingServiceV2GrpcTransport, "grpc"), - (transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_logging_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class", [ - LoggingServiceV2Client, - LoggingServiceV2AsyncClient, -]) -def test_logging_service_v2_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'logging.googleapis.com:443' - - -def test_logging_service_v2_client_get_transport_class(): - transport = LoggingServiceV2Client.get_transport_class() - available_transports = [ - transports.LoggingServiceV2GrpcTransport, - ] - assert transport in available_transports - - transport = LoggingServiceV2Client.get_transport_class("grpc") - assert transport == transports.LoggingServiceV2GrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) -def test_logging_service_v2_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(LoggingServiceV2Client, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(LoggingServiceV2Client, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "true"), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "false"), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_logging_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_logging_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_logging_service_v2_client_client_options_from_dict(): - with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = LoggingServiceV2Client( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_delete_log(transport: str = 'grpc', request_type=logging.DeleteLogRequest): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_log(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_log_from_dict(): - test_delete_log(request_type=dict) - - -def test_delete_log_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: - client.delete_log() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() - - -@pytest.mark.asyncio -async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=logging.DeleteLogRequest): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_log(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_log_async_from_dict(): - await test_delete_log_async(request_type=dict) - - -def test_delete_log_field_headers(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging.DeleteLogRequest() - - request.log_name = 'log_name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: - call.return_value = None - client.delete_log(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'log_name=log_name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_log_field_headers_async(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging.DeleteLogRequest() - - request.log_name = 'log_name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_log(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'log_name=log_name/value', - ) in kw['metadata'] - - -def test_delete_log_flattened(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_log( - log_name='log_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].log_name == 'log_name_value' - - -def test_delete_log_flattened_error(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_log( - logging.DeleteLogRequest(), - log_name='log_name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_log_flattened_async(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_log( - log_name='log_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].log_name == 'log_name_value' - - -@pytest.mark.asyncio -async def test_delete_log_flattened_error_async(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_log( - logging.DeleteLogRequest(), - log_name='log_name_value', - ) - - -def test_write_log_entries(transport: str = 'grpc', request_type=logging.WriteLogEntriesRequest): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging.WriteLogEntriesResponse( - ) - response = client.write_log_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging.WriteLogEntriesResponse) - - -def test_write_log_entries_from_dict(): - test_write_log_entries(request_type=dict) - - -def test_write_log_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: - client.write_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() - - -@pytest.mark.asyncio -async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.WriteLogEntriesRequest): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse( - )) - response = await client.write_log_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging.WriteLogEntriesResponse) - - -@pytest.mark.asyncio -async def test_write_log_entries_async_from_dict(): - await test_write_log_entries_async(request_type=dict) - - -def test_write_log_entries_flattened(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging.WriteLogEntriesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.write_log_entries( - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].log_name == 'log_name_value' - assert args[0].resource == monitored_resource_pb2.MonitoredResource(type_='type__value') - assert args[0].labels == {'key_value': 'value_value'} - assert args[0].entries == [log_entry.LogEntry(log_name='log_name_value')] - - -def test_write_log_entries_flattened_error(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.write_log_entries( - logging.WriteLogEntriesRequest(), - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], - ) - - -@pytest.mark.asyncio -async def test_write_log_entries_flattened_async(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging.WriteLogEntriesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.write_log_entries( - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].log_name == 'log_name_value' - assert args[0].resource == monitored_resource_pb2.MonitoredResource(type_='type__value') - assert args[0].labels == {'key_value': 'value_value'} - assert args[0].entries == [log_entry.LogEntry(log_name='log_name_value')] - - -@pytest.mark.asyncio -async def test_write_log_entries_flattened_error_async(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.write_log_entries( - logging.WriteLogEntriesRequest(), - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], - ) - - -def test_list_log_entries(transport: str = 'grpc', request_type=logging.ListLogEntriesRequest): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_log_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogEntriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_log_entries_from_dict(): - test_list_log_entries(request_type=dict) - - -def test_list_log_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: - client.list_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() - - -@pytest.mark.asyncio -async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogEntriesRequest): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_log_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogEntriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_log_entries_async_from_dict(): - await test_list_log_entries_async(request_type=dict) - - -def test_list_log_entries_flattened(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging.ListLogEntriesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_log_entries( - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource_names == ['resource_names_value'] - assert args[0].filter == 'filter_value' - assert args[0].order_by == 'order_by_value' - - -def test_list_log_entries_flattened_error(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_log_entries( - logging.ListLogEntriesRequest(), - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', - ) - - -@pytest.mark.asyncio -async def test_list_log_entries_flattened_async(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging.ListLogEntriesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_log_entries( - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource_names == ['resource_names_value'] - assert args[0].filter == 'filter_value' - assert args[0].order_by == 'order_by_value' - - -@pytest.mark.asyncio -async def test_list_log_entries_flattened_error_async(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_log_entries( - logging.ListLogEntriesRequest(), - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', - ) - - -def test_list_log_entries_pager(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - log_entry.LogEntry(), - log_entry.LogEntry(), - ], - next_page_token='abc', - ), - logging.ListLogEntriesResponse( - entries=[], - next_page_token='def', - ), - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - ], - next_page_token='ghi', - ), - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - log_entry.LogEntry(), - ], - ), - RuntimeError, - ) - - metadata = () - pager = client.list_log_entries(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, log_entry.LogEntry) - for i in results) - -def test_list_log_entries_pages(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - log_entry.LogEntry(), - log_entry.LogEntry(), - ], - next_page_token='abc', - ), - logging.ListLogEntriesResponse( - entries=[], - next_page_token='def', - ), - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - ], - next_page_token='ghi', - ), - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - log_entry.LogEntry(), - ], - ), - RuntimeError, - ) - pages = list(client.list_log_entries(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_log_entries_async_pager(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - log_entry.LogEntry(), - log_entry.LogEntry(), - ], - next_page_token='abc', - ), - logging.ListLogEntriesResponse( - entries=[], - next_page_token='def', - ), - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - ], - next_page_token='ghi', - ), - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - log_entry.LogEntry(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_log_entries(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, log_entry.LogEntry) - for i in responses) - -@pytest.mark.asyncio -async def test_list_log_entries_async_pages(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - log_entry.LogEntry(), - log_entry.LogEntry(), - ], - next_page_token='abc', - ), - logging.ListLogEntriesResponse( - entries=[], - next_page_token='def', - ), - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - ], - next_page_token='ghi', - ), - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - log_entry.LogEntry(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_log_entries(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_list_monitored_resource_descriptors(transport: str = 'grpc', request_type=logging.ListMonitoredResourceDescriptorsRequest): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_monitored_resource_descriptors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_monitored_resource_descriptors_from_dict(): - test_list_monitored_resource_descriptors(request_type=dict) - - -def test_list_monitored_resource_descriptors_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - client.list_monitored_resource_descriptors() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() - - -@pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_asyncio', request_type=logging.ListMonitoredResourceDescriptorsRequest): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_monitored_resource_descriptors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_async_from_dict(): - await test_list_monitored_resource_descriptors_async(request_type=dict) - - -def test_list_monitored_resource_descriptors_pager(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='abc', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], - next_page_token='def', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='ghi', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - ), - RuntimeError, - ) - - metadata = () - pager = client.list_monitored_resource_descriptors(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) - for i in results) - -def test_list_monitored_resource_descriptors_pages(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='abc', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], - next_page_token='def', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='ghi', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - ), - RuntimeError, - ) - pages = list(client.list_monitored_resource_descriptors(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_async_pager(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='abc', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], - next_page_token='def', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='ghi', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_monitored_resource_descriptors(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) - for i in responses) - -@pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_async_pages(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='abc', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], - next_page_token='def', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='ghi', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_monitored_resource_descriptors(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_list_logs(transport: str = 'grpc', request_type=logging.ListLogsRequest): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', - ) - response = client.list_logs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogsPager) - assert response.log_names == ['log_names_value'] - assert response.next_page_token == 'next_page_token_value' - - -def test_list_logs_from_dict(): - test_list_logs(request_type=dict) - - -def test_list_logs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - client.list_logs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() - - -@pytest.mark.asyncio -async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogsRequest): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', - )) - response = await client.list_logs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogsAsyncPager) - assert response.log_names == ['log_names_value'] - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_logs_async_from_dict(): - await test_list_logs_async(request_type=dict) - - -def test_list_logs_field_headers(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging.ListLogsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - call.return_value = logging.ListLogsResponse() - client.list_logs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_logs_field_headers_async(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging.ListLogsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse()) - await client.list_logs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_logs_flattened(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging.ListLogsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_logs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_logs_flattened_error(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_logs( - logging.ListLogsRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_logs_flattened_async(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging.ListLogsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_logs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_logs_flattened_error_async(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_logs( - logging.ListLogsRequest(), - parent='parent_value', - ) - - -def test_list_logs_pager(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging.ListLogsResponse( - log_names=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - logging.ListLogsResponse( - log_names=[], - next_page_token='def', - ), - logging.ListLogsResponse( - log_names=[ - str(), - ], - next_page_token='ghi', - ), - logging.ListLogsResponse( - log_names=[ - str(), - str(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_logs(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, str) - for i in results) - -def test_list_logs_pages(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging.ListLogsResponse( - log_names=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - logging.ListLogsResponse( - log_names=[], - next_page_token='def', - ), - logging.ListLogsResponse( - log_names=[ - str(), - ], - next_page_token='ghi', - ), - logging.ListLogsResponse( - log_names=[ - str(), - str(), - ], - ), - RuntimeError, - ) - pages = list(client.list_logs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_logs_async_pager(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging.ListLogsResponse( - log_names=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - logging.ListLogsResponse( - log_names=[], - next_page_token='def', - ), - logging.ListLogsResponse( - log_names=[ - str(), - ], - next_page_token='ghi', - ), - logging.ListLogsResponse( - log_names=[ - str(), - str(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_logs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, str) - for i in responses) - -@pytest.mark.asyncio -async def test_list_logs_async_pages(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging.ListLogsResponse( - log_names=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - logging.ListLogsResponse( - log_names=[], - next_page_token='def', - ), - logging.ListLogsResponse( - log_names=[ - str(), - ], - next_page_token='ghi', - ), - logging.ListLogsResponse( - log_names=[ - str(), - str(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_logs(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_tail_log_entries(transport: str = 'grpc', request_type=logging.TailLogEntriesRequest): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.tail_log_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([logging.TailLogEntriesResponse()]) - response = client.tail_log_entries(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, logging.TailLogEntriesResponse) - - -def test_tail_log_entries_from_dict(): - test_tail_log_entries(request_type=dict) - - -@pytest.mark.asyncio -async def test_tail_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.TailLogEntriesRequest): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.tail_log_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[logging.TailLogEntriesResponse()]) - response = await client.tail_log_entries(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, logging.TailLogEntriesResponse) - - -@pytest.mark.asyncio -async def test_tail_log_entries_async_from_dict(): - await test_tail_log_entries_async(request_type=dict) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LoggingServiceV2Client( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LoggingServiceV2Client( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = LoggingServiceV2Client(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.LoggingServiceV2GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.LoggingServiceV2GrpcTransport, - transports.LoggingServiceV2GrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.LoggingServiceV2GrpcTransport, - ) - -def test_logging_service_v2_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.LoggingServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_logging_service_v2_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport.__init__') as Transport: - Transport.return_value = None - transport = transports.LoggingServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'delete_log', - 'write_log_entries', - 'list_log_entries', - 'list_monitored_resource_descriptors', - 'list_logs', - 'tail_log_entries', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_logging_service_v2_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LoggingServiceV2Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_logging_service_v2_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LoggingServiceV2Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - ), - quota_project_id="octopus", - ) - - -def test_logging_service_v2_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LoggingServiceV2Transport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_logging_service_v2_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - LoggingServiceV2Client() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_logging_service_v2_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - LoggingServiceV2Client() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LoggingServiceV2GrpcTransport, - transports.LoggingServiceV2GrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_logging_service_v2_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LoggingServiceV2GrpcTransport, - transports.LoggingServiceV2GrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_logging_service_v2_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.LoggingServiceV2GrpcTransport, grpc_helpers), - (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "logging.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), - scopes=["1", "2"], - default_host="logging.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) -def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_logging_service_v2_host_no_port(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), - ) - assert client.transport._host == 'logging.googleapis.com:443' - - -def test_logging_service_v2_host_with_port(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), - ) - assert client.transport._host == 'logging.googleapis.com:8000' - -def test_logging_service_v2_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LoggingServiceV2GrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_logging_service_v2_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LoggingServiceV2GrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) -def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) -def test_logging_service_v2_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_log_path(): - project = "squid" - log = "clam" - expected = "projects/{project}/logs/{log}".format(project=project, log=log, ) - actual = LoggingServiceV2Client.log_path(project, log) - assert expected == actual - - -def test_parse_log_path(): - expected = { - "project": "whelk", - "log": "octopus", - } - path = LoggingServiceV2Client.log_path(**expected) - - # Check that the path construction is reversible. - actual = LoggingServiceV2Client.parse_log_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = LoggingServiceV2Client.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = LoggingServiceV2Client.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = LoggingServiceV2Client.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = LoggingServiceV2Client.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = LoggingServiceV2Client.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = LoggingServiceV2Client.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = LoggingServiceV2Client.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = LoggingServiceV2Client.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = LoggingServiceV2Client.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = LoggingServiceV2Client.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = LoggingServiceV2Client.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = LoggingServiceV2Client.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = LoggingServiceV2Client.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = LoggingServiceV2Client.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = LoggingServiceV2Client.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: - transport_class = LoggingServiceV2Client.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_metrics_service_v2.py deleted file mode 100644 index 5ce85b428459..000000000000 --- a/packages/google-cloud-logging/owl-bot-staging/v2/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ /dev/null @@ -1,2359 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api import distribution_pb2 # type: ignore -from google.api import label_pb2 # type: ignore -from google.api import launch_stage_pb2 # type: ignore -from google.api import metric_pb2 # type: ignore -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2AsyncClient -from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2Client -from google.cloud.logging_v2.services.metrics_service_v2 import pagers -from google.cloud.logging_v2.services.metrics_service_v2 import transports -from google.cloud.logging_v2.services.metrics_service_v2.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.logging_v2.types import logging_metrics -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert MetricsServiceV2Client._get_default_mtls_endpoint(None) is None - assert MetricsServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert MetricsServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert MetricsServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert MetricsServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert MetricsServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - MetricsServiceV2Client, - MetricsServiceV2AsyncClient, -]) -def test_metrics_service_v2_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'logging.googleapis.com:443' - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MetricsServiceV2GrpcTransport, "grpc"), - (transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_metrics_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class", [ - MetricsServiceV2Client, - MetricsServiceV2AsyncClient, -]) -def test_metrics_service_v2_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'logging.googleapis.com:443' - - -def test_metrics_service_v2_client_get_transport_class(): - transport = MetricsServiceV2Client.get_transport_class() - available_transports = [ - transports.MetricsServiceV2GrpcTransport, - ] - assert transport in available_transports - - transport = MetricsServiceV2Client.get_transport_class("grpc") - assert transport == transports.MetricsServiceV2GrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), - (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) -@mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) -def test_metrics_service_v2_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(MetricsServiceV2Client, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(MetricsServiceV2Client, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "true"), - (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "false"), - (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) -@mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), - (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_metrics_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), - (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_metrics_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_metrics_service_v2_client_client_options_from_dict(): - with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = MetricsServiceV2Client( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_list_log_metrics(transport: str = 'grpc', request_type=logging_metrics.ListLogMetricsRequest): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_log_metrics(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogMetricsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_log_metrics_from_dict(): - test_list_log_metrics(request_type=dict) - - -def test_list_log_metrics_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - client.list_log_metrics() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() - - -@pytest.mark.asyncio -async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.ListLogMetricsRequest): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_log_metrics(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogMetricsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_log_metrics_async_from_dict(): - await test_list_log_metrics_async(request_type=dict) - - -def test_list_log_metrics_field_headers(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_metrics.ListLogMetricsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - call.return_value = logging_metrics.ListLogMetricsResponse() - client.list_log_metrics(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_log_metrics_field_headers_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_metrics.ListLogMetricsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse()) - await client.list_log_metrics(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_log_metrics_flattened(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_metrics.ListLogMetricsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_log_metrics( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_log_metrics_flattened_error(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_log_metrics( - logging_metrics.ListLogMetricsRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_log_metrics_flattened_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_metrics.ListLogMetricsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_log_metrics( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_log_metrics_flattened_error_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_log_metrics( - logging_metrics.ListLogMetricsRequest(), - parent='parent_value', - ) - - -def test_list_log_metrics_pager(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - ], - next_page_token='abc', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[], - next_page_token='def', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - ], - next_page_token='ghi', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_log_metrics(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, logging_metrics.LogMetric) - for i in results) - -def test_list_log_metrics_pages(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - ], - next_page_token='abc', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[], - next_page_token='def', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - ], - next_page_token='ghi', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - ], - ), - RuntimeError, - ) - pages = list(client.list_log_metrics(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_log_metrics_async_pager(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - ], - next_page_token='abc', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[], - next_page_token='def', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - ], - next_page_token='ghi', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_log_metrics(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, logging_metrics.LogMetric) - for i in responses) - -@pytest.mark.asyncio -async def test_list_log_metrics_async_pages(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - ], - next_page_token='abc', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[], - next_page_token='def', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - ], - next_page_token='ghi', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_log_metrics(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_log_metric(transport: str = 'grpc', request_type=logging_metrics.GetLogMetricRequest): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - ) - response = client.get_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.value_extractor == 'value_extractor_value' - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 - - -def test_get_log_metric_from_dict(): - test_get_log_metric(request_type=dict) - - -def test_get_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: - client.get_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() - - -@pytest.mark.asyncio -async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.GetLogMetricRequest): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) - response = await client.get_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.value_extractor == 'value_extractor_value' - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 - - -@pytest.mark.asyncio -async def test_get_log_metric_async_from_dict(): - await test_get_log_metric_async(request_type=dict) - - -def test_get_log_metric_field_headers(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_metrics.GetLogMetricRequest() - - request.metric_name = 'metric_name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: - call.return_value = logging_metrics.LogMetric() - client.get_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'metric_name=metric_name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_log_metric_field_headers_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_metrics.GetLogMetricRequest() - - request.metric_name = 'metric_name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) - await client.get_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'metric_name=metric_name/value', - ) in kw['metadata'] - - -def test_get_log_metric_flattened(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_metrics.LogMetric() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_log_metric( - metric_name='metric_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].metric_name == 'metric_name_value' - - -def test_get_log_metric_flattened_error(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_log_metric( - logging_metrics.GetLogMetricRequest(), - metric_name='metric_name_value', - ) - - -@pytest.mark.asyncio -async def test_get_log_metric_flattened_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_metrics.LogMetric() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_log_metric( - metric_name='metric_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].metric_name == 'metric_name_value' - - -@pytest.mark.asyncio -async def test_get_log_metric_flattened_error_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_log_metric( - logging_metrics.GetLogMetricRequest(), - metric_name='metric_name_value', - ) - - -def test_create_log_metric(transport: str = 'grpc', request_type=logging_metrics.CreateLogMetricRequest): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - ) - response = client.create_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.value_extractor == 'value_extractor_value' - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 - - -def test_create_log_metric_from_dict(): - test_create_log_metric(request_type=dict) - - -def test_create_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: - client.create_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() - - -@pytest.mark.asyncio -async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.CreateLogMetricRequest): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) - response = await client.create_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.value_extractor == 'value_extractor_value' - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 - - -@pytest.mark.asyncio -async def test_create_log_metric_async_from_dict(): - await test_create_log_metric_async(request_type=dict) - - -def test_create_log_metric_field_headers(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_metrics.CreateLogMetricRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: - call.return_value = logging_metrics.LogMetric() - client.create_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_log_metric_field_headers_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_metrics.CreateLogMetricRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) - await client.create_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_log_metric_flattened(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_metrics.LogMetric() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_log_metric( - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].metric == logging_metrics.LogMetric(name='name_value') - - -def test_create_log_metric_flattened_error(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_log_metric( - logging_metrics.CreateLogMetricRequest(), - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_log_metric_flattened_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_metrics.LogMetric() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_log_metric( - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].metric == logging_metrics.LogMetric(name='name_value') - - -@pytest.mark.asyncio -async def test_create_log_metric_flattened_error_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_log_metric( - logging_metrics.CreateLogMetricRequest(), - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - - -def test_update_log_metric(transport: str = 'grpc', request_type=logging_metrics.UpdateLogMetricRequest): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - ) - response = client.update_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.value_extractor == 'value_extractor_value' - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 - - -def test_update_log_metric_from_dict(): - test_update_log_metric(request_type=dict) - - -def test_update_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: - client.update_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() - - -@pytest.mark.asyncio -async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.UpdateLogMetricRequest): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) - response = await client.update_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.value_extractor == 'value_extractor_value' - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 - - -@pytest.mark.asyncio -async def test_update_log_metric_async_from_dict(): - await test_update_log_metric_async(request_type=dict) - - -def test_update_log_metric_field_headers(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_metrics.UpdateLogMetricRequest() - - request.metric_name = 'metric_name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: - call.return_value = logging_metrics.LogMetric() - client.update_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'metric_name=metric_name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_log_metric_field_headers_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_metrics.UpdateLogMetricRequest() - - request.metric_name = 'metric_name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) - await client.update_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'metric_name=metric_name/value', - ) in kw['metadata'] - - -def test_update_log_metric_flattened(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_metrics.LogMetric() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_log_metric( - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].metric_name == 'metric_name_value' - assert args[0].metric == logging_metrics.LogMetric(name='name_value') - - -def test_update_log_metric_flattened_error(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_log_metric( - logging_metrics.UpdateLogMetricRequest(), - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_update_log_metric_flattened_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_metrics.LogMetric() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_log_metric( - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].metric_name == 'metric_name_value' - assert args[0].metric == logging_metrics.LogMetric(name='name_value') - - -@pytest.mark.asyncio -async def test_update_log_metric_flattened_error_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_log_metric( - logging_metrics.UpdateLogMetricRequest(), - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - - -def test_delete_log_metric(transport: str = 'grpc', request_type=logging_metrics.DeleteLogMetricRequest): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_log_metric_from_dict(): - test_delete_log_metric(request_type=dict) - - -def test_delete_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: - client.delete_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() - - -@pytest.mark.asyncio -async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.DeleteLogMetricRequest): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_log_metric_async_from_dict(): - await test_delete_log_metric_async(request_type=dict) - - -def test_delete_log_metric_field_headers(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_metrics.DeleteLogMetricRequest() - - request.metric_name = 'metric_name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: - call.return_value = None - client.delete_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'metric_name=metric_name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_log_metric_field_headers_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_metrics.DeleteLogMetricRequest() - - request.metric_name = 'metric_name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_log_metric(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'metric_name=metric_name/value', - ) in kw['metadata'] - - -def test_delete_log_metric_flattened(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_log_metric( - metric_name='metric_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].metric_name == 'metric_name_value' - - -def test_delete_log_metric_flattened_error(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_log_metric( - logging_metrics.DeleteLogMetricRequest(), - metric_name='metric_name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_log_metric_flattened_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_log_metric( - metric_name='metric_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].metric_name == 'metric_name_value' - - -@pytest.mark.asyncio -async def test_delete_log_metric_flattened_error_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_log_metric( - logging_metrics.DeleteLogMetricRequest(), - metric_name='metric_name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetricsServiceV2Client( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetricsServiceV2Client( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = MetricsServiceV2Client(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.MetricsServiceV2GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.MetricsServiceV2GrpcTransport, - transports.MetricsServiceV2GrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.MetricsServiceV2GrpcTransport, - ) - -def test_metrics_service_v2_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.MetricsServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_metrics_service_v2_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport.__init__') as Transport: - Transport.return_value = None - transport = transports.MetricsServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_log_metrics', - 'get_log_metric', - 'create_log_metric', - 'update_log_metric', - 'delete_log_metric', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_metrics_service_v2_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetricsServiceV2Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_metrics_service_v2_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetricsServiceV2Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - ), - quota_project_id="octopus", - ) - - -def test_metrics_service_v2_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetricsServiceV2Transport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_metrics_service_v2_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MetricsServiceV2Client() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_metrics_service_v2_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MetricsServiceV2Client() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetricsServiceV2GrpcTransport, - transports.MetricsServiceV2GrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_metrics_service_v2_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetricsServiceV2GrpcTransport, - transports.MetricsServiceV2GrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_metrics_service_v2_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.MetricsServiceV2GrpcTransport, grpc_helpers), - (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "logging.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), - scopes=["1", "2"], - default_host="logging.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) -def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_metrics_service_v2_host_no_port(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), - ) - assert client.transport._host == 'logging.googleapis.com:443' - - -def test_metrics_service_v2_host_with_port(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), - ) - assert client.transport._host == 'logging.googleapis.com:8000' - -def test_metrics_service_v2_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MetricsServiceV2GrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_metrics_service_v2_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MetricsServiceV2GrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) -def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) -def test_metrics_service_v2_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_log_metric_path(): - project = "squid" - metric = "clam" - expected = "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) - actual = MetricsServiceV2Client.log_metric_path(project, metric) - assert expected == actual - - -def test_parse_log_metric_path(): - expected = { - "project": "whelk", - "metric": "octopus", - } - path = MetricsServiceV2Client.log_metric_path(**expected) - - # Check that the path construction is reversible. - actual = MetricsServiceV2Client.parse_log_metric_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = MetricsServiceV2Client.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = MetricsServiceV2Client.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = MetricsServiceV2Client.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = MetricsServiceV2Client.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = MetricsServiceV2Client.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = MetricsServiceV2Client.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = MetricsServiceV2Client.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = MetricsServiceV2Client.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = MetricsServiceV2Client.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = MetricsServiceV2Client.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = MetricsServiceV2Client.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = MetricsServiceV2Client.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = MetricsServiceV2Client.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = MetricsServiceV2Client.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = MetricsServiceV2Client.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: - transport_class = MetricsServiceV2Client.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/packages/google-cloud-logging/renovate.json b/packages/google-cloud-logging/renovate.json index 9d2f2512aa5b..bcd0e005a2c3 100644 --- a/packages/google-cloud-logging/renovate.json +++ b/packages/google-cloud-logging/renovate.json @@ -1,6 +1,9 @@ { "extends": [ - "config:base", ":preserveSemverRanges" + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" ], "ignorePaths": [".pre-commit-config.yaml"], "pip_requirements": { diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 125bb619cc49..93a9122cc457 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -39,17 +39,15 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": [], # Old samples are opted out of enforcing Python type hints # All new samples should feature them - 'enforce_type_hints': False, - + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', # If you need to use a specific version of pip, # change pip_version_override to the string representation @@ -57,13 +55,13 @@ "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -78,25 +76,32 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ("True", "true") +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # @@ -141,7 +146,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG['enforce_type_hints']: + if not TEST_CONFIG["enforce_type_hints"]: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -150,9 +155,11 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) + + # # Black # @@ -165,6 +172,7 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) + # # Sample Tests # @@ -173,7 +181,9 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") @@ -203,7 +213,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None) # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @@ -213,9 +223,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # diff --git a/packages/google-cloud-logging/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-logging/scripts/readme-gen/templates/install_deps.tmpl.rst index a0406dba8c84..275d649890d7 100644 --- a/packages/google-cloud-logging/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/packages/google-cloud-logging/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -12,7 +12,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. +#. Create a virtualenv. Samples are compatible with Python 3.6+. .. code-block:: bash diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 5b882211a3c7..a54a0f87bce6 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -40,7 +40,7 @@ # https://github.com/googleapis/google-cloud-python/issues/10566 "google-cloud-core >= 1.4.1, <3.0.0dev", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", - "proto-plus >= 1.11.0", + "proto-plus >= 1.15.0", "packaging >= 14.3", ] extras = {} diff --git a/packages/google-cloud-logging/testing/constraints-3.6.txt b/packages/google-cloud-logging/testing/constraints-3.6.txt index 7e089b8b45a4..8d3ae3901f6b 100644 --- a/packages/google-cloud-logging/testing/constraints-3.6.txt +++ b/packages/google-cloud-logging/testing/constraints-3.6.txt @@ -7,5 +7,5 @@ # Then this file should have foo==1.14.0 google-api-core==1.26.0 google-cloud-core==1.4.1 -proto-plus==1.11.0 +proto-plus==1.15.0 packaging==14.3 From b036d6252a4129d1e57e49ca6d1f78c450a9b027 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 12 Oct 2021 19:50:36 +0200 Subject: [PATCH 548/855] chore(deps): update all dependencies (#418) --- .../google-cloud-logging/samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index dbc2f3fd6bcd..c3c03d97043d 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.6.0 -google-cloud-bigquery==2.26.0 -google-cloud-storage==1.42.2 +google-cloud-bigquery==2.28.1 +google-cloud-storage==1.42.3 google-cloud-pubsub==2.8.0 From 85d237c8510df6bd6d4218abc47b2d463546a3a6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 12 Oct 2021 14:20:05 -0400 Subject: [PATCH 549/855] feat: add context manager support in client (#415) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add context manager support in client chore: fix docstring for first attribute of protos committer: @busunkim96 PiperOrigin-RevId: 401271153 Source-Link: https://github.com/googleapis/googleapis/commit/787f8c9a731f44e74a90b9847d48659ca9462d10 Source-Link: https://github.com/googleapis/googleapis-gen/commit/81decffe9fc72396a8153e756d1d67a6eecfd620 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiODFkZWNmZmU5ZmM3MjM5NmE4MTUzZTc1NmQxZDY3YTZlZWNmZDYyMCJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../config_service_v2/async_client.py | 6 +++ .../services/config_service_v2/client.py | 18 +++++-- .../config_service_v2/transports/base.py | 9 ++++ .../config_service_v2/transports/grpc.py | 3 ++ .../transports/grpc_asyncio.py | 3 ++ .../logging_service_v2/async_client.py | 6 +++ .../services/logging_service_v2/client.py | 18 +++++-- .../logging_service_v2/transports/base.py | 9 ++++ .../logging_service_v2/transports/grpc.py | 3 ++ .../transports/grpc_asyncio.py | 3 ++ .../metrics_service_v2/async_client.py | 6 +++ .../services/metrics_service_v2/client.py | 18 +++++-- .../metrics_service_v2/transports/base.py | 9 ++++ .../metrics_service_v2/transports/grpc.py | 3 ++ .../transports/grpc_asyncio.py | 3 ++ .../cloud/logging_v2/types/log_entry.py | 1 + .../google/cloud/logging_v2/types/logging.py | 15 +++++- .../cloud/logging_v2/types/logging_config.py | 27 ++++++++++ .../cloud/logging_v2/types/logging_metrics.py | 6 +++ .../logging_v2/test_config_service_v2.py | 50 +++++++++++++++++++ .../logging_v2/test_logging_service_v2.py | 50 +++++++++++++++++++ .../logging_v2/test_metrics_service_v2.py | 50 +++++++++++++++++++ 22 files changed, 303 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 634c106b6787..f6dfc32885d2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1929,6 +1929,12 @@ async def update_cmek_settings( # Done; return the response. return response + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index d14827a1e285..f24b6caf9144 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -397,10 +397,7 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def list_buckets( @@ -2091,6 +2088,19 @@ def update_cmek_settings( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 1ffcb227014a..cf32b364a2b3 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -322,6 +322,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def list_buckets( self, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 7fb2560b9be7..cd06eac4151c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -883,5 +883,8 @@ def update_cmek_settings( ) return self._stubs["update_cmek_settings"] + def close(self): + self.grpc_channel.close() + __all__ = ("ConfigServiceV2GrpcTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 9a844e7c295c..4fd932950aa6 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -915,5 +915,8 @@ def update_cmek_settings( ) return self._stubs["update_cmek_settings"] + def close(self): + return self.grpc_channel.close() + __all__ = ("ConfigServiceV2GrpcAsyncIOTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 6a11e96cb018..01b369de3ed4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -779,6 +779,12 @@ def tail_log_entries( # Done; return the response. return response + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 73909c7fff95..75410c30c93f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -341,10 +341,7 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def delete_log( @@ -886,6 +883,19 @@ def tail_log_entries( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 05c273d6d16c..8b3ff755e57b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -257,6 +257,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete_log( self, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index a1031e93e2e6..146f97cbb39e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -404,5 +404,8 @@ def tail_log_entries( ) return self._stubs["tail_log_entries"] + def close(self): + self.grpc_channel.close() + __all__ = ("LoggingServiceV2GrpcTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index a71fb28f9b84..cc2e13685f86 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -415,5 +415,8 @@ def tail_log_entries( ) return self._stubs["tail_log_entries"] + def close(self): + return self.grpc_channel.close() + __all__ = ("LoggingServiceV2GrpcAsyncIOTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index defd64a13efa..8c719a64c322 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -635,6 +635,12 @@ async def delete_log_metric( request, retry=retry, timeout=timeout, metadata=metadata, ) + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 7d1e2a21d743..f03e3cb038ec 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -344,10 +344,7 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def list_log_metrics( @@ -777,6 +774,19 @@ def delete_log_metric( request, retry=retry, timeout=timeout, metadata=metadata, ) + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 1ce8b3a6bd57..f8855cc3db9f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -228,6 +228,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def list_log_metrics( self, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 49e7263d42b8..4dc00d79df14 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -358,5 +358,8 @@ def delete_log_metric( ) return self._stubs["delete_log_metric"] + def close(self): + self.grpc_channel.close() + __all__ = ("MetricsServiceV2GrpcTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 9ddc1975ab44..2623e7d0e69d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -368,5 +368,8 @@ def delete_log_metric( ) return self._stubs["delete_log_metric"] + def close(self): + return self.grpc_channel.close() + __all__ = ("MetricsServiceV2GrpcAsyncIOTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py index 6c57b22d5725..2b7aed28e005 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py @@ -31,6 +31,7 @@ class LogEntry(proto.Message): r"""An individual entry in a log. + Attributes: log_name (str): Required. The resource name of the log to which this log diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py index 6d64b9a9164b..8477c2a49ad2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py @@ -42,6 +42,7 @@ class DeleteLogRequest(proto.Message): r"""The parameters to DeleteLog. + Attributes: log_name (str): Required. The resource name of the log to delete: @@ -65,6 +66,7 @@ class DeleteLogRequest(proto.Message): class WriteLogEntriesRequest(proto.Message): r"""The parameters to WriteLogEntries. + Attributes: log_name (str): Optional. A default log resource name that is assigned to @@ -164,11 +166,13 @@ class WriteLogEntriesRequest(proto.Message): class WriteLogEntriesResponse(proto.Message): - r"""Result returned from WriteLogEntries. """ + r"""Result returned from WriteLogEntries. + """ class WriteLogEntriesPartialErrors(proto.Message): r"""Error details for WriteLogEntries with partial success. + Attributes: log_entry_errors (Sequence[google.cloud.logging_v2.types.WriteLogEntriesPartialErrors.LogEntryErrorsEntry]): When ``WriteLogEntriesRequest.partial_success`` is true, @@ -187,6 +191,7 @@ class WriteLogEntriesPartialErrors(proto.Message): class ListLogEntriesRequest(proto.Message): r"""The parameters to ``ListLogEntries``. + Attributes: resource_names (Sequence[str]): Required. Names of one or more parent resources from which @@ -249,6 +254,7 @@ class ListLogEntriesRequest(proto.Message): class ListLogEntriesResponse(proto.Message): r"""Result returned from ``ListLogEntries``. + Attributes: entries (Sequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. If ``entries`` is empty, @@ -281,6 +287,7 @@ def raw_page(self): class ListMonitoredResourceDescriptorsRequest(proto.Message): r"""The parameters to ListMonitoredResourceDescriptors + Attributes: page_size (int): Optional. The maximum number of results to return from this @@ -301,6 +308,7 @@ class ListMonitoredResourceDescriptorsRequest(proto.Message): class ListMonitoredResourceDescriptorsResponse(proto.Message): r"""Result returned from ListMonitoredResourceDescriptors. + Attributes: resource_descriptors (Sequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): A list of resource descriptors. @@ -325,6 +333,7 @@ def raw_page(self): class ListLogsRequest(proto.Message): r"""The parameters to ListLogs. + Attributes: parent (str): Required. The resource name that owns the logs: @@ -366,6 +375,7 @@ class ListLogsRequest(proto.Message): class ListLogsResponse(proto.Message): r"""Result returned from ListLogs. + Attributes: log_names (Sequence[str]): A list of log names. For example, @@ -388,6 +398,7 @@ def raw_page(self): class TailLogEntriesRequest(proto.Message): r"""The parameters to ``TailLogEntries``. + Attributes: resource_names (Sequence[str]): Required. Name of a parent resource from which to retrieve @@ -431,6 +442,7 @@ class TailLogEntriesRequest(proto.Message): class TailLogEntriesResponse(proto.Message): r"""Result returned from ``TailLogEntries``. + Attributes: entries (Sequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. Each response in the stream will @@ -450,6 +462,7 @@ class TailLogEntriesResponse(proto.Message): class SuppressionInfo(proto.Message): r"""Information about entries that were omitted from the session. + Attributes: reason (google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo.Reason): The reason that entries were omitted from the diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index 9b62807311da..795efbf950a9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -69,6 +69,7 @@ class LifecycleState(proto.Enum): class LogBucket(proto.Message): r"""Describes a repository of logs. + Attributes: name (str): The resource name of the bucket. For example: @@ -114,6 +115,7 @@ class LogBucket(proto.Message): class LogView(proto.Message): r"""Describes a view over logs in a bucket. + Attributes: name (str): The resource name of the view. @@ -303,6 +305,7 @@ class BigQueryOptions(proto.Message): class ListBucketsRequest(proto.Message): r"""The parameters to ``ListBuckets``. + Attributes: parent (str): Required. The parent resource whose buckets are to be @@ -338,6 +341,7 @@ class ListBucketsRequest(proto.Message): class ListBucketsResponse(proto.Message): r"""The response from ListBuckets. + Attributes: buckets (Sequence[google.cloud.logging_v2.types.LogBucket]): A list of buckets. @@ -358,6 +362,7 @@ def raw_page(self): class CreateBucketRequest(proto.Message): r"""The parameters to ``CreateBucket``. + Attributes: parent (str): Required. The resource in which to create the bucket: @@ -386,6 +391,7 @@ class CreateBucketRequest(proto.Message): class UpdateBucketRequest(proto.Message): r"""The parameters to ``UpdateBucket``. + Attributes: name (str): Required. The full resource name of the bucket to update. @@ -425,6 +431,7 @@ class UpdateBucketRequest(proto.Message): class GetBucketRequest(proto.Message): r"""The parameters to ``GetBucket``. + Attributes: name (str): Required. The resource name of the bucket: @@ -445,6 +452,7 @@ class GetBucketRequest(proto.Message): class DeleteBucketRequest(proto.Message): r"""The parameters to ``DeleteBucket``. + Attributes: name (str): Required. The full resource name of the bucket to delete. @@ -465,6 +473,7 @@ class DeleteBucketRequest(proto.Message): class UndeleteBucketRequest(proto.Message): r"""The parameters to ``UndeleteBucket``. + Attributes: name (str): Required. The full resource name of the bucket to undelete. @@ -485,6 +494,7 @@ class UndeleteBucketRequest(proto.Message): class ListViewsRequest(proto.Message): r"""The parameters to ``ListViews``. + Attributes: parent (str): Required. The bucket whose views are to be listed: @@ -512,6 +522,7 @@ class ListViewsRequest(proto.Message): class ListViewsResponse(proto.Message): r"""The response from ListViews. + Attributes: views (Sequence[google.cloud.logging_v2.types.LogView]): A list of views. @@ -532,6 +543,7 @@ def raw_page(self): class CreateViewRequest(proto.Message): r"""The parameters to ``CreateView``. + Attributes: parent (str): Required. The bucket in which to create the view @@ -555,6 +567,7 @@ class CreateViewRequest(proto.Message): class UpdateViewRequest(proto.Message): r"""The parameters to ``UpdateView``. + Attributes: name (str): Required. The full resource name of the view to update @@ -588,6 +601,7 @@ class UpdateViewRequest(proto.Message): class GetViewRequest(proto.Message): r"""The parameters to ``GetView``. + Attributes: name (str): Required. The resource name of the policy: @@ -605,6 +619,7 @@ class GetViewRequest(proto.Message): class DeleteViewRequest(proto.Message): r"""The parameters to ``DeleteView``. + Attributes: name (str): Required. The full resource name of the view to delete: @@ -622,6 +637,7 @@ class DeleteViewRequest(proto.Message): class ListSinksRequest(proto.Message): r"""The parameters to ``ListSinks``. + Attributes: parent (str): Required. The parent resource whose sinks are to be listed: @@ -652,6 +668,7 @@ class ListSinksRequest(proto.Message): class ListSinksResponse(proto.Message): r"""Result returned from ``ListSinks``. + Attributes: sinks (Sequence[google.cloud.logging_v2.types.LogSink]): A list of sinks. @@ -672,6 +689,7 @@ def raw_page(self): class GetSinkRequest(proto.Message): r"""The parameters to ``GetSink``. + Attributes: sink_name (str): Required. The resource name of the sink: @@ -691,6 +709,7 @@ class GetSinkRequest(proto.Message): class CreateSinkRequest(proto.Message): r"""The parameters to ``CreateSink``. + Attributes: parent (str): Required. The resource in which to create the sink: @@ -731,6 +750,7 @@ class CreateSinkRequest(proto.Message): class UpdateSinkRequest(proto.Message): r"""The parameters to ``UpdateSink``. + Attributes: sink_name (str): Required. The full resource name of the sink to update, @@ -791,6 +811,7 @@ class UpdateSinkRequest(proto.Message): class DeleteSinkRequest(proto.Message): r"""The parameters to ``DeleteSink``. + Attributes: sink_name (str): Required. The full resource name of the sink to delete, @@ -865,6 +886,7 @@ class LogExclusion(proto.Message): class ListExclusionsRequest(proto.Message): r"""The parameters to ``ListExclusions``. + Attributes: parent (str): Required. The parent resource whose exclusions are to be @@ -896,6 +918,7 @@ class ListExclusionsRequest(proto.Message): class ListExclusionsResponse(proto.Message): r"""Result returned from ``ListExclusions``. + Attributes: exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): A list of exclusions. @@ -916,6 +939,7 @@ def raw_page(self): class GetExclusionRequest(proto.Message): r"""The parameters to ``GetExclusion``. + Attributes: name (str): Required. The resource name of an existing exclusion: @@ -936,6 +960,7 @@ class GetExclusionRequest(proto.Message): class CreateExclusionRequest(proto.Message): r"""The parameters to ``CreateExclusion``. + Attributes: parent (str): Required. The parent resource in which to create the @@ -962,6 +987,7 @@ class CreateExclusionRequest(proto.Message): class UpdateExclusionRequest(proto.Message): r"""The parameters to ``UpdateExclusion``. + Attributes: name (str): Required. The resource name of the exclusion to update: @@ -1000,6 +1026,7 @@ class UpdateExclusionRequest(proto.Message): class DeleteExclusionRequest(proto.Message): r"""The parameters to ``DeleteExclusion``. + Attributes: name (str): Required. The resource name of an existing exclusion to diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py index 4b39650f24ec..26d855680694 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py @@ -187,6 +187,7 @@ class ApiVersion(proto.Enum): class ListLogMetricsRequest(proto.Message): r"""The parameters to ListLogMetrics. + Attributes: parent (str): Required. The name of the project containing the metrics: @@ -214,6 +215,7 @@ class ListLogMetricsRequest(proto.Message): class ListLogMetricsResponse(proto.Message): r"""Result returned from ListLogMetrics. + Attributes: metrics (Sequence[google.cloud.logging_v2.types.LogMetric]): A list of logs-based metrics. @@ -234,6 +236,7 @@ def raw_page(self): class GetLogMetricRequest(proto.Message): r"""The parameters to GetLogMetric. + Attributes: metric_name (str): Required. The resource name of the desired metric: @@ -248,6 +251,7 @@ class GetLogMetricRequest(proto.Message): class CreateLogMetricRequest(proto.Message): r"""The parameters to CreateLogMetric. + Attributes: parent (str): Required. The resource name of the project in which to @@ -269,6 +273,7 @@ class CreateLogMetricRequest(proto.Message): class UpdateLogMetricRequest(proto.Message): r"""The parameters to UpdateLogMetric. + Attributes: metric_name (str): Required. The resource name of the metric to update: @@ -291,6 +296,7 @@ class UpdateLogMetricRequest(proto.Message): class DeleteLogMetricRequest(proto.Message): r"""The parameters to DeleteLogMetric. + Attributes: metric_name (str): Required. The resource name of the metric to delete: diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 12a5cf896031..c796dc652459 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -29,6 +29,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.config_service_v2 import ( @@ -5437,6 +5438,9 @@ def test_config_service_v2_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + @requires_google_auth_gte_1_25_0 def test_config_service_v2_base_transport_with_credentials_file(): @@ -6040,3 +6044,49 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 1bf1ac26fea1..df26166653eb 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -30,6 +30,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.logging_service_v2 import ( @@ -1969,6 +1970,9 @@ def test_logging_service_v2_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + @requires_google_auth_gte_1_25_0 def test_logging_service_v2_base_transport_with_credentials_file(): @@ -2489,3 +2493,49 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 7455f075e277..4d2e8dba352e 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -33,6 +33,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.metrics_service_v2 import ( @@ -1892,6 +1893,9 @@ def test_metrics_service_v2_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + @requires_google_auth_gte_1_25_0 def test_metrics_service_v2_base_transport_with_credentials_file(): @@ -2414,3 +2418,49 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() From 3b0ccb8e82a812a1414a8f1a0136dbc082f8740a Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Tue, 12 Oct 2021 15:14:13 -0400 Subject: [PATCH 550/855] chore: revert owlbot main branch templates (#390) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: revert owlbot main branch templates * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/owlbot.py | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index 3bffd7c99dd8..ad8e32d4c3b6 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -100,25 +100,3 @@ s.shell.run(["nox", "-s", "blacken"], hide_output=False) -# ---------------------------------------------------------------------------- -# Main Branch migration -# ---------------------------------------------------------------------------- - -s.replace( - "*.rst", - "master", - "main" -) - -s.replace( - "CONTRIBUTING.rst", - "kubernetes/community/blob/main", - "kubernetes/community/blob/master" -) - -s.replace( - ".kokoro/*", - "master", - "main" -) - From ad63d3b7dcab40b816d0f4dd16f32e257074a849 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 13 Oct 2021 04:30:06 -0400 Subject: [PATCH 551/855] chore: add default_version to .repo-metadata.json (#410) * chore: add default_version and codeowner_team to .repo-metadata.json * update default_version --- .../google-cloud-logging/.repo-metadata.json | 27 ++++++++++--------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-logging/.repo-metadata.json b/packages/google-cloud-logging/.repo-metadata.json index 911d58dca90d..fdb0a66c2065 100644 --- a/packages/google-cloud-logging/.repo-metadata.json +++ b/packages/google-cloud-logging/.repo-metadata.json @@ -1,14 +1,15 @@ { - "name": "logging", - "name_pretty": "Cloud Logging", - "product_documentation": "https://cloud.google.com/logging/docs", - "client_documentation": "https://googleapis.dev/python/logging/latest", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559764", - "release_level": "ga", - "language": "python", - "library_type": "GAPIC_COMBO", - "repo": "googleapis/python-logging", - "distribution_name": "google-cloud-logging", - "api_id": "logging.googleapis.com", - "codeowner_team": "@googleapis/api-logging" -} \ No newline at end of file + "name": "logging", + "name_pretty": "Cloud Logging", + "product_documentation": "https://cloud.google.com/logging/docs", + "client_documentation": "https://googleapis.dev/python/logging/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559764", + "release_level": "ga", + "language": "python", + "library_type": "GAPIC_COMBO", + "repo": "googleapis/python-logging", + "distribution_name": "google-cloud-logging", + "api_id": "logging.googleapis.com", + "codeowner_team": "@googleapis/api-logging", + "default_version": "v2" +} From d0eb2ae1679c275a80a85f400ebb3201cb057254 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 15 Oct 2021 21:20:16 +0200 Subject: [PATCH 552/855] chore(deps): update dependency google-cloud-appengine-logging to v1 (#419) Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index a54a0f87bce6..02eeccea5ec0 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -33,7 +33,7 @@ # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-api-core[grpc] >= 1.26.0, <3.0.0dev", - "google-cloud-appengine-logging >= 0.1.0, < 1.0.0dev", + "google-cloud-appengine-logging>=0.1.0, <1.0.1", "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed From eaf8173ecd5c99fe0b0ed2010fabc8308581a3ba Mon Sep 17 00:00:00 2001 From: Nicholas Lee Date: Tue, 19 Oct 2021 22:27:21 +0100 Subject: [PATCH 553/855] chore(docs): remove get_default_handler call from setup docs (#421) --- packages/google-cloud-logging/samples/snippets/handler.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/handler.py b/packages/google-cloud-logging/samples/snippets/handler.py index 0a708c1383cc..49d2578984f5 100644 --- a/packages/google-cloud-logging/samples/snippets/handler.py +++ b/packages/google-cloud-logging/samples/snippets/handler.py @@ -28,7 +28,6 @@ def use_logging_handler(): # you're running in and integrates the handler with the # Python logging module. By default this captures all logs # at INFO level and higher - client.get_default_handler() client.setup_logging() # [END logging_handler_setup] From 34edaffa771979f9b0179ac494a245bb0a929099 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 25 Oct 2021 21:32:11 -0400 Subject: [PATCH 554/855] chore(python): Push cloud library docs to Cloud RAD (#431) Source-Link: https://github.com/googleapis/synthtool/commit/694118b039b09551fb5d445fceb361a7dbb06400 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ec49167c606648a063d1222220b48119c912562849a0528f35bfb592a9f72737 Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/.kokoro/docs/common.cfg | 1 + packages/google-cloud-logging/noxfile.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 0b76845028a9..cb89b2e326b7 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:424d88d5d08ddd955782a4359559dc536e658db1a77416c9a4fff79df9519ad2 + digest: sha256:ec49167c606648a063d1222220b48119c912562849a0528f35bfb592a9f72737 diff --git a/packages/google-cloud-logging/.kokoro/docs/common.cfg b/packages/google-cloud-logging/.kokoro/docs/common.cfg index 7bb6536d7580..4e013a7f7404 100644 --- a/packages/google-cloud-logging/.kokoro/docs/common.cfg +++ b/packages/google-cloud-logging/.kokoro/docs/common.cfg @@ -30,6 +30,7 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" + # Push google cloud library docs to the Cloud RAD bucket `docs-staging-v2` value: "docs-staging-v2" } diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 63e5d4aa0825..96ef7ee7e4ea 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -104,7 +104,7 @@ def default(session): "py.test", "--quiet", f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google/cloud", + "--cov=google", "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", From d13eb6157c961b3251cda928c582f295fc877733 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 1 Nov 2021 11:32:12 +0000 Subject: [PATCH 555/855] chore: use gapic-generator-python 0.53.4 (#435) - [ ] Regenerate this pull request now. docs: list oneofs in docstring fix(deps): require google-api-core >= 1.28.0 fix(deps): drop packaging dependency committer: busunkim96@ PiperOrigin-RevId: 406468269 Source-Link: https://github.com/googleapis/googleapis/commit/83d81b0c8fc22291a13398d6d77f02dc97a5b6f4 Source-Link: https://github.com/googleapis/googleapis-gen/commit/2ff001fbacb9e77e71d734de5f955c05fdae8526 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMmZmMDAxZmJhY2I5ZTc3ZTcxZDczNGRlNWY5NTVjMDVmZGFlODUyNiJ9 --- .../config_service_v2/async_client.py | 142 +++++++++--------- .../services/config_service_v2/client.py | 48 +++--- .../config_service_v2/transports/base.py | 35 +---- .../transports/grpc_asyncio.py | 1 - .../logging_service_v2/async_client.py | 36 ++--- .../services/logging_service_v2/client.py | 14 +- .../logging_service_v2/transports/base.py | 35 +---- .../transports/grpc_asyncio.py | 1 - .../metrics_service_v2/async_client.py | 34 +++-- .../services/metrics_service_v2/client.py | 12 +- .../metrics_service_v2/transports/base.py | 35 +---- .../transports/grpc_asyncio.py | 1 - .../cloud/logging_v2/types/log_entry.py | 10 ++ .../cloud/logging_v2/types/logging_config.py | 4 + packages/google-cloud-logging/setup.py | 3 +- .../testing/constraints-3.6.txt | 3 +- .../logging_v2/test_config_service_v2.py | 106 ++----------- .../logging_v2/test_logging_service_v2.py | 109 ++------------ .../logging_v2/test_metrics_service_v2.py | 109 ++------------ 19 files changed, 195 insertions(+), 543 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index f6dfc32885d2..23db3b5c1b9b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config from google.protobuf import field_mask_pb2 # type: ignore @@ -177,17 +179,17 @@ def __init__( async def list_buckets( self, - request: logging_config.ListBucketsRequest = None, + request: Union[logging_config.ListBucketsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsAsyncPager: r"""Lists buckets. Args: - request (:class:`google.cloud.logging_v2.types.ListBucketsRequest`): + request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): The request object. The parameters to `ListBuckets`. parent (:class:`str`): Required. The parent resource whose buckets are to be @@ -266,16 +268,16 @@ async def list_buckets( async def get_bucket( self, - request: logging_config.GetBucketRequest = None, + request: Union[logging_config.GetBucketRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Gets a bucket. Args: - request (:class:`google.cloud.logging_v2.types.GetBucketRequest`): + request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): The request object. The parameters to `GetBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -312,9 +314,9 @@ async def get_bucket( async def create_bucket( self, - request: logging_config.CreateBucketRequest = None, + request: Union[logging_config.CreateBucketRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: @@ -323,7 +325,7 @@ async def create_bucket( cannot be changed. Args: - request (:class:`google.cloud.logging_v2.types.CreateBucketRequest`): + request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): The request object. The parameters to `CreateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -360,9 +362,9 @@ async def create_bucket( async def update_bucket( self, - request: logging_config.UpdateBucketRequest = None, + request: Union[logging_config.UpdateBucketRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: @@ -379,7 +381,7 @@ async def update_bucket( A buckets region may not be modified after it is created. Args: - request (:class:`google.cloud.logging_v2.types.UpdateBucketRequest`): + request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): The request object. The parameters to `UpdateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -416,9 +418,9 @@ async def update_bucket( async def delete_bucket( self, - request: logging_config.DeleteBucketRequest = None, + request: Union[logging_config.DeleteBucketRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -427,7 +429,7 @@ async def delete_bucket( the bucket will be permanently deleted. Args: - request (:class:`google.cloud.logging_v2.types.DeleteBucketRequest`): + request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): The request object. The parameters to `DeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -459,9 +461,9 @@ async def delete_bucket( async def undelete_bucket( self, - request: logging_config.UndeleteBucketRequest = None, + request: Union[logging_config.UndeleteBucketRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -469,7 +471,7 @@ async def undelete_bucket( may be undeleted within the grace period of 7 days. Args: - request (:class:`google.cloud.logging_v2.types.UndeleteBucketRequest`): + request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): The request object. The parameters to `UndeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -501,17 +503,17 @@ async def undelete_bucket( async def list_views( self, - request: logging_config.ListViewsRequest = None, + request: Union[logging_config.ListViewsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsAsyncPager: r"""Lists views on a bucket. Args: - request (:class:`google.cloud.logging_v2.types.ListViewsRequest`): + request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): The request object. The parameters to `ListViews`. parent (:class:`str`): Required. The bucket whose views are to be listed: @@ -582,16 +584,16 @@ async def list_views( async def get_view( self, - request: logging_config.GetViewRequest = None, + request: Union[logging_config.GetViewRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Gets a view. Args: - request (:class:`google.cloud.logging_v2.types.GetViewRequest`): + request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): The request object. The parameters to `GetView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -630,9 +632,9 @@ async def get_view( async def create_view( self, - request: logging_config.CreateViewRequest = None, + request: Union[logging_config.CreateViewRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: @@ -640,7 +642,7 @@ async def create_view( contain a maximum of 50 views. Args: - request (:class:`google.cloud.logging_v2.types.CreateViewRequest`): + request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): The request object. The parameters to `CreateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -679,9 +681,9 @@ async def create_view( async def update_view( self, - request: logging_config.UpdateViewRequest = None, + request: Union[logging_config.UpdateViewRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: @@ -689,7 +691,7 @@ async def update_view( existing view with values from the new view: ``filter``. Args: - request (:class:`google.cloud.logging_v2.types.UpdateViewRequest`): + request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): The request object. The parameters to `UpdateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -728,16 +730,16 @@ async def update_view( async def delete_view( self, - request: logging_config.DeleteViewRequest = None, + request: Union[logging_config.DeleteViewRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a view from a bucket. Args: - request (:class:`google.cloud.logging_v2.types.DeleteViewRequest`): + request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): The request object. The parameters to `DeleteView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -769,17 +771,17 @@ async def delete_view( async def list_sinks( self, - request: logging_config.ListSinksRequest = None, + request: Union[logging_config.ListSinksRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. Args: - request (:class:`google.cloud.logging_v2.types.ListSinksRequest`): + request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): The request object. The parameters to `ListSinks`. parent (:class:`str`): Required. The parent resource whose sinks are to be @@ -865,17 +867,17 @@ async def list_sinks( async def get_sink( self, - request: logging_config.GetSinkRequest = None, + request: Union[logging_config.GetSinkRequest, dict] = None, *, sink_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Gets a sink. Args: - request (:class:`google.cloud.logging_v2.types.GetSinkRequest`): + request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): The request object. The parameters to `GetSink`. sink_name (:class:`str`): Required. The resource name of the sink: @@ -963,11 +965,11 @@ async def get_sink( async def create_sink( self, - request: logging_config.CreateSinkRequest = None, + request: Union[logging_config.CreateSinkRequest, dict] = None, *, parent: str = None, sink: logging_config.LogSink = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: @@ -978,7 +980,7 @@ async def create_sink( entries only from the resource owning the sink. Args: - request (:class:`google.cloud.logging_v2.types.CreateSinkRequest`): + request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): The request object. The parameters to `CreateSink`. parent (:class:`str`): Required. The resource in which to create the sink: @@ -1063,12 +1065,12 @@ async def create_sink( async def update_sink( self, - request: logging_config.UpdateSinkRequest = None, + request: Union[logging_config.UpdateSinkRequest, dict] = None, *, sink_name: str = None, sink: logging_config.LogSink = None, update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: @@ -1080,7 +1082,7 @@ async def update_sink( the ``unique_writer_identity`` field. Args: - request (:class:`google.cloud.logging_v2.types.UpdateSinkRequest`): + request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): The request object. The parameters to `UpdateSink`. sink_name (:class:`str`): Required. The full resource name of the sink to update, @@ -1200,10 +1202,10 @@ async def update_sink( async def delete_sink( self, - request: logging_config.DeleteSinkRequest = None, + request: Union[logging_config.DeleteSinkRequest, dict] = None, *, sink_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1211,7 +1213,7 @@ async def delete_sink( then that service account is also deleted. Args: - request (:class:`google.cloud.logging_v2.types.DeleteSinkRequest`): + request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): The request object. The parameters to `DeleteSink`. sink_name (:class:`str`): Required. The full resource name of the sink to delete, @@ -1286,17 +1288,17 @@ async def delete_sink( async def list_exclusions( self, - request: logging_config.ListExclusionsRequest = None, + request: Union[logging_config.ListExclusionsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListExclusionsAsyncPager: r"""Lists all the exclusions in a parent resource. Args: - request (:class:`google.cloud.logging_v2.types.ListExclusionsRequest`): + request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): The request object. The parameters to `ListExclusions`. parent (:class:`str`): Required. The parent resource whose exclusions are to be @@ -1382,17 +1384,17 @@ async def list_exclusions( async def get_exclusion( self, - request: logging_config.GetExclusionRequest = None, + request: Union[logging_config.GetExclusionRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion. Args: - request (:class:`google.cloud.logging_v2.types.GetExclusionRequest`): + request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): The request object. The parameters to `GetExclusion`. name (:class:`str`): Required. The resource name of an existing exclusion: @@ -1481,11 +1483,11 @@ async def get_exclusion( async def create_exclusion( self, - request: logging_config.CreateExclusionRequest = None, + request: Union[logging_config.CreateExclusionRequest, dict] = None, *, parent: str = None, exclusion: logging_config.LogExclusion = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: @@ -1495,7 +1497,7 @@ async def create_exclusion( resource. Args: - request (:class:`google.cloud.logging_v2.types.CreateExclusionRequest`): + request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): The request object. The parameters to `CreateExclusion`. parent (:class:`str`): Required. The parent resource in which to create the @@ -1584,12 +1586,12 @@ async def create_exclusion( async def update_exclusion( self, - request: logging_config.UpdateExclusionRequest = None, + request: Union[logging_config.UpdateExclusionRequest, dict] = None, *, name: str = None, exclusion: logging_config.LogExclusion = None, update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: @@ -1597,7 +1599,7 @@ async def update_exclusion( exclusion. Args: - request (:class:`google.cloud.logging_v2.types.UpdateExclusionRequest`): + request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): The request object. The parameters to `UpdateExclusion`. name (:class:`str`): Required. The resource name of the exclusion to update: @@ -1701,17 +1703,17 @@ async def update_exclusion( async def delete_exclusion( self, - request: logging_config.DeleteExclusionRequest = None, + request: Union[logging_config.DeleteExclusionRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an exclusion. Args: - request (:class:`google.cloud.logging_v2.types.DeleteExclusionRequest`): + request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): The request object. The parameters to `DeleteExclusion`. name (:class:`str`): Required. The resource name of an existing exclusion to @@ -1785,9 +1787,9 @@ async def delete_exclusion( async def get_cmek_settings( self, - request: logging_config.GetCmekSettingsRequest = None, + request: Union[logging_config.GetCmekSettingsRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: @@ -1802,7 +1804,7 @@ async def get_cmek_settings( for more information. Args: - request (:class:`google.cloud.logging_v2.types.GetCmekSettingsRequest`): + request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. See [Enabling CMEK for Logs @@ -1855,9 +1857,9 @@ async def get_cmek_settings( async def update_cmek_settings( self, - request: logging_config.UpdateCmekSettingsRequest = None, + request: Union[logging_config.UpdateCmekSettingsRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: @@ -1878,7 +1880,7 @@ async def update_cmek_settings( for more information. Args: - request (:class:`google.cloud.logging_v2.types.UpdateCmekSettingsRequest`): + request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. See [Enabling CMEK for Logs diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index f24b6caf9144..e2c8fe0a5707 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config from google.protobuf import field_mask_pb2 # type: ignore @@ -405,7 +407,7 @@ def list_buckets( request: Union[logging_config.ListBucketsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsPager: @@ -493,7 +495,7 @@ def get_bucket( self, request: Union[logging_config.GetBucketRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: @@ -540,7 +542,7 @@ def create_bucket( self, request: Union[logging_config.CreateBucketRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: @@ -589,7 +591,7 @@ def update_bucket( self, request: Union[logging_config.UpdateBucketRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: @@ -646,7 +648,7 @@ def delete_bucket( self, request: Union[logging_config.DeleteBucketRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -690,7 +692,7 @@ def undelete_bucket( self, request: Union[logging_config.UndeleteBucketRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -734,7 +736,7 @@ def list_views( request: Union[logging_config.ListViewsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsPager: @@ -814,7 +816,7 @@ def get_view( self, request: Union[logging_config.GetViewRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: @@ -863,7 +865,7 @@ def create_view( self, request: Union[logging_config.CreateViewRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: @@ -913,7 +915,7 @@ def update_view( self, request: Union[logging_config.UpdateViewRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: @@ -963,7 +965,7 @@ def delete_view( self, request: Union[logging_config.DeleteViewRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1006,7 +1008,7 @@ def list_sinks( request: Union[logging_config.ListSinksRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSinksPager: @@ -1091,7 +1093,7 @@ def get_sink( request: Union[logging_config.GetSinkRequest, dict] = None, *, sink_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: @@ -1179,7 +1181,7 @@ def create_sink( *, parent: str = None, sink: logging_config.LogSink = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: @@ -1280,7 +1282,7 @@ def update_sink( sink_name: str = None, sink: logging_config.LogSink = None, update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: @@ -1404,7 +1406,7 @@ def delete_sink( request: Union[logging_config.DeleteSinkRequest, dict] = None, *, sink_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1479,7 +1481,7 @@ def list_exclusions( request: Union[logging_config.ListExclusionsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListExclusionsPager: @@ -1564,7 +1566,7 @@ def get_exclusion( request: Union[logging_config.GetExclusionRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: @@ -1653,7 +1655,7 @@ def create_exclusion( *, parent: str = None, exclusion: logging_config.LogExclusion = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: @@ -1757,7 +1759,7 @@ def update_exclusion( name: str = None, exclusion: logging_config.LogExclusion = None, update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: @@ -1872,7 +1874,7 @@ def delete_exclusion( request: Union[logging_config.DeleteExclusionRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1944,7 +1946,7 @@ def get_cmek_settings( self, request: Union[logging_config.GetCmekSettingsRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: @@ -2015,7 +2017,7 @@ def update_cmek_settings( self, request: Union[logging_config.UpdateCmekSettingsRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index cf32b364a2b3..00557f640b00 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -36,15 +35,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class ConfigServiceV2Transport(abc.ABC): """Abstract transport class for ConfigServiceV2.""" @@ -99,7 +89,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -132,29 +122,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 4fd932950aa6..54615a12524e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -20,7 +20,6 @@ from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 01b369de3ed4..0b927dea913a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -28,13 +28,15 @@ ) import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry @@ -174,10 +176,10 @@ def __init__( async def delete_log( self, - request: logging.DeleteLogRequest = None, + request: Union[logging.DeleteLogRequest, dict] = None, *, log_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -188,7 +190,7 @@ async def delete_log( with a timestamp before the operation will be deleted. Args: - request (:class:`google.cloud.logging_v2.types.DeleteLogRequest`): + request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): The request object. The parameters to DeleteLog. log_name (:class:`str`): Required. The resource name of the log to delete: @@ -264,13 +266,13 @@ async def delete_log( async def write_log_entries( self, - request: logging.WriteLogEntriesRequest = None, + request: Union[logging.WriteLogEntriesRequest, dict] = None, *, log_name: str = None, resource: monitored_resource_pb2.MonitoredResource = None, labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, entries: Sequence[log_entry.LogEntry] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging.WriteLogEntriesResponse: @@ -283,7 +285,7 @@ async def write_log_entries( organizations, billing accounts or folders) Args: - request (:class:`google.cloud.logging_v2.types.WriteLogEntriesRequest`): + request (Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]): The request object. The parameters to WriteLogEntries. log_name (:class:`str`): Optional. A default log resource name that is assigned @@ -434,12 +436,12 @@ async def write_log_entries( async def list_log_entries( self, - request: logging.ListLogEntriesRequest = None, + request: Union[logging.ListLogEntriesRequest, dict] = None, *, resource_names: Sequence[str] = None, filter: str = None, order_by: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogEntriesAsyncPager: @@ -449,7 +451,7 @@ async def list_log_entries( Logs `__. Args: - request (:class:`google.cloud.logging_v2.types.ListLogEntriesRequest`): + request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): The request object. The parameters to `ListLogEntries`. resource_names (:class:`Sequence[str]`): Required. Names of one or more parent resources from @@ -569,9 +571,9 @@ async def list_log_entries( async def list_monitored_resource_descriptors( self, - request: logging.ListMonitoredResourceDescriptorsRequest = None, + request: Union[logging.ListMonitoredResourceDescriptorsRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: @@ -579,7 +581,7 @@ async def list_monitored_resource_descriptors( used by Logging. Args: - request (:class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest`): + request (Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]): The request object. The parameters to ListMonitoredResourceDescriptors retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -633,10 +635,10 @@ async def list_monitored_resource_descriptors( async def list_logs( self, - request: logging.ListLogsRequest = None, + request: Union[logging.ListLogsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogsAsyncPager: @@ -645,7 +647,7 @@ async def list_logs( listed. Args: - request (:class:`google.cloud.logging_v2.types.ListLogsRequest`): + request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): The request object. The parameters to ListLogs. parent (:class:`str`): Required. The resource name that owns the logs: @@ -732,7 +734,7 @@ def tail_log_entries( self, requests: AsyncIterator[logging.TailLogEntriesRequest] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 75410c30c93f..8e64d902478f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry @@ -349,7 +351,7 @@ def delete_log( request: Union[logging.DeleteLogRequest, dict] = None, *, log_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -431,7 +433,7 @@ def write_log_entries( resource: monitored_resource_pb2.MonitoredResource = None, labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, entries: Sequence[log_entry.LogEntry] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging.WriteLogEntriesResponse: @@ -588,7 +590,7 @@ def list_log_entries( resource_names: Sequence[str] = None, filter: str = None, order_by: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogEntriesPager: @@ -709,7 +711,7 @@ def list_monitored_resource_descriptors( self, request: Union[logging.ListMonitoredResourceDescriptorsRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMonitoredResourceDescriptorsPager: @@ -766,7 +768,7 @@ def list_logs( request: Union[logging.ListLogsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogsPager: @@ -851,7 +853,7 @@ def tail_log_entries( self, requests: Iterator[logging.TailLogEntriesRequest] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[logging.TailLogEntriesResponse]: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 8b3ff755e57b..556488467cc7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -36,15 +35,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class LoggingServiceV2Transport(abc.ABC): """Abstract transport class for LoggingServiceV2.""" @@ -100,7 +90,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -133,29 +123,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index cc2e13685f86..84e765cf4fdf 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -20,7 +20,6 @@ from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 8c719a64c322..1e3213a4c3f1 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers @@ -166,17 +168,17 @@ def __init__( async def list_log_metrics( self, - request: logging_metrics.ListLogMetricsRequest = None, + request: Union[logging_metrics.ListLogMetricsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. Args: - request (:class:`google.cloud.logging_v2.types.ListLogMetricsRequest`): + request (Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]): The request object. The parameters to ListLogMetrics. parent (:class:`str`): Required. The name of the project containing the @@ -259,17 +261,17 @@ async def list_log_metrics( async def get_log_metric( self, - request: logging_metrics.GetLogMetricRequest = None, + request: Union[logging_metrics.GetLogMetricRequest, dict] = None, *, metric_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. Args: - request (:class:`google.cloud.logging_v2.types.GetLogMetricRequest`): + request (Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]): The request object. The parameters to GetLogMetric. metric_name (:class:`str`): Required. The resource name of the desired metric: @@ -354,18 +356,18 @@ async def get_log_metric( async def create_log_metric( self, - request: logging_metrics.CreateLogMetricRequest = None, + request: Union[logging_metrics.CreateLogMetricRequest, dict] = None, *, parent: str = None, metric: logging_metrics.LogMetric = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. Args: - request (:class:`google.cloud.logging_v2.types.CreateLogMetricRequest`): + request (Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]): The request object. The parameters to CreateLogMetric. parent (:class:`str`): Required. The resource name of the project in which to @@ -450,18 +452,18 @@ async def create_log_metric( async def update_log_metric( self, - request: logging_metrics.UpdateLogMetricRequest = None, + request: Union[logging_metrics.UpdateLogMetricRequest, dict] = None, *, metric_name: str = None, metric: logging_metrics.LogMetric = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. Args: - request (:class:`google.cloud.logging_v2.types.UpdateLogMetricRequest`): + request (Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]): The request object. The parameters to UpdateLogMetric. metric_name (:class:`str`): Required. The resource name of the metric to update: @@ -558,17 +560,17 @@ async def update_log_metric( async def delete_log_metric( self, - request: logging_metrics.DeleteLogMetricRequest = None, + request: Union[logging_metrics.DeleteLogMetricRequest, dict] = None, *, metric_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a logs-based metric. Args: - request (:class:`google.cloud.logging_v2.types.DeleteLogMetricRequest`): + request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): The request object. The parameters to DeleteLogMetric. metric_name (:class:`str`): Required. The resource name of the metric to delete: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index f03e3cb038ec..9290d62cc799 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers @@ -352,7 +354,7 @@ def list_log_metrics( request: Union[logging_metrics.ListLogMetricsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogMetricsPager: @@ -434,7 +436,7 @@ def get_log_metric( request: Union[logging_metrics.GetLogMetricRequest, dict] = None, *, metric_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: @@ -519,7 +521,7 @@ def create_log_metric( *, parent: str = None, metric: logging_metrics.LogMetric = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: @@ -615,7 +617,7 @@ def update_log_metric( *, metric_name: str = None, metric: logging_metrics.LogMetric = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: @@ -711,7 +713,7 @@ def delete_log_metric( request: Union[logging_metrics.DeleteLogMetricRequest, dict] = None, *, metric_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index f8855cc3db9f..e3105748a9fe 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -36,15 +35,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class MetricsServiceV2Transport(abc.ABC): """Abstract transport class for MetricsServiceV2.""" @@ -100,7 +90,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -133,29 +123,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 2623e7d0e69d..956f53b1d31f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -20,7 +20,6 @@ from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py index 2b7aed28e005..99331d327ae9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py @@ -32,6 +32,13 @@ class LogEntry(proto.Message): r"""An individual entry in a log. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: log_name (str): Required. The resource name of the log to which this log @@ -80,12 +87,15 @@ class LogEntry(proto.Message): "type.googleapis.com/google.cloud.audit.AuditLog" "type.googleapis.com/google.appengine.logging.v1.RequestLog". + This field is a member of `oneof`_ ``payload``. text_payload (str): The log entry payload, represented as a Unicode string (UTF-8). + This field is a member of `oneof`_ ``payload``. json_payload (google.protobuf.struct_pb2.Struct): The log entry payload, represented as a structure that is expressed as a JSON object. + This field is a member of `oneof`_ ``payload``. timestamp (google.protobuf.timestamp_pb2.Timestamp): Optional. The time the event described by the log entry occurred. This time is used to compute the log entry's age diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index 795efbf950a9..0724911b90f6 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -154,6 +154,9 @@ class LogSink(proto.Message): created within a project, organization, billing account, or folder. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Required. The client-assigned sink identifier, unique within @@ -238,6 +241,7 @@ class LogSink(proto.Message): bigquery_options (google.cloud.logging_v2.types.BigQueryOptions): Optional. Options that affect sinks exporting data to BigQuery. + This field is a member of `oneof`_ ``options``. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The creation timestamp of the sink. diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 02eeccea5ec0..65cfc02c1934 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -32,7 +32,7 @@ # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.26.0, <3.0.0dev", + "google-api-core[grpc] >= 1.28.0, <3.0.0dev", "google-cloud-appengine-logging>=0.1.0, <1.0.1", "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", # NOTE: Maintainers, please do not require google-api-core>=2.x.x @@ -41,7 +41,6 @@ "google-cloud-core >= 1.4.1, <3.0.0dev", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", "proto-plus >= 1.15.0", - "packaging >= 14.3", ] extras = {} diff --git a/packages/google-cloud-logging/testing/constraints-3.6.txt b/packages/google-cloud-logging/testing/constraints-3.6.txt index 8d3ae3901f6b..250c505ff675 100644 --- a/packages/google-cloud-logging/testing/constraints-3.6.txt +++ b/packages/google-cloud-logging/testing/constraints-3.6.txt @@ -5,7 +5,6 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.26.0 +google-api-core==1.28.0 google-cloud-core==1.4.1 proto-plus==1.15.0 -packaging==14.3 diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index c796dc652459..0d96a08fc83d 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -38,9 +37,6 @@ from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.services.config_service_v2 import transports -from google.cloud.logging_v2.services.config_service_v2.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.logging_v2.types import logging_config from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore @@ -48,20 +44,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -218,7 +200,7 @@ def test_config_service_v2_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -235,7 +217,7 @@ def test_config_service_v2_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -252,7 +234,7 @@ def test_config_service_v2_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -281,7 +263,7 @@ def test_config_service_v2_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -350,7 +332,7 @@ def test_config_service_v2_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -392,7 +374,7 @@ def test_config_service_v2_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -414,7 +396,7 @@ def test_config_service_v2_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -445,7 +427,7 @@ def test_config_service_v2_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -476,7 +458,7 @@ def test_config_service_v2_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -5442,7 +5424,6 @@ def test_config_service_v2_base_transport(): transport.close() -@requires_google_auth_gte_1_25_0 def test_config_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -5468,31 +5449,6 @@ def test_config_service_v2_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_config_service_v2_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ConfigServiceV2Transport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - ), - quota_project_id="octopus", - ) - - def test_config_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -5504,7 +5460,6 @@ def test_config_service_v2_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_config_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -5522,23 +5477,6 @@ def test_config_service_v2_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_config_service_v2_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ConfigServiceV2Client() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - ), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -5546,7 +5484,6 @@ def test_config_service_v2_auth_adc_old_google_auth(): transports.ConfigServiceV2GrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_config_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -5565,31 +5502,6 @@ def test_config_service_v2_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.ConfigServiceV2GrpcTransport, - transports.ConfigServiceV2GrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_config_service_v2_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - ), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index df26166653eb..5d6021f9aeb3 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -39,9 +38,6 @@ from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.services.logging_service_v2 import transports -from google.cloud.logging_v2.services.logging_service_v2.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging from google.logging.type import http_request_pb2 # type: ignore @@ -54,20 +50,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -225,7 +207,7 @@ def test_logging_service_v2_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -242,7 +224,7 @@ def test_logging_service_v2_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -259,7 +241,7 @@ def test_logging_service_v2_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -288,7 +270,7 @@ def test_logging_service_v2_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,7 +339,7 @@ def test_logging_service_v2_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -399,7 +381,7 @@ def test_logging_service_v2_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -421,7 +403,7 @@ def test_logging_service_v2_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -452,7 +434,7 @@ def test_logging_service_v2_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -483,7 +465,7 @@ def test_logging_service_v2_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -1974,7 +1956,6 @@ def test_logging_service_v2_base_transport(): transport.close() -@requires_google_auth_gte_1_25_0 def test_logging_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -2001,32 +1982,6 @@ def test_logging_service_v2_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_logging_service_v2_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LoggingServiceV2Transport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ), - quota_project_id="octopus", - ) - - def test_logging_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -2038,7 +1993,6 @@ def test_logging_service_v2_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_logging_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -2057,24 +2011,6 @@ def test_logging_service_v2_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_logging_service_v2_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - LoggingServiceV2Client() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -2082,7 +2018,6 @@ def test_logging_service_v2_auth_adc_old_google_auth(): transports.LoggingServiceV2GrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_logging_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -2102,32 +2037,6 @@ def test_logging_service_v2_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.LoggingServiceV2GrpcTransport, - transports.LoggingServiceV2GrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_logging_service_v2_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 4d2e8dba352e..9c14746d08cf 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -42,9 +41,6 @@ from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2Client from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.services.metrics_service_v2 import transports -from google.cloud.logging_v2.services.metrics_service_v2.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.logging_v2.types import logging_metrics from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore @@ -52,20 +48,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -223,7 +205,7 @@ def test_metrics_service_v2_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -240,7 +222,7 @@ def test_metrics_service_v2_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -257,7 +239,7 @@ def test_metrics_service_v2_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -286,7 +268,7 @@ def test_metrics_service_v2_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -355,7 +337,7 @@ def test_metrics_service_v2_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -397,7 +379,7 @@ def test_metrics_service_v2_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -419,7 +401,7 @@ def test_metrics_service_v2_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -450,7 +432,7 @@ def test_metrics_service_v2_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -481,7 +463,7 @@ def test_metrics_service_v2_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -1897,7 +1879,6 @@ def test_metrics_service_v2_base_transport(): transport.close() -@requires_google_auth_gte_1_25_0 def test_metrics_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1924,32 +1905,6 @@ def test_metrics_service_v2_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_metrics_service_v2_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetricsServiceV2Transport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ), - quota_project_id="octopus", - ) - - def test_metrics_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1961,7 +1916,6 @@ def test_metrics_service_v2_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_metrics_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1980,24 +1934,6 @@ def test_metrics_service_v2_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_metrics_service_v2_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MetricsServiceV2Client() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -2005,7 +1941,6 @@ def test_metrics_service_v2_auth_adc_old_google_auth(): transports.MetricsServiceV2GrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_metrics_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -2025,32 +1960,6 @@ def test_metrics_service_v2_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetricsServiceV2GrpcTransport, - transports.MetricsServiceV2GrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_metrics_service_v2_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ From 56e71a06716b8bb8dda2f143250f9a467b39d0b8 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 1 Nov 2021 15:58:25 -0700 Subject: [PATCH 556/855] feat: use structured logging on GCF with python 3.7 (#434) --- .../google/cloud/logging_v2/client.py | 10 +-- .../tests/environment/README.md | 2 - .../tests/environment/deployable/go/main.go | 29 +++++- .../environment/deployable/java/.gitignore | 1 + .../environment/deployable/java/Dockerfile | 6 +- .../deployable/DeployableApplication.java | 2 +- .../java/envtest/deployable/Snippets.java | 31 ++++++- .../web/DeployableHttpController.java | 30 +++++++ .../environment/deployable/nodejs/app.js | 12 +-- .../environment/deployable/nodejs/tests.js | 67 +++----------- .../environment/deployable/python/.gitignore | 1 + .../deployable/python/requirements.txt | 2 +- .../envctl/env_scripts/go/cloudrun.sh | 4 +- .../envctl/env_scripts/java/appengine_flex.sh | 83 +++++++++++++++++ .../env_scripts/java/appengine_standard.sh | 88 +++++++++++++++++++ .../envctl/env_scripts/java/cloudrun.sh | 88 +++++++++++++++++++ .../envctl/env_scripts/java/compute.sh | 69 +++++++++++++++ .../envctl/env_scripts/java/kubernetes.sh | 2 +- .../tests/environment/noxfile.py | 4 +- .../tests/environment/tests/common/common.py | 72 ++++++++++----- .../tests/environment/tests/common/stdout.py | 67 -------------- .../tests/java/test_appengine_flex.py | 26 ++++++ .../tests/java/test_appengine_standard.py | 26 ++++++ .../environment/tests/java/test_cloudrun.py | 35 ++++++++ .../environment/tests/java/test_compute.py | 27 ++++++ .../environment/tests/java/test_kubernetes.py | 2 +- .../tests/nodejs/test_appengine_standard.py | 30 +------ .../environment/tests/nodejs/test_cloudrun.py | 31 +------ .../tests/nodejs/test_functions.py | 33 +------ .../tests/nodejs/test_kubernetes.py | 31 +------ tests/environment | 2 +- 31 files changed, 615 insertions(+), 298 deletions(-) create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/appengine_flex.sh create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/appengine_standard.sh create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/cloudrun.sh create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/compute.sh delete mode 100644 packages/google-cloud-logging/tests/environment/tests/common/stdout.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/java/test_appengine_flex.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/java/test_appengine_standard.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/java/test_cloudrun.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/java/test_compute.py diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py index 51d93355ce9e..e9b432eb22e1 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -355,13 +355,9 @@ def get_default_handler(self, **kw): return AppEngineHandler(self, **kw) elif monitored_resource.type == _GKE_RESOURCE_TYPE: return ContainerEngineHandler(**kw) - elif ( - monitored_resource.type == _GCF_RESOURCE_TYPE - and sys.version_info[0] == 3 - and sys.version_info[1] >= 8 - ): - # Cloud Functions with runtimes > 3.8 supports structured logs on standard out - # 3.7 should use the standard CloudLoggingHandler, which sends logs over the network. + elif monitored_resource.type == _GCF_RESOURCE_TYPE: + # __stdout__ stream required to support structured logging on Python 3.7 + kw["stream"] = kw.get("stream", sys.__stdout__) return StructuredLogHandler(**kw, project_id=self.project) elif monitored_resource.type == _RUN_RESOURCE_TYPE: return StructuredLogHandler(**kw, project_id=self.project) diff --git a/packages/google-cloud-logging/tests/environment/README.md b/packages/google-cloud-logging/tests/environment/README.md index 41600ff2f668..df2878ea1e3a 100644 --- a/packages/google-cloud-logging/tests/environment/README.md +++ b/packages/google-cloud-logging/tests/environment/README.md @@ -71,5 +71,3 @@ Test files in `tests/` can inherit from any file in `tests/common` log | Test Name | Optional Input | Description | | -------------- | ---------------- | -------------------------------- | | `simplelog` | `log_name`, `log_text` | Logs a simple text payload | -| `requestlog` | `log_name`, `log_text` | Logs an http request object | -| `stdoutlog` | `log_name`, `log_text` | Logs to standard out | diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/main.go b/packages/google-cloud-logging/tests/environment/deployable/go/main.go index 37d2bfafe009..00eca7fc73cd 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/main.go +++ b/packages/google-cloud-logging/tests/environment/deployable/go/main.go @@ -22,6 +22,7 @@ import ( "log" "net/http" "os" + "strings" "time" "cloud.google.com/go/compute/metadata" @@ -207,7 +208,33 @@ func simplelog(args map[string]string) { logtext = val } - logger := client.Logger(logname).StandardLogger(logging.Info) + logseverity := logging.Info + if val, ok := args["severity"]; ok { + switch strings.ToUpper(val) { + case "DEFAULT": + logseverity = logging.Default + case "DEBUG": + logseverity = logging.Debug + case "INFO": + logseverity = logging.Info + case "NOTICE": + logseverity = logging.Notice + case "WARNING": + logseverity = logging.Warning + case "ERROR": + logseverity = logging.Error + case "CRITICAL": + logseverity = logging.Critical + case "ALERT": + logseverity = logging.Alert + case "EMERGENCY": + logseverity = logging.Emergency + default: + break + } + } + + logger := client.Logger(logname).StandardLogger(logseverity) logger.Println(logtext) } diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/.gitignore b/packages/google-cloud-logging/tests/environment/deployable/java/.gitignore index 976ef6f11484..cf03bc7df319 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/java/.gitignore +++ b/packages/google-cloud-logging/tests/environment/deployable/java/.gitignore @@ -1,5 +1,6 @@ java-logging _library +workspace lib.tar *.sw* diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile b/packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile index 2a0b9c2340be..86bec8092075 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile +++ b/packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile @@ -13,14 +13,14 @@ # limitations under the License. # compile local java-logging library -FROM docker.io/maven AS lib-env +FROM maven:3.8.1 AS lib-env WORKDIR /app -COPY _library ./java-logging +COPY _library ./ RUN mvn verify --fail-never RUN mvn -Dmaven.test.skip=true package # Compile the deployable code. -FROM docker.io/maven AS build-env +FROM maven:3.8.1 AS build-env WORKDIR /app COPY pom.xml /app/pom.xml # copy over compiled library diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/DeployableApplication.java b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/DeployableApplication.java index dd591225e235..5d44e04c8d2f 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/DeployableApplication.java +++ b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/DeployableApplication.java @@ -154,7 +154,7 @@ public static void main(String[] args) throws IOException, RuntimeException { // ****************** GAE, GKE, GCE ****************** // Enable app subscriber for all environments except GCR Boolean enableSubscriber = Boolean.parseBoolean(System.getenv().getOrDefault("ENABLE_SUBSCRIBER", "false")); - System.out.format("ENV: ENABLE_SUBSCRIBER=true\n"); + System.out.format("ENV: ENABLE_SUBSCRIBER=%b\n", enableSubscriber); if (enableSubscriber) { // start a pub/sub server and listen for messages startPubsubSubscription(); diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/Snippets.java b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/Snippets.java index bd6c48a8e7b2..62d14a25c8d3 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/Snippets.java +++ b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/Snippets.java @@ -22,10 +22,36 @@ import com.google.cloud.logging.LoggingOptions; import com.google.cloud.logging.Payload.StringPayload; import com.google.cloud.logging.Severity; +import com.google.cloud.logging.MonitoredResourceUtil; import com.google.logging.type.LogSeverity; +import com.google.cloud.logging.Synchronicity; public class Snippets { + private Severity getSeverity(String severityString){ + Severity severity; + if (severityString.equals("DEBUG")){ + severity = Severity.DEBUG; + } else if(severityString.equals("INFO")){ + severity = Severity.INFO; + } else if (severityString.equals("NOTICE")){ + severity = Severity.NOTICE; + } else if(severityString.equals("WARNING")){ + severity = Severity.WARNING; + } else if(severityString.equals("ERROR")){ + severity = Severity.ERROR; + } else if(severityString.equals("CRITICAL")){ + severity = Severity.CRITICAL; + } else if(severityString.equals("ALERT")){ + severity = Severity.ALERT; + } else if(severityString.equals("EMERGENCY")){ + severity = Severity.EMERGENCY; + } else { + severity = Severity.DEFAULT; + } + return severity; + } + public void simplelog(Map args){ System.out.println("Called Simplelog!"); // pull out arguments @@ -33,11 +59,14 @@ public void simplelog(Map args){ String logName = args.getOrDefault("log_name", "test"); String severityString = args.getOrDefault("severity", "DEFAULT"); + // Set severity + Severity severity = getSeverity(severityString); + // Instantiates a client Logging logging = LoggingOptions.getDefaultInstance().getService(); LogEntry entry = LogEntry.newBuilder(StringPayload.of(logText)) - .setSeverity(Severity.ERROR) + .setSeverity(severity) .setLogName(logName) .setResource(MonitoredResource.newBuilder("global").build()) .build(); diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/web/DeployableHttpController.java b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/web/DeployableHttpController.java index bf317584f56d..9be5b5e2e68b 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/web/DeployableHttpController.java +++ b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/web/DeployableHttpController.java @@ -17,6 +17,7 @@ import org.springframework.web.bind.annotation.RestController; import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; import com.google.cloud.MonitoredResource; import com.google.cloud.logging.LogEntry; @@ -45,6 +46,17 @@ import java.lang.Thread; import java.lang.InterruptedException; +import java.util.Base64; +import org.apache.commons.lang3.StringUtils; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; +import java.util.Map; +import org.eclipse.jetty.util.B64Code; +import envtest.deployable.DeployableApplication; /** * Defines a controller to handle HTTP requests. */ @@ -58,4 +70,22 @@ public String helloWorld() { return message; } + + /** + * This function will be triggered by incomming pub/sub messages from envctl. + * It will then find and execute the requested test snippet, based on the + * contents of the pub/sub payload + */ + @RequestMapping(value = "/", method = RequestMethod.POST) + public ResponseEntity pubsub_receive(@RequestBody Map payload) { + Map pubsubMessage = (Map) payload.get("message"); + Map args = Collections.emptyMap(); + if (pubsubMessage.containsKey("attributes")) { + args = (Map) pubsubMessage.get("attributes"); + } + String encodedName = (String) pubsubMessage.get("data"); + String fnName = B64Code.decode(encodedName, "UTF-8"); + DeployableApplication.triggerSnippet(fnName, args); + return new ResponseEntity<>(fnName, HttpStatus.OK); + } } diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js b/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js index 7ba97ba79dc4..9d334c2c6e4b 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/app.js @@ -75,7 +75,7 @@ if (process.env.RUNSERVER) { // Start app server const PORT = process.env.PORT || 8080; app.listen(PORT, () => - console.log(`nodejs-pubsub-tutorial listening on port ${PORT}`) + console.log(`listening on port ${PORT}`) ); } @@ -121,12 +121,6 @@ function triggerTest(message) { ? Buffer.from(message.data, 'base64').toString() : console.error("WARNING: no log function was invoked"); - console.log('Fn invoked with attributes, if any: '); - console.log(message.attributes); - - if (message.attributes) { - tests[testName](message.attributes['log_name'], message.attributes['log_text']); - } else { - tests[testName](); - } + args = message.attributes ? message.attributes : {}; + tests[testName](args); } diff --git a/packages/google-cloud-logging/tests/environment/deployable/nodejs/tests.js b/packages/google-cloud-logging/tests/environment/deployable/nodejs/tests.js index 41731416ccb2..2fbf2579488d 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/nodejs/tests.js +++ b/packages/google-cloud-logging/tests/environment/deployable/nodejs/tests.js @@ -14,74 +14,29 @@ const {Logging} = require('@google-cloud/logging'); const logging = new Logging(); -const defaultRequest = { - method: 'POST', - httpVersion: 'HTTP/1.1', - url: 'https://google.com', - headers: {'x-cloud-trace-context': '1/1;o=1'}, - rawHeaders: ['X-Cloud-Trace-Context'], - statusCode: 200, -} /** * The following are test functions that can be triggered in each service. * envctl nodejs trigger simplelog log_name=foo,log_text=bar */ -var simplelog = function(logname = "my-log", logtext = "hello world" ) { +var simplelog = function(args) { + // set default values + const logname = "logname" in args ? args["logname"] : "my-log"; + const logtext = "log_text" in args ? args["log_text"] : "simplelog"; + const severity = "severity" in args ? args["severity"] : "ERROR"; + const log = logging.log(logname); - const text_entry = log.entry(logtext); + const metadata = { + severity: severity, + }; - log.write(text_entry).then(r => console.log(r)); -} - -/** - * envctl nodejs trigger requestlog log_name=foo,log_text=bar - */ -var requestlog = function(logname = 'my-log', logtext = 'hello world', request) { - if (!request) request = defaultRequest; - const log = logging.log(logname); - const entry = log.entry({httpRequest: request}, logtext); - log.write(entry).then(r => console.log(r)); -} + const text_entry = log.entry(metadata, logtext); -/** - * envctl nodejs trigger stdoutlog log_name=foo,log_text=bar - */ -var stdoutlog = function(logname = 'my-log', logtext = 'hello world', request) { - if (!request) request = defaultRequest; - logging.setProjectId().then( res => { - logging.setDetectedResource().then( res => { - const log = logging.logSync(logname); - const meta = { - // Fields all agents lift: - severity: 'WARNING', - httpRequest: request, - labels: {foo: 'bar'}, - // Fields not lifted by all agents, e.g. GCF: - insertId: '42', - timestamp: new Date(2021,1,1,1,1,1,1), - resource: { - type: 'global', - labels: { - region: 'my-backyard', - zone: 'twilight', - } - }, - // Note: explicit trace declarations override httpRequest header context - trace: 'projects/my-projectid/traces/0679686673a', - spanId: '000000000000004a', - traceSampled: false, - } - const entry = log.entry(meta, logtext); - log.write(entry); - }); - }); + log.write(text_entry); } module.exports={ 'simplelog': simplelog, - 'stdoutlog': stdoutlog, - 'requestlog': requestlog, } diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/.gitignore b/packages/google-cloud-logging/tests/environment/deployable/python/.gitignore index fd785511cedb..d555b835059e 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/.gitignore +++ b/packages/google-cloud-logging/tests/environment/deployable/python/.gitignore @@ -1,3 +1,4 @@ python-logging _library *.tar +_library diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt index 26a183914fed..6ad3143ae8ed 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt +++ b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt @@ -1,4 +1,4 @@ flask==1.1.2 -google-cloud-pubsub==2.3.0 +google-cloud-pubsub>=2.8.0 click==7.1.2 pytz==2021.1 diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh index 3155c856530b..50d0a221df75 100644 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/cloudrun.sh @@ -18,7 +18,7 @@ set -o pipefail # any step in pipe caused failure set -u # undefined variables cause exit # Note: there is a max character count constraint -SERVICE_NAME="log-go-run-$(echo $ENVCTL_ID | head -c 8)x" +SERVICE_NAME="log-go-run-$(echo $ENVCTL_ID | head -c 8)" SA_NAME=$SERVICE_NAME-invoker add_service_accounts() { @@ -101,4 +101,4 @@ verify() { filter-string() { echo "resource.type=\"global\"" -} \ No newline at end of file +} diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/appengine_flex.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/appengine_flex.sh new file mode 100755 index 000000000000..612de17396c5 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/appengine_flex.sh @@ -0,0 +1,83 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + + +SERVICE_NAME="log-java-flex-con-$(echo $ENVCTL_ID | head -c 10)" + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete container images + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null + # delete service + gcloud app services delete $SERVICE_NAME -q + set -e +} + +verify() { + set +e + gcloud app services describe $SERVICE_NAME -q > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + + +deploy() { + # create pub/sub topic + set +e + gcloud pubsub topics create $SERVICE_NAME 2>/dev/null + set -e + + build_container + + cat < $TMP_DIR/app.yaml + runtime: custom + service: $SERVICE_NAME + env: flex + resources: + cpu: 1 + memory_gb: 2 + disk_size_gb: 10 + manual_scaling: + instances: 1 + env_variables: + ENABLE_SUBSCRIBER: "true" + PUBSUB_TOPIC: $SERVICE_NAME +EOF + + # deploy + pushd $TMP_DIR + gcloud app deploy --image-url $GCR_PATH -q + popd +} + +filter-string() { + echo "resource.type=\"gae_app\" AND resource.labels.module_id=\"$SERVICE_NAME\"" +} + + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/appengine_standard.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/appengine_standard.sh new file mode 100755 index 000000000000..8397e422df37 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/appengine_standard.sh @@ -0,0 +1,88 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + + +SERVICE_NAME="logging-java-standard-$(echo $ENVCTL_ID | head -c 10)"\ + +destroy() { + set +e + # delete service + gcloud app services delete $SERVICE_NAME -q 2> /dev/null + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + set -e +} + +verify() { + set +e + gcloud app services describe $SERVICE_NAME -q > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +deploy() { + # create pub/sub topic + set +e + gcloud pubsub topics create $SERVICE_NAME 2>/dev/null + set -e + + build_container nopush + id=$(docker create $GCR_PATH) + docker cp $id:/app/deployable-1.0.0.jar $TMP_DIR/deployable-1.0.0.jar + docker rm -v $id + ls $TMP_DIR + + # build app.yaml + cat < $TMP_DIR/app.yaml + runtime: java11 + service: $SERVICE_NAME + manual_scaling: + instances: 1 + env_variables: + ENABLE_SUBSCRIBER: "true" + RUNSERVER: "false" + PUBSUB_TOPIC: $SERVICE_NAME +EOF + # deploy + pushd $TMP_DIR + gcloud app deploy --appyaml $TMP_DIR/app.yaml $TMP_DIR/deployable-1.0.0.jar -q + popd + # wait for the pub/sub subscriber to start + NUM_SUBSCRIBERS=0 + TRIES=0 + while [[ "${NUM_SUBSCRIBERS}" -lt 1 && "${TRIES}" -lt 10 ]]; do + sleep 30 + NUM_SUBSCRIBERS=$(gcloud pubsub topics list-subscriptions $SERVICE_NAME 2> /dev/null | wc -l) + TRIES=$((TRIES + 1)) + done +} + +filter-string() { + echo "resource.type=\"gae_app\" AND resource.labels.module_id=\"$SERVICE_NAME\"" +} + + + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/cloudrun.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/cloudrun.sh new file mode 100755 index 000000000000..23f2111d4160 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/cloudrun.sh @@ -0,0 +1,88 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + + +SERVICE_NAME="log-java-run-$(echo $ENVCTL_ID | head -c 10)" +SA_NAME=env-test-invoker + +add_service_accounts() { + set +e + local PROJECT_ID=$(gcloud config list --format 'value(core.project)') + local PROJECT_NUMBER=$(gcloud projects list --filter=$PROJECT_ID --format="value(PROJECT_NUMBER)") + gcloud projects add-iam-policy-binding $PROJECT_ID \ + --member=serviceAccount:service-$PROJECT_NUMBER@gcp-sa-pubsub.iam.gserviceaccount.com \ + --role=roles/iam.serviceAccountTokenCreator + gcloud iam service-accounts create $SA_NAME \ + --display-name "Pub/Sub Invoker" + gcloud run services add-iam-policy-binding $SERVICE_NAME \ + --member=serviceAccount:$SA_NAME@$PROJECT_ID.iam.gserviceaccount.com \ + --role=roles/run.invoker + RUN_URL=$(gcloud run services list --filter=$SERVICE_NAME --format="value(URL)") + gcloud pubsub subscriptions create $SERVICE_NAME-subscriber --topic $SERVICE_NAME \ + --push-endpoint=$RUN_URL \ + --push-auth-service-account=$SA_NAME@$PROJECT_ID.iam.gserviceaccount.com + set -e +} + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete service account + gcloud iam service-accounts delete $SA_NAME@$PROJECT_ID.iam.gserviceaccount.com -q 2> /dev/null + # delete container images + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null + # delete service + gcloud run services delete $SERVICE_NAME -q 2> /dev/null + set -e +} + +verify() { + set +e + gcloud run services describe $SERVICE_NAME > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +deploy() { + build_container + gcloud config set run/platform managed + gcloud config set run/region us-west1 + gcloud run deploy \ + --image $GCR_PATH \ + --update-env-vars RUNSERVER=true \ + --update-env-vars ENABLE_SUBSCRIBER=false \ + --no-allow-unauthenticated \ + $SERVICE_NAME + ## create pubsub subscription + add_service_accounts +} + +filter-string() { + echo "resource.type=\"global\"" +} + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/compute.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/compute.sh new file mode 100755 index 000000000000..1f8767af3a17 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/compute.sh @@ -0,0 +1,69 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + +SERVICE_NAME="logging-java-gce-$(echo $ENVCTL_ID | head -c 10)" + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete container images + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null + # delete service + gcloud compute instances delete $SERVICE_NAME -q + set -e +} + +verify() { + set +e + gcloud compute instances describe $SERVICE_NAME > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +deploy() { + build_container + gcloud compute instances create-with-container \ + $SERVICE_NAME \ + --container-image $GCR_PATH \ + --container-env PUBSUB_TOPIC="$SERVICE_NAME",ENABLE_SUBSCRIBER="true",RUNSERVER="false" + # wait for the pub/sub subscriber to start + NUM_SUBSCRIBERS=0 + TRIES=0 + while [[ "${NUM_SUBSCRIBERS}" -lt 1 && "${TRIES}" -lt 10 ]]; do + sleep 30 + NUM_SUBSCRIBERS=$(gcloud pubsub topics list-subscriptions $SERVICE_NAME 2> /dev/null | wc -l) + TRIES=$((TRIES + 1)) + done + +} + +filter-string() { + INSTANCE_ID=$(gcloud compute instances list --filter="name~^$SERVICE_NAME$" --format="value(ID)") + echo "resource.type=\"gce_instance\" AND resource.labels.instance_id=\"$INSTANCE_ID\"" +} + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/kubernetes.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/kubernetes.sh index a5a012bc5cd5..90d6e8755e97 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/kubernetes.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/kubernetes.sh @@ -58,7 +58,7 @@ attach_or_create_gke_cluster(){ echo "cluster not found. creating..." gcloud container clusters create $SERVICE_NAME \ --zone $ZONE \ - --scopes=gke-default,pubsub \ + --scopes=cloud-platform \ --no-enable-ip-alias fi set -e diff --git a/packages/google-cloud-logging/tests/environment/noxfile.py b/packages/google-cloud-logging/tests/environment/noxfile.py index f47421828e5f..672cff8d5b46 100644 --- a/packages/google-cloud-logging/tests/environment/noxfile.py +++ b/packages/google-cloud-logging/tests/environment/noxfile.py @@ -124,6 +124,7 @@ def blacken(session: nox.sessions.Session) -> None: "appengine_standard", "appengine_flex_python", "appengine_flex_container", + "appengine_flex", "kubernetes", "cloudrun", "functions", @@ -158,4 +159,5 @@ def tests(session, language, platform): if os.path.exists(test_path): session.run("py.test", "-s", test_path, *session.posargs) else: - session.skip(f"not yet implemented: {test_path}") + print(f"not implemented: {test_path}") + exit(1) diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index 4d5c79bcccd6..ca07741d1b34 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -14,6 +14,7 @@ import google.cloud.logging from google.cloud._helpers import UTC +from google.cloud.logging_v2 import ProtobufEntry from google.cloud.logging_v2.handlers.handlers import CloudLoggingHandler from google.cloud.logging_v2.handlers.transports import SyncTransport from google.cloud.logging_v2 import Client @@ -51,29 +52,69 @@ class Common: monitored_resource_labels = None def _add_time_condition_to_filter(self, filter_str, timestamp=None): + """ + Appends a 10 minute limit to an arbitrary filter string + """ time_format = "%Y-%m-%dT%H:%M:%S.%f%z" if not timestamp: timestamp = datetime.now(timezone.utc) - timedelta(minutes=10) return f'"{filter_str}" AND timestamp > "{timestamp.strftime(time_format)}"' - def _get_logs(self, filter_str=None): + def _get_logs(self, filter_str=None, ignore_protos=True): + """ + Helper function to retrieve the text and json logs using an input + filter string. + + Parameters: + filter_str (str): the filter string determining which logs to include + ignore_protos (bool): when disabled, matching protobuf entries will be included. + This may result false positives from AuditLogs on certain projects + + Returns: + list[LogEntry] + """ if not filter_str: _, filter_str, _ = self._script.run_command(Command.GetFilter) iterator = self._client.list_entries(filter_=filter_str) entries = list(iterator) + if ignore_protos: + # in most cases, we want to ignore AuditLogs in our tests + entries = [e for e in entries if not isinstance(e, ProtobufEntry)] if not entries: raise LogsNotFound return entries def _trigger(self, snippet, **kwargs): + """ + Helper function for triggering a snippet deployed in a cloud environment + """ timestamp = datetime.now(timezone.utc) args_str = ",".join([f'{k}="{v}"' for k, v in kwargs.items()]) self._script.run_command(Command.Trigger, [snippet, args_str]) @RetryErrors(exception=(LogsNotFound, RpcError), delay=2, max_tries=2) def trigger_and_retrieve( - self, log_text, snippet, append_uuid=True, max_tries=6, **kwargs + self, log_text, snippet, append_uuid=True, ignore_protos=True, max_tries=6, **kwargs ): + """ + Trigger a snippet deployed in the cloud by envctl, and return resulting + logs. + + Parameters: + log_text (str): passed as an argument to the snippet function. + Typically used for the body of the resulting log, + snippet (str): the name of the snippet to trigger. + append_uuid (bool): when true, appends a unique suffix to log_text, + to ensure old logs aren't picket up in later runs + ignore_protos: when disabled, matching protobuf entries will be included. + This may result false positives from AuditLogs on certain projects + max_tries (int): number of times to retry if logs haven't been found + **kwargs: additional arguments are passed as arguments to the snippet function + + Returns: + list[LogEntry] + """ + if append_uuid: log_text = f"{log_text} {uuid.uuid1()}" self._trigger(snippet, log_text=log_text, **kwargs) @@ -85,9 +126,10 @@ def trigger_and_retrieve( while tries < max_tries: # retrieve resulting logs try: - log_list = self._get_logs(filter_str) + log_list = self._get_logs(filter_str, ignore_protos) return log_list except (LogsNotFound, RpcError) as e: + print("logs not found...") sleep(5) tries += 1 # log not found @@ -158,9 +200,11 @@ def test_receive_unicode_log(self): self.assertIsNotNone(found_log, "expected unicode log not found") def test_monitored_resource(self): - if self.language not in ["nodejs", "go"]: - # TODO: other languages to also support this test + if self.language == 'java' or self.language == 'python': + # TODO: implement in java + # TODO: remove python after v3.0.0 return True + log_text = f"{inspect.currentframe().f_code.co_name}" log_list = self.trigger_and_retrieve(log_text, "simplelog") found_resource = log_list[-1].resource @@ -173,25 +217,7 @@ def test_monitored_resource(self): self.assertTrue(found_resource.labels[label], f'resource.labels[{label}] is not set') - def test_request_log(self): - if self.language not in ["nodejs"]: - return True - log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text, "requestlog") - # Note: 2 logs are spawned, only one containing http_request prop. - log_entry = log_list[-1] - if log_entry.http_request is None: - log_entry = log_list[-2] - found_request = log_entry.http_request - if hasattr(self, 'request_props'): - for prop in self.request_props: - self.assertTrue(found_request[prop], - f'{prop} is not set') - def test_severity(self): - if self.language != "python": - # to do: enable test for other languages - return True log_text = f"{inspect.currentframe().f_code.co_name}" severities = [ "EMERGENCY", diff --git a/packages/google-cloud-logging/tests/environment/tests/common/stdout.py b/packages/google-cloud-logging/tests/environment/tests/common/stdout.py deleted file mode 100644 index 51679d30b9b3..000000000000 --- a/packages/google-cloud-logging/tests/environment/tests/common/stdout.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import unittest -import inspect -import re - -import google.cloud.logging - -from ..common.common import Common - -class CommonStdout: - def test_stdout_log(self): - if self.language not in ["nodejs"]: - # TODO: other languages to also support this test - return True - if self.environment in ["compute"]: - # No logging agent support in GCE - return True - log_text = f"{inspect.currentframe().f_code.co_name}" - log_list = self.trigger_and_retrieve(log_text, "stdoutlog") - # Note: 2 logs are spawned, use the one containing http_request prop. - found = log_list[-1] - if found.http_request is None: - found = log_list[-2] - # Agents lift fields inconsistently among envs, so check if is expected. - if hasattr(self, 'stdout_log_name'): - self.assertTrue(self.stdout_log_name in found.log_name) - if hasattr(self, 'stdout_severity'): - self.assertEqual(found.severity, self.stdout_severity) - if hasattr(self, 'stdout_insert_id'): - self.assertEqual(found.insert_id, self.stdout_insert_id) - if hasattr(self, 'stdout_timestamp'): - self.assertEqual(found.timestamp, self.stdout_timestamp) - if hasattr(self, 'stdout_trace'): - self.assertTrue(self.stdout_trace in found.trace) - if hasattr(self, 'stdout_span_id'): - self.assertEqual(found.span_id, self.stdout_span_id) - # TODO: uncomment this again once python-logging accepts trace_samples - # if hasattr(self, 'stdout_trace_sampled'): - # self.assertEqual(found.trace_sampled, self.stdout_trace_sampled) - if hasattr(self, 'stdout_labels'): - for prop in self.stdout_labels: - self.assertTrue(found.labels[prop], - f'{prop} is not set') - if hasattr(self, 'stdout_resource_type'): - self.assertEqual(found.resource.type, self.stdout_resource_type) - if hasattr(self, 'stdout_resource_labels'): - for prop in self.stdout_resource_labels: - self.assertTrue(found.resource.labels[prop], - f'{prop} is not set') - if hasattr(self, 'stdout_payload_props'): - for prop in self.stdout_payload_props: - self.assertTrue(found.payload[prop], - f'{prop} is not set') diff --git a/packages/google-cloud-logging/tests/environment/tests/java/test_appengine_flex.py b/packages/google-cloud-logging/tests/environment/tests/java/test_appengine_flex.py new file mode 100644 index 000000000000..cfc262a4cf89 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/java/test_appengine_flex.py @@ -0,0 +1,26 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +from ..common.common import Common + +class TestAppEngineFlex(Common, unittest.TestCase): + + environment = "appengine_flex" + language = "java" + + monitored_resource_name = "gae_app" + monitored_resource_labels = ["project_id", "module_id", "version_id", "zone"] diff --git a/packages/google-cloud-logging/tests/environment/tests/java/test_appengine_standard.py b/packages/google-cloud-logging/tests/environment/tests/java/test_appengine_standard.py new file mode 100644 index 000000000000..ab7d4b9e761d --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/java/test_appengine_standard.py @@ -0,0 +1,26 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +from ..common.common import Common + +class TestAppEngineStandard(Common, unittest.TestCase): + + environment = "appengine_standard" + language = "java" + + monitored_resource_name = "gae_app" + monitored_resource_labels = ["project_id", "module_id", "version_id", "zone"] diff --git a/packages/google-cloud-logging/tests/environment/tests/java/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/java/test_cloudrun.py new file mode 100644 index 000000000000..b63caebff5af --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/java/test_cloudrun.py @@ -0,0 +1,35 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect +import uuid + +from ..common.common import Common + +class TestCloudRun(Common, unittest.TestCase): + + environment = "cloudrun" + language = "java" + + monitored_resource_name = "cloud_run_revision" + monitored_resource_labels = [ + "project_id", + "service_name", + "revision_name", + "location", + "configuration_name", + ] + diff --git a/packages/google-cloud-logging/tests/environment/tests/java/test_compute.py b/packages/google-cloud-logging/tests/environment/tests/java/test_compute.py new file mode 100644 index 000000000000..4ee3319faab3 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/java/test_compute.py @@ -0,0 +1,27 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect + +from ..common.common import Common + +class TestComputeEngine(Common, unittest.TestCase): + + environment = "compute" + language = "java" + + monitored_resource_name = "gce_instance" + monitored_resource_labels = ["instance_id", "zone"] diff --git a/packages/google-cloud-logging/tests/environment/tests/java/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/java/test_kubernetes.py index b3968fa84607..324ff7943b0c 100644 --- a/packages/google-cloud-logging/tests/environment/tests/java/test_kubernetes.py +++ b/packages/google-cloud-logging/tests/environment/tests/java/test_kubernetes.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google LLC +# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py index b010b514f8c7..4d4151dd4d3a 100644 --- a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py @@ -18,10 +18,9 @@ import google.cloud.logging from ..common.common import Common -from ..common.stdout import CommonStdout -class TestAppEngineStandard(Common, CommonStdout, unittest.TestCase): +class TestAppEngineStandard(Common, unittest.TestCase): environment = "appengine_standard" language = "nodejs" @@ -29,31 +28,4 @@ class TestAppEngineStandard(Common, CommonStdout, unittest.TestCase): monitored_resource_name = "gae_app" monitored_resource_labels = ["project_id", "module_id", "version_id", "zone"] - request_props = [ - "requestMethod", - "requestUrl", - "protocol", - ] - - stdout_payload_props = [ - "message", - "resource", - "timestamp", - "logName", - ] - stdout_severity = "WARNING" - stdout_request_props = request_props - stdout_labels = [ - "foo", - ] - # substring to test for - stdout_trace = "/traces/0679686673a" - stdout_span_id = "000000000000004a" - - # Not lifted properly - # stdout_trace_sampled = "true" - # stdout_insert_id - # stdout_resource_type - # stdout_timestamp - # stdout_log_name: its /logs/stdout diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py index 21f8fcd2dadb..e2d000689d77 100644 --- a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py @@ -21,10 +21,8 @@ from google.cloud.logging_v2.resource import Resource from ..common.common import Common -from ..common.stdout import CommonStdout - -class TestCloudRun(Common, CommonStdout, unittest.TestCase): +class TestCloudRun(Common, unittest.TestCase): environment = "cloudrun" language = "nodejs" @@ -39,30 +37,3 @@ class TestCloudRun(Common, CommonStdout, unittest.TestCase): "configuration_name", ] - request_props = [ - "requestMethod", - "requestUrl", - "protocol", - ] - - stdout_payload_props = [ - "message", - "resource", - "timestamp", - "logName", - ] - stdout_severity = "WARNING" - stdout_request_props = request_props - stdout_labels = [ - "foo", - ] - # substring to test for - stdout_trace = "/traces/0679686673a" - stdout_span_id = "000000000000004a" - - # Not lifted properly - # stdout_trace_sampled = "true" - # stdout_insert_id - # stdout_resource_type - # stdout_timestamp - # stdout_log_name: its /logs/stdout diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py index a55672471116..b3258dc1cf9c 100644 --- a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py @@ -19,10 +19,9 @@ import google.cloud.logging from ..common.common import Common -from ..common.stdout import CommonStdout -class TestCloudFunctions(Common, CommonStdout, unittest.TestCase): +class TestCloudFunctions(Common, unittest.TestCase): environment = "functions" language = "nodejs" @@ -34,33 +33,3 @@ class TestCloudFunctions(Common, CommonStdout, unittest.TestCase): "project_id", ] - request_props = [ - "requestMethod", - "requestUrl", - "protocol", - ] - - stdout_payload_props = [ - "message", - "resource", - "timestamp", - "logName", - ] - stdout_severity = "WARNING" - stdout_request_props = request_props - stdout_labels = [ - "foo", - # Nicely inserted by the agent - "execution_id", - ] - # Randomly dropped by Functions agent: - # stdout_insert_id = '42' - # stdout_trace = /traces/0679686673a' - # stdout_span_id = '000000000000004a' - # stdout_trace_sampled = 'true' - # ============================= - # Not lifted and just left in JSONPayload: - # stdout_resource_type - # stdout_resource_labels - # stdout_log_name - # stdout_timestamp diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py index 2cb0d3129ab4..e6df2a018215 100644 --- a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py @@ -19,10 +19,8 @@ import google.cloud.logging from ..common.common import Common -from ..common.stdout import CommonStdout - -class TestKubernetesEngine(Common, CommonStdout, unittest.TestCase): +class TestKubernetesEngine(Common, unittest.TestCase): environment = "kubernetes" language = "nodejs" @@ -30,30 +28,3 @@ class TestKubernetesEngine(Common, CommonStdout, unittest.TestCase): monitored_resource_name = "k8s_container" monitored_resource_labels = ["project_id", "location", "cluster_name", "pod_name", "namespace_name"] - request_props = [ - "requestMethod", - "requestUrl", - "protocol", - ] - - stdout_payload_props = [ - "message", - "resource", - "timestamp", - "logName", - ] - stdout_severity = "WARNING" - stdout_request_props = request_props - stdout_labels = [ - "foo", - ] - stdout_insert_id = "42" - # substring to test for - stdout_trace = "/traces/0679686673a" - stdout_span_id = "000000000000004a" - stdout_trace_sampled = "true" - - # Not lifted and just left in JSONPayload: - # stdout_resource_type - # stdout_timestamp - # stdout_log_name: in GKE it looks like /logs/stdout. weird diff --git a/tests/environment b/tests/environment index 17b7a4690832..dc85066053b8 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 17b7a46908320891605908d5baa5f32eb255380e +Subproject commit dc85066053b8dc2246c8b72f93a5b97f92885eb2 From 06b5009bcb306c6d0cf4ed2f4c1422d7a4e4b62f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 2 Nov 2021 19:51:41 +0100 Subject: [PATCH 557/855] chore(deps): update dependency google-cloud-appengine-logging to >=0.1.0, <2.0.0 (#432) --- packages/google-cloud-logging/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 65cfc02c1934..f7a45df105fb 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -32,8 +32,8 @@ # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.28.0, <3.0.0dev", - "google-cloud-appengine-logging>=0.1.0, <1.0.1", + "google-api-core[grpc] >= 1.26.0, <3.0.0dev", + "google-cloud-appengine-logging>=0.1.0, <2.0.0dev", "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed From e980ff5f0fce98e653c9c3dbdefb9a3c88609077 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 2 Nov 2021 14:50:44 -0700 Subject: [PATCH 558/855] chore(docs): added audit log sample to usage guide (#428) --- packages/google-cloud-logging/docs/usage.rst | 9 +++++++ .../samples/snippets/usage_guide.py | 27 +++++++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/packages/google-cloud-logging/docs/usage.rst b/packages/google-cloud-logging/docs/usage.rst index 1ea9440fcd26..1fde3d8ea1fd 100644 --- a/packages/google-cloud-logging/docs/usage.rst +++ b/packages/google-cloud-logging/docs/usage.rst @@ -84,6 +84,15 @@ Retrieve entries for a single logger, sorting in descending timestamp order: :end-before: [END logger_list_entries] :dedent: 4 +And as a practical example, retrieve all `GKE Admin Activity audit logs`_ +from the past 24 hours: + +.. _GKE Admin Activity audit logs: https://cloud.google.com/kubernetes-engine/docs/how-to/audit-logging#audit_logs_in_your_project + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_list_gke_audit_logs] + :end-before: [END logging_list_gke_audit_logs] + :dedent: 4 Delete all entries for a logger ------------------------------- diff --git a/packages/google-cloud-logging/samples/snippets/usage_guide.py b/packages/google-cloud-logging/samples/snippets/usage_guide.py index c931ed167977..e519c75c1a35 100644 --- a/packages/google-cloud-logging/samples/snippets/usage_guide.py +++ b/packages/google-cloud-logging/samples/snippets/usage_guide.py @@ -71,6 +71,33 @@ def client_list_entries(client, to_delete): # pylint: disable=unused-argument # [END client_list_entries_order_by] break + # [START logging_list_gke_audit_logs] + import google.cloud.logging + from datetime import datetime, timedelta, timezone + import os + + # pull your project id from an environment variable + project_id = os.environ["GOOGLE_CLOUD_PROJECT"] + # construct a date object representing yesterday + yesterday = datetime.now(timezone.utc) - timedelta(days=1) + # Cloud Logging expects a timestamp in RFC3339 UTC "Zulu" format + # https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry + time_format = "%Y-%m-%dT%H:%M:%S.%f%z" + # build a filter that returns GKE Admin Activity audit Logs from + # the past 24 hours + # https://cloud.google.com/kubernetes-engine/docs/how-to/audit-logging + filter_str = ( + f'logName="projects/{project_id}/logs/cloudaudit.googleapis.com%2Factivity"' + f' AND resource.type="k8s_cluster"' + f' AND timestamp>="{yesterday.strftime(time_format)}"' + ) + # query and print all matching logs + client = google.cloud.logging.Client() + for entry in client.list_entries(filter_=filter_str): + print(entry) + # [END logging_list_gke_audit_logs] + break # we don't really need to print them all + @snippet def logger_usage(client, to_delete): From 9d9f5de075b59ab7a63a099d2e22d976ca6e9841 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 2 Nov 2021 15:19:10 -0700 Subject: [PATCH 559/855] chore: release 2.7.0 (#413) --- packages/google-cloud-logging/CHANGELOG.md | 26 ++++++++++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 4586148505f3..068ad3df2dc1 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,32 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [2.7.0](https://www.github.com/googleapis/python-logging/compare/v2.6.0...v2.7.0) (2021-11-02) + + +### Features + +* add context manager support in client ([#415](https://www.github.com/googleapis/python-logging/issues/415)) ([f5af164](https://www.github.com/googleapis/python-logging/commit/f5af16439807a0954ee78fa91cb69b9493b80176)) +* added support for iam AuditData proto ([#396](https://www.github.com/googleapis/python-logging/issues/396)) ([e3a1eba](https://www.github.com/googleapis/python-logging/commit/e3a1eba74dd8b67bcc73a78f784189ef2a9927c2)) +* use structured logging on GCF with python 3.7 ([#434](https://www.github.com/googleapis/python-logging/issues/434)) ([5055919](https://www.github.com/googleapis/python-logging/commit/5055919f70c82b38de6d1fa7f1df6006865a857b)) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([76ac729](https://www.github.com/googleapis/python-logging/commit/76ac729e42a782524be87ad71745aad37bbe1653)) +* add 'dict' annotation type to 'request' ([23f9e1f](https://www.github.com/googleapis/python-logging/commit/23f9e1f6e9af30c4e65578edbf73c8c893b35285)) +* **deps:** drop packaging dependency ([9d38995](https://www.github.com/googleapis/python-logging/commit/9d389958c7de31ae9e21228eaf965762b31d5e48)) +* **deps:** require google-api-core >= 1.28.0 ([9d38995](https://www.github.com/googleapis/python-logging/commit/9d389958c7de31ae9e21228eaf965762b31d5e48)) +* **deps:** require proto-plus==1.15.0 ([76ac729](https://www.github.com/googleapis/python-logging/commit/76ac729e42a782524be87ad71745aad37bbe1653)) +* exception log message format ([#394](https://www.github.com/googleapis/python-logging/issues/394)) ([c426bf5](https://www.github.com/googleapis/python-logging/commit/c426bf56787fa02140e8aa142ecd4d4e45432697)) +* improper types in pagers generation ([76ac729](https://www.github.com/googleapis/python-logging/commit/76ac729e42a782524be87ad71745aad37bbe1653)) +* improper types in pagers generation ([6a837a5](https://www.github.com/googleapis/python-logging/commit/6a837a5d1faab1f9fa8ac94e424e847821a0069f)) + + +### Documentation + +* list oneofs in docstring ([9d38995](https://www.github.com/googleapis/python-logging/commit/9d389958c7de31ae9e21228eaf965762b31d5e48)) + ## [2.6.0](https://www.github.com/googleapis/python-logging/compare/v2.5.0...v2.6.0) (2021-07-28) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index f7a45df105fb..b31ae8f7800d 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.6.0" +version = "2.7.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 99535794f0e0c1c970822179f9e41112273c2e03 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 3 Nov 2021 18:53:02 +0100 Subject: [PATCH 560/855] chore(deps): update all dependencies (#436) --- .../google-cloud-logging/samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index c3c03d97043d..81596164b0d9 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==2.6.0 -google-cloud-bigquery==2.28.1 +google-cloud-logging==2.7.0 +google-cloud-bigquery==2.29.0 google-cloud-storage==1.42.3 google-cloud-pubsub==2.8.0 From 393120b84f1cb3643a01a5bac275f4529f2bc471 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 4 Nov 2021 11:36:47 +0100 Subject: [PATCH 561/855] chore(deps): update dependency google-cloud-bigquery to v2.30.0 (#437) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 81596164b0d9..050b1ed8fee6 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.7.0 -google-cloud-bigquery==2.29.0 +google-cloud-bigquery==2.30.0 google-cloud-storage==1.42.3 google-cloud-pubsub==2.8.0 From 567b3260aa748832d93bee83b6c039145c5a1e7f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 5 Nov 2021 10:54:50 +0100 Subject: [PATCH 562/855] chore(deps): update all dependencies (#438) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 050b1ed8fee6..f5941a7d239d 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.7.0 -google-cloud-bigquery==2.30.0 +google-cloud-bigquery==2.30.1 google-cloud-storage==1.42.3 google-cloud-pubsub==2.8.0 From 41eb85ba1e4996b86893a122163a842bad1e75c4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 9 Nov 2021 17:58:21 -0500 Subject: [PATCH 563/855] chore: use gapic-generator-python 0.56.2 (#440) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update Java and Python dependencies PiperOrigin-RevId: 408420890 Source-Link: https://github.com/googleapis/googleapis/commit/2921f9fb3bfbd16f6b2da0104373e2b47a80a65e Source-Link: https://github.com/googleapis/googleapis-gen/commit/6598ca8cbbf5226733a099c4506518a5af6ff74c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjU5OGNhOGNiYmY1MjI2NzMzYTA5OWM0NTA2NTE4YTVhZjZmZjc0YyJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../config_service_v2/async_client.py | 13 +- .../services/config_service_v2/client.py | 25 ++- .../config_service_v2/transports/base.py | 8 +- .../config_service_v2/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../logging_service_v2/async_client.py | 13 +- .../services/logging_service_v2/client.py | 25 ++- .../logging_service_v2/transports/base.py | 8 +- .../logging_service_v2/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../metrics_service_v2/async_client.py | 13 +- .../services/metrics_service_v2/client.py | 25 ++- .../metrics_service_v2/transports/base.py | 8 +- .../metrics_service_v2/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../cloud/logging_v2/types/log_entry.py | 3 + .../cloud/logging_v2/types/logging_config.py | 1 + .../logging_v2/test_config_service_v2.py | 144 +++++++++++++----- .../logging_v2/test_logging_service_v2.py | 76 ++++++--- .../logging_v2/test_metrics_service_v2.py | 56 +++++-- 20 files changed, 307 insertions(+), 135 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 23db3b5c1b9b..2be78b15b7ae 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index e2c8fe0a5707..0de49b68f332 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config @@ -336,8 +338,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 00557f640b00..2f41a0107985 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index cd06eac4151c..b34d0a12130b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 54615a12524e..1cf4f3121c57 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 0b927dea913a..325fe71931b8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -28,14 +28,17 @@ ) import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 8e64d902478f..44ec1a85f142 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers @@ -280,8 +282,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 556488467cc7..cfafe30e4ec2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 146f97cbb39e..0379cbecff42 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 84e765cf4fdf..16602c2b42fc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 1e3213a4c3f1..f034fd9f5859 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 9290d62cc799..2f339a130185 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore @@ -283,8 +285,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index e3105748a9fe..7137678ab9f0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 4dc00d79df14..194d341f32ef 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 956f53b1d31f..37cec4a63d68 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py index 99331d327ae9..93e428622180 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py @@ -87,14 +87,17 @@ class LogEntry(proto.Message): "type.googleapis.com/google.cloud.audit.AuditLog" "type.googleapis.com/google.appengine.logging.v1.RequestLog". + This field is a member of `oneof`_ ``payload``. text_payload (str): The log entry payload, represented as a Unicode string (UTF-8). + This field is a member of `oneof`_ ``payload``. json_payload (google.protobuf.struct_pb2.Struct): The log entry payload, represented as a structure that is expressed as a JSON object. + This field is a member of `oneof`_ ``payload``. timestamp (google.protobuf.timestamp_pb2.Timestamp): Optional. The time the event described by the log entry diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index 0724911b90f6..3ea70506c6ca 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -241,6 +241,7 @@ class LogSink(proto.Message): bigquery_options (google.cloud.logging_v2.types.BigQueryOptions): Optional. Options that affect sinks exporting data to BigQuery. + This field is a member of `oneof`_ ``options``. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The creation timestamp of the diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 0d96a08fc83d..054982f12460 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -642,7 +642,9 @@ def test_list_buckets_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_buckets_flattened_error(): @@ -678,7 +680,9 @@ async def test_list_buckets_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1706,7 +1710,9 @@ def test_list_views_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_views_flattened_error(): @@ -1742,7 +1748,9 @@ async def test_list_views_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2610,7 +2618,9 @@ def test_list_sinks_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_sinks_flattened_error(): @@ -2646,7 +2656,9 @@ async def test_list_sinks_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2987,7 +2999,9 @@ def test_get_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val def test_get_sink_flattened_error(): @@ -3023,7 +3037,9 @@ async def test_get_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -3226,8 +3242,12 @@ def test_create_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].sink == logging_config.LogSink(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") + assert arg == mock_val def test_create_sink_flattened_error(): @@ -3267,8 +3287,12 @@ async def test_create_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].sink == logging_config.LogSink(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") + assert arg == mock_val @pytest.mark.asyncio @@ -3475,9 +3499,15 @@ def test_update_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" - assert args[0].sink == logging_config.LogSink(name="name_value") - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val def test_update_sink_flattened_error(): @@ -3520,9 +3550,15 @@ async def test_update_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" - assert args[0].sink == logging_config.LogSink(name="name_value") - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val @pytest.mark.asyncio @@ -3685,7 +3721,9 @@ def test_delete_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val def test_delete_sink_flattened_error(): @@ -3719,7 +3757,9 @@ async def test_delete_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -3889,7 +3929,9 @@ def test_list_exclusions_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_exclusions_flattened_error(): @@ -3925,7 +3967,9 @@ async def test_list_exclusions_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -4269,7 +4313,9 @@ def test_get_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_exclusion_flattened_error(): @@ -4305,7 +4351,9 @@ async def test_get_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -4490,8 +4538,12 @@ def test_create_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val def test_create_exclusion_flattened_error(): @@ -4532,8 +4584,12 @@ async def test_create_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val @pytest.mark.asyncio @@ -4721,9 +4777,15 @@ def test_update_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val def test_update_exclusion_flattened_error(): @@ -4766,9 +4828,15 @@ async def test_update_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val @pytest.mark.asyncio @@ -4931,7 +4999,9 @@ def test_delete_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_delete_exclusion_flattened_error(): @@ -4965,7 +5035,9 @@ async def test_delete_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 5d6021f9aeb3..eac24ed2c651 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -639,7 +639,9 @@ def test_delete_log_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" + arg = args[0].log_name + mock_val = "log_name_value" + assert arg == mock_val def test_delete_log_flattened_error(): @@ -673,7 +675,9 @@ async def test_delete_log_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" + arg = args[0].log_name + mock_val = "log_name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -797,12 +801,18 @@ def test_write_log_entries_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" - assert args[0].resource == monitored_resource_pb2.MonitoredResource( - type="type__value" - ) - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entries == [log_entry.LogEntry(log_name="log_name_value")] + arg = args[0].log_name + mock_val = "log_name_value" + assert arg == mock_val + arg = args[0].resource + mock_val = monitored_resource_pb2.MonitoredResource(type="type__value") + assert arg == mock_val + arg = args[0].labels + mock_val = {"key_value": "value_value"} + assert arg == mock_val + arg = args[0].entries + mock_val = [log_entry.LogEntry(log_name="log_name_value")] + assert arg == mock_val def test_write_log_entries_flattened_error(): @@ -849,12 +859,18 @@ async def test_write_log_entries_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" - assert args[0].resource == monitored_resource_pb2.MonitoredResource( - type="type__value" - ) - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entries == [log_entry.LogEntry(log_name="log_name_value")] + arg = args[0].log_name + mock_val = "log_name_value" + assert arg == mock_val + arg = args[0].resource + mock_val = monitored_resource_pb2.MonitoredResource(type="type__value") + assert arg == mock_val + arg = args[0].labels + mock_val = {"key_value": "value_value"} + assert arg == mock_val + arg = args[0].entries + mock_val = [log_entry.LogEntry(log_name="log_name_value")] + assert arg == mock_val @pytest.mark.asyncio @@ -977,9 +993,15 @@ def test_list_log_entries_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].resource_names == ["resource_names_value"] - assert args[0].filter == "filter_value" - assert args[0].order_by == "order_by_value" + arg = args[0].resource_names + mock_val = ["resource_names_value"] + assert arg == mock_val + arg = args[0].filter + mock_val = "filter_value" + assert arg == mock_val + arg = args[0].order_by + mock_val = "order_by_value" + assert arg == mock_val def test_list_log_entries_flattened_error(): @@ -1022,9 +1044,15 @@ async def test_list_log_entries_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].resource_names == ["resource_names_value"] - assert args[0].filter == "filter_value" - assert args[0].order_by == "order_by_value" + arg = args[0].resource_names + mock_val = ["resource_names_value"] + assert arg == mock_val + arg = args[0].filter + mock_val = "filter_value" + assert arg == mock_val + arg = args[0].order_by + mock_val = "order_by_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1616,7 +1644,9 @@ def test_list_logs_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_logs_flattened_error(): @@ -1652,7 +1682,9 @@ async def test_list_logs_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 9c14746d08cf..1d42212a1372 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -649,7 +649,9 @@ def test_list_log_metrics_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_log_metrics_flattened_error(): @@ -685,7 +687,9 @@ async def test_list_log_metrics_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1013,7 +1017,9 @@ def test_get_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" + arg = args[0].metric_name + mock_val = "metric_name_value" + assert arg == mock_val def test_get_log_metric_flattened_error(): @@ -1049,7 +1055,9 @@ async def test_get_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" + arg = args[0].metric_name + mock_val = "metric_name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1249,8 +1257,12 @@ def test_create_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name="name_value") + assert arg == mock_val def test_create_log_metric_flattened_error(): @@ -1292,8 +1304,12 @@ async def test_create_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name="name_value") + assert arg == mock_val @pytest.mark.asyncio @@ -1496,8 +1512,12 @@ def test_update_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") + arg = args[0].metric_name + mock_val = "metric_name_value" + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name="name_value") + assert arg == mock_val def test_update_log_metric_flattened_error(): @@ -1540,8 +1560,12 @@ async def test_update_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") + arg = args[0].metric_name + mock_val = "metric_name_value" + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name="name_value") + assert arg == mock_val @pytest.mark.asyncio @@ -1715,7 +1739,9 @@ def test_delete_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" + arg = args[0].metric_name + mock_val = "metric_name_value" + assert arg == mock_val def test_delete_log_metric_flattened_error(): @@ -1751,7 +1777,9 @@ async def test_delete_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" + arg = args[0].metric_name + mock_val = "metric_name_value" + assert arg == mock_val @pytest.mark.asyncio From ae47ea48e0e43e7b7c66bfc3130c9486b5f747c6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 11 Nov 2021 02:09:40 +0100 Subject: [PATCH 564/855] chore(deps): update dependency google-cloud-pubsub to v2.9.0 (#441) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index f5941a7d239d..122299946bbb 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.7.0 google-cloud-bigquery==2.30.1 google-cloud-storage==1.42.3 -google-cloud-pubsub==2.8.0 +google-cloud-pubsub==2.9.0 From 77bbca05da9a366326378d95b4b46674906a3f20 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 11 Nov 2021 12:25:34 -0500 Subject: [PATCH 565/855] chore(python): add .github/CODEOWNERS as a templated file (#442) Source-Link: https://github.com/googleapis/synthtool/commit/c5026b3217973a8db55db8ee85feee0e9a65e295 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/.github/CODEOWNERS | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index cb89b2e326b7..7519fa3a2289 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ec49167c606648a063d1222220b48119c912562849a0528f35bfb592a9f72737 + digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 diff --git a/packages/google-cloud-logging/.github/CODEOWNERS b/packages/google-cloud-logging/.github/CODEOWNERS index 64d2aaa367da..21b5b8b8c530 100644 --- a/packages/google-cloud-logging/.github/CODEOWNERS +++ b/packages/google-cloud-logging/.github/CODEOWNERS @@ -3,9 +3,10 @@ # # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax +# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# The @googleapis/yoshi-python is the default owner for changes in this repo -* @googleapis/api-logging @googleapis/yoshi-python +# @googleapis/yoshi-python @googleapis/api-logging are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/api-logging -# The python-samples-reviewers team is the default owner for samples changes -/samples/ @googleapis/python-samples-owners \ No newline at end of file +# @googleapis/python-samples-owners @googleapis/api-logging are the default owners for samples changes +/samples/ @googleapis/python-samples-owners @googleapis/api-logging From 810a35a21f67d6c4806b8d940f4e9399a0426384 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 18 Nov 2021 19:03:59 +0100 Subject: [PATCH 566/855] chore(deps): update dependency google-cloud-storage to v1.43.0 (#449) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 122299946bbb..43a218aa5ca7 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.7.0 google-cloud-bigquery==2.30.1 -google-cloud-storage==1.42.3 +google-cloud-storage==1.43.0 google-cloud-pubsub==2.9.0 From 531d2a5856d8a9bdb14b8ec30a179510d91d07a6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 3 Dec 2021 11:53:38 +0100 Subject: [PATCH 567/855] chore(deps): update dependency google-cloud-bigquery to v2.31.0 (#451) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 43a218aa5ca7..fa8f6386ff38 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.7.0 -google-cloud-bigquery==2.30.1 +google-cloud-bigquery==2.31.0 google-cloud-storage==1.43.0 google-cloud-pubsub==2.9.0 From 653d08686e2b1430daa379463e8d2e9da917174d Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 28 Dec 2021 13:20:31 -0500 Subject: [PATCH 568/855] chore: update .repo-metadata.json (#457) --- packages/google-cloud-logging/.repo-metadata.json | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/.repo-metadata.json b/packages/google-cloud-logging/.repo-metadata.json index fdb0a66c2065..9dac57e33f15 100644 --- a/packages/google-cloud-logging/.repo-metadata.json +++ b/packages/google-cloud-logging/.repo-metadata.json @@ -2,14 +2,15 @@ "name": "logging", "name_pretty": "Cloud Logging", "product_documentation": "https://cloud.google.com/logging/docs", - "client_documentation": "https://googleapis.dev/python/logging/latest", + "client_documentation": "https://cloud.google.com/python/docs/reference/logging/latest", "issue_tracker": "https://issuetracker.google.com/savedsearches/559764", - "release_level": "ga", + "release_level": "stable", "language": "python", "library_type": "GAPIC_COMBO", "repo": "googleapis/python-logging", "distribution_name": "google-cloud-logging", "api_id": "logging.googleapis.com", "codeowner_team": "@googleapis/api-logging", - "default_version": "v2" + "default_version": "v2", + "api_shortname": "logging" } From f7ffabcf7482c2df82797dd7e29d3ec8b4e5d122 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 6 Jan 2022 17:10:12 +0000 Subject: [PATCH 569/855] chore: use python-samples-reviewers (#461) --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/.github/CODEOWNERS | 4 ++-- packages/google-cloud-logging/samples/AUTHORING_GUIDE.md | 2 +- packages/google-cloud-logging/samples/CONTRIBUTING.md | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 7519fa3a2289..f33299ddbbab 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 + digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 diff --git a/packages/google-cloud-logging/.github/CODEOWNERS b/packages/google-cloud-logging/.github/CODEOWNERS index 21b5b8b8c530..2a3b42055693 100644 --- a/packages/google-cloud-logging/.github/CODEOWNERS +++ b/packages/google-cloud-logging/.github/CODEOWNERS @@ -8,5 +8,5 @@ # @googleapis/yoshi-python @googleapis/api-logging are the default owners for changes in this repo * @googleapis/yoshi-python @googleapis/api-logging -# @googleapis/python-samples-owners @googleapis/api-logging are the default owners for samples changes -/samples/ @googleapis/python-samples-owners @googleapis/api-logging +# @googleapis/python-samples-reviewers @googleapis/api-logging are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/api-logging diff --git a/packages/google-cloud-logging/samples/AUTHORING_GUIDE.md b/packages/google-cloud-logging/samples/AUTHORING_GUIDE.md index 55c97b32f4c1..8249522ffc2d 100644 --- a/packages/google-cloud-logging/samples/AUTHORING_GUIDE.md +++ b/packages/google-cloud-logging/samples/AUTHORING_GUIDE.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/packages/google-cloud-logging/samples/CONTRIBUTING.md b/packages/google-cloud-logging/samples/CONTRIBUTING.md index 34c882b6f1a3..f5fe2e6baf13 100644 --- a/packages/google-cloud-logging/samples/CONTRIBUTING.md +++ b/packages/google-cloud-logging/samples/CONTRIBUTING.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/CONTRIBUTING.md \ No newline at end of file From 283403ffc5167f10dc61713c41bb798f3b627e23 Mon Sep 17 00:00:00 2001 From: losalex <90795544+losalex@users.noreply.github.com> Date: Thu, 6 Jan 2022 12:38:25 -0800 Subject: [PATCH 570/855] chore: enable staleness and pull request size bots on repository (#458) --- packages/google-cloud-logging/.github/auto-label.yaml | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 packages/google-cloud-logging/.github/auto-label.yaml diff --git a/packages/google-cloud-logging/.github/auto-label.yaml b/packages/google-cloud-logging/.github/auto-label.yaml new file mode 100644 index 000000000000..1e4706499b3a --- /dev/null +++ b/packages/google-cloud-logging/.github/auto-label.yaml @@ -0,0 +1,7 @@ +product: true +requestsize: + enabled: true +staleness: + pullrequest: true + old: 30 + extraold: 60 From 2faeb95f1676c78298ef7a48b8324ad77bbafc7a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 8 Jan 2022 06:14:25 -0500 Subject: [PATCH 571/855] chore: use gapic-generator-python 0.58.4 (#459) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.58.4 fix: provide appropriate mock values for message body fields committer: dovs PiperOrigin-RevId: 419025932 Source-Link: https://github.com/googleapis/googleapis/commit/73da6697f598f1ba30618924936a59f8e457ec89 Source-Link: https://github.com/googleapis/googleapis-gen/commit/46df624a54b9ed47c1a7eefb7a49413cf7b82f98 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDZkZjYyNGE1NGI5ZWQ0N2MxYTdlZWZiN2E0OTQxM2NmN2I4MmY5OCJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../config_service_v2/transports/base.py | 1 - .../logging_service_v2/transports/base.py | 1 - .../metrics_service_v2/transports/base.py | 1 - .../logging_v2/test_config_service_v2.py | 267 ++++++------------ .../logging_v2/test_logging_service_v2.py | 103 +++---- .../logging_v2/test_metrics_service_v2.py | 77 ++--- 6 files changed, 163 insertions(+), 287 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 2f41a0107985..90e3054882ac 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -105,7 +105,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index cfafe30e4ec2..6fe2e9e8a46c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -106,7 +106,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 7137678ab9f0..fef40f239565 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -106,7 +106,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 054982f12460..5a82cec81ebf 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -250,20 +250,20 @@ def test_config_service_v2_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -332,7 +332,7 @@ def test_config_service_v2_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -427,7 +427,7 @@ def test_config_service_v2_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -458,7 +458,7 @@ def test_config_service_v2_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -491,9 +491,8 @@ def test_config_service_v2_client_client_options_from_dict(): ) -def test_list_buckets( - transport: str = "grpc", request_type=logging_config.ListBucketsRequest -): +@pytest.mark.parametrize("request_type", [logging_config.ListBucketsRequest, dict,]) +def test_list_buckets(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -520,10 +519,6 @@ def test_list_buckets( assert response.next_page_token == "next_page_token_value" -def test_list_buckets_from_dict(): - test_list_buckets(request_type=dict) - - def test_list_buckets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -699,8 +694,10 @@ async def test_list_buckets_flattened_error_async(): ) -def test_list_buckets_pager(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_buckets_pager(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -737,8 +734,10 @@ def test_list_buckets_pager(): assert all(isinstance(i, logging_config.LogBucket) for i in results) -def test_list_buckets_pages(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_buckets_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -841,9 +840,8 @@ async def test_list_buckets_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_bucket( - transport: str = "grpc", request_type=logging_config.GetBucketRequest -): +@pytest.mark.parametrize("request_type", [logging_config.GetBucketRequest, dict,]) +def test_get_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -878,10 +876,6 @@ def test_get_bucket( assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_get_bucket_from_dict(): - test_get_bucket(request_type=dict) - - def test_get_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -995,9 +989,8 @@ async def test_get_bucket_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_create_bucket( - transport: str = "grpc", request_type=logging_config.CreateBucketRequest -): +@pytest.mark.parametrize("request_type", [logging_config.CreateBucketRequest, dict,]) +def test_create_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1032,10 +1025,6 @@ def test_create_bucket( assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_create_bucket_from_dict(): - test_create_bucket(request_type=dict) - - def test_create_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1149,9 +1138,8 @@ async def test_create_bucket_field_headers_async(): assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] -def test_update_bucket( - transport: str = "grpc", request_type=logging_config.UpdateBucketRequest -): +@pytest.mark.parametrize("request_type", [logging_config.UpdateBucketRequest, dict,]) +def test_update_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1186,10 +1174,6 @@ def test_update_bucket( assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_update_bucket_from_dict(): - test_update_bucket(request_type=dict) - - def test_update_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1303,9 +1287,8 @@ async def test_update_bucket_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_delete_bucket( - transport: str = "grpc", request_type=logging_config.DeleteBucketRequest -): +@pytest.mark.parametrize("request_type", [logging_config.DeleteBucketRequest, dict,]) +def test_delete_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1329,10 +1312,6 @@ def test_delete_bucket( assert response is None -def test_delete_bucket_from_dict(): - test_delete_bucket(request_type=dict) - - def test_delete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1431,9 +1410,8 @@ async def test_delete_bucket_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_undelete_bucket( - transport: str = "grpc", request_type=logging_config.UndeleteBucketRequest -): +@pytest.mark.parametrize("request_type", [logging_config.UndeleteBucketRequest, dict,]) +def test_undelete_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1457,10 +1435,6 @@ def test_undelete_bucket( assert response is None -def test_undelete_bucket_from_dict(): - test_undelete_bucket(request_type=dict) - - def test_undelete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1559,9 +1533,8 @@ async def test_undelete_bucket_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_list_views( - transport: str = "grpc", request_type=logging_config.ListViewsRequest -): +@pytest.mark.parametrize("request_type", [logging_config.ListViewsRequest, dict,]) +def test_list_views(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1588,10 +1561,6 @@ def test_list_views( assert response.next_page_token == "next_page_token_value" -def test_list_views_from_dict(): - test_list_views(request_type=dict) - - def test_list_views_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1767,8 +1736,10 @@ async def test_list_views_flattened_error_async(): ) -def test_list_views_pager(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_views_pager(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1805,8 +1776,10 @@ def test_list_views_pager(): assert all(isinstance(i, logging_config.LogView) for i in results) -def test_list_views_pages(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_views_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1909,7 +1882,8 @@ async def test_list_views_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_view(transport: str = "grpc", request_type=logging_config.GetViewRequest): +@pytest.mark.parametrize("request_type", [logging_config.GetViewRequest, dict,]) +def test_get_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1938,10 +1912,6 @@ def test_get_view(transport: str = "grpc", request_type=logging_config.GetViewRe assert response.filter == "filter_value" -def test_get_view_from_dict(): - test_get_view(request_type=dict) - - def test_get_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2051,9 +2021,8 @@ async def test_get_view_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_create_view( - transport: str = "grpc", request_type=logging_config.CreateViewRequest -): +@pytest.mark.parametrize("request_type", [logging_config.CreateViewRequest, dict,]) +def test_create_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2082,10 +2051,6 @@ def test_create_view( assert response.filter == "filter_value" -def test_create_view_from_dict(): - test_create_view(request_type=dict) - - def test_create_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2195,9 +2160,8 @@ async def test_create_view_field_headers_async(): assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] -def test_update_view( - transport: str = "grpc", request_type=logging_config.UpdateViewRequest -): +@pytest.mark.parametrize("request_type", [logging_config.UpdateViewRequest, dict,]) +def test_update_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2226,10 +2190,6 @@ def test_update_view( assert response.filter == "filter_value" -def test_update_view_from_dict(): - test_update_view(request_type=dict) - - def test_update_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2339,9 +2299,8 @@ async def test_update_view_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_delete_view( - transport: str = "grpc", request_type=logging_config.DeleteViewRequest -): +@pytest.mark.parametrize("request_type", [logging_config.DeleteViewRequest, dict,]) +def test_delete_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2365,10 +2324,6 @@ def test_delete_view( assert response is None -def test_delete_view_from_dict(): - test_delete_view(request_type=dict) - - def test_delete_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2467,9 +2422,8 @@ async def test_delete_view_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_list_sinks( - transport: str = "grpc", request_type=logging_config.ListSinksRequest -): +@pytest.mark.parametrize("request_type", [logging_config.ListSinksRequest, dict,]) +def test_list_sinks(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2496,10 +2450,6 @@ def test_list_sinks( assert response.next_page_token == "next_page_token_value" -def test_list_sinks_from_dict(): - test_list_sinks(request_type=dict) - - def test_list_sinks_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2675,8 +2625,10 @@ async def test_list_sinks_flattened_error_async(): ) -def test_list_sinks_pager(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_sinks_pager(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -2713,8 +2665,10 @@ def test_list_sinks_pager(): assert all(isinstance(i, logging_config.LogSink) for i in results) -def test_list_sinks_pages(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_sinks_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -2817,7 +2771,8 @@ async def test_list_sinks_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_sink(transport: str = "grpc", request_type=logging_config.GetSinkRequest): +@pytest.mark.parametrize("request_type", [logging_config.GetSinkRequest, dict,]) +def test_get_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2861,10 +2816,6 @@ def test_get_sink(transport: str = "grpc", request_type=logging_config.GetSinkRe assert response.include_children is True -def test_get_sink_from_dict(): - test_get_sink(request_type=dict) - - def test_get_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3056,9 +3007,8 @@ async def test_get_sink_flattened_error_async(): ) -def test_create_sink( - transport: str = "grpc", request_type=logging_config.CreateSinkRequest -): +@pytest.mark.parametrize("request_type", [logging_config.CreateSinkRequest, dict,]) +def test_create_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3102,10 +3052,6 @@ def test_create_sink( assert response.include_children is True -def test_create_sink_from_dict(): - test_create_sink(request_type=dict) - - def test_create_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3311,9 +3257,8 @@ async def test_create_sink_flattened_error_async(): ) -def test_update_sink( - transport: str = "grpc", request_type=logging_config.UpdateSinkRequest -): +@pytest.mark.parametrize("request_type", [logging_config.UpdateSinkRequest, dict,]) +def test_update_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3357,10 +3302,6 @@ def test_update_sink( assert response.include_children is True -def test_update_sink_from_dict(): - test_update_sink(request_type=dict) - - def test_update_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3578,9 +3519,8 @@ async def test_update_sink_flattened_error_async(): ) -def test_delete_sink( - transport: str = "grpc", request_type=logging_config.DeleteSinkRequest -): +@pytest.mark.parametrize("request_type", [logging_config.DeleteSinkRequest, dict,]) +def test_delete_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3604,10 +3544,6 @@ def test_delete_sink( assert response is None -def test_delete_sink_from_dict(): - test_delete_sink(request_type=dict) - - def test_delete_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3776,9 +3712,8 @@ async def test_delete_sink_flattened_error_async(): ) -def test_list_exclusions( - transport: str = "grpc", request_type=logging_config.ListExclusionsRequest -): +@pytest.mark.parametrize("request_type", [logging_config.ListExclusionsRequest, dict,]) +def test_list_exclusions(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3805,10 +3740,6 @@ def test_list_exclusions( assert response.next_page_token == "next_page_token_value" -def test_list_exclusions_from_dict(): - test_list_exclusions(request_type=dict) - - def test_list_exclusions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3986,8 +3917,10 @@ async def test_list_exclusions_flattened_error_async(): ) -def test_list_exclusions_pager(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_exclusions_pager(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4029,8 +3962,10 @@ def test_list_exclusions_pager(): assert all(isinstance(i, logging_config.LogExclusion) for i in results) -def test_list_exclusions_pages(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_exclusions_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4148,9 +4083,8 @@ async def test_list_exclusions_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_exclusion( - transport: str = "grpc", request_type=logging_config.GetExclusionRequest -): +@pytest.mark.parametrize("request_type", [logging_config.GetExclusionRequest, dict,]) +def test_get_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4183,10 +4117,6 @@ def test_get_exclusion( assert response.disabled is True -def test_get_exclusion_from_dict(): - test_get_exclusion(request_type=dict) - - def test_get_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -4370,9 +4300,8 @@ async def test_get_exclusion_flattened_error_async(): ) -def test_create_exclusion( - transport: str = "grpc", request_type=logging_config.CreateExclusionRequest -): +@pytest.mark.parametrize("request_type", [logging_config.CreateExclusionRequest, dict,]) +def test_create_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4405,10 +4334,6 @@ def test_create_exclusion( assert response.disabled is True -def test_create_exclusion_from_dict(): - test_create_exclusion(request_type=dict) - - def test_create_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -4608,9 +4533,8 @@ async def test_create_exclusion_flattened_error_async(): ) -def test_update_exclusion( - transport: str = "grpc", request_type=logging_config.UpdateExclusionRequest -): +@pytest.mark.parametrize("request_type", [logging_config.UpdateExclusionRequest, dict,]) +def test_update_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4643,10 +4567,6 @@ def test_update_exclusion( assert response.disabled is True -def test_update_exclusion_from_dict(): - test_update_exclusion(request_type=dict) - - def test_update_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -4856,9 +4776,8 @@ async def test_update_exclusion_flattened_error_async(): ) -def test_delete_exclusion( - transport: str = "grpc", request_type=logging_config.DeleteExclusionRequest -): +@pytest.mark.parametrize("request_type", [logging_config.DeleteExclusionRequest, dict,]) +def test_delete_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4882,10 +4801,6 @@ def test_delete_exclusion( assert response is None -def test_delete_exclusion_from_dict(): - test_delete_exclusion(request_type=dict) - - def test_delete_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -5054,9 +4969,8 @@ async def test_delete_exclusion_flattened_error_async(): ) -def test_get_cmek_settings( - transport: str = "grpc", request_type=logging_config.GetCmekSettingsRequest -): +@pytest.mark.parametrize("request_type", [logging_config.GetCmekSettingsRequest, dict,]) +def test_get_cmek_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5089,10 +5003,6 @@ def test_get_cmek_settings( assert response.service_account_id == "service_account_id_value" -def test_get_cmek_settings_from_dict(): - test_get_cmek_settings(request_type=dict) - - def test_get_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -5210,9 +5120,10 @@ async def test_get_cmek_settings_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_update_cmek_settings( - transport: str = "grpc", request_type=logging_config.UpdateCmekSettingsRequest -): +@pytest.mark.parametrize( + "request_type", [logging_config.UpdateCmekSettingsRequest, dict,] +) +def test_update_cmek_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5245,10 +5156,6 @@ def test_update_cmek_settings( assert response.service_account_id == "service_account_id_value" -def test_update_cmek_settings_from_dict(): - test_update_cmek_settings(request_type=dict) - - def test_update_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -6009,7 +5916,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index eac24ed2c651..f73ef775cda8 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -257,20 +257,20 @@ def test_logging_service_v2_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -339,7 +339,7 @@ def test_logging_service_v2_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -434,7 +434,7 @@ def test_logging_service_v2_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -465,7 +465,7 @@ def test_logging_service_v2_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -498,7 +498,8 @@ def test_logging_service_v2_client_client_options_from_dict(): ) -def test_delete_log(transport: str = "grpc", request_type=logging.DeleteLogRequest): +@pytest.mark.parametrize("request_type", [logging.DeleteLogRequest, dict,]) +def test_delete_log(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -522,10 +523,6 @@ def test_delete_log(transport: str = "grpc", request_type=logging.DeleteLogReque assert response is None -def test_delete_log_from_dict(): - test_delete_log(request_type=dict) - - def test_delete_log_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -694,9 +691,8 @@ async def test_delete_log_flattened_error_async(): ) -def test_write_log_entries( - transport: str = "grpc", request_type=logging.WriteLogEntriesRequest -): +@pytest.mark.parametrize("request_type", [logging.WriteLogEntriesRequest, dict,]) +def test_write_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -722,10 +718,6 @@ def test_write_log_entries( assert isinstance(response, logging.WriteLogEntriesResponse) -def test_write_log_entries_from_dict(): - test_write_log_entries(request_type=dict) - - def test_write_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -891,9 +883,8 @@ async def test_write_log_entries_flattened_error_async(): ) -def test_list_log_entries( - transport: str = "grpc", request_type=logging.ListLogEntriesRequest -): +@pytest.mark.parametrize("request_type", [logging.ListLogEntriesRequest, dict,]) +def test_list_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -920,10 +911,6 @@ def test_list_log_entries( assert response.next_page_token == "next_page_token_value" -def test_list_log_entries_from_dict(): - test_list_log_entries(request_type=dict) - - def test_list_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1072,8 +1059,10 @@ async def test_list_log_entries_flattened_error_async(): ) -def test_list_log_entries_pager(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_log_entries_pager(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: @@ -1107,8 +1096,10 @@ def test_list_log_entries_pager(): assert all(isinstance(i, log_entry.LogEntry) for i in results) -def test_list_log_entries_pages(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_log_entries_pages(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: @@ -1211,10 +1202,10 @@ async def test_list_log_entries_async_pages(): assert page_.raw_page.next_page_token == token -def test_list_monitored_resource_descriptors( - transport: str = "grpc", - request_type=logging.ListMonitoredResourceDescriptorsRequest, -): +@pytest.mark.parametrize( + "request_type", [logging.ListMonitoredResourceDescriptorsRequest, dict,] +) +def test_list_monitored_resource_descriptors(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1243,10 +1234,6 @@ def test_list_monitored_resource_descriptors( assert response.next_page_token == "next_page_token_value" -def test_list_monitored_resource_descriptors_from_dict(): - test_list_monitored_resource_descriptors(request_type=dict) - - def test_list_monitored_resource_descriptors_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1304,8 +1291,10 @@ async def test_list_monitored_resource_descriptors_async_from_dict(): await test_list_monitored_resource_descriptors_async(request_type=dict) -def test_list_monitored_resource_descriptors_pager(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1352,8 +1341,10 @@ def test_list_monitored_resource_descriptors_pager(): ) -def test_list_monitored_resource_descriptors_pages(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1491,7 +1482,8 @@ async def test_list_monitored_resource_descriptors_async_pages(): assert page_.raw_page.next_page_token == token -def test_list_logs(transport: str = "grpc", request_type=logging.ListLogsRequest): +@pytest.mark.parametrize("request_type", [logging.ListLogsRequest, dict,]) +def test_list_logs(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1519,10 +1511,6 @@ def test_list_logs(transport: str = "grpc", request_type=logging.ListLogsRequest assert response.next_page_token == "next_page_token_value" -def test_list_logs_from_dict(): - test_list_logs(request_type=dict) - - def test_list_logs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1701,8 +1689,10 @@ async def test_list_logs_flattened_error_async(): ) -def test_list_logs_pager(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_logs_pager(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1730,8 +1720,10 @@ def test_list_logs_pager(): assert all(isinstance(i, str) for i in results) -def test_list_logs_pages(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_logs_pages(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1807,9 +1799,8 @@ async def test_list_logs_async_pages(): assert page_.raw_page.next_page_token == token -def test_tail_log_entries( - transport: str = "grpc", request_type=logging.TailLogEntriesRequest -): +@pytest.mark.parametrize("request_type", [logging.TailLogEntriesRequest, dict,]) +def test_tail_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1835,10 +1826,6 @@ def test_tail_log_entries( assert isinstance(message, logging.TailLogEntriesResponse) -def test_tail_log_entries_from_dict(): - test_tail_log_entries(request_type=dict) - - @pytest.mark.asyncio async def test_tail_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest @@ -2415,7 +2402,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 1d42212a1372..dcb87b2ab72d 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -255,20 +255,20 @@ def test_metrics_service_v2_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -337,7 +337,7 @@ def test_metrics_service_v2_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -432,7 +432,7 @@ def test_metrics_service_v2_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -463,7 +463,7 @@ def test_metrics_service_v2_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -496,9 +496,8 @@ def test_metrics_service_v2_client_client_options_from_dict(): ) -def test_list_log_metrics( - transport: str = "grpc", request_type=logging_metrics.ListLogMetricsRequest -): +@pytest.mark.parametrize("request_type", [logging_metrics.ListLogMetricsRequest, dict,]) +def test_list_log_metrics(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -525,10 +524,6 @@ def test_list_log_metrics( assert response.next_page_token == "next_page_token_value" -def test_list_log_metrics_from_dict(): - test_list_log_metrics(request_type=dict) - - def test_list_log_metrics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -706,8 +701,10 @@ async def test_list_log_metrics_flattened_error_async(): ) -def test_list_log_metrics_pager(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_log_metrics_pager(transport_name: str = "grpc"): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -744,8 +741,10 @@ def test_list_log_metrics_pager(): assert all(isinstance(i, logging_metrics.LogMetric) for i in results) -def test_list_log_metrics_pages(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_log_metrics_pages(transport_name: str = "grpc"): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -848,9 +847,8 @@ async def test_list_log_metrics_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_log_metric( - transport: str = "grpc", request_type=logging_metrics.GetLogMetricRequest -): +@pytest.mark.parametrize("request_type", [logging_metrics.GetLogMetricRequest, dict,]) +def test_get_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -885,10 +883,6 @@ def test_get_log_metric( assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_get_log_metric_from_dict(): - test_get_log_metric(request_type=dict) - - def test_get_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1074,9 +1068,10 @@ async def test_get_log_metric_flattened_error_async(): ) -def test_create_log_metric( - transport: str = "grpc", request_type=logging_metrics.CreateLogMetricRequest -): +@pytest.mark.parametrize( + "request_type", [logging_metrics.CreateLogMetricRequest, dict,] +) +def test_create_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1113,10 +1108,6 @@ def test_create_log_metric( assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_create_log_metric_from_dict(): - test_create_log_metric(request_type=dict) - - def test_create_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1328,9 +1319,10 @@ async def test_create_log_metric_flattened_error_async(): ) -def test_update_log_metric( - transport: str = "grpc", request_type=logging_metrics.UpdateLogMetricRequest -): +@pytest.mark.parametrize( + "request_type", [logging_metrics.UpdateLogMetricRequest, dict,] +) +def test_update_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1367,10 +1359,6 @@ def test_update_log_metric( assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_update_log_metric_from_dict(): - test_update_log_metric(request_type=dict) - - def test_update_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1584,9 +1572,10 @@ async def test_update_log_metric_flattened_error_async(): ) -def test_delete_log_metric( - transport: str = "grpc", request_type=logging_metrics.DeleteLogMetricRequest -): +@pytest.mark.parametrize( + "request_type", [logging_metrics.DeleteLogMetricRequest, dict,] +) +def test_delete_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1612,10 +1601,6 @@ def test_delete_log_metric( assert response is None -def test_delete_log_metric_from_dict(): - test_delete_log_metric(request_type=dict) - - def test_delete_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2336,7 +2321,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( From a7dbe8b4a292c878d2390cb3c11dd847e70cba2e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 11 Jan 2022 10:35:23 -0500 Subject: [PATCH 572/855] chore(samples): Add check for tests in directory (#463) Source-Link: https://github.com/googleapis/synthtool/commit/52aef91f8d25223d9dbdb4aebd94ba8eea2101f3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 2 +- .../samples/snippets/noxfile.py | 70 +++++++++++-------- 2 files changed, 40 insertions(+), 32 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index f33299ddbbab..6b8a73b31465 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 + digest: sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 93a9122cc457..3bbef5d54f44 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) From 2d8011db52660f10390fa38dd6722fa7480a4109 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 14 Jan 2022 12:03:12 -0500 Subject: [PATCH 573/855] build: switch to release-please for tagging (#464) Source-Link: https://github.com/googleapis/synthtool/commit/f8077d237e0df2cb0066dfc6e09fc41e1c59646a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/.github/release-please.yml | 1 + packages/google-cloud-logging/.github/release-trigger.yml | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-logging/.github/release-trigger.yml diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 6b8a73b31465..ff5126c188d0 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 + digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 diff --git a/packages/google-cloud-logging/.github/release-please.yml b/packages/google-cloud-logging/.github/release-please.yml index 4507ad0598a5..466597e5b196 100644 --- a/packages/google-cloud-logging/.github/release-please.yml +++ b/packages/google-cloud-logging/.github/release-please.yml @@ -1 +1,2 @@ releaseType: python +handleGHRelease: true diff --git a/packages/google-cloud-logging/.github/release-trigger.yml b/packages/google-cloud-logging/.github/release-trigger.yml new file mode 100644 index 000000000000..d4ca94189e16 --- /dev/null +++ b/packages/google-cloud-logging/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true From 91d3109b268400cfe2fbb81b408250f795c1fee7 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 14 Jan 2022 22:24:14 +0100 Subject: [PATCH 574/855] chore(deps): update dependency google-cloud-storage to v1.44.0 (#460) Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index fa8f6386ff38..283735f7af92 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.7.0 google-cloud-bigquery==2.31.0 -google-cloud-storage==1.43.0 +google-cloud-storage==1.44.0 google-cloud-pubsub==2.9.0 From c6f6326b4998bfe25e3c4d5fd63a41ae0184f517 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sun, 16 Jan 2022 15:26:12 +0100 Subject: [PATCH 575/855] chore(deps): update all dependencies (#466) --- .../google-cloud-logging/samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 283735f7af92..d75e274c2621 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.7.0 -google-cloud-bigquery==2.31.0 -google-cloud-storage==1.44.0 +google-cloud-bigquery==2.32.0 +google-cloud-storage==2.0.0 google-cloud-pubsub==2.9.0 From 1bb26ef75ca569a282715b6c3e00c7fc32d106c9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 16 Jan 2022 10:28:20 -0500 Subject: [PATCH 576/855] chore(python): update release.sh to use keystore (#465) Source-Link: https://github.com/googleapis/synthtool/commit/69fda12e2994f0b595a397e8bb6e3e9f380524eb Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/.kokoro/release.sh | 2 +- .../google-cloud-logging/.kokoro/release/common.cfg | 12 +++++++++++- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index ff5126c188d0..eecb84c21b27 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 + digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 diff --git a/packages/google-cloud-logging/.kokoro/release.sh b/packages/google-cloud-logging/.kokoro/release.sh index f8994b0341dc..8a4d1f432338 100755 --- a/packages/google-cloud-logging/.kokoro/release.sh +++ b/packages/google-cloud-logging/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") cd github/python-logging python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-logging/.kokoro/release/common.cfg b/packages/google-cloud-logging/.kokoro/release/common.cfg index e0012bf9c19e..637885e8504f 100644 --- a/packages/google-cloud-logging/.kokoro/release/common.cfg +++ b/packages/google-cloud-logging/.kokoro/release/common.cfg @@ -23,8 +23,18 @@ env_vars: { value: "github/python-logging/.kokoro/release.sh" } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-pypi-token-keystore-1" + } + } +} + # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } From 168c314e4bec3370b97018592f1718a799a4f005 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 18 Jan 2022 20:28:51 -0500 Subject: [PATCH 577/855] chore(python): Noxfile recognizes that tests can live in a folder (#468) Source-Link: https://github.com/googleapis/synthtool/commit/4760d8dce1351d93658cb11d02a1b7ceb23ae5d7 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f0e4b51deef56bed74d3e2359c583fc104a8d6367da3984fc5c66938db738828 Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/samples/snippets/noxfile.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index eecb84c21b27..52d79c11f3ad 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 + digest: sha256:f0e4b51deef56bed74d3e2359c583fc104a8d6367da3984fc5c66938db738828 diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 3bbef5d54f44..20cdfc620138 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -187,6 +187,7 @@ def _session_tests( ) -> None: # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) if len(test_list) == 0: print("No tests found, skipping directory.") else: From 2e1e9fdb3c4d6c59871fc2b5eac2afa453a8ab19 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 20 Jan 2022 12:32:39 -0500 Subject: [PATCH 578/855] chore(python): exclude templated GH action workflows (#470) * ci(python): run lint / unit tests / docs as GH actions Source-Link: https://github.com/googleapis/synthtool/commit/57be0cdb0b94e1669cee0ca38d790de1dfdbcd44 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 * exclude templated github actions Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 15 ++++++++++++++- packages/google-cloud-logging/owlbot.py | 7 ++++++- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 52d79c11f3ad..8cb43804d999 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,3 +1,16 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f0e4b51deef56bed74d3e2359c583fc104a8d6367da3984fc5c66938db738828 + digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index ad8e32d4c3b6..b266ed13f8fe 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -61,7 +61,12 @@ unit_test_external_dependencies=["flask", "webob", "django"], samples=True, ) -s.move(templated_files, excludes=[".coveragerc", "docs/multiprocessing.rst"]) +s.move(templated_files, + excludes=[ + ".coveragerc", + "docs/multiprocessing.rst", + ".github/workflows", # exclude gh actions as credentials are needed for tests + ]) # adjust .trampolinerc for environment tests s.replace( From a3ae713f9651d4acbc669cb84ad4e4ca205815ca Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 24 Jan 2022 13:08:18 -0700 Subject: [PATCH 579/855] chore: make samples 3.6 check optional (#471) --- packages/google-cloud-logging/.github/sync-repo-settings.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/google-cloud-logging/.github/sync-repo-settings.yaml b/packages/google-cloud-logging/.github/sync-repo-settings.yaml index 3e98ae70f964..37438d33d4fa 100644 --- a/packages/google-cloud-logging/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-logging/.github/sync-repo-settings.yaml @@ -10,6 +10,5 @@ branchProtectionRules: - 'Kokoro' - 'cla/google' - 'Samples - Lint' - - 'Samples - Python 3.6' - 'Samples - Python 3.7' - 'Samples - Python 3.8' From 840e74944edf0b321a60954fb8a04fd009debe2e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 25 Jan 2022 12:23:46 -0500 Subject: [PATCH 580/855] feat: add api key support (#472) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: upgrade gapic-generator-java, gax-java and gapic-generator-python PiperOrigin-RevId: 423842556 Source-Link: https://github.com/googleapis/googleapis/commit/a616ca08f4b1416abbac7bc5dd6d61c791756a81 Source-Link: https://github.com/googleapis/googleapis-gen/commit/29b938c58c1e51d019f2ee539d55dc0a3c86a905 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjliOTM4YzU4YzFlNTFkMDE5ZjJlZTUzOWQ1NWRjMGEzYzg2YTkwNSJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../config_service_v2/async_client.py | 38 +++++- .../services/config_service_v2/client.py | 127 +++++++++++------ .../logging_service_v2/async_client.py | 37 +++++ .../services/logging_service_v2/client.py | 127 +++++++++++------ .../metrics_service_v2/async_client.py | 38 +++++- .../services/metrics_service_v2/client.py | 127 +++++++++++------ .../logging_v2/test_config_service_v2.py | 128 ++++++++++++++++++ .../logging_v2/test_logging_service_v2.py | 128 ++++++++++++++++++ .../logging_v2/test_metrics_service_v2.py | 128 ++++++++++++++++++ 9 files changed, 747 insertions(+), 131 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 2be78b15b7ae..664f10adae9f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -120,6 +120,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ConfigServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> ConfigServiceV2Transport: """Returns the transport used by the client instance. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 0de49b68f332..f4a1be57c357 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -287,6 +287,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -337,57 +404,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, ConfigServiceV2Transport): # transport is a ConfigServiceV2Transport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -399,6 +431,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 325fe71931b8..e14453424a78 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Optional, AsyncIterable, Awaitable, AsyncIterator, @@ -117,6 +118,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return LoggingServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> LoggingServiceV2Transport: """Returns the transport used by the client instance. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 44ec1a85f142..5815c8d1948f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -231,6 +231,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -281,57 +348,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, LoggingServiceV2Transport): # transport is a LoggingServiceV2Transport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -343,6 +375,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index f034fd9f5859..eb7321ab7456 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -109,6 +109,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MetricsServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> MetricsServiceV2Transport: """Returns the transport used by the client instance. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 2f339a130185..ced653a5107c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -234,6 +234,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -284,57 +351,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, MetricsServiceV2Transport): # transport is a MetricsServiceV2Transport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -346,6 +378,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 5a82cec81ebf..efb46eaad5ea 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -409,6 +409,87 @@ def test_config_service_v2_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient] +) +@mock.patch.object( + ConfigServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfigServiceV2Client), +) +@mock.patch.object( + ConfigServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfigServiceV2AsyncClient), +) +def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -5294,6 +5375,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigServiceV2Client(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -5981,3 +6079,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index f73ef775cda8..9f11a0210203 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -416,6 +416,87 @@ def test_logging_service_v2_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient] +) +@mock.patch.object( + LoggingServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LoggingServiceV2AsyncClient), +) +def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -1883,6 +1964,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LoggingServiceV2Client(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2467,3 +2565,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index dcb87b2ab72d..97a2c4a99354 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -414,6 +414,87 @@ def test_metrics_service_v2_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient] +) +@mock.patch.object( + MetricsServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetricsServiceV2Client), +) +@mock.patch.object( + MetricsServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetricsServiceV2AsyncClient), +) +def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -1801,6 +1882,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetricsServiceV2Client(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2386,3 +2484,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) From 36f861653d066da014e0a729a1bd7ea118e42fe7 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 26 Jan 2022 16:44:10 -0800 Subject: [PATCH 581/855] chore!: deprecate AppEngineHandler and ContainerEngineHandler (#310) --- .../google/cloud/logging_v2/client.py | 4 +- .../cloud/logging_v2/handlers/_helpers.py | 21 +-- .../cloud/logging_v2/handlers/app_engine.py | 10 +- .../logging_v2/handlers/container_engine.py | 8 + .../cloud/logging_v2/handlers/handlers.py | 27 +-- .../deployable/DeployableApplication.java | 33 +--- .../java/envtest/deployable/Snippets.java | 28 +-- .../deployable/python/requirements.txt | 1 + .../environment/deployable/python/snippets.py | 54 +++++- .../tests/environment/noxfile.py | 1 + .../tests/environment/tests/common/common.py | 44 ++++- .../tests/environment/tests/common/python.py | 171 +++++++++++++----- .../environment/tests/common/script_utils.py | 6 +- .../environment/tests/go/test_compute.py | 2 +- .../environment/tests/go/test_kubernetes.py | 8 +- .../tests/nodejs/test_kubernetes.py | 8 +- .../python/test_appengine_flex_container.py | 19 ++ .../python/test_appengine_flex_python.py | 19 ++ .../tests/python/test_appengine_standard.py | 19 ++ .../tests/unit/test_client.py | 8 +- tests/environment | 2 +- 21 files changed, 345 insertions(+), 148 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py index e9b432eb22e1..c9bbe1fe04f3 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -35,8 +35,6 @@ from google.cloud.logging_v2._http import _MetricsAPI as JSONMetricsAPI from google.cloud.logging_v2._http import _SinksAPI as JSONSinksAPI from google.cloud.logging_v2.handlers import CloudLoggingHandler -from google.cloud.logging_v2.handlers import AppEngineHandler -from google.cloud.logging_v2.handlers import ContainerEngineHandler from google.cloud.logging_v2.handlers import StructuredLogHandler from google.cloud.logging_v2.handlers import setup_logging from google.cloud.logging_v2.handlers.handlers import EXCLUDED_LOGGER_DEFAULTS @@ -352,7 +350,7 @@ def get_default_handler(self, **kw): if isinstance(monitored_resource, Resource): if monitored_resource.type == _GAE_RESOURCE_TYPE: - return AppEngineHandler(self, **kw) + return CloudLoggingHandler(self, resource=monitored_resource, **kw) elif monitored_resource.type == _GKE_RESOURCE_TYPE: return ContainerEngineHandler(**kw) elif monitored_resource.type == _GCF_RESOURCE_TYPE: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py index 931b7a2f5e3a..f5dfb7c5540d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py @@ -17,6 +17,7 @@ import math import json import re +import warnings try: import flask @@ -39,6 +40,8 @@ def format_stackdriver_json(record, message): Returns: str: JSON str to be written to the log file. + + DEPRECATED: use StructuredLogHandler to write formatted logs to standard out instead. """ subsecond, second = math.modf(record.created) @@ -48,7 +51,10 @@ def format_stackdriver_json(record, message): "thread": record.thread, "severity": record.levelname, } - + warnings.warn( + "format_stackdriver_json is deprecated. Use StructuredLogHandler instead.", + DeprecationWarning, + ) return json.dumps(payload, ensure_ascii=False) @@ -68,10 +74,7 @@ def get_request_data_from_flask(): http_request = { "requestMethod": flask.request.method, "requestUrl": flask.request.url, - "requestSize": flask.request.content_length, "userAgent": flask.request.user_agent.string, - "remoteIp": flask.request.remote_addr, - "referer": flask.request.referrer, "protocol": flask.request.environ.get(_PROTOCOL_HEADER), } @@ -96,21 +99,11 @@ def get_request_data_from_django(): if request is None: return None, None, None - # convert content_length to int if it exists - content_length = None - try: - content_length = int(request.META.get(_DJANGO_CONTENT_LENGTH)) - except (ValueError, TypeError): - content_length = None - # build http_request http_request = { "requestMethod": request.method, "requestUrl": request.build_absolute_uri(), - "requestSize": content_length, "userAgent": request.META.get(_DJANGO_USERAGENT_HEADER), - "remoteIp": request.META.get(_DJANGO_REMOTE_ADDR_HEADER), - "referer": request.META.get(_DJANGO_REFERER_HEADER), "protocol": request.META.get(_PROTOCOL_HEADER), } diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py index 874a9d6085d4..abd16664f73c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py @@ -20,6 +20,7 @@ import logging import os +import warnings from google.cloud.logging_v2.handlers._helpers import get_request_data from google.cloud.logging_v2.handlers._monitored_resources import ( @@ -36,9 +37,14 @@ _TRACE_ID_LABEL = "appengine.googleapis.com/trace_id" +_DEPRECATION_MSG = "AppEngineHandler is deprecated. Use CloudLoggingHandler instead." + class AppEngineHandler(logging.StreamHandler): - """A logging handler that sends App Engine-formatted logs to Stackdriver.""" + """A logging handler that sends App Engine-formatted logs to Stackdriver. + + DEPRECATED: use CloudLoggingHandler instead. + """ def __init__( self, @@ -71,6 +77,8 @@ def __init__( self.version_id = os.environ.get(_GAE_VERSION_ENV, "") self.resource = self.get_gae_resource() + warnings.warn(_DEPRECATION_MSG, DeprecationWarning) + def get_gae_resource(self): """Return the GAE resource using the environment variables. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/container_engine.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/container_engine.py index a4bd0f84890b..3842111b47d4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/container_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/container_engine.py @@ -20,15 +20,22 @@ """ import logging.handlers +import warnings from google.cloud.logging_v2.handlers._helpers import format_stackdriver_json +_DEPRECATION_MSG = ( + "ContainerEngineHandler is deprecated. Use StructuredLogHandler instead." +) + class ContainerEngineHandler(logging.StreamHandler): """Handler to format log messages the format expected by GKE fluent. This handler is written to format messages for the Google Container Engine (GKE) fluentd plugin, so that metadata such as log level are properly set. + + DEPRECATED: use StructuredLogHandler to write formatted logs to standard out instead. """ def __init__(self, *, name=None, stream=None): @@ -40,6 +47,7 @@ def __init__(self, *, name=None, stream=None): """ super(ContainerEngineHandler, self).__init__(stream=stream) self.name = name + warnings.warn(_DEPRECATION_MSG, DeprecationWarning) def format(self, record): """Format the message into JSON expected by fluentd. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index b3b787fe22ff..46922d54fa07 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -33,8 +33,15 @@ "werkzeug", ) +"""These environments require us to remove extra handlers on setup""" _CLEAR_HANDLER_RESOURCE_TYPES = ("gae_app", "cloud_function") +"""Extra trace label to be added on App Engine environments""" +_GAE_TRACE_ID_LABEL = "appengine.googleapis.com/trace_id" + +"""Resource name for App Engine environments""" +_GAE_RESOURCE_TYPE = "gae_app" + class CloudLoggingFilter(logging.Filter): """Python standard ``logging`` Filter class to add Cloud Logging @@ -45,10 +52,6 @@ class CloudLoggingFilter(logging.Filter): overwritten using the `extras` argument when writing logs. """ - # The subset of http_request fields have been tested to work consistently across GCP environments - # https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#httprequest - _supported_http_fields = ("requestMethod", "requestUrl", "userAgent", "protocol") - def __init__(self, project=None, default_labels=None): self.project = project self.default_labels = default_labels if default_labels else {} @@ -80,13 +83,6 @@ def filter(self, record): user_labels = getattr(record, "labels", {}) # infer request data from the environment inferred_http, inferred_trace, inferred_span = get_request_data() - if inferred_http is not None: - # filter inferred_http to include only well-supported fields - inferred_http = { - k: v - for (k, v) in inferred_http.items() - if k in self._supported_http_fields and v is not None - } if inferred_trace is not None and self.project is not None: # add full path for detected trace inferred_trace = f"projects/{self.project}/traces/{inferred_trace}" @@ -188,12 +184,17 @@ def emit(self, record): record (logging.LogRecord): The record to be logged. """ message = super(CloudLoggingHandler, self).format(record) + labels = record._labels + resource = record._resource or self.resource + if resource.type == _GAE_RESOURCE_TYPE and record._trace is not None: + # add GAE-specific label + labels = {_GAE_TRACE_ID_LABEL: record._trace, **(labels or {})} # send off request self.transport.send( record, message, - resource=(record._resource or self.resource), - labels=record._labels, + resource=resource, + labels=labels, trace=record._trace, span_id=record._span_id, http_request=record._http_request, diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/DeployableApplication.java b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/DeployableApplication.java index 5d44e04c8d2f..ba737481a51b 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/DeployableApplication.java +++ b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/DeployableApplication.java @@ -25,45 +25,23 @@ import com.google.cloud.pubsub.v1.Subscriber; import com.google.cloud.pubsub.v1.SubscriptionAdminClient; import com.google.pubsub.v1.PushConfig; -import com.google.pubsub.v1.ProjectTopicName; import com.google.pubsub.v1.ProjectSubscriptionName; -import com.google.common.util.concurrent.MoreExecutors; import com.google.pubsub.v1.PubsubMessage; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import java.io.IOException; -import java.lang.Thread; -import java.lang.InterruptedException; import java.lang.NoSuchMethodException; import java.lang.IllegalAccessException; import java.lang.reflect.InvocationTargetException; import com.google.auth.oauth2.GoogleCredentials; import com.google.auth.oauth2.ServiceAccountCredentials; -import com.google.cloud.logging.Severity; -import com.google.cloud.pubsub.v1.AckReplyConsumer; -import com.google.cloud.pubsub.v1.MessageReceiver; -import com.google.cloud.pubsub.v1.Subscriber; -import com.google.cloud.pubsub.v1.SubscriptionAdminClient; -import com.google.pubsub.v1.ProjectSubscriptionName; -import com.google.pubsub.v1.PubsubMessage; -import com.google.pubsub.v1.PushConfig; import com.google.pubsub.v1.TopicName; -import com.sun.net.httpserver.HttpServer; -import com.sun.net.httpserver.HttpHandler; -import com.sun.net.httpserver.HttpExchange; -import java.io.OutputStream; -import java.net.InetSocketAddress; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import java.util.Map; import java.lang.reflect.Method; import java.io.BufferedReader; import java.net.URL; import java.io.InputStreamReader; -import java.net.MalformedURLException; import java.net.HttpURLConnection; /** @@ -73,8 +51,6 @@ @SpringBootApplication public class DeployableApplication { - private static final org.slf4j.Logger logger = LoggerFactory.getLogger(DeployableApplication.class); - private static String getProjectId() throws RuntimeException { try { // try reading from service account @@ -116,7 +92,7 @@ private static void startPubsubSubscription() throws IOException, RuntimeExcepti MessageReceiver receiver = (PubsubMessage message, AckReplyConsumer consumer) -> { consumer.ack(); String fnName = message.getData().toStringUtf8(); - Map args = message.getAttributes(); + Map args = message.getAttributesMap(); triggerSnippet(fnName, args); }; // start subscriber @@ -134,7 +110,7 @@ private static void startPubsubSubscription() throws IOException, RuntimeExcepti public static void triggerSnippet(String fnName, Map args) { try { Snippets obj = new Snippets(); - Class c = obj.getClass(); + Class c = obj.getClass(); Method found = c.getDeclaredMethod(fnName, new Class[] {Map.class}); found.invoke(obj, args); } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { @@ -143,11 +119,6 @@ public static void triggerSnippet(String fnName, Map args) { } public static void main(String[] args) throws IOException, RuntimeException { - String projectId = ""; - String topicId; - String subscriptionId; - - Logger root = (Logger)LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME); root.setLevel(Level.INFO); diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/Snippets.java b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/Snippets.java index 62d14a25c8d3..2a260002b972 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/Snippets.java +++ b/packages/google-cloud-logging/tests/environment/deployable/java/src/main/java/envtest/deployable/Snippets.java @@ -22,9 +22,7 @@ import com.google.cloud.logging.LoggingOptions; import com.google.cloud.logging.Payload.StringPayload; import com.google.cloud.logging.Severity; -import com.google.cloud.logging.MonitoredResourceUtil; -import com.google.logging.type.LogSeverity; -import com.google.cloud.logging.Synchronicity; +import java.lang.reflect.InvocationTargetException; public class Snippets { @@ -52,7 +50,7 @@ private Severity getSeverity(String severityString){ return severity; } - public void simplelog(Map args){ + public void simplelog(Map args) throws InvocationTargetException { System.out.println("Called Simplelog!"); // pull out arguments String logText = args.getOrDefault("log_text", "simplelog"); @@ -63,15 +61,19 @@ public void simplelog(Map args){ Severity severity = getSeverity(severityString); // Instantiates a client - Logging logging = LoggingOptions.getDefaultInstance().getService(); - LogEntry entry = - LogEntry.newBuilder(StringPayload.of(logText)) - .setSeverity(severity) - .setLogName(logName) - .setResource(MonitoredResource.newBuilder("global").build()) - .build(); + try (Logging logging = LoggingOptions.getDefaultInstance().getService()) { + LogEntry entry = + LogEntry.newBuilder(StringPayload.of(logText)) + .setSeverity(severity) + .setLogName(logName) + .setResource(MonitoredResource.newBuilder("global").build()) + .build(); - //Writes the log entry asynchronously - logging.write(Collections.singleton(entry)); + //Writes the log entry asynchronously + logging.write(Collections.singleton(entry)); + } + catch (Exception ex) { + throw new InvocationTargetException(ex, "Failed to close Logging instance"); + } } } diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt index 6ad3143ae8ed..e088494bddc1 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt +++ b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt @@ -2,3 +2,4 @@ flask==1.1.2 google-cloud-pubsub>=2.8.0 click==7.1.2 pytz==2021.1 +pandas>=1.1.5 diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py index d1e2f758c5b5..a2fd2c82e206 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -14,7 +14,7 @@ import logging import os - +import json try: import google.cloud.logging @@ -39,13 +39,34 @@ def simplelog(log_name=None, log_text="simple_log", severity="DEFAULT", **kwargs logger.log_text(log_text, severity=severity) -def pylogging_json(log_text=None, severity="WARNING", **kwargs): +def jsonlog(log_name=None, log_text=None, severity="DEFAULT", **kwargs): + # allowed severity: default, debug, info, notice, warning, error, critical, alert, emergency + severity = severity.upper() + client = google.cloud.logging.Client() + logger = client.logger(log_name) + + # build json message + message = {} + for k, v in kwargs.items(): + message[k] = int(v) if v.isnumeric() else v + if log_text: + message["message"] = log_text + + logger.log_struct(message, severity=severity) + + +def pylogging_json(log_text=None, severity="WARNING", string_encode=False, **kwargs): # allowed severity: debug, info, warning, error, critical # build json message message = {} - for k in kwargs.keys(): - message[k] = kwargs[k] + for k, v in kwargs.items(): + message[k] = int(v) if v.isnumeric() else v + if log_text: + message["message"] = log_text + if string_encode: + str_msg = json.dumps(message, ensure_ascii=False) + message = json.dumps({**message, "raw_str": str_msg}, ensure_ascii=False) severity = severity.upper() if severity == "DEBUG": @@ -59,6 +80,7 @@ def pylogging_json(log_text=None, severity="WARNING", **kwargs): else: logging.critical(message) + def pylogging(log_text="pylogging", severity="WARNING", **kwargs): # allowed severity: debug, info, warning, error, critical @@ -100,38 +122,58 @@ def pylogging(log_text="pylogging", severity="WARNING", **kwargs): else: logging.critical(log_text, extra=kwargs) + def pylogging_multiline(log_text="pylogging", second_line="line 2", **kwargs): logging.error(f"{log_text}\n{second_line}") + def pylogging_complex_chars(**kwargs): logging.error('}"{!@[') -def pylogging_with_formatter(log_text="pylogging", format_str="%(name)s :: %(levelname)s :: %(message)s", **kwargs): + +def pylogging_with_formatter( + log_text="pylogging", + format_str="%(name)s :: %(levelname)s :: %(message)s", + **kwargs, +): root_logger = logging.getLogger() handler = root_logger.handlers[0] handler.setFormatter(logging.Formatter(fmt=format_str)) logging.error(log_text) handler.setFormatter(None) + def pylogging_with_arg(log_text="my_arg", **kwargs): logging.error("Arg: %s", log_text) + def pylogging_flask( log_text="pylogging_flask", path="/", base_url="http://google", agent="Chrome", trace="123", + traceparent="", **kwargs, ): import flask app = flask.Flask(__name__) with app.test_request_context( - path, base_url, headers={"User-Agent": agent, "X_CLOUD_TRACE_CONTEXT": trace} + path, base_url, headers={"User-Agent": agent, "X_CLOUD_TRACE_CONTEXT": trace, "TRACEPARENT":traceparent} ): logging.info(log_text) +def pylogging_pandas(log_text="pylogging_pandas", **kwargs): + """ + Ensure pandas dataframes are handled properly + https://github.com/googleapis/python-logging/issues/409 + """ + import pandas as pd + df = pd.DataFrame(columns=['log_text']) + df = df.append({"log_text": log_text}, ignore_index=True) + logging.error(df) + def pylogging_exception(log_text="pylogging_exception", exception_text="Test", **kwargs): try: raise Exception(exception_text) diff --git a/packages/google-cloud-logging/tests/environment/noxfile.py b/packages/google-cloud-logging/tests/environment/noxfile.py index 672cff8d5b46..f3404fa839cf 100644 --- a/packages/google-cloud-logging/tests/environment/noxfile.py +++ b/packages/google-cloud-logging/tests/environment/noxfile.py @@ -153,6 +153,7 @@ def tests(session, language, platform): "google-cloud-storage", "google-cloud-testutils", "google-cloud-logging", + "pandas", ) test_path = f"./tests/{language}/test_{platform}.py" diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index ca07741d1b34..7bdeecfcdb2b 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -30,6 +30,7 @@ import sys import uuid import inspect +import random from test_utils.retry import RetryErrors from grpc import RpcError @@ -94,7 +95,7 @@ def _trigger(self, snippet, **kwargs): @RetryErrors(exception=(LogsNotFound, RpcError), delay=2, max_tries=2) def trigger_and_retrieve( - self, log_text, snippet, append_uuid=True, ignore_protos=True, max_tries=6, **kwargs + self, log_text, snippet, append_uuid=True, ignore_protos=True, max_tries=3, **kwargs ): """ Trigger a snippet deployed in the cloud by envctl, and return resulting @@ -118,7 +119,7 @@ def trigger_and_retrieve( if append_uuid: log_text = f"{log_text} {uuid.uuid1()}" self._trigger(snippet, log_text=log_text, **kwargs) - sleep(2) + sleep(10) filter_str = self._add_time_condition_to_filter(log_text) print(filter_str) # give the command time to be received @@ -128,9 +129,19 @@ def trigger_and_retrieve( try: log_list = self._get_logs(filter_str, ignore_protos) return log_list - except (LogsNotFound, RpcError) as e: + except RpcError as e: + print(f"RPC error: {e}") + # most RpcErrors come from exceeding the reads per minute quota + # wait at least 60 seconds + # use a randomized backoff so parallel runs don't start up at + # the same time again + sleep(random.randint(60, 300)) + tries += 1 + except LogsNotFound as e: print("logs not found...") - sleep(5) + # logs may not have been fully ingested into Cloud Logging + # Wait before trying again + sleep(10 * (tries+1)) tries += 1 # log not found raise LogsNotFound @@ -199,10 +210,26 @@ def test_receive_unicode_log(self): found_log = log self.assertIsNotNone(found_log, "expected unicode log not found") + def test_json_log(self): + if self.language not in ["python"]: + # TODO: other languages to also support this test + return True + log_text = f"{inspect.currentframe().f_code.co_name} {uuid.uuid1()}" + log_dict = {"unicode_field": "嗨 世界 😀", "num_field": 2} + log_list = self.trigger_and_retrieve( + log_text, "jsonlog", append_uuid=False, **log_dict + ) + + found_log = log_list[-1] + + self.assertIsNotNone(found_log, "expected log text not found") + self.assertTrue(isinstance(found_log.payload, dict), "expected jsonPayload") + expected_dict = {"message": log_text, **log_dict} + self.assertEqual(found_log.payload, expected_dict) + def test_monitored_resource(self): - if self.language == 'java' or self.language == 'python': + if self.language == 'java': # TODO: implement in java - # TODO: remove python after v3.0.0 return True log_text = f"{inspect.currentframe().f_code.co_name}" @@ -214,8 +241,9 @@ def test_monitored_resource(self): self.assertEqual(found_resource.type, self.monitored_resource_name) for label in self.monitored_resource_labels: - self.assertTrue(found_resource.labels[label], - f'resource.labels[{label}] is not set') + self.assertTrue( + found_resource.labels[label], f"resource.labels[{label}] is not set" + ) def test_severity(self): log_text = f"{inspect.currentframe().f_code.co_name}" diff --git a/packages/google-cloud-logging/tests/environment/tests/common/python.py b/packages/google-cloud-logging/tests/environment/tests/common/python.py index 2ac037298a3f..3a613e487924 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/python.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/python.py @@ -16,6 +16,8 @@ import unittest import inspect import re +import uuid +import json import google.cloud.logging @@ -27,42 +29,69 @@ def test_pylogging_receive_log(self): log_text = f"{inspect.currentframe().f_code.co_name}" log_list = self.trigger_and_retrieve(log_text, "pylogging") - found_log = None - for log in log_list: - message = ( - log.payload.get("message", None) - if isinstance(log.payload, dict) - else str(log.payload) - ) - if message and log_text in message: - found_log = log + found_log = log_list[-1] + self.assertIsNotNone(found_log, "expected log text not found") + self.assertTrue(isinstance(found_log.payload, str), "expected textPayload") + self.assertTrue(found_log.payload.startswith(log_text)) + self.assertEqual(len(log_list), 1, "expected 1 log") def test_pylogging_receive_unicode_log(self): log_text = f"{inspect.currentframe().f_code.co_name} 嗨 世界 😀" log_list = self.trigger_and_retrieve(log_text, "pylogging") - found_log = None - for log in log_list: - message = ( - log.payload.get("message", None) - if isinstance(log.payload, dict) - else str(log.payload) - ) - if message and log_text in message: - found_log = log - self.assertIsNotNone(found_log, "expected unicode log not found") + found_log = log_list[-1] + + self.assertIsNotNone(found_log, "expected log text not found") + self.assertTrue(isinstance(found_log.payload, str), "expected textPayload") + self.assertTrue(found_log.payload.startswith(log_text)) + + def test_pylogging_json_log(self): + log_text = f"{inspect.currentframe().f_code.co_name} {uuid.uuid1()}" + log_dict = {"unicode_field": "嗨 世界 😀", "num_field": 2} + log_list = self.trigger_and_retrieve( + log_text, "pylogging_json", append_uuid=False, **log_dict + ) + + found_log = log_list[-1] + + self.assertIsNotNone(found_log, "expected log text not found") + self.assertTrue(isinstance(found_log.payload, dict), "expected jsonPayload") + expected_dict = {"message": log_text, **log_dict} + self.assertEqual(found_log.payload, expected_dict) + + def test_pylogging_encoded_json_log(self): + log_text = f"{inspect.currentframe().f_code.co_name} {uuid.uuid1()}" + log_dict = {"unicode_field": "嗨 世界 😀", "num_field": 2} + log_list = self.trigger_and_retrieve( + log_text, + "pylogging_json", + string_encode="True", + append_uuid=False, + **log_dict, + ) + + found_log = log_list[-1] + + self.assertIsNotNone(found_log, "expected log text not found") + self.assertTrue(isinstance(found_log.payload, dict), "expected jsonPayload") + raw_str = found_log.payload.pop("raw_str") + expected_dict = {"message": log_text, **log_dict} + self.assertEqual(json.loads(raw_str), expected_dict) + self.assertEqual(found_log.payload, expected_dict) def test_pylogging_multiline(self): first_line = f"{inspect.currentframe().f_code.co_name}" second_line = "hello world" - log_list = self.trigger_and_retrieve(first_line, "pylogging_multiline", second_line=second_line) + log_list = self.trigger_and_retrieve( + first_line, "pylogging_multiline", second_line=second_line + ) found_log = log_list[-1] found_message = ( - found_log.payload.get("message", None) - if isinstance(found_log.payload, dict) - else str(found_log.payload) - ) + found_log.payload.get("message", None) + if isinstance(found_log.payload, dict) + else str(found_log.payload) + ) self.assertTrue(re.match(f"{first_line} .*\n{second_line}", found_message)) @@ -72,23 +101,25 @@ def test_pylogging_with_argument(self): log_list = self.trigger_and_retrieve(log_text, "pylogging_with_arg") found_log = log_list[-1] found_message = ( - found_log.payload.get("message", None) - if isinstance(found_log.payload, dict) - else str(found_log.payload) - ) + found_log.payload.get("message", None) + if isinstance(found_log.payload, dict) + else str(found_log.payload) + ) self.assertTrue(re.match(f"Arg: {log_text} .*", found_message)) def test_pylogging_with_formatter(self): log_text = f"{inspect.currentframe().f_code.co_name}" - format_str = '%(levelname)s :: %(message)s' - log_list = self.trigger_and_retrieve(log_text, "pylogging_with_formatter", format_str=format_str) + format_str = "%(levelname)s :: %(message)s" + log_list = self.trigger_and_retrieve( + log_text, "pylogging_with_formatter", format_str=format_str + ) found_log = log_list[-1] found_message = ( - found_log.payload.get("message", None) - if isinstance(found_log.payload, dict) - else str(found_log.payload) - ) + found_log.payload.get("message", None) + if isinstance(found_log.payload, dict) + else str(found_log.payload) + ) self.assertTrue(re.match(f"ERROR :: {log_text} .*", found_message)) @@ -118,10 +149,6 @@ def test_severity_pylogging(self): self.assertEqual(found_severity.lower(), severity.lower()) def test_source_location_pylogging(self): - if self.environment == "kubernetes" or "appengine" in self.environment: - # disable these tests on environments with custom handlers - # todo: enable in v3.0.0 - return log_text = f"{inspect.currentframe().f_code.co_name}" log_list = self.trigger_and_retrieve(log_text, "pylogging") found_source = log_list[-1].source_location @@ -135,10 +162,6 @@ def test_source_location_pylogging(self): self.assertTrue(int(found_source["line"]) > 0) def test_flask_http_request_pylogging(self): - if self.environment == "kubernetes" or "appengine" in self.environment: - # disable these tests on environments with custom handlers - # todo: enable in v3.0.0 - return log_text = f"{inspect.currentframe().f_code.co_name}" expected_agent = "test-agent" @@ -170,18 +193,50 @@ def test_flask_http_request_pylogging(self): found_trace = log_list[-1].trace found_span = log_list[-1].span_id + found_sampled = log_list[-1].trace_sampled self.assertIsNotNone(found_trace) self.assertIn("projects/", found_trace) if self.environment != "functions": # functions seems to override the user's trace value self.assertIn(expected_trace, found_trace) self.assertEqual(expected_span, found_span) + self.assertTrue(found_sampled) + + def test_flask_traceparent(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + + expected_agent = "test-agent" + expected_base_url = "http://test" + expected_path = "/pylogging" + expected_trace = "4bf92f3577b34da6a3ce929d0e0e4736" + expected_span = "00f067aa0ba902b7" + trace_header = f"00-{expected_trace}-{expected_span}-09" + + log_list = self.trigger_and_retrieve( + log_text, + "pylogging_flask", + path=expected_path, + trace="", + traceparent=trace_header, + base_url=expected_base_url, + agent=expected_agent, + ) + found_request = log_list[-1].http_request + + self.assertIsNotNone(found_request) + + found_trace = log_list[-1].trace + found_span = log_list[-1].span_id + found_sampled = log_list[-1].trace_sampled + self.assertIsNotNone(found_trace) + self.assertIn("projects/", found_trace) + if self.environment != "functions": + # functions seems to override the user's trace value + self.assertIn(expected_trace, found_trace) + self.assertEqual(expected_span, found_span) + self.assertTrue(found_sampled) def test_pylogging_extras(self): - if self.environment == "kubernetes" or "appengine" in self.environment: - # disable these tests on environments with custom handlers - # todo: enable in v3.0.0 - return log_text = f"{inspect.currentframe().f_code.co_name}" kwargs = { "trace": "123", @@ -232,10 +287,6 @@ def test_pylogging_extras(self): self.assertEqual(found_log.labels["custom"], kwargs["label_custom"]) def test_pylogging_extras_sparse(self): - if self.environment == "kubernetes" or "appengine" in self.environment: - # disable these tests on environments with custom handlers - # todo: enable in v3.0.0 - return log_text = f"{inspect.currentframe().f_code.co_name}" kwargs = { "requestMethod": "POST", @@ -277,3 +328,23 @@ def test_pylogging_exception(self): self.assertIn(log_text, message) self.assertIn(f"Exception: {exception_text}", message) self.assertIn("Traceback (most recent call last):", message) + + def test_pylogging_pandas(self): + """ + Ensure pandas dataframes are parsed without crashing + https://github.com/googleapis/python-logging/issues/409 + """ + import pandas as pd + log_text = f"{inspect.currentframe().f_code.co_name} {str(uuid.uuid1())[-10:]}" + + log_list = self.trigger_and_retrieve(log_text, "pylogging_pandas", append_uuid=False) + found_log = log_list[-1] + + message = (found_log.payload.get("message", None) + if isinstance(found_log.payload, dict) + else str(found_log.payload)) + + df = pd.DataFrame(columns=['log_text']) + df = df.append({"log_text": log_text}, ignore_index=True) + + self.assertEqual(str(df), message) diff --git a/packages/google-cloud-logging/tests/environment/tests/common/script_utils.py b/packages/google-cloud-logging/tests/environment/tests/common/script_utils.py index df8786248730..d0602549e692 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/script_utils.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/script_utils.py @@ -59,7 +59,11 @@ def run_command(self, command, args=[]): print(full_command) result = subprocess.run(full_command, capture_output=True) complete = True - return result.returncode, result.stdout.decode("utf-8"), result.stderr.decode("utf-8") + return ( + result.returncode, + result.stdout.decode("utf-8"), + result.stderr.decode("utf-8"), + ) except Exception as e: print(e) finally: diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_compute.py b/packages/google-cloud-logging/tests/environment/tests/go/test_compute.py index 59c77fb3eb9b..9c75cf012f90 100644 --- a/packages/google-cloud-logging/tests/environment/tests/go/test_compute.py +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_compute.py @@ -26,4 +26,4 @@ class TestComputeEngine(Common, unittest.TestCase): language = "go" monitored_resource_name = "gce_instance" - monitored_resource_labels = ["project_id", "instance_id", "zone"] \ No newline at end of file + monitored_resource_labels = ["project_id", "instance_id", "zone"] diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py index bb30cf105eda..0e9654986c0f 100644 --- a/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py @@ -26,4 +26,10 @@ class TestKubernetesEngine(Common, unittest.TestCase): language = "go" monitored_resource_name = "k8s_container" - monitored_resource_labels = ["project_id", "location", "cluster_name", "pod_name", "namespace_name"] + monitored_resource_labels = [ + "project_id", + "location", + "cluster_name", + "pod_name", + "namespace_name", + ] diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py index e6df2a018215..caad651a0bc4 100644 --- a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py @@ -26,5 +26,11 @@ class TestKubernetesEngine(Common, unittest.TestCase): language = "nodejs" monitored_resource_name = "k8s_container" - monitored_resource_labels = ["project_id", "location", "cluster_name", "pod_name", "namespace_name"] + monitored_resource_labels = [ + "project_id", + "location", + "cluster_name", + "pod_name", + "namespace_name", + ] diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py index a3b4dcac9a61..62137c058828 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py @@ -14,6 +14,7 @@ import logging import unittest +import inspect import google.cloud.logging @@ -28,3 +29,21 @@ class TestAppEngineFlexContainer(Common, CommonPython, unittest.TestCase): monitored_resource_name = "gae_app" monitored_resource_labels = ["project_id", "module_id", "version_id", "zone"] + + def test_pylogging_gae_trace_label(self): + """ + Check to make sure 'appengine.googleapis.com/trace_id' label is set on GAE environments + """ + expected_trace = "123" + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve( + log_text, "pylogging_flask", trace=expected_trace + ) + found_log = log_list[-1] + + self.assertIsNotNone(found_log.labels) + self.assertIsNotNone(found_log.trace) + self.assertEqual( + found_log.labels["appengine.googleapis.com/trace_id"], found_log.trace + ) + self.assertIn(expected_trace, found_log.trace) diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py index 8619f77a2e8e..62d874979757 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py @@ -14,6 +14,7 @@ import logging import unittest +import inspect import google.cloud.logging @@ -28,3 +29,21 @@ class TestAppEngineFlex(Common, CommonPython, unittest.TestCase): monitored_resource_name = "gae_app" monitored_resource_labels = ["project_id", "module_id", "version_id", "zone"] + + def test_pylogging_gae_trace_label(self): + """ + Check to make sure 'appengine.googleapis.com/trace_id' label is set on GAE environments + """ + expected_trace = "123" + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve( + log_text, "pylogging_flask", trace=expected_trace + ) + found_log = log_list[-1] + + self.assertIsNotNone(found_log.labels) + self.assertIsNotNone(found_log.trace) + self.assertEqual( + found_log.labels["appengine.googleapis.com/trace_id"], found_log.trace + ) + self.assertIn(expected_trace, found_log.trace) diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py index a633a9770626..ef8e10ed4bb4 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py @@ -14,6 +14,7 @@ import logging import unittest +import inspect import google.cloud.logging @@ -28,3 +29,21 @@ class TestAppEngineStandard(Common, CommonPython, unittest.TestCase): monitored_resource_name = "gae_app" monitored_resource_labels = ["project_id", "module_id", "version_id", "zone"] + + def test_pylogging_gae_trace_label(self): + """ + Check to make sure 'appengine.googleapis.com/trace_id' label is set on GAE environments + """ + expected_trace = "123" + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve( + log_text, "pylogging_flask", trace=expected_trace + ) + found_log = log_list[-1] + + self.assertIsNotNone(found_log.labels) + self.assertIsNotNone(found_log.trace) + self.assertEqual( + found_log.labels["appengine.googleapis.com/trace_id"], found_log.trace + ) + self.assertIn(expected_trace, found_log.trace) diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 9dbfa87fd0b9..11ccd7e3749c 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -719,7 +719,7 @@ def test_get_default_handler_app_engine(self): import os from google.cloud._testing import _Monkey from google.cloud.logging_v2.handlers._monitored_resources import _GAE_ENV_VARS - from google.cloud.logging.handlers import AppEngineHandler + from google.cloud.logging.handlers import CloudLoggingHandler credentials = _make_credentials() client = self._make_one( @@ -733,10 +733,10 @@ def test_get_default_handler_app_engine(self): handler.transport.worker.stop() - self.assertIsInstance(handler, AppEngineHandler) + self.assertIsInstance(handler, CloudLoggingHandler) def test_get_default_handler_container_engine(self): - from google.cloud.logging.handlers import ContainerEngineHandler + from google.cloud.logging.handlers import StructuredLogHandler credentials = _make_credentials() client = self._make_one( @@ -751,7 +751,7 @@ def test_get_default_handler_container_engine(self): with patch: handler = client.get_default_handler() - self.assertIsInstance(handler, ContainerEngineHandler) + self.assertIsInstance(handler, StructuredLogHandler) def test_get_default_handler_general(self): import io diff --git a/tests/environment b/tests/environment index dc85066053b8..41c32ce34255 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit dc85066053b8dc2246c8b72f93a5b97f92885eb2 +Subproject commit 41c32ce3425529680e32701549d3f682f9c82b63 From 8520cc197b239a4337489493b289357364b93cf2 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 26 Jan 2022 16:46:47 -0800 Subject: [PATCH 582/855] feat!: support json logs (#316) --- .../handlers/_monitored_resources.py | 2 + .../cloud/logging_v2/handlers/handlers.py | 27 ++- .../logging_v2/handlers/structured_log.py | 31 ++- .../handlers/transports/background_thread.py | 14 +- .../logging_v2/handlers/transports/base.py | 2 +- .../logging_v2/handlers/transports/sync.py | 16 +- .../google/cloud/logging_v2/logger.py | 45 +++++ .../tests/system/test_system.py | 100 +++++---- .../tests/unit/handlers/test_handlers.py | 34 +++- .../unit/handlers/test_structured_log.py | 5 +- .../transports/test_background_thread.py | 22 +- .../unit/handlers/transports/test_sync.py | 39 +++- .../tests/unit/test_logger.py | 191 ++++++++++++++++++ 13 files changed, 437 insertions(+), 91 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py index e257f08e49a4..144258749d00 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py @@ -169,6 +169,8 @@ def _create_global_resource(project): def detect_resource(project=""): """Return the default monitored resource based on the local environment. + If GCP resource not found, defaults to `global`. + Args: project (str): The project ID to pass on to the resource (if needed) Returns: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 46922d54fa07..5d16e74b5a85 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -14,6 +14,7 @@ """Python :mod:`logging` handlers for Cloud Logging.""" +import collections import json import logging @@ -92,15 +93,19 @@ def filter(self, record): record._span_id = getattr(record, "span_id", inferred_span) or None record._http_request = getattr(record, "http_request", inferred_http) record._source_location = CloudLoggingFilter._infer_source_location(record) - record._labels = {**self.default_labels, **user_labels} or None + # add logger name as a label if possible + logger_label = {"python_logger": record.name} if record.name else {} + record._labels = {**logger_label, **self.default_labels, **user_labels} or None # create string representations for structured logging record._trace_str = record._trace or "" record._span_id_str = record._span_id or "" - record._http_request_str = json.dumps(record._http_request or {}) - record._source_location_str = json.dumps(record._source_location or {}) - record._labels_str = json.dumps(record._labels or {}) - # break quotes for parsing through structured logging - record._msg_str = str(record.msg).replace('"', '\\"') if record.msg else "" + record._http_request_str = json.dumps( + record._http_request or {}, ensure_ascii=False + ) + record._source_location_str = json.dumps( + record._source_location or {}, ensure_ascii=False + ) + record._labels_str = json.dumps(record._labels or {}, ensure_ascii=False) return True @@ -183,9 +188,15 @@ def emit(self, record): Args: record (logging.LogRecord): The record to be logged. """ - message = super(CloudLoggingHandler, self).format(record) - labels = record._labels resource = record._resource or self.resource + labels = record._labels + message = None + if isinstance(record.msg, collections.abc.Mapping): + # if input is a dictionary, pass as-is for structured logging + message = record.msg + elif record.msg: + # otherwise, format message string based on superclass + message = super(CloudLoggingHandler, self).format(record) if resource.type == _GAE_RESOURCE_TYPE and record._trace is not None: # add GAE-specific label labels = {_GAE_TRACE_ID_LABEL: record._trace, **(labels or {})} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py index 43e1250a3c2b..2d7c5e0786bf 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py @@ -14,19 +14,23 @@ """Logging handler for printing formatted structured logs to standard output. """ +import collections import json import logging.handlers from google.cloud.logging_v2.handlers.handlers import CloudLoggingFilter +from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message GCP_FORMAT = ( - '{"message": %(_formatted_msg)s, ' + "{%(_payload_str)s" '"severity": "%(levelname)s", ' '"logging.googleapis.com/labels": %(_labels_str)s, ' '"logging.googleapis.com/trace": "%(_trace_str)s", ' '"logging.googleapis.com/spanId": "%(_span_id_str)s", ' + '"logging.googleapis.com/trace_sampled": %(_trace_sampled_str)s, ' '"logging.googleapis.com/sourceLocation": %(_source_location_str)s, ' - '"httpRequest": %(_http_request_str)s }' + '"httpRequest": %(_http_request_str)s ' + "}" ) @@ -57,15 +61,22 @@ def format(self, record): Args: record (logging.LogRecord): The log record. Returns: - str: A JSON string formatted for GKE fluentd. + str: A JSON string formatted for GCP structured logging. """ - # let other formatters alter the message - super_payload = None - if record.msg: - # format the message using default handler behaviors - super_payload = super(StructuredLogHandler, self).format(record) - # properly break any formatting in string to make it json safe - record._formatted_msg = json.dumps(super_payload or "") + payload = None + message = _format_and_parse_message(record, super(StructuredLogHandler, self)) + + if isinstance(message, collections.abc.Mapping): + # if input is a dictionary, encode it as a json string + encoded_msg = json.dumps(message, ensure_ascii=False) + # strip out open and close parentheses + payload = encoded_msg.lstrip("{").rstrip("}") + "," + elif message: + # properly break any formatting in string to make it json safe + encoded_message = json.dumps(message, ensure_ascii=False) + payload = '"message": {},'.format(encoded_message) + + record._payload_str = payload or "" # remove exception info to avoid duplicating it # https://github.com/googleapis/python-logging/issues/382 record.exc_info = None diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py index 60828a117804..1097830a8d55 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py @@ -137,7 +137,7 @@ def _thread_main(self): if item is _WORKER_TERMINATOR: done = True # Continue processing items. else: - batch.log_struct(**item) + batch.log(**item) self._safely_commit_batch(batch) @@ -226,12 +226,18 @@ def enqueue(self, record, message, **kwargs): Args: record (logging.LogRecord): Python log record that the handler was called with. - message (str): The message from the ``LogRecord`` after being + message (str or dict): The message from the ``LogRecord`` after being formatted by the associated log formatters. kwargs: Additional optional arguments for the logger """ + # set python logger name as label if missing + labels = kwargs.pop("labels", {}) + if record.name: + labels["python_logger"] = labels.get("python_logger", record.name) + kwargs["labels"] = labels + # enqueue new entry queue_entry = { - "info": {"message": message, "python_logger": record.name}, + "message": message, "severity": _helpers._normalize_severity(record.levelno), "timestamp": datetime.datetime.utcfromtimestamp(record.created), } @@ -285,7 +291,7 @@ def send(self, record, message, **kwargs): Args: record (logging.LogRecord): Python log record that the handler was called with. - message (str): The message from the ``LogRecord`` after being + message (str or dict): The message from the ``LogRecord`` after being formatted by the associated log formatters. kwargs: Additional optional arguments for the logger """ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py index d60a5a070876..bd52b4e75dff 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py @@ -27,7 +27,7 @@ def send(self, record, message, **kwargs): Args: record (logging.LogRecord): Python log record that the handler was called with. - message (str): The message from the ``LogRecord`` after being + message (str or dict): The message from the ``LogRecord`` after being formatted by the associated log formatters. kwargs: Additional optional arguments for the logger """ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py index 35ee73daa19d..796f0d2ff733 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py @@ -16,7 +16,6 @@ Logs directly to the the Cloud Logging API with a synchronous call. """ - from google.cloud.logging_v2 import _helpers from google.cloud.logging_v2.handlers.transports.base import Transport @@ -36,11 +35,18 @@ def send(self, record, message, **kwargs): Args: record (logging.LogRecord): Python log record that the handler was called with. - message (str): The message from the ``LogRecord`` after being + message (str or dict): The message from the ``LogRecord`` after being formatted by the associated log formatters. kwargs: Additional optional arguments for the logger """ - info = {"message": message, "python_logger": record.name} - self.logger.log_struct( - info, severity=_helpers._normalize_severity(record.levelno), **kwargs, + # set python logger name as label if missing + labels = kwargs.pop("labels", {}) + if record.name: + labels["python_logger"] = labels.get("python_logger", record.name) + # send log synchronously + self.logger.log( + message, + severity=_helpers._normalize_severity(record.levelno), + labels=labels, + **kwargs, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py index fafb70629380..ffe7ea706872 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -14,6 +14,8 @@ """Define API Loggers.""" +import collections + from google.cloud.logging_v2._helpers import _add_defaults_to_filter from google.cloud.logging_v2.entries import LogEntry from google.cloud.logging_v2.entries import ProtobufEntry @@ -21,6 +23,7 @@ from google.cloud.logging_v2.entries import TextEntry from google.cloud.logging_v2.resource import Resource +import google.protobuf.message _GLOBAL_RESOURCE = Resource(type="global", labels={}) @@ -197,6 +200,30 @@ def log_proto(self, message, *, client=None, **kw): """ self._do_log(client, ProtobufEntry, message, **kw) + def log(self, message=None, *, client=None, **kw): + """Log an arbitrary message via a POST request. + Type will be inferred based on the input message. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list + + Args: + message (Optional[str or dict or google.protobuf.Message]): The message. to log + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. + kw (Optional[dict]): additional keyword arguments for the entry. + See :class:`~logging_v2.entries.LogEntry`. + """ + entry_type = LogEntry + if isinstance(message, google.protobuf.message.Message): + entry_type = ProtobufEntry + elif isinstance(message, collections.abc.Mapping): + entry_type = StructEntry + elif isinstance(message, str): + entry_type = TextEntry + self._do_log(client, entry_type, message, **kw) + def delete(self, logger_name=None, *, client=None): """Delete all entries in a logger via a DELETE request @@ -361,6 +388,24 @@ def log_proto(self, message, **kw): """ self.entries.append(ProtobufEntry(payload=message, **kw)) + def log(self, message=None, **kw): + """Add an arbitrary message to be logged during :meth:`commit`. + Type will be inferred based on the input message. + + Args: + message (Optional[str or dict or google.protobuf.Message]): The message. to log + kw (Optional[dict]): Additional keyword arguments for the entry. + See :class:`~logging_v2.entries.LogEntry`. + """ + entry_type = LogEntry + if isinstance(message, google.protobuf.message.Message): + entry_type = ProtobufEntry + elif isinstance(message, collections.abc.Mapping): + entry_type = StructEntry + elif isinstance(message, str): + entry_type = TextEntry + self.entries.append(entry_type(payload=message, **kw)) + def commit(self, *, client=None): """Send saved log entries as a single API call. diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 532eea96b09f..836339f0b8e9 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -32,7 +32,6 @@ from google.api_core.exceptions import ServiceUnavailable import google.cloud.logging from google.cloud._helpers import UTC -from google.cloud.logging_v2.handlers import AppEngineHandler from google.cloud.logging_v2.handlers import CloudLoggingHandler from google.cloud.logging_v2.handlers.transports import SyncTransport from google.cloud.logging_v2 import client @@ -401,6 +400,35 @@ def test_log_struct_w_metadata(self): self.assertEqual(request["requestUrl"], URI) self.assertEqual(request["status"], STATUS) + def test_log_w_text(self): + TEXT_PAYLOAD = "System test: test_log_w_text" + logger = Config.CLIENT.logger(self._logger_name("log_w_text")) + self.to_delete.append(logger) + logger.log(TEXT_PAYLOAD) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + + def test_log_w_struct(self): + logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) + self.to_delete.append(logger) + + logger.log(self.JSON_PAYLOAD) + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + + def test_log_empty(self): + logger = Config.CLIENT.logger(self._logger_name("log_empty")) + self.to_delete.append(logger) + + logger.log() + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + self.assertIsNone(entries[0].payload) + def test_log_handler_async(self): LOG_MESSAGE = "It was the worst of times" @@ -415,7 +443,7 @@ def test_log_handler_async(self): cloud_logger.warning(LOG_MESSAGE) handler.flush() entries = _list_entries(logger) - expected_payload = {"message": LOG_MESSAGE, "python_logger": handler.name} + expected_payload = LOG_MESSAGE self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) @@ -437,44 +465,46 @@ def test_log_handler_sync(self): cloud_logger.warning(LOG_MESSAGE) entries = _list_entries(logger) - expected_payload = {"message": LOG_MESSAGE, "python_logger": LOGGER_NAME} + expected_payload = LOG_MESSAGE self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) def test_handlers_w_extras(self): LOG_MESSAGE = "Testing with injected extras." + LOGGER_NAME = "handler_extras" + handler_name = self._logger_name(LOGGER_NAME) + + handler = CloudLoggingHandler( + Config.CLIENT, name=handler_name, transport=SyncTransport + ) + + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler.name) + self.to_delete.append(logger) - for cls in [CloudLoggingHandler, AppEngineHandler]: - LOGGER_NAME = f"{cls.__name__}-handler_extras" - handler_name = self._logger_name(LOGGER_NAME) - - handler = cls(Config.CLIENT, name=handler_name, transport=SyncTransport) - - # only create the logger to delete, hidden otherwise - logger = Config.CLIENT.logger(handler.name) - self.to_delete.append(logger) - - cloud_logger = logging.getLogger(LOGGER_NAME) - cloud_logger.addHandler(handler) - expected_request = {"requestUrl": "localhost"} - expected_source = {"file": "test.py"} - extra = { - "trace": "123", - "span_id": "456", - "http_request": expected_request, - "source_location": expected_source, - "resource": Resource(type="cloudiot_device", labels={}), - "labels": {"test-label": "manual"}, - } - cloud_logger.warning(LOG_MESSAGE, extra=extra) - - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].trace, extra["trace"]) - self.assertEqual(entries[0].span_id, extra["span_id"]) - self.assertEqual(entries[0].http_request, expected_request) - self.assertEqual(entries[0].labels, extra["labels"]) - self.assertEqual(entries[0].resource.type, extra["resource"].type) + cloud_logger = logging.getLogger(LOGGER_NAME) + cloud_logger.addHandler(handler) + expected_request = {"requestUrl": "localhost"} + expected_source = {"file": "test.py"} + extra = { + "trace": "123", + "span_id": "456", + "http_request": expected_request, + "source_location": expected_source, + "resource": Resource(type="cloudiot_device", labels={}), + "labels": {"test-label": "manual"}, + } + cloud_logger.warn(LOG_MESSAGE, extra=extra) + + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].trace, extra["trace"]) + self.assertEqual(entries[0].span_id, extra["span_id"]) + self.assertEqual(entries[0].http_request, expected_request) + self.assertEqual( + entries[0].labels, {**extra["labels"], "python_logger": LOGGER_NAME} + ) + self.assertEqual(entries[0].resource.type, extra["resource"].type) def test_log_root_handler(self): LOG_MESSAGE = "It was the best of times." @@ -490,7 +520,7 @@ def test_log_root_handler(self): logging.warning(LOG_MESSAGE) entries = _list_entries(logger) - expected_payload = {"message": LOG_MESSAGE, "python_logger": "root"} + expected_payload = LOG_MESSAGE self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index b7fef1b9eaa9..c51175261d9c 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -63,6 +63,7 @@ def test_filter_record(self): "file": "testpath", "function": "test-function", } + expected_label = {"python_logger": logname} record = logging.LogRecord( logname, logging.INFO, @@ -78,7 +79,6 @@ def test_filter_record(self): self.assertTrue(success) self.assertEqual(record.msg, message) - self.assertEqual(record._msg_str, message) self.assertEqual(record._source_location, expected_location) self.assertEqual(record._source_location_str, json.dumps(expected_location)) self.assertIsNone(record._resource) @@ -88,8 +88,8 @@ def test_filter_record(self): self.assertEqual(record._span_id_str, "") self.assertIsNone(record._http_request) self.assertEqual(record._http_request_str, "{}") - self.assertIsNone(record._labels) - self.assertEqual(record._labels_str, "{}") + self.assertEqual(record._labels, expected_label) + self.assertEqual(record._labels_str, json.dumps(expected_label)) def test_minimal_record(self): """ @@ -105,7 +105,6 @@ def test_minimal_record(self): self.assertTrue(success) self.assertIsNone(record.msg) - self.assertEqual(record._msg_str, "") self.assertIsNone(record._source_location) self.assertEqual(record._source_location_str, "{}") self.assertIsNone(record._resource) @@ -297,7 +296,16 @@ def test_emit(self): handler.handle(record) self.assertEqual( handler.transport.send_called_with, - (record, message, _GLOBAL_RESOURCE, None, None, None, None, None), + ( + record, + message, + _GLOBAL_RESOURCE, + {"python_logger": logname}, + None, + None, + None, + None, + ), ) def test_emit_manual_field_override(self): @@ -336,6 +344,7 @@ def test_emit_manual_field_override(self): "default_key": "default-value", "overwritten_key": "new_value", "added_key": "added_value", + "python_logger": logname, } setattr(record, "labels", added_labels) handler.handle(record) @@ -368,14 +377,25 @@ def test_emit_with_custom_formatter(self): handler.setFormatter(logFormatter) message = "test" expected_result = "logname :: INFO :: test" + logname = "logname" + expected_label = {"python_logger": logname} record = logging.LogRecord( - "logname", logging.INFO, None, None, message, None, None + logname, logging.INFO, None, None, message, None, None ) handler.handle(record) self.assertEqual( handler.transport.send_called_with, - (record, expected_result, _GLOBAL_RESOURCE, None, None, None, None, None,), + ( + record, + expected_result, + _GLOBAL_RESOURCE, + expected_label, + None, + None, + None, + None, + ), ) def test_format_with_arguments(self): diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 271a68189090..c87f7f23e3c8 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -60,6 +60,7 @@ def test_format(self): record = logging.LogRecord( logname, logging.INFO, pathname, lineno, message, None, None, func=func ) + expected_labels = {**labels, "python_logger": logname} expected_payload = { "message": message, "severity": record.levelname, @@ -71,7 +72,7 @@ def test_format(self): "function": func, }, "httpRequest": {}, - "logging.googleapis.com/labels": labels, + "logging.googleapis.com/labels": expected_labels, } handler.filter(record) result = json.loads(handler.format(record)) @@ -91,7 +92,6 @@ def test_format_minimal(self): record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) record.created = None expected_payload = { - "message": "", "logging.googleapis.com/trace": "", "logging.googleapis.com/sourceLocation": {}, "httpRequest": {}, @@ -265,6 +265,7 @@ def test_format_overrides(self): "default_key": "default-value", "overwritten_key": "new_value", "added_key": "added_value", + "python_logger": logname, }, } diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 1666cd74b4b0..f408de4769ca 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -279,15 +279,14 @@ def test_enqueue_defaults(self): self._enqueue_record(worker, message) entry = worker._queue.get_nowait() - expected_info = {"message": message, "python_logger": "testing"} - self.assertEqual(entry["info"], expected_info) + self.assertEqual(entry["message"], message) self.assertEqual(entry["severity"], LogSeverity.INFO) self.assertIsInstance(entry["timestamp"], datetime.datetime) self.assertNotIn("resource", entry.keys()) - self.assertNotIn("labels", entry.keys()) self.assertNotIn("trace", entry.keys()) self.assertNotIn("span_id", entry.keys()) self.assertNotIn("http_request", entry.keys()) + self.assertEqual(entry["labels"], {"python_logger": "testing"}) def test_enqueue_explicit(self): import datetime @@ -313,11 +312,10 @@ def test_enqueue_explicit(self): entry = worker._queue.get_nowait() - expected_info = {"message": message, "python_logger": "testing"} - self.assertEqual(entry["info"], expected_info) + self.assertEqual(entry["message"], message) self.assertEqual(entry["severity"], LogSeverity.ERROR) self.assertIs(entry["resource"], resource) - self.assertIs(entry["labels"], labels) + self.assertEqual(entry["labels"], {**labels, "python_logger": "testing"}) self.assertIs(entry["trace"], trace) self.assertIs(entry["span_id"], span_id) self.assertIsInstance(entry["timestamp"], datetime.datetime) @@ -388,9 +386,9 @@ def test__thread_main_max_latency(self, time): worker._queue = mock.create_autospec(queue.Queue, instance=True) worker._queue.get.side_effect = [ - {"info": {"message": "1"}}, # Single record. + {"message": 1}, # Single record. queue.Empty(), # Emulate a queue.get() timeout. - {"info": {"message": "1"}}, # Second record. + {"message": "2"}, # Second record. background_thread._WORKER_TERMINATOR, # Stop the thread. queue.Empty(), # Emulate a queue.get() timeout. ] @@ -479,9 +477,9 @@ def __init__(self): self.commit_called = False self.commit_count = None - def log_struct( + def log( self, - info, + message, severity=logging.INFO, resource=None, labels=None, @@ -495,8 +493,8 @@ def log_struct( assert resource is None resource = _GLOBAL_RESOURCE - self.log_struct_called_with = (info, severity, resource, labels, trace, span_id) - self.entries.append(info) + self.log_called_with = (message, severity, resource, labels, trace, span_id) + self.entries.append(message) def commit(self): self.commit_called = True diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py index 9f06427573c1..cc8ffe284546 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py @@ -41,26 +41,51 @@ def test_send(self): client = _Client(self.PROJECT) - stackdriver_logger_name = "python" + client_name = "python" python_logger_name = "mylogger" - transport = self._make_one(client, stackdriver_logger_name) + transport = self._make_one(client, client_name) message = "hello world" record = logging.LogRecord( python_logger_name, logging.INFO, None, None, message, None, None ) transport.send(record, message, resource=_GLOBAL_RESOURCE) - EXPECTED_STRUCT = {"message": message, "python_logger": python_logger_name} EXPECTED_SENT = ( - EXPECTED_STRUCT, + message, LogSeverity.INFO, _GLOBAL_RESOURCE, + {"python_logger": python_logger_name}, + None, + None, None, + ) + self.assertEqual(transport.logger.log_called_with, EXPECTED_SENT) + + def test_send_struct(self): + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2._helpers import LogSeverity + + client = _Client(self.PROJECT) + + client_name = "python" + python_logger_name = "mylogger" + transport = self._make_one(client, client_name) + message = {"message": "hello world", "extra": "test"} + record = logging.LogRecord( + python_logger_name, logging.INFO, None, None, message, None, None + ) + + transport.send(record, message, resource=_GLOBAL_RESOURCE) + EXPECTED_SENT = ( + message, + LogSeverity.INFO, + _GLOBAL_RESOURCE, + {"python_logger": python_logger_name}, None, None, None, ) - self.assertEqual(transport.logger.log_struct_called_with, EXPECTED_SENT) + self.assertEqual(transport.logger.log_called_with, EXPECTED_SENT) class _Logger(object): @@ -69,7 +94,7 @@ class _Logger(object): def __init__(self, name): self.name = name - def log_struct( + def log( self, message, severity=None, @@ -79,7 +104,7 @@ def log_struct( span_id=None, http_request=None, ): - self.log_struct_called_with = ( + self.log_called_with = ( message, severity, resource, diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 5ad4861784fb..d0e751e93bbd 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -464,6 +464,80 @@ def test_log_proto_w_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_inference_empty(self): + DEFAULT_LABELS = {"foo": "spam"} + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "resource": {"type": "global", "labels": {}}, + "labels": DEFAULT_LABELS, + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) + + logger.log() + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + def test_log_inference_text(self): + RESOURCE = {"type": "global", "labels": {}} + TEXT = "TEXT" + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "textPayload": TEXT, + "resource": RESOURCE, + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log(TEXT) + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + def test_log_inference_struct(self): + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} + RESOURCE = {"type": "global", "labels": {}} + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "jsonPayload": STRUCT, + "resource": RESOURCE, + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log(STRUCT) + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + def test_log_inference_proto(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + + message = Struct(fields={"foo": Value(bool_value=True)}) + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "protoPayload": json.loads(MessageToJson(message)), + "resource": {"type": "global", "labels": {}}, + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log(message) + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() @@ -902,6 +976,123 @@ def test_log_proto_explicit(self): ) self.assertEqual(batch.entries, [ENTRY]) + def test_log_inference_empty(self): + """ + When calling batch.log with empty input, it should + call batch.log_empty + """ + from google.cloud.logging import LogEntry + + ENTRY = LogEntry() + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log() + self.assertEqual(batch.entries, [ENTRY]) + + def test_log_inference_text(self): + """ + When calling batch.log with text input, it should + call batch.log_text + """ + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.cloud.logging import TextEntry + + TEXT = "This is the entry text" + ENTRY = TextEntry(payload=TEXT, resource=_GLOBAL_RESOURCE) + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log(TEXT) + self.assertEqual(batch.entries, [ENTRY]) + + def test_log_inference_struct(self): + """ + When calling batch.struct with text input, it should + call batch.log_struct + """ + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.cloud.logging import StructEntry + + STRUCT = {"message": "Message text", "weather": "partly cloudy"} + ENTRY = StructEntry(payload=STRUCT, resource=_GLOBAL_RESOURCE) + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log(STRUCT) + self.assertEqual(batch.entries, [ENTRY]) + + def test_log_inference_proto(self): + """ + When calling batch.log with proto input, it should + call batch.log_proto + """ + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.cloud.logging import ProtobufEntry + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + message = Struct(fields={"foo": Value(bool_value=True)}) + ENTRY = ProtobufEntry(payload=message, resource=_GLOBAL_RESOURCE) + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log(message) + self.assertEqual(batch.entries, [ENTRY]) + + def test_log_inference_struct_explicit(self): + """ + When calling batch.log with struct input, it should + call batch.log_struct, along with input arguments + """ + import datetime + from google.cloud.logging import Resource + from google.cloud.logging import StructEntry + + STRUCT = {"message": "Message text", "weather": "partly cloudy"} + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + RESOURCE = Resource( + type="gae_app", labels={"module_id": "default", "version_id": "test"} + ) + ENTRY = StructEntry( + payload=STRUCT, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) + + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log( + STRUCT, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) + self.assertEqual(batch.entries, [ENTRY]) + def test_commit_w_unknown_entry_type(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE from google.cloud.logging import LogEntry From 32a9e1210aaeafa9e43215fad583f97bc2b87644 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 26 Jan 2022 16:48:15 -0800 Subject: [PATCH 583/855] feat!: Infer default resource in logger (#315) --- .../.github/.OwlBot.lock.yaml | 1 + .../google/cloud/logging_v2/client.py | 9 ++- .../cloud/logging_v2/handlers/handlers.py | 8 ++- .../google/cloud/logging_v2/logger.py | 11 +++- .../tests/unit/handlers/test_handlers.py | 7 +- .../tests/unit/test_client.py | 8 ++- .../tests/unit/test_logger.py | 65 +++++++++++++++---- 7 files changed, 85 insertions(+), 24 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 8cb43804d999..fa15cb546774 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -14,3 +14,4 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 + diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py index c9bbe1fe04f3..5792ff6f9e0d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -182,16 +182,21 @@ def metrics_api(self): self._metrics_api = JSONMetricsAPI(self) return self._metrics_api - def logger(self, name): + def logger(self, name, *, labels=None, resource=None): """Creates a logger bound to the current client. Args: name (str): The name of the logger to be constructed. + resource (Optional[~logging_v2.Resource]): a monitored resource object + representing the resource the code was run on. If not given, will + be inferred from the environment. + labels (Optional[dict]): Mapping of default labels for entries written + via this logger. Returns: ~logging_v2.logger.Logger: Logger created with the current client. """ - return Logger(name, client=self) + return Logger(name, client=self, labels=labels, resource=resource) def list_entries( self, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 5d16e74b5a85..b554a6fdb7d9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -18,7 +18,6 @@ import json import logging -from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport from google.cloud.logging_v2.handlers._monitored_resources import detect_resource from google.cloud.logging_v2.handlers._helpers import get_request_data @@ -144,7 +143,7 @@ def __init__( *, name=DEFAULT_LOGGER_NAME, transport=BackgroundThreadTransport, - resource=_GLOBAL_RESOURCE, + resource=None, labels=None, stream=None, ): @@ -163,11 +162,14 @@ def __init__( :class:`.BackgroundThreadTransport`. The other option is :class:`.SyncTransport`. resource (~logging_v2.resource.Resource): - Resource for this Handler. Defaults to ``global``. + Resource for this Handler. If not given, will be inferred from the environment. labels (Optional[dict]): Additional labels to attach to logs. stream (Optional[IO]): Stream to be used by the handler. """ super(CloudLoggingHandler, self).__init__(stream) + if not resource: + # infer the correct monitored resource from the local environment + resource = detect_resource(client.project) self.name = name self.client = client self.transport = transport(client, name) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py index ffe7ea706872..01221fc7baba 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -22,6 +22,7 @@ from google.cloud.logging_v2.entries import StructEntry from google.cloud.logging_v2.entries import TextEntry from google.cloud.logging_v2.resource import Resource +from google.cloud.logging_v2.handlers._monitored_resources import detect_resource import google.protobuf.message @@ -51,19 +52,23 @@ class Logger(object): See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs """ - def __init__(self, name, client, *, labels=None, resource=_GLOBAL_RESOURCE): + def __init__(self, name, client, *, labels=None, resource=None): """ Args: name (str): The name of the logger. client (~logging_v2.client.Client): A client which holds credentials and project configuration for the logger (which requires a project). - resource (~logging_v2.Resource): a monitored resource object - representing the resource the code was run on. + resource (Optional[~logging_v2.Resource]): a monitored resource object + representing the resource the code was run on. If not given, will + be inferred from the environment. labels (Optional[dict]): Mapping of default labels for entries written via this logger. """ + if not resource: + # infer the correct monitored resource from the local environment + resource = detect_resource(client.project) self.name = name self._client = client self.labels = labels diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index c51175261d9c..74f5c6dd85c8 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -236,7 +236,9 @@ def _make_one(self, *args, **kw): def test_ctor_defaults(self): import sys - from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.handlers._monitored_resources import ( + _create_global_resource, + ) from google.cloud.logging_v2.handlers.handlers import DEFAULT_LOGGER_NAME patch = mock.patch( @@ -251,7 +253,8 @@ def test_ctor_defaults(self): self.assertIsInstance(handler.transport, _Transport) self.assertIs(handler.transport.client, client) self.assertEqual(handler.transport.name, DEFAULT_LOGGER_NAME) - self.assertEqual(handler.resource, _GLOBAL_RESOURCE) + global_resource = _create_global_resource(self.PROJECT) + self.assertEqual(handler.resource, global_resource) self.assertIsNone(handler.labels) self.assertIs(handler.stream, sys.stderr) diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 11ccd7e3749c..46526fb219e6 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -239,14 +239,20 @@ def make_api(client_obj): def test_logger(self): from google.cloud.logging import Logger + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) - logger = client.logger(self.LOGGER_NAME) + labels = {"test": "true"} + logger = client.logger( + self.LOGGER_NAME, resource=_GLOBAL_RESOURCE, labels=labels + ) self.assertIsInstance(logger, Logger) self.assertEqual(logger.name, self.LOGGER_NAME) self.assertIs(logger.client, client) self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(logger.default_resource, _GLOBAL_RESOURCE) + self.assertEqual(logger.labels, labels) def test_list_entries_defaults(self): from google.cloud.logging import TextEntry diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index d0e751e93bbd..0d8fd12085ef 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -99,11 +99,15 @@ def test_batch_w_alternate_client(self): self.assertIs(batch.client, client2) def test_log_empty_defaults_w_default_labels(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), "labels": DEFAULT_LABELS, } ] @@ -170,7 +174,11 @@ def test_log_empty_w_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_text_defaults(self): - RESOURCE = {"type": "global", "labels": {}} + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + + RESOURCE = detect_resource(self.PROJECT)._to_dict() TEXT = "TEXT" ENTRIES = [ { @@ -188,8 +196,12 @@ def test_log_text_defaults(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_text_w_unicode_and_default_labels(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + TEXT = "TEXT" - RESOURCE = {"type": "global", "labels": {}} + RESOURCE = detect_resource(self.PROJECT)._to_dict() DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { @@ -265,8 +277,12 @@ def test_log_text_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_struct_defaults(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} - RESOURCE = {"type": "global", "labels": {}} + RESOURCE = detect_resource(self.PROJECT)._to_dict() ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), @@ -283,8 +299,12 @@ def test_log_struct_defaults(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_struct_w_default_labels(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} - RESOURCE = {"type": "global", "labels": {}} + RESOURCE = detect_resource(self.PROJECT)._to_dict() DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { @@ -360,6 +380,9 @@ def test_log_struct_w_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_proto_defaults(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value @@ -369,7 +392,7 @@ def test_log_proto_defaults(self): { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "protoPayload": json.loads(MessageToJson(message)), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), } ] client = _Client(self.PROJECT) @@ -381,6 +404,9 @@ def test_log_proto_defaults(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_proto_w_default_labels(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value @@ -391,7 +417,7 @@ def test_log_proto_w_default_labels(self): { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "protoPayload": json.loads(MessageToJson(message)), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), "labels": DEFAULT_LABELS, } ] @@ -465,11 +491,15 @@ def test_log_proto_w_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_inference_empty(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), "labels": DEFAULT_LABELS, } ] @@ -482,13 +512,16 @@ def test_log_inference_empty(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_inference_text(self): - RESOURCE = {"type": "global", "labels": {}} + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + TEXT = "TEXT" ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "textPayload": TEXT, - "resource": RESOURCE, + "resource": detect_resource(self.PROJECT)._to_dict(), } ] client = _Client(self.PROJECT) @@ -500,13 +533,16 @@ def test_log_inference_text(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_inference_struct(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} - RESOURCE = {"type": "global", "labels": {}} ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "jsonPayload": STRUCT, - "resource": RESOURCE, + "resource": detect_resource(self.PROJECT)._to_dict(), } ] client = _Client(self.PROJECT) @@ -521,13 +557,16 @@ def test_log_inference_proto(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) message = Struct(fields={"foo": Value(bool_value=True)}) ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "protoPayload": json.loads(MessageToJson(message)), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), } ] client = _Client(self.PROJECT) From 2a569795afc13582eb24ae508d3aa3b3df1a1b36 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 27 Jul 2021 11:20:31 -0700 Subject: [PATCH 584/855] feat!: support string-encoded json (#339) --- .../cloud/logging_v2/handlers/handlers.py | 38 +++- .../tests/unit/handlers/test_handlers.py | 184 ++++++++++++++++++ .../unit/handlers/test_structured_log.py | 41 ++++ 3 files changed, 256 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index b554a6fdb7d9..8d14852e1f0e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -192,13 +192,8 @@ def emit(self, record): """ resource = record._resource or self.resource labels = record._labels - message = None - if isinstance(record.msg, collections.abc.Mapping): - # if input is a dictionary, pass as-is for structured logging - message = record.msg - elif record.msg: - # otherwise, format message string based on superclass - message = super(CloudLoggingHandler, self).format(record) + message = _format_and_parse_message(record, self) + if resource.type == _GAE_RESOURCE_TYPE and record._trace is not None: # add GAE-specific label labels = {_GAE_TRACE_ID_LABEL: record._trace, **(labels or {})} @@ -215,6 +210,35 @@ def emit(self, record): ) +def _format_and_parse_message(record, formatter_handler): + """ + Helper function to apply formatting to a LogRecord message, + and attempt to parse encoded JSON into a dictionary object. + + Resulting output will be of type (str | dict | None) + + Args: + record (logging.LogRecord): The record object representing the log + formatter_handler (logging.Handler): The handler used to format the log + """ + # if message is a dictionary, return as-is + if isinstance(record.msg, collections.abc.Mapping): + return record.msg + # format message string based on superclass + message = formatter_handler.format(record) + try: + # attempt to parse encoded json into dictionary + if message[0] == "{": + json_message = json.loads(message) + if isinstance(json_message, collections.abc.Mapping): + message = json_message + except (json.decoder.JSONDecodeError, IndexError): + # log string is not valid json + pass + # if formatted message contains no content, return None + return message if message != "None" else None + + def setup_logging( handler, *, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, log_level=logging.INFO ): diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 74f5c6dd85c8..d36dc895970f 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -311,6 +311,20 @@ def test_emit(self): ), ) + def test_emit_minimal(self): + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE + ) + record = logging.LogRecord(None, logging.INFO, None, None, None, None, None) + handler.handle(record) + self.assertEqual( + handler.transport.send_called_with, + (record, None, _GLOBAL_RESOURCE, None, None, None, None, None,), + ) + def test_emit_manual_field_override(self): from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE from google.cloud.logging_v2.resource import Resource @@ -401,6 +415,70 @@ def test_emit_with_custom_formatter(self): ), ) + def test_emit_dict(self): + """ + Handler should support logging dictionaries + """ + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE, + ) + message = {"x": "test"} + logname = "logname" + expected_label = {"python_logger": logname} + record = logging.LogRecord( + logname, logging.INFO, None, None, message, None, None + ) + handler.handle(record) + + self.assertEqual( + handler.transport.send_called_with, + ( + record, + message, + _GLOBAL_RESOURCE, + expected_label, + None, + None, + None, + None, + ), + ) + + def test_emit_with_encoded_json(self): + """ + Handler should parse json encoded as a string + """ + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE, + ) + logFormatter = logging.Formatter(fmt='{ "x" : "%(name)s" }') + handler.setFormatter(logFormatter) + logname = "logname" + expected_result = {"x": logname} + expected_label = {"python_logger": logname} + record = logging.LogRecord(logname, logging.INFO, None, None, None, None, None) + handler.handle(record) + + self.assertEqual( + handler.transport.send_called_with, + ( + record, + expected_result, + _GLOBAL_RESOURCE, + expected_label, + None, + None, + None, + None, + ), + ) + def test_format_with_arguments(self): """ Handler should support format string arguments @@ -425,6 +503,112 @@ def test_format_with_arguments(self): ) +class TestFormatAndParseMessage(unittest.TestCase): + def test_none(self): + """ + None messages with no special formatting should return + None after formatting + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = None + record = logging.LogRecord(None, None, None, None, message, None, None) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, None) + + def test_none_formatted(self): + """ + None messages with formatting rules should return formatted string + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = None + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + formatter = logging.Formatter("name: %(name)s") + handler.setFormatter(formatter) + result = _format_and_parse_message(record, handler) + self.assertEqual(result, "name: logname") + + def test_unformatted_string(self): + """ + Unformated strings should be returned unchanged + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = '"test"' + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, message) + + def test_empty_string(self): + """ + Empty strings should be returned unchanged + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = "" + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, message) + + def test_string_formatted_with_args(self): + """ + string messages should properly apply formatting and arguments + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = "argument: %s" + arg = "test" + record = logging.LogRecord("logname", None, None, None, message, arg, None) + handler = logging.StreamHandler() + formatter = logging.Formatter("name: %(name)s :: message: %(message)s") + handler.setFormatter(formatter) + result = _format_and_parse_message(record, handler) + self.assertEqual(result, "name: logname :: message: argument: test") + + def test_dict(self): + """ + dict messages should be unchanged + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = {"a": "b"} + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + formatter = logging.Formatter("name: %(name)s") + handler.setFormatter(formatter) + result = _format_and_parse_message(record, handler) + self.assertEqual(result, message) + + def test_string_encoded_dict(self): + """ + dicts should be extracted from string messages + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = '{ "x": { "y" : "z" } }' + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, {"x": {"y": "z"}}) + + def test_broken_encoded_dict(self): + """ + unparseable encoded dicts should be kept as strings + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = '{ "x": { "y" : ' + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, message) + + class TestSetupLogging(unittest.TestCase): def _call_fut(self, handler, excludes=None): from google.cloud.logging.handlers import setup_logging diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index c87f7f23e3c8..d9dfa2512618 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -92,13 +92,16 @@ def test_format_minimal(self): record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) record.created = None expected_payload = { + "severity": "INFO", "logging.googleapis.com/trace": "", + "logging.googleapis.com/spanId": "", "logging.googleapis.com/sourceLocation": {}, "httpRequest": {}, "logging.googleapis.com/labels": {}, } handler.filter(record) result = json.loads(handler.format(record)) + self.assertEqual(set(expected_payload.keys()), set(result.keys())) for (key, value) in expected_payload.items(): self.assertEqual( value, result[key], f"expected_payload[{key}] != result[{key}]" @@ -170,6 +173,44 @@ def test_format_with_custom_formatter(self): handler.filter(record) result = handler.format(record) self.assertIn(expected_result, result) + self.assertIn("message", result) + + def test_dict(self): + """ + Handler should parse json encoded as a string + """ + import logging + + handler = self._make_one() + message = {"x": "test"} + expected_result = '"x": "test"' + record = logging.LogRecord( + "logname", logging.INFO, None, None, message, None, None, + ) + record.created = None + handler.filter(record) + result = handler.format(record) + self.assertIn(expected_result, result) + self.assertNotIn("message", result) + + def test_encoded_json(self): + """ + Handler should parse json encoded as a string + """ + import logging + + handler = self._make_one() + logFormatter = logging.Formatter(fmt='{ "name" : "%(name)s" }') + handler.setFormatter(logFormatter) + expected_result = '"name": "logname"' + record = logging.LogRecord( + "logname", logging.INFO, None, None, None, None, None, + ) + record.created = None + handler.filter(record) + result = handler.format(record) + self.assertIn(expected_result, result) + self.assertNotIn("message", result) def test_format_with_arguments(self): """ From aaaf55c40261adb37a5a09ab62ea824477c79fbb Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Oct 2021 17:04:19 -0700 Subject: [PATCH 585/855] fix!: api consistency between HTTP and Gapic layers (#375) --- .../google/cloud/logging_v2/_gapic.py | 109 ++++-- .../google/cloud/logging_v2/_http.py | 79 +++-- .../google/cloud/logging_v2/client.py | 71 ++-- .../google/cloud/logging_v2/logger.py | 25 +- .../tests/system/test_system.py | 319 +++++++++++------- .../tests/unit/test__gapic.py | 116 ++++++- .../tests/unit/test__http.py | 136 +++++--- .../tests/unit/test_client.py | 72 ++-- .../tests/unit/test_logger.py | 107 +++++- 9 files changed, 691 insertions(+), 343 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py index 7a6d70650dff..3661d3d09184 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py @@ -49,10 +49,11 @@ def list_entries( *, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): - """Return a page of log entry resources. + """Return a generator of log entry resources. Args: resource_names (Sequence[str]): Names of one or more parent resources @@ -69,14 +70,16 @@ def list_entries( https://cloud.google.com/logging/docs/view/advanced_filters order_by (str) One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (int): maximum number of entries to return, If not passed, - defaults to a value set by the API. - page_token (str): opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. - + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.LogEntry] + Generator[~logging_v2.LogEntry] """ # full resource names are expected by the API resource_names = resource_names @@ -89,19 +92,27 @@ def list_entries( ) response = self._gapic_api.list_log_entries(request=request) - page_iter = iter(response) + log_iter = iter(response) # We attach a mutable loggers dictionary so that as Logger # objects are created by entry_from_resource, they can be # re-used by other log entries from the same logger. loggers = {} - def log_entries_pager(page_iter): - for page in page_iter: - log_entry_dict = _parse_log_entry(LogEntryPB.pb(page)) + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") + + # create generator + def log_entries_pager(log_iter): + i = 0 + for entry in log_iter: + if max_results is not None and i >= max_results: + break + log_entry_dict = _parse_log_entry(LogEntryPB.pb(entry)) yield entry_from_resource(log_entry_dict, self._client, loggers=loggers) + i += 1 - return log_entries_pager(page_iter) + return log_entries_pager(log_iter) def write_entries( self, @@ -175,7 +186,7 @@ def __init__(self, gapic_api, client): self._gapic_api = gapic_api self._client = client - def list_sinks(self, parent, *, page_size=0, page_token=None): + def list_sinks(self, parent, *, max_results=None, page_size=None, page_token=None): """List sinks for the parent resource. Args: @@ -187,27 +198,37 @@ def list_sinks(self, parent, *, page_size=0, page_token=None): "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". - page_size (Optional[int]): Maximum number of sinks to return, If not passed, - defaults to a value set by the API. - page_token (Optional[str]): Opaque marker for the next "page" of sinks. If not - passed, the API will return the first page of - sinks. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.Sink] + Generator[~logging_v2.Sink] """ request = ListSinksRequest( parent=parent, page_size=page_size, page_token=page_token ) response = self._gapic_api.list_sinks(request) - page_iter = iter(response) + sink_iter = iter(response) + + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") - def sinks_pager(page_iter): - for page in page_iter: + def sinks_pager(sink_iter): + i = 0 + for entry in sink_iter: + if max_results is not None and i >= max_results: + break # Convert the GAPIC sink type into the handwritten `Sink` type - yield Sink.from_api_repr(LogSink.to_dict(page), client=self._client) + yield Sink.from_api_repr(LogSink.to_dict(entry), client=self._client) + i += 1 - return sinks_pager(page_iter) + return sinks_pager(sink_iter) def sink_create( self, parent, sink_name, filter_, destination, *, unique_writer_identity=False @@ -347,33 +368,47 @@ def __init__(self, gapic_api, client): self._gapic_api = gapic_api self._client = client - def list_metrics(self, project, *, page_size=0, page_token=None): + def list_metrics( + self, project, *, max_results=None, page_size=None, page_token=None + ): """List metrics for the project associated with this client. Args: project (str): ID of the project whose metrics are to be listed. - page_size (int): Maximum number of metrics to return, If not passed, - defaults to a value set by the API. - page_token (str): Opaque marker for the next "page" of metrics. If not - passed, the API will return the first page of - sinks. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterable[logging_v2.Metric]: Iterable of metrics. + Generator[logging_v2.Metric] """ path = f"projects/{project}" request = ListLogMetricsRequest( parent=path, page_size=page_size, page_token=page_token, ) response = self._gapic_api.list_log_metrics(request=request) - page_iter = iter(response) + metric_iter = iter(response) + + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") - def metrics_pager(page_iter): - for page in page_iter: + def metrics_pager(metric_iter): + i = 0 + for entry in metric_iter: + if max_results is not None and i >= max_results: + break # Convert GAPIC metrics type into handwritten `Metric` type - yield Metric.from_api_repr(LogMetric.to_dict(page), client=self._client) + yield Metric.from_api_repr( + LogMetric.to_dict(entry), client=self._client + ) + i += 1 - return metrics_pager(page_iter) + return metrics_pager(metric_iter) def metric_create(self, project, metric_name, filter_, description): """Create a metric resource. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_http.py b/packages/google-cloud-logging/google/cloud/logging_v2/_http.py index 68bde346a119..21fb38606d0c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_http.py @@ -74,6 +74,7 @@ def list_entries( *, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): @@ -94,14 +95,16 @@ def list_entries( https://cloud.google.com/logging/docs/view/advanced_filters order_by (str) One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (int): maximum number of entries to return, If not passed, - defaults to a value set by the API. - page_token (str): opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. - + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.LogEntry] + Generator[~logging_v2.LogEntry] """ extra_params = {"resourceNames": resource_names} @@ -131,7 +134,8 @@ def list_entries( ) # This method uses POST to make a read-only request. iterator._HTTP_METHOD = "POST" - return iterator + + return _entries_pager(iterator, max_results) def write_entries( self, @@ -219,7 +223,7 @@ def __init__(self, client): self._client = client self.api_request = client._connection.api_request - def list_sinks(self, parent, *, page_size=None, page_token=None): + def list_sinks(self, parent, *, max_results=None, page_size=None, page_token=None): """List sinks for the parent resource. See @@ -234,14 +238,17 @@ def list_sinks(self, parent, *, page_size=None, page_token=None): "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". - page_size (Optional[int]): Maximum number of sinks to return, If not passed, - defaults to a value set by the API. - page_token (Optional[str]): Opaque marker for the next "page" of sinks. If not - passed, the API will return the first page of - sinks. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.Sink] + Generator[~logging_v2.Sink] """ extra_params = {} @@ -249,7 +256,7 @@ def list_sinks(self, parent, *, page_size=None, page_token=None): extra_params["pageSize"] = page_size path = f"/{parent}/sinks" - return page_iterator.HTTPIterator( + iterator = page_iterator.HTTPIterator( client=self._client, api_request=self._client._connection.api_request, path=path, @@ -259,6 +266,8 @@ def list_sinks(self, parent, *, page_size=None, page_token=None): extra_params=extra_params, ) + return _entries_pager(iterator, max_results) + def sink_create( self, parent, sink_name, filter_, destination, *, unique_writer_identity=False ): @@ -373,24 +382,27 @@ def __init__(self, client): self._client = client self.api_request = client._connection.api_request - def list_metrics(self, project, *, page_size=None, page_token=None): + def list_metrics( + self, project, *, max_results=None, page_size=None, page_token=None + ): """List metrics for the project associated with this client. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list Args: - page_size (Optional[int]): The maximum number of sinks in each - page of results from this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - page_token (Optional[str]): If present, return the next batch of sinks, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property ofthe returned iterator instead of manually passing the - token. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[google.cloud.logging_v2.metric.Metric] + Generator[logging_v2.Metric] + """ extra_params = {} @@ -398,7 +410,7 @@ def list_metrics(self, project, *, page_size=None, page_token=None): extra_params["pageSize"] = page_size path = f"/projects/{project}/metrics" - return page_iterator.HTTPIterator( + iterator = page_iterator.HTTPIterator( client=self._client, api_request=self._client._connection.api_request, path=path, @@ -407,6 +419,7 @@ def list_metrics(self, project, *, page_size=None, page_token=None): page_token=page_token, extra_params=extra_params, ) + return _entries_pager(iterator, max_results) def metric_create(self, project, metric_name, filter_, description): """Create a metric resource. @@ -469,6 +482,18 @@ def metric_delete(self, project, metric_name): self.api_request(method="DELETE", path=target) +def _entries_pager(page_iter, max_results=None): + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") + + i = 0 + for page in page_iter: + if max_results is not None and i >= max_results: + break + yield page + i += 1 + + def _item_to_entry(iterator, resource, loggers): """Convert a log entry resource to the native object. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py index 5792ff6f9e0d..7098c8baa843 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -204,10 +204,11 @@ def list_entries( resource_names=None, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): - """Return a page of log entry resources. + """Return a generator of log entry resources. Args: resource_names (Sequence[str]): Names of one or more parent resources @@ -226,14 +227,17 @@ def list_entries( https://cloud.google.com/logging/docs/view/advanced_filters order_by (str) One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (int): maximum number of entries to return, If not passed, - defaults to a value set by the API. - page_token (str): opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.LogEntry] + Generator[~logging_v2.LogEntry] """ if resource_names is None: resource_names = [f"projects/{self.project}"] @@ -243,6 +247,7 @@ def list_entries( resource_names=resource_names, filter_=filter_, order_by=order_by, + max_results=max_results, page_size=page_size, page_token=page_token, ) @@ -266,7 +271,9 @@ def sink(self, name, *, filter_=None, destination=None): """ return Sink(name, filter_=filter_, destination=destination, client=self) - def list_sinks(self, *, parent=None, page_size=None, page_token=None): + def list_sinks( + self, *, parent=None, max_results=None, page_size=None, page_token=None + ): """List sinks for the a parent resource. See @@ -283,22 +290,25 @@ def list_sinks(self, *, parent=None, page_size=None, page_token=None): "folders/[FOLDER_ID]". If not passed, defaults to the project bound to the API's client. - page_size (Optional[int]): The maximum number of sinks in each - page of results from this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - page_token (Optional[str]): If present, return the next batch of sinks, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property ofthe returned iterator instead of manually passing the - token. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.sink.Sink] + Generator[~logging_v2.Sink] """ if parent is None: parent = f"projects/{self.project}" return self.sinks_api.list_sinks( - parent=parent, page_size=page_size, page_token=page_token + parent=parent, + max_results=max_results, + page_size=page_size, + page_token=page_token, ) def metric(self, name, *, filter_=None, description=""): @@ -319,27 +329,30 @@ def metric(self, name, *, filter_=None, description=""): """ return Metric(name, filter_=filter_, client=self, description=description) - def list_metrics(self, *, page_size=None, page_token=None): + def list_metrics(self, *, max_results=None, page_size=None, page_token=None): """List metrics for the project associated with this client. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list Args: - page_size (Optional[int]): The maximum number of sinks in each - page of results from this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - page_token (Optional[str]): If present, return the next batch of sinks, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property ofthe returned iterator instead of manually passing the - token. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.metric.Metric] + Generator[logging_v2.Metric] """ return self.metrics_api.list_metrics( - self.project, page_size=page_size, page_token=page_token + self.project, + max_results=max_results, + page_size=page_size, + page_token=page_token, ) def get_default_handler(self, **kw): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py index 01221fc7baba..404871bef6fe 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -264,10 +264,11 @@ def list_entries( resource_names=None, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): - """Return a page of log entries. + """Return a generator of log entry resources. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list @@ -289,19 +290,16 @@ def list_entries( By default, a 24 hour filter is applied. order_by (Optional[str]): One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (Optional[int]): - Optional. The maximum number of entries in each page of results - from this request. Non-positive values are ignored. Defaults - to a sensible value set by the API. - page_token (Optional[str]): - Optional. If present, return the next batch of entries, using - the value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property of the returned iterator instead of manually passing - the token. - + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.entries.LogEntry] + Generator[~logging_v2.LogEntry] """ if resource_names is None: @@ -317,6 +315,7 @@ def list_entries( resource_names=resource_names, filter_=filter_, order_by=order_by, + max_results=max_results, page_size=page_size, page_token=page_token, ) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 836339f0b8e9..d7e1e57d2957 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -36,6 +36,7 @@ from google.cloud.logging_v2.handlers.transports import SyncTransport from google.cloud.logging_v2 import client from google.cloud.logging_v2.resource import Resource +from google.cloud.logging_v2.entries import TextEntry from google.protobuf.struct_pb2 import Struct, Value, ListValue, NullValue @@ -97,11 +98,13 @@ class Config(object): """ CLIENT = None + HTTP_CLIENT = None use_mtls = os.environ.get("GOOGLE_API_USE_MTLS_ENDPOINT", "never") def setUpModule(): Config.CLIENT = client.Client() + Config.HTTP_CLIENT = client.Client(_use_grpc=False) # Skip the test cases using bigquery, storage and pubsub clients for mTLS testing. @@ -186,34 +189,34 @@ def test_list_entry_with_auditlog(self): audit_dict = { "@type": type_url, "methodName": "test", - "requestMetadata": {"callerIp": "::1", "callerSuppliedUserAgent": "test"}, "resourceName": "test", "serviceName": "test", - "status": {"code": 0}, } audit_struct = self._dict_to_struct(audit_dict) - logger = Config.CLIENT.logger(f"audit-proto-{uuid.uuid1()}") - logger.log_proto(audit_struct) - - # retrieve log - retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) - protobuf_entry = retry(lambda: next(logger.list_entries()))() - - self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) - self.assertIsNone(protobuf_entry.payload_pb) - self.assertIsInstance(protobuf_entry.payload_json, dict) - self.assertEqual(protobuf_entry.payload_json["@type"], type_url) - self.assertEqual( - protobuf_entry.payload_json["methodName"], audit_dict["methodName"] - ) - self.assertEqual( - protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url - ) - self.assertEqual( - protobuf_entry.to_api_repr()["protoPayload"]["methodName"], - audit_dict["methodName"], - ) + gapic_logger = Config.CLIENT.logger(f"audit-proto-{uuid.uuid1()}") + http_logger = Config.HTTP_CLIENT.logger(f"audit-proto-{uuid.uuid1()}-http") + for logger in [gapic_logger, http_logger]: + logger.log_proto(audit_struct) + + # retrieve log + retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) + protobuf_entry = retry(lambda: next(logger.list_entries()))() + + self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) + self.assertIsNone(protobuf_entry.payload_pb) + self.assertIsInstance(protobuf_entry.payload_json, dict) + self.assertEqual(protobuf_entry.payload_json["@type"], type_url) + self.assertEqual( + protobuf_entry.payload_json["methodName"], audit_dict["methodName"] + ) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url + ) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["methodName"], + audit_dict["methodName"], + ) def test_list_entry_with_requestlog(self): """ @@ -244,20 +247,22 @@ def test_list_entry_with_requestlog(self): } req_struct = self._dict_to_struct(req_dict) - logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}") - logger.log_proto(req_struct) - - # retrieve log - retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) - protobuf_entry = retry(lambda: next(logger.list_entries()))() - - self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) - self.assertIsNone(protobuf_entry.payload_pb) - self.assertIsInstance(protobuf_entry.payload_json, dict) - self.assertEqual(protobuf_entry.payload_json["@type"], type_url) - self.assertEqual( - protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url - ) + gapic_logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}") + http_logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}-http") + for logger in [gapic_logger, http_logger]: + logger.log_proto(req_struct) + + # retrieve log + retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) + protobuf_entry = retry(lambda: next(logger.list_entries()))() + + self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) + self.assertIsNone(protobuf_entry.payload_pb) + self.assertIsInstance(protobuf_entry.payload_json, dict) + self.assertEqual(protobuf_entry.payload_json["@type"], type_url) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url + ) def test_list_entry_with_auditdata(self): """ @@ -294,46 +299,51 @@ def test_list_entry_with_auditdata(self): def test_log_text(self): TEXT_PAYLOAD = "System test: test_log_text" - logger = Config.CLIENT.logger(self._logger_name("log_text")) - self.to_delete.append(logger) - logger.log_text(TEXT_PAYLOAD) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + logger.log_text(TEXT_PAYLOAD) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + self.assertTrue(isinstance(entries[0], TextEntry)) def test_log_text_with_timestamp(self): text_payload = "System test: test_log_text_with_timestamp" - logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_ts_http")) now = datetime.utcnow() - - self.to_delete.append(logger) - - logger.log_text(text_payload, timestamp=now) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, text_payload) - self.assertEqual(entries[0].timestamp, now.replace(tzinfo=UTC)) - self.assertIsInstance(entries[0].received_timestamp, datetime) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + logger.log_text(text_payload, timestamp=now) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, text_payload) + self.assertEqual(entries[0].timestamp, now.replace(tzinfo=UTC)) + self.assertIsInstance(entries[0].received_timestamp, datetime) def test_log_text_with_resource(self): text_payload = "System test: test_log_text_with_timestamp" - logger = Config.CLIENT.logger(self._logger_name("log_text_res")) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_res")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_res_http")) now = datetime.utcnow() - resource = Resource( - type="gae_app", - labels={"module_id": "default", "version_id": "test", "zone": ""}, - ) + for logger in [gapic_logger, http_logger]: + resource = Resource( + type="gae_app", + labels={"module_id": "default", "version_id": "test", "zone": ""}, + ) - self.to_delete.append(logger) + self.to_delete.append(logger) - logger.log_text(text_payload, timestamp=now, resource=resource) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, text_payload) - # project_id is output only so we don't want it in assertion - del entries[0].resource.labels["project_id"] - self.assertEqual(entries[0].resource, resource) + logger.log_text(text_payload, timestamp=now, resource=resource) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, text_payload) + # project_id is output only so we don't want it in assertion + del entries[0].resource.labels["project_id"] + self.assertEqual(entries[0].resource, resource) def test_log_text_w_metadata(self): TEXT_PAYLOAD = "System test: test_log_text" @@ -343,35 +353,42 @@ def test_log_text_w_metadata(self): URI = "https://api.example.com/endpoint" STATUS = 500 REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - logger = Config.CLIENT.logger(self._logger_name("log_text_md")) - self.to_delete.append(logger) - - logger.log_text( - TEXT_PAYLOAD, insert_id=INSERT_ID, severity=SEVERITY, http_request=REQUEST - ) - entries = _list_entries(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_md")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_md_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + + logger.log_text( + TEXT_PAYLOAD, + insert_id=INSERT_ID, + severity=SEVERITY, + http_request=REQUEST, + ) + entries = _list_entries(logger) - self.assertEqual(len(entries), 1) + self.assertEqual(len(entries), 1) - entry = entries[0] - self.assertEqual(entry.payload, TEXT_PAYLOAD) - self.assertEqual(entry.insert_id, INSERT_ID) - self.assertEqual(entry.severity, SEVERITY) + entry = entries[0] + self.assertEqual(entry.payload, TEXT_PAYLOAD) + self.assertEqual(entry.insert_id, INSERT_ID) + self.assertEqual(entry.severity, SEVERITY) - request = entry.http_request - self.assertEqual(request["requestMethod"], METHOD) - self.assertEqual(request["requestUrl"], URI) - self.assertEqual(request["status"], STATUS) + request = entry.http_request + self.assertEqual(request["requestMethod"], METHOD) + self.assertEqual(request["requestUrl"], URI) + self.assertEqual(request["status"], STATUS) def test_log_struct(self): - logger = Config.CLIENT.logger(self._logger_name("log_struct")) - self.to_delete.append(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_struct")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_struct_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) - logger.log_struct(self.JSON_PAYLOAD) - entries = _list_entries(logger) + logger.log_struct(self.JSON_PAYLOAD) + entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) def test_log_struct_w_metadata(self): INSERT_ID = "INSERTID" @@ -380,54 +397,63 @@ def test_log_struct_w_metadata(self): URI = "https://api.example.com/endpoint" STATUS = 500 REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - logger = Config.CLIENT.logger(self._logger_name("log_struct_md")) - self.to_delete.append(logger) - - logger.log_struct( - self.JSON_PAYLOAD, - insert_id=INSERT_ID, - severity=SEVERITY, - http_request=REQUEST, - ) - entries = _list_entries(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_struct_md")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_struct_md_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + + logger.log_struct( + self.JSON_PAYLOAD, + insert_id=INSERT_ID, + severity=SEVERITY, + http_request=REQUEST, + ) + entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) - self.assertEqual(entries[0].insert_id, INSERT_ID) - self.assertEqual(entries[0].severity, SEVERITY) - request = entries[0].http_request - self.assertEqual(request["requestMethod"], METHOD) - self.assertEqual(request["requestUrl"], URI) - self.assertEqual(request["status"], STATUS) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + self.assertEqual(entries[0].insert_id, INSERT_ID) + self.assertEqual(entries[0].severity, SEVERITY) + request = entries[0].http_request + self.assertEqual(request["requestMethod"], METHOD) + self.assertEqual(request["requestUrl"], URI) + self.assertEqual(request["status"], STATUS) def test_log_w_text(self): TEXT_PAYLOAD = "System test: test_log_w_text" - logger = Config.CLIENT.logger(self._logger_name("log_w_text")) - self.to_delete.append(logger) - logger.log(TEXT_PAYLOAD) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_w_text")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_w_text")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + logger.log(TEXT_PAYLOAD) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, TEXT_PAYLOAD) def test_log_w_struct(self): - logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) - self.to_delete.append(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_w_struct_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) - logger.log(self.JSON_PAYLOAD) - entries = _list_entries(logger) + logger.log(self.JSON_PAYLOAD) + entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) def test_log_empty(self): - logger = Config.CLIENT.logger(self._logger_name("log_empty")) - self.to_delete.append(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_empty")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_empty_http")) - logger.log() - entries = _list_entries(logger) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) - self.assertEqual(len(entries), 1) - self.assertIsNone(entries[0].payload) + logger.log() + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + self.assertIsNone(entries[0].payload) def test_log_handler_async(self): LOG_MESSAGE = "It was the worst of times" @@ -755,6 +781,51 @@ def test_update_sink(self): self.assertEqual(sink.filter_, UPDATED_FILTER) self.assertEqual(sink.destination, dataset_uri) + def test_api_equality_list_logs(self): + unique_id = uuid.uuid1() + gapic_logger = Config.CLIENT.logger(f"api-list-{unique_id}") + http_logger = Config.HTTP_CLIENT.logger(f"api-list-{unique_id}") + # write logs + log_count = 5 + for i in range(log_count): + gapic_logger.log_text(f"test {i}") + + def retryable(): + max_results = 3 + gapic_generator = gapic_logger.list_entries(max_results=max_results) + http_generator = http_logger.list_entries(max_results=max_results) + # returned objects should be consistent + self.assertEqual(type(gapic_generator), type(http_generator)) + gapic_list, http_list = list(gapic_generator), list(http_generator) + # max_results should limit the number of logs returned + self.assertEqual(len(gapic_list), max_results) + self.assertEqual(len(http_list), max_results) + # returned logs should be the same + self.assertEqual(gapic_list[0].insert_id, http_list[0].insert_id) + # should return in ascending order + self.assertEqual(gapic_list[0].payload, "test 0") + # test reverse ordering + gapic_generator = gapic_logger.list_entries( + max_results=max_results, order_by=google.cloud.logging_v2.DESCENDING + ) + http_generator = http_logger.list_entries( + max_results=max_results, order_by=google.cloud.logging_v2.DESCENDING + ) + gapic_list, http_list = list(gapic_generator), list(http_generator) + self.assertEqual(len(gapic_list), max_results) + self.assertEqual(len(http_list), max_results) + # http and gapic results should be consistent + self.assertEqual(gapic_list[0].insert_id, http_list[0].insert_id) + # returned logs should be in descending order + self.assertEqual(gapic_list[0].payload, f"test {log_count-1}") + + RetryErrors( + (ServiceUnavailable, InternalServerError, AssertionError), + delay=2, + backoff=2, + max_tries=3, + )(retryable)() + class _DeleteWrapper(object): def __init__(self, publisher, topic_path): diff --git a/packages/google-cloud-logging/tests/unit/test__gapic.py b/packages/google-cloud-logging/tests/unit/test__gapic.py index 5da1c71222dd..d8c4bf57eb37 100644 --- a/packages/google-cloud-logging/tests/unit/test__gapic.py +++ b/packages/google-cloud-logging/tests/unit/test__gapic.py @@ -32,7 +32,7 @@ FILTER = "logName:syslog AND severity>=ERROR" -class Test_LoggingAPI(object): +class Test_LoggingAPI(unittest.TestCase): LOG_NAME = "log_name" LOG_PATH = f"projects/{PROJECT}/logs/{LOG_NAME}" @@ -107,6 +107,49 @@ def test_list_entries_with_options(self): assert request.page_size == 42 assert request.page_token == "token" + def test_list_logs_with_max_results(self): + client = self.make_logging_api() + log_entry_msg = LogEntryPB(log_name=self.LOG_PATH, text_payload="text") + + with mock.patch.object( + type(client._gapic_api.transport.list_log_entries), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogEntriesResponse( + entries=[log_entry_msg, log_entry_msg] + ) + result = client.list_entries( + [PROJECT_PATH], + filter_=FILTER, + order_by=google.cloud.logging.ASCENDING, + page_size=42, + page_token="token", + max_results=1, + ) + + # Check the request + call.assert_called_once() + assert len(list(result)) == 1 + + def test_list_logs_negative_max_results(self): + client = self.make_logging_api() + + with self.assertRaises(ValueError): + with mock.patch.object( + type(client._gapic_api.transport.list_log_entries), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogEntriesResponse(entries=[]) + result = client.list_entries( + [PROJECT_PATH], + filter_=FILTER, + order_by=google.cloud.logging.ASCENDING, + page_size=42, + page_token="token", + max_results=-1, + ) + # Check the request + list(result) + call.assert_called_once() + def test_write_entries_single(self): client = self.make_logging_api() @@ -141,7 +184,7 @@ def test_logger_delete(self): assert call.call_args.args[0].log_name == self.LOG_PATH -class Test_SinksAPI(object): +class Test_SinksAPI(unittest.TestCase): SINK_NAME = "sink_name" PARENT_PATH = f"projects/{PROJECT}" SINK_PATH = f"projects/{PROJECT}/sinks/{SINK_NAME}" @@ -208,6 +251,40 @@ def test_list_sinks_with_options(self): assert request.page_size == 42 assert request.page_token == "token" + def test_list_sinks_with_max_results(self): + client = self.make_sinks_api() + sink_msg = LogSink( + name=self.SINK_NAME, destination=self.DESTINATION_URI, filter=FILTER + ) + + with mock.patch.object( + type(client._gapic_api.transport.list_sinks), "__call__" + ) as call: + call.return_value = logging_v2.types.ListSinksResponse( + sinks=[sink_msg, sink_msg] + ) + result = client.list_sinks( + self.PARENT_PATH, page_size=42, page_token="token", max_results=1 + ) + # Check the request + call.assert_called_once() + assert len(list(result)) == 1 + + def test_list_sinks_negative_max_results(self): + client = self.make_sinks_api() + + with self.assertRaises(ValueError): + with mock.patch.object( + type(client._gapic_api.transport.list_sinks), "__call__" + ) as call: + call.return_value = logging_v2.types.ListSinksResponse(sinks=[]) + result = client.list_sinks( + self.PARENT_PATH, page_size=42, page_token="token", max_results=-1 + ) + # Check the request + list(result) + call.assert_called_once() + def test_sink_create(self): client = self.make_sinks_api() with mock.patch.object( @@ -315,7 +392,7 @@ def test_sink_delete(self): assert request.sink_name == self.SINK_PATH -class Test_MetricsAPI(object): +class Test_MetricsAPI(unittest.TestCase): METRIC_NAME = "metric_name" METRIC_PATH = f"projects/{PROJECT}/metrics/{METRIC_NAME}" DESCRIPTION = "Description" @@ -379,6 +456,39 @@ def test_list_metrics_options(self): assert request.page_size == 42 assert request.page_token == "token" + def test_list_metrics_with_max_results(self): + client = self.make_metrics_api() + metric = logging_v2.types.LogMetric( + name=self.METRIC_PATH, description=self.DESCRIPTION, filter=FILTER + ) + with mock.patch.object( + type(client._gapic_api.transport.list_log_metrics), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogMetricsResponse( + metrics=[metric, metric] + ) + result = client.list_metrics( + PROJECT, page_size=42, page_token="token", max_results=1 + ) + # Check the request + call.assert_called_once() + assert len(list(result)) == 1 + + def test_list_metrics_negative_max_results(self): + client = self.make_metrics_api() + + with self.assertRaises(ValueError): + with mock.patch.object( + type(client._gapic_api.transport.list_log_metrics), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogMetricsResponse(metrics=[]) + result = client.list_metrics( + PROJECT, page_size=42, page_token="token", max_results=-1 + ) + # Check the request + list(result) + call.assert_called_once() + def test_metric_create(self): client = self.make_metrics_api() diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index e927f6c1555a..2154b6f5735b 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -129,16 +129,20 @@ def _make_timestamp(): NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) return NOW, _datetime_to_rfc3339_w_nanos(NOW) - def test_list_entries_no_paging(self): + def test_list_entries_with_limits(self): from google.cloud.logging import Client from google.cloud.logging import TextEntry from google.cloud.logging import Logger NOW, TIMESTAMP = self._make_timestamp() IID = "IID" + IID1 = "IID1" + IID2 = "IID2" TEXT = "TEXT" SENT = {"resourceNames": [self.PROJECT_PATH]} - TOKEN = "TOKEN" + PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" RETURNED = { "entries": [ { @@ -147,24 +151,42 @@ def test_list_entries_no_paging(self): "resource": {"type": "global"}, "timestamp": TIMESTAMP, "logName": f"projects/{self.PROJECT}/logs/{self.LOGGER_NAME}", - } + }, + { + "jsonPayload": PAYLOAD, + "insertId": IID1, + "resource": {"type": "global"}, + "timestamp": TIMESTAMP, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + { + "protoPayload": PROTO_PAYLOAD, + "insertId": IID2, + "resource": {"type": "global"}, + "timestamp": TIMESTAMP, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, ], - "nextPageToken": TOKEN, } client = Client( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) + # try with negative max_results + with self.assertRaises(ValueError): + client._connection = _Connection(RETURNED) + api = self._make_one(client) + empty = list(api.list_entries([self.PROJECT_PATH], max_results=-1)) + # try with max_results of 0 client._connection = _Connection(RETURNED) api = self._make_one(client) - - iterator = api.list_entries([self.PROJECT_PATH]) - page = next(iterator.pages) - entries = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the entries returned. + empty = list(api.list_entries([self.PROJECT_PATH], max_results=0)) + self.assertEqual(empty, []) + # try with single result + client._connection = _Connection(RETURNED) + api = self._make_one(client) + iterator = api.list_entries([self.PROJECT_PATH], max_results=1) + entries = list(iterator) + # check the entries returned. self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, TextEntry) @@ -183,7 +205,7 @@ def test_list_entries_no_paging(self): called_with, {"method": "POST", "path": expected_path, "data": SENT} ) - def test_list_entries_w_paging(self): + def test_list_entries(self): from google.cloud.logging import DESCENDING from google.cloud.logging import Client from google.cloud.logging import Logger @@ -241,11 +263,8 @@ def test_list_entries_w_paging(self): page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token - # First check the token. - self.assertIsNone(token) - # Then check the entries returned. + # Check the entries returned. self.assertEqual(len(entries), 2) entry1 = entries[0] self.assertIsInstance(entry1, StructEntry) @@ -361,32 +380,38 @@ def test_ctor(self): self.assertIs(api._client, client) self.assertEqual(api.api_request, connection.api_request) - def test_list_sinks_no_paging(self): + def test_list_sinks_max_returned(self): from google.cloud.logging import Sink - TOKEN = "TOKEN" RETURNED = { "sinks": [ { "name": self.SINK_PATH, "filter": self.FILTER, "destination": self.DESTINATION_URI, - } + }, + {"name": "test", "filter": "test", "destination": "test"}, ], - "nextPageToken": TOKEN, } + # try with negative max_results + with self.assertRaises(ValueError): + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + empty = list(api.list_sinks(self.PROJECT_PATH, max_results=-1)) + # try with max_results of 0 conn = _Connection(RETURNED) client = _Client(conn) api = self._make_one(client) - - iterator = api.list_sinks(self.PROJECT_PATH) - page = next(iterator.pages) - sinks = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the sinks returned. + empty = list(api.list_sinks(self.PROJECT_PATH, max_results=0)) + self.assertEqual(empty, []) + # try with single result + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + iterator = api.list_sinks(self.PROJECT_PATH, max_results=1) + sinks = list(iterator) + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -401,7 +426,7 @@ def test_list_sinks_no_paging(self): called_with, {"method": "GET", "path": path, "query_params": {}} ) - def test_list_sinks_w_paging(self): + def test_list_sinks(self): from google.cloud.logging import Sink TOKEN = "TOKEN" @@ -423,11 +448,7 @@ def test_list_sinks_w_paging(self): self.PROJECT_PATH, page_size=PAGE_SIZE, page_token=TOKEN ) sinks = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the sinks returned. + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -632,26 +653,35 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_list_metrics_no_paging(self): + def test_list_metrics_max_results(self): from google.cloud.logging import Metric - TOKEN = "TOKEN" RETURNED = { - "metrics": [{"name": self.METRIC_PATH, "filter": self.FILTER}], - "nextPageToken": TOKEN, + "metrics": [ + {"name": self.METRIC_PATH, "filter": self.FILTER}, + {"name": "test", "filter": "test"}, + ], } + # try with negative max_results + with self.assertRaises(ValueError): + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + empty = list(api.list_metrics(self.PROJECT, max_results=-1)) + # try with max_results of 0 + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + empty = list(api.list_metrics(self.PROJECT, max_results=0)) + self.assertEqual(empty, []) + # try with single result conn = _Connection(RETURNED) client = _Client(conn) api = self._make_one(client) - iterator = api.list_metrics(self.PROJECT) - page = next(iterator.pages) - metrics = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the metrics returned. + iterator = api.list_metrics(self.PROJECT, max_results=1) + metrics = list(iterator) + # Check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertIsInstance(metric, Metric) @@ -666,7 +696,7 @@ def test_list_metrics_no_paging(self): called_with, {"method": "GET", "path": path, "query_params": {}} ) - def test_list_metrics_w_paging(self): + def test_list_metrics(self): from google.cloud.logging import Metric TOKEN = "TOKEN" @@ -678,11 +708,7 @@ def test_list_metrics_w_paging(self): iterator = api.list_metrics(self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) metrics = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the metrics returned. + # Check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertIsInstance(metric, Metric) diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 46526fb219e6..1a31e9c0c277 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -259,7 +259,6 @@ def test_list_entries_defaults(self): IID = "IID" TEXT = "TEXT" - TOKEN = "TOKEN" ENTRIES = [ { "textPayload": TEXT, @@ -272,13 +271,11 @@ def test_list_entries_defaults(self): client = self._make_one( project=self.PROJECT, credentials=creds, _use_grpc=False ) - returned = {"entries": ENTRIES, "nextPageToken": TOKEN} + returned = {"entries": ENTRIES} client._connection = _Connection(returned) iterator = client.list_entries() - page = next(iterator.pages) - entries = list(page) - token = iterator.next_page_token + entries = list(iterator) self.assertEqual(len(entries), 1) entry = entries[0] @@ -289,7 +286,6 @@ def test_list_entries_defaults(self): self.assertEqual(logger.name, self.LOGGER_NAME) self.assertIs(logger.client, client) self.assertEqual(logger.project, self.PROJECT) - self.assertEqual(token, TOKEN) # check call payload call_payload_no_filter = deepcopy(client._connection._called_with) @@ -342,6 +338,12 @@ def test_list_entries_explicit(self): "resource": {"type": "global"}, "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, + { + "protoPayload": "ignored", + "insertId": "ignored", + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, ] client = self._make_one( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False @@ -355,13 +357,10 @@ def test_list_entries_explicit(self): order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN, + max_results=2, ) entries = list(iterator) - token = iterator.next_page_token - - # First, check the token. - self.assertIsNone(token) - # Then check the entries. + # Check the entries. self.assertEqual(len(entries), 2) entry = entries[0] self.assertIsInstance(entry, StructEntry) @@ -423,7 +422,6 @@ def test_list_entries_explicit_timestamp(self): PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} PROTO_PAYLOAD = PAYLOAD.copy() PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" - TOKEN = "TOKEN" PAGE_SIZE = 42 ENTRIES = [ { @@ -450,14 +448,9 @@ def test_list_entries_explicit_timestamp(self): filter_=INPUT_FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, - page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token - - # First, check the token. - self.assertIsNone(token) - # Then check the entries. + # Check the entries. self.assertEqual(len(entries), 2) entry = entries[0] self.assertIsInstance(entry, StructEntry) @@ -491,7 +484,6 @@ def test_list_entries_explicit_timestamp(self): "filter": INPUT_FILTER, "orderBy": DESCENDING, "pageSize": PAGE_SIZE, - "pageToken": TOKEN, "resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"], }, }, @@ -529,7 +521,6 @@ def test_list_sinks_no_paging(self): from google.cloud.logging import Sink PROJECT = "PROJECT" - TOKEN = "TOKEN" SINK_NAME = "sink_name" FILTER = "logName:syslog AND severity>=ERROR" SINKS = [ @@ -538,17 +529,13 @@ def test_list_sinks_no_paging(self): client = self._make_one( project=PROJECT, credentials=_make_credentials(), _use_grpc=False ) - returned = {"sinks": SINKS, "nextPageToken": TOKEN} + returned = {"sinks": SINKS} client._connection = _Connection(returned) iterator = client.list_sinks() - page = next(iterator.pages) - sinks = list(page) - token = iterator.next_page_token + sinks = list(iterator) - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the sinks returned. + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -573,7 +560,8 @@ def test_list_sinks_with_paging(self): TOKEN = "TOKEN" PAGE_SIZE = 42 SINKS = [ - {"name": SINK_NAME, "filter": FILTER, "destination": self.DESTINATION_URI} + {"name": SINK_NAME, "filter": FILTER, "destination": self.DESTINATION_URI}, + {"name": "test", "filter": "test", "destination": "test"}, ] client = self._make_one( project=PROJECT, credentials=_make_credentials(), _use_grpc=False @@ -581,13 +569,11 @@ def test_list_sinks_with_paging(self): returned = {"sinks": SINKS} client._connection = _Connection(returned) - iterator = client.list_sinks(page_size=PAGE_SIZE, page_token=TOKEN) + iterator = client.list_sinks( + page_size=PAGE_SIZE, page_token=TOKEN, max_results=1 + ) sinks = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the sinks returned. + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -678,29 +664,27 @@ def test_list_metrics_with_paging(self): from google.cloud.logging import Metric token = "TOKEN" - next_token = "T00KEN" page_size = 42 metrics = [ { "name": self.METRIC_NAME, "filter": self.FILTER, "description": self.DESCRIPTION, - } + }, + {"name": "test", "filter": "test", "description": "test"}, ] client = self._make_one( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) - returned = {"metrics": metrics, "nextPageToken": next_token} + returned = {"metrics": metrics} client._connection = _Connection(returned) # Execute request. - iterator = client.list_metrics(page_size=page_size, page_token=token) - page = next(iterator.pages) - metrics = list(page) - - # First check the token. - self.assertEqual(iterator.next_page_token, next_token) - # Then check the metrics returned. + iterator = client.list_metrics( + page_size=page_size, page_token=token, max_results=1 + ) + metrics = list(iterator) + # Check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertIsInstance(metric, Metric) diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 0d8fd12085ef..ef13c923c542 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -605,23 +605,18 @@ def test_delete_w_alternate_client(self): def test_list_entries_defaults(self): from google.cloud.logging import Client - TOKEN = "TOKEN" - client = Client( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) - returned = {"nextPageToken": TOKEN} + returned = {} client._connection = _Connection(returned) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries() - page = next(iterator.pages) - entries = list(page) - token = iterator.next_page_token + entries = list(iterator) self.assertEqual(len(entries), 0) - self.assertEqual(token, TOKEN) LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) # check call payload @@ -668,10 +663,8 @@ def test_list_entries_explicit(self): page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token self.assertEqual(len(entries), 0) - self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) # check call payload call_payload_no_filter = deepcopy(client._connection._called_with) @@ -728,10 +721,8 @@ def test_list_entries_explicit_timestamp(self): page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token self.assertEqual(len(entries), 0) - self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) # check call payload LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME,) @@ -751,6 +742,100 @@ def test_list_entries_explicit_timestamp(self): }, ) + def test_list_entries_limit(self): + from google.cloud.logging import DESCENDING + from google.cloud.logging import ProtobufEntry + from google.cloud.logging import StructEntry + from google.cloud.logging import Logger + from google.cloud.logging import Client + + PROJECT1 = "PROJECT1" + PROJECT2 = "PROJECT2" + INPUT_FILTER = "logName:LOGNAME" + IID1 = "IID1" + IID2 = "IID2" + PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" + TOKEN = "TOKEN" + PAGE_SIZE = 42 + ENTRIES = [ + { + "jsonPayload": PAYLOAD, + "insertId": IID1, + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + { + "protoPayload": PROTO_PAYLOAD, + "insertId": IID2, + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + { + "protoPayload": "ignored", + "insertId": "ignored", + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + ] + client = Client( + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) + returned = {"entries": ENTRIES} + client._connection = _Connection(returned) + logger = self._make_one(self.LOGGER_NAME, client=client) + + iterator = logger.list_entries( + resource_names=[f"projects/{PROJECT1}", f"projects/{PROJECT2}"], + filter_=INPUT_FILTER, + order_by=DESCENDING, + page_size=PAGE_SIZE, + page_token=TOKEN, + max_results=2, + ) + entries = list(iterator) + # Check the entries. + self.assertEqual(len(entries), 2) + entry = entries[0] + self.assertIsInstance(entry, StructEntry) + self.assertEqual(entry.insert_id, IID1) + self.assertEqual(entry.payload, PAYLOAD) + logger = entry.logger + self.assertIsInstance(logger, Logger) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + + entry = entries[1] + self.assertIsInstance(entry, ProtobufEntry) + self.assertEqual(entry.insert_id, IID2) + self.assertEqual(entry.payload, PROTO_PAYLOAD) + logger = entry.logger + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + + self.assertIs(entries[0].logger, entries[1].logger) + + # check call payload + call_payload_no_filter = deepcopy(client._connection._called_with) + call_payload_no_filter["data"]["filter"] = "removed" + self.assertEqual( + call_payload_no_filter, + { + "path": "/entries:list", + "method": "POST", + "data": { + "filter": "removed", + "orderBy": DESCENDING, + "pageSize": PAGE_SIZE, + "pageToken": TOKEN, + "resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"], + }, + }, + ) + class TestBatch(unittest.TestCase): From 19f954091e80d5ee5be74c4b7bb0419f2bc9f823 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 15 Oct 2021 12:59:57 -0700 Subject: [PATCH 586/855] feat: avoid importing grpc when explicitly disabled (#416) --- .../google/cloud/logging_v2/client.py | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py index 7098c8baa843..92ab72a3a3ae 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -18,13 +18,6 @@ import os import sys -try: - from google.cloud.logging_v2 import _gapic -except ImportError: # pragma: NO COVER - _HAVE_GRPC = False - _gapic = None -else: - _HAVE_GRPC = True import google.api_core.client_options from google.cloud.client import ClientWithProject @@ -48,6 +41,19 @@ _DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) +_HAVE_GRPC = False + +try: + if not _DISABLE_GRPC: + # only import if DISABLE_GRPC is not set + from google.cloud.logging_v2 import _gapic + + _HAVE_GRPC = True +except ImportError: # pragma: NO COVER + # could not import gapic library. Fall back to HTTP mode + _HAVE_GRPC = False + _gapic = None + _USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC _GAE_RESOURCE_TYPE = "gae_app" From f32d6cec7df10fb16d5162a913a148f84d2352cc Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 11 Nov 2021 13:54:00 -0800 Subject: [PATCH 587/855] feat!: make logging API more friendly to use (#422) --- .../google/cloud/logging_v2/logger.py | 30 +++++- .../tests/system/test_system.py | 19 ++++ .../tests/unit/test_logger.py | 101 ++++++++++++++++++ 3 files changed, 145 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py index 404871bef6fe..542e4d62905f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -45,6 +45,8 @@ ("source_location", None), ) +_STRUCT_EXTRACTABLE_FIELDS = ["severity", "trace", "span_id"] + class Logger(object): """Loggers represent named targets for log entries. @@ -133,6 +135,20 @@ def _do_log(self, client, _entry_class, payload=None, **kw): kw["labels"] = kw.pop("labels", self.labels) kw["resource"] = kw.pop("resource", self.default_resource) + severity = kw.get("severity", None) + if isinstance(severity, str) and not severity.isupper(): + # convert severity to upper case, as expected by enum definition + kw["severity"] = severity.upper() + + if isinstance(kw["resource"], collections.abc.Mapping): + # if resource was passed as a dict, attempt to parse it into a + # Resource object + try: + kw["resource"] = Resource(**kw["resource"]) + except TypeError as e: + # dict couldn't be parsed as a Resource + raise TypeError("invalid resource dict") from e + if payload is not None: entry = _entry_class(payload=payload, **kw) else: @@ -186,6 +202,10 @@ def log_struct(self, info, *, client=None, **kw): kw (Optional[dict]): additional keyword arguments for the entry. See :class:`~logging_v2.entries.LogEntry`. """ + for field in _STRUCT_EXTRACTABLE_FIELDS: + # attempt to copy relevant fields from the payload into the LogEntry body + if field in info and field not in kw: + kw[field] = info[field] self._do_log(client, StructEntry, info, **kw) def log_proto(self, message, *, client=None, **kw): @@ -220,14 +240,14 @@ def log(self, message=None, *, client=None, **kw): kw (Optional[dict]): additional keyword arguments for the entry. See :class:`~logging_v2.entries.LogEntry`. """ - entry_type = LogEntry if isinstance(message, google.protobuf.message.Message): - entry_type = ProtobufEntry + self.log_proto(message, client=client, **kw) elif isinstance(message, collections.abc.Mapping): - entry_type = StructEntry + self.log_struct(message, client=client, **kw) elif isinstance(message, str): - entry_type = TextEntry - self._do_log(client, entry_type, message, **kw) + self.log_text(message, client=client, **kw) + else: + self._do_log(client, LogEntry, message, **kw) def delete(self, logger_name=None, *, client=None): """Delete all entries in a logger via a DELETE request diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index d7e1e57d2957..cde722bd65a8 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -455,6 +455,25 @@ def test_log_empty(self): self.assertEqual(len(entries), 1) self.assertIsNone(entries[0].payload) + def test_log_struct_logentry_data(self): + logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) + self.to_delete.append(logger) + + JSON_PAYLOAD = { + "message": "System test: test_log_struct_logentry_data", + "severity": "warning", + "trace": "123", + "span_id": "456", + } + logger.log(JSON_PAYLOAD) + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, JSON_PAYLOAD) + self.assertEqual(entries[0].severity, "WARNING") + self.assertEqual(entries[0].trace, JSON_PAYLOAD["trace"]) + self.assertEqual(entries[0].span_id, JSON_PAYLOAD["span_id"]) + def test_log_handler_async(self): LOG_MESSAGE = "It was the worst of times" diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index ef13c923c542..5f0868ba2e45 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -379,6 +379,107 @@ def test_log_struct_w_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_struct_inference(self): + """ + LogEntry fields in _STRUCT_EXTRACTABLE_FIELDS should be inferred from + the payload data if not passed as a parameter + """ + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + + STRUCT = { + "message": "System test: test_log_struct_logentry_data", + "severity": "warning", + "trace": "123", + "span_id": "456", + } + RESOURCE = detect_resource(self.PROJECT)._to_dict() + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "jsonPayload": STRUCT, + "severity": "WARNING", + "trace": "123", + "spanId": "456", + "resource": RESOURCE, + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log_struct(STRUCT, resource=RESOURCE) + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + def test_log_w_dict_resource(self): + """ + Users should be able to input a dictionary with type and labels instead + of a Resource object + """ + import pytest + + MESSAGE = "hello world" + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + broken_resource_dicts = [{}, {"type": ""}, {"labels": ""}] + for resource in broken_resource_dicts: + # ensure bad inputs result in a helpful error + with pytest.raises(TypeError): + logger.log(MESSAGE, resource=resource) + # ensure well-formed dict is converted to a resource + resource = {"type": "gae_app", "labels": []} + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "textPayload": MESSAGE, + "resource": resource, + } + ] + logger.log(MESSAGE, resource=resource) + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + def test_log_lowercase_severity(self): + """ + lower case severity strings should be accepted + """ + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + + for lower_severity in [ + "default", + "debug", + "info", + "notice", + "warning", + "error", + "critical", + "alert", + "emergency", + ]: + MESSAGE = "hello world" + RESOURCE = detect_resource(self.PROJECT)._to_dict() + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "textPayload": MESSAGE, + "resource": RESOURCE, + "severity": lower_severity.upper(), + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log(MESSAGE, severity=lower_severity) + + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None) + ) + def test_log_proto_defaults(self): from google.cloud.logging_v2.handlers._monitored_resources import ( detect_resource, From 061befda273e52ca88530030989ce86aa82c446c Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 7 Dec 2021 12:07:04 -0800 Subject: [PATCH 588/855] feat: add json_fields extras argument for adding to jsonPayload (#447) --- .../google/cloud/logging_v2/client.py | 2 +- .../cloud/logging_v2/handlers/handlers.py | 16 +++- .../tests/system/test_system.py | 25 ++++++ .../tests/unit/handlers/test_handlers.py | 90 +++++++++++++++++++ .../unit/handlers/test_structured_log.py | 23 +++++ 5 files changed, 153 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py index 92ab72a3a3ae..3d5ea24fc568 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -376,7 +376,7 @@ def get_default_handler(self, **kw): if monitored_resource.type == _GAE_RESOURCE_TYPE: return CloudLoggingHandler(self, resource=monitored_resource, **kw) elif monitored_resource.type == _GKE_RESOURCE_TYPE: - return ContainerEngineHandler(**kw) + return StructuredLogHandler(**kw, project_id=self.project) elif monitored_resource.type == _GCF_RESOURCE_TYPE: # __stdout__ stream required to support structured logging on Python 3.7 kw["stream"] = kw.get("stream", sys.__stdout__) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 8d14852e1f0e..39bcbca791ad 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -221,9 +221,16 @@ def _format_and_parse_message(record, formatter_handler): record (logging.LogRecord): The record object representing the log formatter_handler (logging.Handler): The handler used to format the log """ - # if message is a dictionary, return as-is + passed_json_fields = getattr(record, "json_fields", {}) + # if message is a dictionary, use dictionary directly if isinstance(record.msg, collections.abc.Mapping): - return record.msg + payload = record.msg + # attach any extra json fields if present + if passed_json_fields and isinstance( + passed_json_fields, collections.abc.Mapping + ): + payload = {**payload, **passed_json_fields} + return payload # format message string based on superclass message = formatter_handler.format(record) try: @@ -235,6 +242,11 @@ def _format_and_parse_message(record, formatter_handler): except (json.decoder.JSONDecodeError, IndexError): # log string is not valid json pass + # if json_fields was set, create a dictionary using that + if passed_json_fields and isinstance(passed_json_fields, collections.abc.Mapping): + if message != "None": + passed_json_fields["message"] = message + return passed_json_fields # if formatted message contains no content, return None return message if message != "None" else None diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index cde722bd65a8..24050e8b310a 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -551,6 +551,31 @@ def test_handlers_w_extras(self): ) self.assertEqual(entries[0].resource.type, extra["resource"].type) + def test_handlers_w_json_fields(self): + LOG_MESSAGE = "Testing with json_field extras." + LOGGER_NAME = "json_field_extras" + handler_name = self._logger_name(LOGGER_NAME) + + handler = CloudLoggingHandler( + Config.CLIENT, name=handler_name, transport=SyncTransport + ) + + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler.name) + self.to_delete.append(logger) + + cloud_logger = logging.getLogger(LOGGER_NAME) + cloud_logger.addHandler(handler) + extra = {"json_fields": {"hello": "world", "two": 2}} + cloud_logger.warn(LOG_MESSAGE, extra=extra) + + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + payload = entries[0].payload + self.assertEqual(payload["message"], LOG_MESSAGE) + self.assertEqual(payload["hello"], "world") + self.assertEqual(payload["two"], 2) + def test_log_root_handler(self): LOG_MESSAGE = "It was the best of times." diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index d36dc895970f..71a709b6a9da 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -447,6 +447,40 @@ def test_emit_dict(self): ), ) + def test_emit_w_json_extras(self): + """ + User can add json_fields to the record, which should populate the payload + """ + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE, + ) + message = "message" + json_fields = {"hello": "world"} + logname = "logname" + expected_label = {"python_logger": logname} + record = logging.LogRecord( + logname, logging.INFO, None, None, message, None, None + ) + setattr(record, "json_fields", json_fields) + handler.handle(record) + + self.assertEqual( + handler.transport.send_called_with, + ( + record, + {"message": "message", "hello": "world"}, + _GLOBAL_RESOURCE, + expected_label, + None, + None, + None, + None, + ), + ) + def test_emit_with_encoded_json(self): """ Handler should parse json encoded as a string @@ -608,6 +642,62 @@ def test_broken_encoded_dict(self): result = _format_and_parse_message(record, handler) self.assertEqual(result, message) + def test_json_fields(self): + """ + record.json_fields should populate the json payload + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = "hello" + json_fields = {"key": "val"} + record = logging.LogRecord("logname", None, None, None, message, None, None) + setattr(record, "json_fields", json_fields) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, {"message": message, "key": "val"}) + + def test_empty_json_fields(self): + """ + empty jsond_field dictionaries should result in a string output + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = "hello" + record = logging.LogRecord("logname", None, None, None, message, None, None) + setattr(record, "json_fields", {}) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, message) + + def test_json_fields_empty_message(self): + """ + empty message fields should not be added to json_fields dictionaries + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = None + json_fields = {"key": "val"} + record = logging.LogRecord("logname", None, None, None, message, None, None) + setattr(record, "json_fields", json_fields) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, json_fields) + + def test_json_fields_with_json_message(self): + """ + if json_fields and message are both dicts, they should be combined + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = {"key_m": "val_m"} + json_fields = {"key_j": "val_j"} + record = logging.LogRecord("logname", None, None, None, message, None, None) + setattr(record, "json_fields", json_fields) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result["key_m"], message["key_m"]) + self.assertEqual(result["key_j"], json_fields["key_j"]) + class TestSetupLogging(unittest.TestCase): def _call_fut(self, handler, excludes=None): diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index d9dfa2512618..08e4c2906470 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -321,3 +321,26 @@ def test_format_overrides(self): result = json.loads(handler.format(record)) for (key, value) in expected_payload.items(): self.assertEqual(value, result[key]) + + def test_format_with_json_fields(self): + """ + User can add json_fields to the record, which should populate the payload + """ + import logging + import json + + handler = self._make_one() + message = "name: %s" + name_arg = "Daniel" + expected_result = "name: Daniel" + json_fields = {"hello": "world", "number": 12} + record = logging.LogRecord( + None, logging.INFO, None, None, message, name_arg, None, + ) + record.created = None + setattr(record, "json_fields", json_fields) + handler.filter(record) + result = json.loads(handler.format(record)) + self.assertEqual(result["message"], expected_result) + self.assertEqual(result["hello"], "world") + self.assertEqual(result["number"], 12) From 9e42ec627e98b9365fd4cc0c1929ebf656c625a9 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 7 Dec 2021 13:39:59 -0800 Subject: [PATCH 589/855] fix: allow reading logs from non-project paths (#444) --- .../google/cloud/logging_v2/entries.py | 17 ++-- .../tests/unit/test_entries.py | 84 ++++++++++++++++++- .../tests/unit/test_logger.py | 30 +++++++ 3 files changed, 124 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/entries.py b/packages/google-cloud-logging/google/cloud/logging_v2/entries.py index 0af5a46f72ba..cb485da61189 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/entries.py @@ -46,11 +46,12 @@ ) -def logger_name_from_path(path): +def logger_name_from_path(path, project=None): """Validate a logger URI path and get the logger name. Args: path (str): URI path for a logger API request + project (str): The project the path is expected to belong to Returns: str: Logger name parsed from ``path``. @@ -59,7 +60,7 @@ def logger_name_from_path(path): ValueError: If the ``path`` is ill-formed of if the project from ``path`` does not agree with the ``project`` passed in. """ - return _name_from_project_path(path, None, _LOGGER_TEMPLATE) + return _name_from_project_path(path, project, _LOGGER_TEMPLATE) def _int_or_none(value): @@ -155,7 +156,8 @@ def from_api_repr(cls, resource, client, *, loggers=None): Client which holds credentials and project configuration. loggers (Optional[dict]): A mapping of logger fullnames -> loggers. If not - passed, the entry will have a newly-created logger. + passed, the entry will have a newly-created logger if possible, + or an empty logger field if not. Returns: google.cloud.logging.entries.LogEntry: Log entry parsed from ``resource``. @@ -165,8 +167,13 @@ def from_api_repr(cls, resource, client, *, loggers=None): logger_fullname = resource["logName"] logger = loggers.get(logger_fullname) if logger is None: - logger_name = logger_name_from_path(logger_fullname) - logger = loggers[logger_fullname] = client.logger(logger_name) + # attempt to create a logger if possible + try: + logger_name = logger_name_from_path(logger_fullname, client.project) + logger = loggers[logger_fullname] = client.logger(logger_name) + except ValueError: + # log name is not scoped to a project. Leave logger as None + pass payload = cls._extract_payload(resource) insert_id = resource.get("insertId") timestamp = resource.get("timestamp") diff --git a/packages/google-cloud-logging/tests/unit/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py index b8795b8ce1e5..6f3af684fe6e 100644 --- a/packages/google-cloud-logging/tests/unit/test_entries.py +++ b/packages/google-cloud-logging/tests/unit/test_entries.py @@ -18,10 +18,10 @@ class Test_logger_name_from_path(unittest.TestCase): - def _call_fut(self, path): + def _call_fut(self, path, project=None): from google.cloud.logging_v2.entries import logger_name_from_path - return logger_name_from_path(path) + return logger_name_from_path(path, project) def test_w_simple_name(self): LOGGER_NAME = "LOGGER_NAME" @@ -37,6 +37,30 @@ def test_w_name_w_all_extras(self): logger_name = self._call_fut(PATH) self.assertEqual(logger_name, LOGGER_NAME) + def test_w_wrong_project(self): + LOGGER_NAME = "LOGGER_NAME" + IN_PROJECT = "in-project" + PATH_PROJECT = "path-project" + PATH = "projects/%s/logs/%s" % (PATH_PROJECT, LOGGER_NAME) + with self.assertRaises(ValueError): + self._call_fut(PATH, IN_PROJECT) + + def test_invalid_inputs(self): + invalid_list = [ + "", + "abc/123/logs/456", + "projects//logs/", + "projects/123/logs", + "projects/123logs/", + "projects123/logs", + "project/123", + "projects123logs456", + "/logs/123", + ] + for path in invalid_list: + with self.assertRaises(ValueError): + self._call_fut(path) + class Test__int_or_none(unittest.TestCase): def _call_fut(self, value): @@ -315,6 +339,62 @@ def test_from_api_repr_w_loggers_w_logger_match(self): self.assertEqual(entry.operation, OPERATION) self.assertIsNone(entry.payload) + def test_from_api_repr_w_folder_path(self): + from datetime import datetime + from datetime import timedelta + from google.cloud._helpers import UTC + + client = _Client(self.PROJECT) + IID = "IID" + NOW = datetime.utcnow().replace(tzinfo=UTC) + LATER = NOW + timedelta(seconds=1) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + RECEIVED = _datetime_to_rfc3339_w_nanos(LATER) + LOG_NAME = "folders/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + LABELS = {"foo": "bar", "baz": "qux"} + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + FILE = "my_file.py" + LINE_NO = 123 + FUNCTION = "my_function" + SOURCE_LOCATION = {"file": FILE, "line": str(LINE_NO), "function": FUNCTION} + OP_ID = "OP_ID" + PRODUCER = "PRODUCER" + OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} + API_REPR = { + "logName": LOG_NAME, + "insertId": IID, + "timestamp": TIMESTAMP, + "receiveTimestamp": RECEIVED, + "labels": LABELS, + "trace": TRACE, + "spanId": SPANID, + "traceSampled": True, + "sourceLocation": SOURCE_LOCATION, + "operation": OPERATION, + } + klass = self._get_target_class() + + entry = klass.from_api_repr(API_REPR, client) + + self.assertEqual(entry.log_name, LOG_NAME) + self.assertIsNone(entry.logger) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + self.assertEqual(entry.received_timestamp, LATER) + self.assertEqual(entry.labels, LABELS) + self.assertEqual(entry.trace, TRACE) + self.assertEqual(entry.span_id, SPANID) + self.assertTrue(entry.trace_sampled) + + source_location = entry.source_location + self.assertEqual(source_location["file"], FILE) + self.assertEqual(source_location["line"], LINE_NO) + self.assertEqual(source_location["function"], FUNCTION) + + self.assertEqual(entry.operation, OPERATION) + self.assertIsNone(entry.payload) + def test_to_api_repr_w_source_location_no_line(self): from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 5f0868ba2e45..1eae1cda6899 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -937,6 +937,36 @@ def test_list_entries_limit(self): }, ) + def test_list_entries_folder(self): + from google.cloud.logging import TextEntry + from google.cloud.logging import Client + + client = Client( + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) + FOLDER_ID = "123" + LOG_NAME = f"folders/{FOLDER_ID}/logs/cloudaudit.googleapis.com%2Fdata_access" + + ENTRIES = [ + { + "textPayload": "hello world", + "insertId": "1", + "resource": {"type": "global"}, + "logName": LOG_NAME, + }, + ] + returned = {"entries": ENTRIES} + client._connection = _Connection(returned) + + iterator = client.list_entries(resource_names=[f"folder/{FOLDER_ID}"],) + entries = list(iterator) + # Check the entries. + self.assertEqual(len(entries), 1) + entry = entries[0] + self.assertIsInstance(entry, TextEntry) + self.assertIsNone(entry.logger) + self.assertEqual(entry.log_name, LOG_NAME) + class TestBatch(unittest.TestCase): From 572d6353dbc8f294b24e3b48549b0c3885e668e6 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Dec 2021 14:59:31 -0800 Subject: [PATCH 590/855] feat: trace improvements (#450) --- .../cloud/logging_v2/handlers/_helpers.py | 117 ++++++--- .../cloud/logging_v2/handlers/app_engine.py | 4 +- .../cloud/logging_v2/handlers/handlers.py | 10 +- .../tests/system/test_system.py | 4 + .../tests/unit/handlers/test__helpers.py | 223 +++++++++++++----- .../tests/unit/handlers/test_app_engine.py | 6 +- .../tests/unit/handlers/test_handlers.py | 71 +++++- .../unit/handlers/test_structured_log.py | 44 +++- 8 files changed, 378 insertions(+), 101 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py index f5dfb7c5540d..32e70dfdd5e6 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py @@ -27,11 +27,13 @@ from google.cloud.logging_v2.handlers.middleware.request import _get_django_request _DJANGO_CONTENT_LENGTH = "CONTENT_LENGTH" -_DJANGO_TRACE_HEADER = "HTTP_X_CLOUD_TRACE_CONTEXT" +_DJANGO_XCLOUD_TRACE_HEADER = "HTTP_X_CLOUD_TRACE_CONTEXT" +_DJANGO_TRACEPARENT = "HTTP_TRACEPARENT" _DJANGO_USERAGENT_HEADER = "HTTP_USER_AGENT" _DJANGO_REMOTE_ADDR_HEADER = "REMOTE_ADDR" _DJANGO_REFERER_HEADER = "HTTP_REFERER" -_FLASK_TRACE_HEADER = "X_CLOUD_TRACE_CONTEXT" +_FLASK_XCLOUD_TRACE_HEADER = "X_CLOUD_TRACE_CONTEXT" +_FLASK_TRACEPARENT = "TRACEPARENT" _PROTOCOL_HEADER = "SERVER_PROTOCOL" @@ -62,13 +64,12 @@ def get_request_data_from_flask(): """Get http_request and trace data from flask request headers. Returns: - Tuple[Optional[dict], Optional[str], Optional[str]]: - Data related to the current http request, trace_id, and span_id for - the request. All fields will be None if a django request isn't - found. + Tuple[Optional[dict], Optional[str], Optional[str], bool]: + Data related to the current http request, trace_id, span_id and trace_sampled + for the request. All fields will be None if a django request isn't found. """ if flask is None or not flask.request: - return None, None, None + return None, None, None, False # build http_request http_request = { @@ -79,25 +80,29 @@ def get_request_data_from_flask(): } # find trace id and span id - header = flask.request.headers.get(_FLASK_TRACE_HEADER) - trace_id, span_id = _parse_trace_span(header) + # first check for w3c traceparent header + header = flask.request.headers.get(_FLASK_TRACEPARENT) + trace_id, span_id, trace_sampled = _parse_trace_parent(header) + if trace_id is None: + # traceparent not found. look for xcloud_trace_context header + header = flask.request.headers.get(_FLASK_XCLOUD_TRACE_HEADER) + trace_id, span_id, trace_sampled = _parse_xcloud_trace(header) - return http_request, trace_id, span_id + return http_request, trace_id, span_id, trace_sampled def get_request_data_from_django(): """Get http_request and trace data from django request headers. Returns: - Tuple[Optional[dict], Optional[str], Optional[str]]: - Data related to the current http request, trace_id, and span_id for - the request. All fields will be None if a django request isn't - found. + Tuple[Optional[dict], Optional[str], Optional[str], bool]: + Data related to the current http request, trace_id, span_id, and trace_sampled + for the request. All fields will be None if a django request isn't found. """ request = _get_django_request() if request is None: - return None, None, None + return None, None, None, False # build http_request http_request = { @@ -108,34 +113,75 @@ def get_request_data_from_django(): } # find trace id and span id - header = request.META.get(_DJANGO_TRACE_HEADER) - trace_id, span_id = _parse_trace_span(header) + # first check for w3c traceparent header + header = request.META.get(_DJANGO_TRACEPARENT) + trace_id, span_id, trace_sampled = _parse_trace_parent(header) + if trace_id is None: + # traceparent not found. look for xcloud_trace_context header + header = request.META.get(_DJANGO_XCLOUD_TRACE_HEADER) + trace_id, span_id, trace_sampled = _parse_xcloud_trace(header) - return http_request, trace_id, span_id + return http_request, trace_id, span_id, trace_sampled -def _parse_trace_span(header): +def _parse_trace_parent(header): + """Given a w3 traceparent header, extract the trace and span ids. + For more information see https://www.w3.org/TR/trace-context/ + + Args: + header (str): the string extracted from the traceparent header + example: 00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01 + Returns: + Tuple[Optional[dict], Optional[str], bool]: + The trace_id, span_id and trace_sampled extracted from the header + Each field will be None if header can't be parsed in expected format. + """ + trace_id = span_id = None + trace_sampled = False + # see https://www.w3.org/TR/trace-context/ for W3C traceparent format + if header: + try: + VERSION_PART = r"(?!ff)[a-f\d]{2}" + TRACE_ID_PART = r"(?![0]{32})[a-f\d]{32}" + PARENT_ID_PART = r"(?![0]{16})[a-f\d]{16}" + FLAGS_PART = r"[a-f\d]{2}" + regex = f"^\\s?({VERSION_PART})-({TRACE_ID_PART})-({PARENT_ID_PART})-({FLAGS_PART})(-.*)?\\s?$" + match = re.match(regex, header) + trace_id = match.group(2) + span_id = match.group(3) + # trace-flag component is an 8-bit bit field. Read as an int + int_flag = int(match.group(4), 16) + # trace sampled is set if the right-most bit in flag component is set + trace_sampled = bool(int_flag & 1) + except (IndexError, AttributeError): + # could not parse header as expected. Return None + pass + return trace_id, span_id, trace_sampled + + +def _parse_xcloud_trace(header): """Given an X_CLOUD_TRACE header, extract the trace and span ids. Args: header (str): the string extracted from the X_CLOUD_TRACE header Returns: - Tuple[Optional[dict], Optional[str]]: - The trace_id and span_id extracted from the header + Tuple[Optional[dict], Optional[str], bool]: + The trace_id, span_id and trace_sampled extracted from the header Each field will be None if not found. """ - trace_id = None - span_id = None + trace_id = span_id = None + trace_sampled = False + # see https://cloud.google.com/trace/docs/setup for X-Cloud-Trace_Context format if header: try: - split_header = header.split("/", 1) - trace_id = split_header[0] - header_suffix = split_header[1] - # the span is the set of alphanumeric characters after the / - span_id = re.findall(r"^\w+", header_suffix)[0] + regex = r"([\w-]+)?(\/?([\w-]+))?(;?o=(\d))?" + match = re.match(regex, header) + trace_id = match.group(1) + span_id = match.group(3) + trace_sampled = match.group(5) == "1" except IndexError: pass - return trace_id, span_id + return trace_id, span_id, trace_sampled def get_request_data(): @@ -143,10 +189,9 @@ def get_request_data(): frameworks (currently supported: Flask and Django). Returns: - Tuple[Optional[dict], Optional[str], Optional[str]]: - Data related to the current http request, trace_id, and span_id for - the request. All fields will be None if a django request isn't - found. + Tuple[Optional[dict], Optional[str], Optional[str], bool]: + Data related to the current http request, trace_id, span_id, and trace_sampled + for the request. All fields will be None if a http request isn't found. """ checkers = ( get_request_data_from_django, @@ -154,8 +199,8 @@ def get_request_data(): ) for checker in checkers: - http_request, trace_id, span_id = checker() + http_request, trace_id, span_id, trace_sampled = checker() if http_request is not None: - return http_request, trace_id, span_id + return http_request, trace_id, span_id, trace_sampled - return None, None, None + return None, None, None, False diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py index abd16664f73c..a65d16a0e655 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/app_engine.py @@ -98,7 +98,7 @@ def get_gae_labels(self): """ gae_labels = {} - _, trace_id, _ = get_request_data() + _, trace_id, _, _ = get_request_data() if trace_id is not None: gae_labels[_TRACE_ID_LABEL] = trace_id @@ -115,7 +115,7 @@ def emit(self, record): record (logging.LogRecord): The record to be logged. """ message = super(AppEngineHandler, self).format(record) - inferred_http, inferred_trace, _ = get_request_data() + inferred_http, inferred_trace, _, _ = get_request_data() if inferred_trace is not None: inferred_trace = f"projects/{self.project_id}/traces/{inferred_trace}" # allow user overrides diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 39bcbca791ad..769146007007 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -82,7 +82,12 @@ def filter(self, record): """ user_labels = getattr(record, "labels", {}) # infer request data from the environment - inferred_http, inferred_trace, inferred_span = get_request_data() + ( + inferred_http, + inferred_trace, + inferred_span, + inferred_sampled, + ) = get_request_data() if inferred_trace is not None and self.project is not None: # add full path for detected trace inferred_trace = f"projects/{self.project}/traces/{inferred_trace}" @@ -90,6 +95,7 @@ def filter(self, record): record._resource = getattr(record, "resource", None) record._trace = getattr(record, "trace", inferred_trace) or None record._span_id = getattr(record, "span_id", inferred_span) or None + record._trace_sampled = bool(getattr(record, "trace_sampled", inferred_sampled)) record._http_request = getattr(record, "http_request", inferred_http) record._source_location = CloudLoggingFilter._infer_source_location(record) # add logger name as a label if possible @@ -98,6 +104,7 @@ def filter(self, record): # create string representations for structured logging record._trace_str = record._trace or "" record._span_id_str = record._span_id or "" + record._trace_sampled_str = "true" if record._trace_sampled else "false" record._http_request_str = json.dumps( record._http_request or {}, ensure_ascii=False ) @@ -205,6 +212,7 @@ def emit(self, record): labels=labels, trace=record._trace, span_id=record._span_id, + trace_sampled=record._trace_sampled, http_request=record._http_request, source_location=record._source_location, ) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 24050e8b310a..90b4059d6efe 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -454,6 +454,7 @@ def test_log_empty(self): self.assertEqual(len(entries), 1) self.assertIsNone(entries[0].payload) + self.assertFalse(entries[0].trace_sampled) def test_log_struct_logentry_data(self): logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) @@ -473,6 +474,7 @@ def test_log_struct_logentry_data(self): self.assertEqual(entries[0].severity, "WARNING") self.assertEqual(entries[0].trace, JSON_PAYLOAD["trace"]) self.assertEqual(entries[0].span_id, JSON_PAYLOAD["span_id"]) + self.assertFalse(entries[0].trace_sampled) def test_log_handler_async(self): LOG_MESSAGE = "It was the worst of times" @@ -534,6 +536,7 @@ def test_handlers_w_extras(self): extra = { "trace": "123", "span_id": "456", + "trace_sampled": True, "http_request": expected_request, "source_location": expected_source, "resource": Resource(type="cloudiot_device", labels={}), @@ -545,6 +548,7 @@ def test_handlers_w_extras(self): self.assertEqual(len(entries), 1) self.assertEqual(entries[0].trace, extra["trace"]) self.assertEqual(entries[0].span_id, extra["span_id"]) + self.assertTrue(entries[0].trace_sampled) self.assertEqual(entries[0].http_request, expected_request) self.assertEqual( entries[0].labels, {**extra["labels"], "python_logger": LOGGER_NAME} diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index d26e700e8b50..9946c8eb5c7d 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -16,10 +16,10 @@ import mock -_FLASK_TRACE_ID = "flask-id" +_FLASK_TRACE_ID = "flask0id" _FLASK_SPAN_ID = "span0flask" _FLASK_HTTP_REQUEST = {"requestUrl": "https://flask.palletsprojects.com/en/1.1.x/"} -_DJANGO_TRACE_ID = "django-id" +_DJANGO_TRACE_ID = "django0id" _DJANGO_SPAN_ID = "span0django" _DJANGO_HTTP_REQUEST = {"requestUrl": "https://www.djangoproject.com/"} @@ -29,7 +29,8 @@ class Test_get_request_data_from_flask(unittest.TestCase): def _call_fut(): from google.cloud.logging_v2.handlers import _helpers - return _helpers.get_request_data_from_flask() + http, trace, span, sampled = _helpers.get_request_data_from_flask() + return http, trace, span, sampled @staticmethod def create_app(): @@ -46,17 +47,18 @@ def index(): def test_no_context_header(self): app = self.create_app() with app.test_request_context(path="/", headers={}): - http_request, trace_id, span_id = self._call_fut() + http_request, trace_id, span_id, sampled = self._call_fut() self.assertIsNone(trace_id) self.assertIsNone(span_id) + self.assertEqual(sampled, False) self.assertEqual(http_request["requestMethod"], "GET") - def test_valid_context_header(self): + def test_xcloud_header(self): flask_trace_header = "X_CLOUD_TRACE_CONTEXT" expected_trace_id = _FLASK_TRACE_ID expected_span_id = _FLASK_SPAN_ID - flask_trace_id = f"{expected_trace_id}/{expected_span_id}" + flask_trace_id = f"{expected_trace_id}/{expected_span_id};o=1" app = self.create_app() context = app.test_request_context( @@ -64,10 +66,30 @@ def test_valid_context_header(self): ) with context: - http_request, trace_id, span_id = self._call_fut() + http_request, trace_id, span_id, sampled = self._call_fut() self.assertEqual(trace_id, expected_trace_id) self.assertEqual(span_id, expected_span_id) + self.assertEqual(sampled, True) + self.assertEqual(http_request["requestMethod"], "GET") + + def test_traceparent_header(self): + flask_trace_header = "TRACEPARENT" + expected_trace_id = "4bf92f3577b34da6a3ce929d0e0e4736" + expected_span_id = "00f067aa0ba902b7" + flask_trace_id = f"00-{expected_trace_id}-{expected_span_id}-01" + + app = self.create_app() + context = app.test_request_context( + path="/", headers={flask_trace_header: flask_trace_id} + ) + + with context: + http_request, trace_id, span_id, sampled = self._call_fut() + + self.assertEqual(trace_id, expected_trace_id) + self.assertEqual(span_id, expected_span_id) + self.assertEqual(sampled, True) self.assertEqual(http_request["requestMethod"], "GET") def test_http_request_populated(self): @@ -106,7 +128,8 @@ class Test_get_request_data_from_django(unittest.TestCase): def _call_fut(): from google.cloud.logging_v2.handlers import _helpers - return _helpers.get_request_data_from_django() + http, trace, span, sampled = _helpers.get_request_data_from_django() + return http, trace, span, sampled def setUp(self): from django.conf import settings @@ -131,20 +154,21 @@ def test_no_context_header(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) - http_request, trace_id, span_id = self._call_fut() + http_request, trace_id, span_id, sampled = self._call_fut() self.assertEqual(http_request["requestMethod"], "GET") self.assertIsNone(trace_id) self.assertIsNone(span_id) + self.assertEqual(sampled, False) - def test_valid_context_header(self): + def test_xcloud_header(self): from django.test import RequestFactory from google.cloud.logging_v2.handlers.middleware import request django_trace_header = "HTTP_X_CLOUD_TRACE_CONTEXT" expected_span_id = _DJANGO_SPAN_ID expected_trace_id = _DJANGO_TRACE_ID - django_trace_id = f"{expected_trace_id}/{expected_span_id}" + django_trace_id = f"{expected_trace_id}/{expected_span_id};o=1" django_request = RequestFactory().get( "/", **{django_trace_header: django_trace_id} @@ -152,10 +176,31 @@ def test_valid_context_header(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) - http_request, trace_id, span_id = self._call_fut() + http_request, trace_id, span_id, sampled = self._call_fut() + + self.assertEqual(trace_id, expected_trace_id) + self.assertEqual(span_id, expected_span_id) + self.assertEqual(sampled, True) + self.assertEqual(http_request["requestMethod"], "GET") + + def test_traceparent_header(self): + from django.test import RequestFactory + from google.cloud.logging_v2.handlers.middleware import request + + django_trace_header = "HTTP_TRACEPARENT" + expected_trace_id = "4bf92f3577b34da6a3ce929d0e0e4736" + expected_span_id = "00f067aa0ba902b7" + header = f"00-{expected_trace_id}-{expected_span_id}-01" + + django_request = RequestFactory().get("/", **{django_trace_header: header}) + + middleware = request.RequestMiddleware(None) + middleware.process_request(django_request) + http_request, trace_id, span_id, sampled = self._call_fut() self.assertEqual(trace_id, expected_trace_id) self.assertEqual(span_id, expected_span_id) + self.assertEqual(sampled, True) self.assertEqual(http_request["requestMethod"], "GET") def test_http_request_populated(self): @@ -203,7 +248,8 @@ class Test_get_request_data(unittest.TestCase): def _call_fut(): from google.cloud.logging_v2.handlers import _helpers - return _helpers.get_request_data() + http, trace, span, sampled = _helpers.get_request_data() + return http, trace, span, sampled def _helper(self, django_return, flask_return): django_patch = mock.patch( @@ -222,8 +268,13 @@ def _helper(self, django_return, flask_return): return django_mock, flask_mock, result def test_from_django(self): - django_expected = (_DJANGO_HTTP_REQUEST, _DJANGO_TRACE_ID, _DJANGO_SPAN_ID) - flask_expected = (None, None, None) + django_expected = ( + _DJANGO_HTTP_REQUEST, + _DJANGO_TRACE_ID, + _DJANGO_SPAN_ID, + False, + ) + flask_expected = (None, None, None, False) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) self.assertEqual(output, django_expected) @@ -231,8 +282,8 @@ def test_from_django(self): flask_mock.assert_not_called() def test_from_flask(self): - django_expected = (None, None, None) - flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID) + django_expected = (None, None, None, False) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) self.assertEqual(output, flask_expected) @@ -241,8 +292,13 @@ def test_from_flask(self): flask_mock.assert_called_once_with() def test_from_django_and_flask(self): - django_expected = (_DJANGO_HTTP_REQUEST, _DJANGO_TRACE_ID, _DJANGO_SPAN_ID) - flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID) + django_expected = ( + _DJANGO_HTTP_REQUEST, + _DJANGO_TRACE_ID, + _DJANGO_SPAN_ID, + False, + ) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) @@ -253,19 +309,19 @@ def test_from_django_and_flask(self): flask_mock.assert_not_called() def test_missing_http_request(self): - flask_expected = (None, _FLASK_TRACE_ID, _FLASK_SPAN_ID) - django_expected = (None, _DJANGO_TRACE_ID, _DJANGO_TRACE_ID) + flask_expected = (None, _FLASK_TRACE_ID, _FLASK_SPAN_ID, True) + django_expected = (None, _DJANGO_TRACE_ID, _DJANGO_TRACE_ID, True) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) # function only returns trace if http_request data is present - self.assertEqual(output, (None, None, None)) + self.assertEqual(output, (None, None, None, False)) django_mock.assert_called_once_with() flask_mock.assert_called_once_with() def test_missing_trace_id(self): - flask_expected = (_FLASK_HTTP_REQUEST, None, None) - django_expected = (None, _DJANGO_TRACE_ID, _DJANGO_SPAN_ID) + flask_expected = (_FLASK_HTTP_REQUEST, None, None, False) + django_expected = (None, _DJANGO_TRACE_ID, _DJANGO_SPAN_ID, True) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) # trace_id is optional @@ -275,77 +331,136 @@ def test_missing_trace_id(self): flask_mock.assert_called_once_with() def test_missing_both(self): - flask_expected = (None, None, None) - django_expected = (None, None, None) + flask_expected = (None, None, None, False) + django_expected = (None, None, None, False) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) - self.assertEqual(output, (None, None, None)) + self.assertEqual(output, (None, None, None, False)) django_mock.assert_called_once_with() flask_mock.assert_called_once_with() def test_wo_libraries(self): output = self._call_fut() - self.assertEqual(output, (None, None, None)) + self.assertEqual(output, (None, None, None, False)) -class Test__parse_trace_span(unittest.TestCase): +class Test__parse_xcloud_trace(unittest.TestCase): @staticmethod def _call_fut(header): from google.cloud.logging_v2.handlers import _helpers - return _helpers._parse_trace_span(header) + trace, span, sampled = _helpers._parse_xcloud_trace(header) + return trace, span, sampled def test_empty_header(self): header = "" - trace_id, span_id = self._call_fut(header) - self.assertEqual(trace_id, None) - self.assertEqual(span_id, None) + trace_id, span_id, sampled = self._call_fut(header) + self.assertIsNone(trace_id) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) def test_no_span(self): header = "12345" - trace_id, span_id = self._call_fut(header) + trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, header) - self.assertEqual(span_id, None) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) def test_no_trace(self): header = "/12345" - trace_id, span_id = self._call_fut(header) - self.assertEqual(trace_id, "") + trace_id, span_id, sampled = self._call_fut(header) + self.assertIsNone(trace_id) self.assertEqual(span_id, "12345") + self.assertEqual(sampled, False) def test_with_span(self): expected_trace = "12345" expected_span = "67890" header = f"{expected_trace}/{expected_span}" - trace_id, span_id = self._call_fut(header) + trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) + self.assertEqual(sampled, False) def test_with_extra_characters(self): expected_trace = "12345" expected_span = "67890" - header = f"{expected_trace}/{expected_span};o=0" - trace_id, span_id = self._call_fut(header) + header = f"{expected_trace}/{expected_span};abc" + trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) + self.assertEqual(sampled, False) - def test_with_unicode_span(self): - """ - Spans are expected to be alphanumeric - """ + def test_with_explicit_no_sampled(self): expected_trace = "12345" - header = f"{expected_trace}/😀123" - trace_id, span_id = self._call_fut(header) + expected_span = "67890" + header = f"{expected_trace}/{expected_span};o=0" + trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) - self.assertEqual(span_id, None) + self.assertEqual(span_id, expected_span) + self.assertEqual(sampled, False) - def test_with_unicode_trace(self): - """ - Spans are expected to be alphanumeric - """ - expected_trace = "12😀345" + def test_with__sampled(self): + expected_trace = "12345" expected_span = "67890" - header = f"{expected_trace}/{expected_span}" - trace_id, span_id = self._call_fut(header) + header = f"{expected_trace}/{expected_span};o=1" + trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) + self.assertEqual(sampled, True) + + +class Test__parse_trace_parent(unittest.TestCase): + @staticmethod + def _call_fut(header): + from google.cloud.logging_v2.handlers import _helpers + + trace, span, sampled = _helpers._parse_trace_parent(header) + return trace, span, sampled + + def test_empty_header(self): + header = "" + trace_id, span_id, sampled = self._call_fut(header) + self.assertIsNone(trace_id) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) + + def test_valid_header(self): + header = "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01" + trace_id, span_id, sampled = self._call_fut(header) + self.assertEqual(trace_id, "0af7651916cd43dd8448eb211c80319c") + self.assertEqual(span_id, "b7ad6b7169203331") + self.assertEqual(sampled, True) + + def test_not_sampled(self): + header = "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-00" + trace_id, span_id, sampled = self._call_fut(header) + self.assertEqual(trace_id, "0af7651916cd43dd8448eb211c80319c") + self.assertEqual(span_id, "b7ad6b7169203331") + self.assertEqual(sampled, False) + + def test_sampled_w_other_flags(self): + header = "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-09" + trace_id, span_id, sampled = self._call_fut(header) + self.assertEqual(trace_id, "0af7651916cd43dd8448eb211c80319c") + self.assertEqual(span_id, "b7ad6b7169203331") + self.assertEqual(sampled, True) + + def test_invalid_headers(self): + invalid_headers = [ + "", + "test" + "ff-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01", # invalid version + "00-00000000000000000000000000000000-b7ad6b7169203331-01", # invalid trace + "00-0af7651916cd43dd8448eb211c80319c-0000000000000000-01", # invalid span + "00-af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-00", + "00-0af7651916cd43dd8448eb211c80319c-bad6b7169203331-00", + "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-0", + "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-", + "00-0af7651916cd43dd8448eb211c80319c-00", + ] + for header in invalid_headers: + trace_id, span_id, sampled = self._call_fut(header) + self.assertIsNone(trace_id) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index c726c8496df9..8eedfad9b053 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -97,7 +97,7 @@ def test_emit(self): expected_trace_id = f"projects/{self.PROJECT}/traces/{trace_id}" get_request_patch = mock.patch( "google.cloud.logging_v2.handlers.app_engine.get_request_data", - return_value=(expected_http_request, trace_id, None), + return_value=(expected_http_request, trace_id, None, None), ) with get_request_patch: # library integrations mocked to return test data @@ -135,7 +135,7 @@ def test_emit_manual_field_override(self): inferred_trace_id = "trace-test" get_request_patch = mock.patch( "google.cloud.logging_v2.handlers.app_engine.get_request_data", - return_value=(inferred_http_request, inferred_trace_id, None), + return_value=(inferred_http_request, inferred_trace_id, None, None), ) with get_request_patch: # library integrations mocked to return test data @@ -180,7 +180,7 @@ def test_emit_manual_field_override(self): def _get_gae_labels_helper(self, trace_id): get_request_patch = mock.patch( "google.cloud.logging_v2.handlers.app_engine.get_request_data", - return_value=(None, trace_id, None), + return_value=(None, trace_id, None, None), ) client = mock.Mock(project=self.PROJECT, spec=["project"]) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 71a709b6a9da..bbfacf59faa5 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -84,6 +84,8 @@ def test_filter_record(self): self.assertIsNone(record._resource) self.assertIsNone(record._trace) self.assertEqual(record._trace_str, "") + self.assertFalse(record._trace_sampled) + self.assertEqual(record._trace_sampled_str, "false") self.assertIsNone(record._span_id) self.assertEqual(record._span_id_str, "") self.assertIsNone(record._http_request) @@ -112,6 +114,8 @@ def test_minimal_record(self): self.assertEqual(record._trace_str, "") self.assertIsNone(record._span_id) self.assertEqual(record._span_id_str, "") + self.assertFalse(record._trace_sampled) + self.assertEqual(record._trace_sampled_str, "false") self.assertIsNone(record._http_request) self.assertEqual(record._http_request_str, "{}") self.assertIsNone(record._labels) @@ -131,7 +135,7 @@ def test_record_with_request(self): expected_agent = "Mozilla/5.0" expected_trace = "123" expected_span = "456" - combined_trace = f"{expected_trace}/{expected_span}" + combined_trace = f"{expected_trace}/{expected_span};o=1" expected_request = { "requestMethod": "GET", "requestUrl": expected_path, @@ -154,6 +158,47 @@ def test_record_with_request(self): self.assertEqual(record._trace_str, expected_trace) self.assertEqual(record._span_id, expected_span) self.assertEqual(record._span_id_str, expected_span) + self.assertTrue(record._trace_sampled) + self.assertEqual(record._trace_sampled_str, "true") + self.assertEqual(record._http_request, expected_request) + self.assertEqual(record._http_request_str, json.dumps(expected_request)) + + def test_record_with_traceparent_request(self): + """ + test filter adds http request data when available + """ + import logging + + filter_obj = self._make_one() + record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) + record.created = None + + expected_path = "http://testserver/123" + expected_agent = "Mozilla/5.0" + expected_trace = "4bf92f3577b34da6a3ce929d0e0e4736" + expected_span = "00f067aa0ba902b7" + combined_trace = f"00-{expected_trace}-{expected_span}-03" + expected_request = { + "requestMethod": "GET", + "requestUrl": expected_path, + "userAgent": expected_agent, + "protocol": "HTTP/1.1", + } + + app = self.create_app() + with app.test_request_context( + expected_path, + headers={"User-Agent": expected_agent, "TRACEPARENT": combined_trace}, + ): + success = filter_obj.filter(record) + self.assertTrue(success) + + self.assertEqual(record._trace, expected_trace) + self.assertEqual(record._trace_str, expected_trace) + self.assertEqual(record._span_id, expected_span) + self.assertEqual(record._span_id_str, expected_span) + self.assertTrue(record._trace_sampled) + self.assertEqual(record._trace_sampled_str, "true") self.assertEqual(record._http_request, expected_request) self.assertEqual(record._http_request_str, json.dumps(expected_request)) @@ -306,6 +351,7 @@ def test_emit(self): {"python_logger": logname}, None, None, + False, None, None, ), @@ -322,7 +368,7 @@ def test_emit_minimal(self): handler.handle(record) self.assertEqual( handler.transport.send_called_with, - (record, None, _GLOBAL_RESOURCE, None, None, None, None, None,), + (record, None, _GLOBAL_RESOURCE, None, None, None, False, None, None,), ) def test_emit_manual_field_override(self): @@ -350,6 +396,8 @@ def test_emit_manual_field_override(self): setattr(record, "trace", expected_trace) expected_span = "456" setattr(record, "span_id", expected_span) + expected_sampled = True + setattr(record, "trace_sampled", expected_sampled) expected_http = {"reuqest_url": "manual"} setattr(record, "http_request", expected_http) expected_source = {"file": "test-file"} @@ -375,6 +423,7 @@ def test_emit_manual_field_override(self): expected_labels, expected_trace, expected_span, + expected_sampled, expected_http, expected_source, ), @@ -410,6 +459,7 @@ def test_emit_with_custom_formatter(self): expected_label, None, None, + False, None, None, ), @@ -442,6 +492,7 @@ def test_emit_dict(self): expected_label, None, None, + False, None, None, ), @@ -476,6 +527,7 @@ def test_emit_w_json_extras(self): expected_label, None, None, + False, None, None, ), @@ -508,6 +560,7 @@ def test_emit_with_encoded_json(self): expected_label, None, None, + False, None, None, ), @@ -533,7 +586,17 @@ def test_format_with_arguments(self): self.assertEqual( handler.transport.send_called_with, - (record, expected_result, _GLOBAL_RESOURCE, None, None, None, None, None,), + ( + record, + expected_result, + _GLOBAL_RESOURCE, + None, + None, + None, + False, + None, + None, + ), ) @@ -809,6 +872,7 @@ def send( labels=None, trace=None, span_id=None, + trace_sampled=None, http_request=None, source_location=None, ): @@ -819,6 +883,7 @@ def send( labels, trace, span_id, + trace_sampled, http_request, source_location, ) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 08e4c2906470..5db098c29a8b 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -66,6 +66,7 @@ def test_format(self): "severity": record.levelname, "logging.googleapis.com/trace": "", "logging.googleapis.com/spanId": "", + "logging.googleapis.com/trace_sampled": False, "logging.googleapis.com/sourceLocation": { "file": pathname, "line": lineno, @@ -95,6 +96,7 @@ def test_format_minimal(self): "severity": "INFO", "logging.googleapis.com/trace": "", "logging.googleapis.com/spanId": "", + "logging.googleapis.com/trace_sampled": False, "logging.googleapis.com/sourceLocation": {}, "httpRequest": {}, "logging.googleapis.com/labels": {}, @@ -242,10 +244,11 @@ def test_format_with_request(self): expected_agent = "Mozilla/5.0" expected_trace = "123" expected_span = "456" - trace_header = f"{expected_trace}/{expected_span};o=0" + trace_header = f"{expected_trace}/{expected_span};o=1" expected_payload = { "logging.googleapis.com/trace": expected_trace, "logging.googleapis.com/spanId": expected_span, + "logging.googleapis.com/trace_sampled": True, "httpRequest": { "requestMethod": "GET", "requestUrl": expected_path, @@ -267,6 +270,41 @@ def test_format_with_request(self): for (key, value) in expected_payload.items(): self.assertEqual(value, result[key]) + def test_format_with_traceparent(self): + import logging + import json + + handler = self._make_one() + logname = "loggername" + message = "hello world,嗨 世界" + record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None) + expected_path = "http://testserver/123" + expected_agent = "Mozilla/5.0" + expected_trace = "4bf92f3577b34da6a3ce929d0e0e4736" + expected_span = "00f067aa0ba902b7" + trace_header = f"00-{expected_trace}-{expected_span}-09" + expected_payload = { + "logging.googleapis.com/trace": expected_trace, + "logging.googleapis.com/spanId": expected_span, + "logging.googleapis.com/trace_sampled": True, + "httpRequest": { + "requestMethod": "GET", + "requestUrl": expected_path, + "userAgent": expected_agent, + "protocol": "HTTP/1.1", + }, + } + + app = self.create_app() + with app.test_request_context( + expected_path, + headers={"User-Agent": expected_agent, "TRACEPARENT": trace_header}, + ): + handler.filter(record) + result = json.loads(handler.format(record)) + for (key, value) in expected_payload.items(): + self.assertEqual(value, result[key]) + def test_format_overrides(self): """ Allow users to override log fields using `logging.info("", extra={})` @@ -289,17 +327,19 @@ def test_format_overrides(self): inferred_path = "http://testserver/123" overwrite_trace = "abc" overwrite_span = "def" - inferred_trace_span = "123/456;" + inferred_trace_span = "123/456;o=1" overwrite_file = "test-file" record.http_request = {"requestUrl": overwrite_path} record.source_location = {"file": overwrite_file} record.trace = overwrite_trace record.span_id = overwrite_span + record.trace_sampled = False added_labels = {"added_key": "added_value", "overwritten_key": "new_value"} record.labels = added_labels expected_payload = { "logging.googleapis.com/trace": overwrite_trace, "logging.googleapis.com/spanId": overwrite_span, + "logging.googleapis.com/trace_sampled": False, "logging.googleapis.com/sourceLocation": {"file": overwrite_file}, "httpRequest": {"requestUrl": overwrite_path}, "logging.googleapis.com/labels": { From 3927ebc97c3d60692ea54d1123992621a2045115 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 26 Jan 2022 14:16:17 -0800 Subject: [PATCH 591/855] docs: update usage guide for v3.0.0 (#456) --- packages/google-cloud-logging/UPGRADING.md | 130 ++++++- .../docs/direct-lib-usage.rst | 330 ++++++++++++++++ .../google-cloud-logging/docs/entries.rst | 4 +- .../docs/grpc-vs-http.rst | 14 + .../docs/handlers-app-engine.rst | 7 +- .../docs/handlers-cloud-logging.rst | 6 + .../docs/handlers-container-engine.rst | 7 +- .../docs/handlers-structured-log.rst | 6 + .../google-cloud-logging/docs/handlers.rst | 13 +- packages/google-cloud-logging/docs/index.rst | 22 +- packages/google-cloud-logging/docs/logger.rst | 1 + .../docs/std-lib-integration.rst | 146 +++++++ .../docs/stdlib-usage.rst | 70 ---- .../google-cloud-logging/docs/transport.rst | 25 ++ .../docs/transports-base.rst | 6 - .../docs/transports-sync.rst | 6 - .../docs/transports-thread.rst | 7 - packages/google-cloud-logging/docs/usage.rst | 359 +----------------- packages/google-cloud-logging/docs/v2.rst | 19 - .../google/cloud/logging_v2/logger.py | 11 +- .../samples/snippets/usage_guide.py | 109 +++++- 21 files changed, 808 insertions(+), 490 deletions(-) create mode 100644 packages/google-cloud-logging/docs/direct-lib-usage.rst create mode 100644 packages/google-cloud-logging/docs/grpc-vs-http.rst create mode 100644 packages/google-cloud-logging/docs/handlers-cloud-logging.rst create mode 100644 packages/google-cloud-logging/docs/handlers-structured-log.rst create mode 100644 packages/google-cloud-logging/docs/std-lib-integration.rst delete mode 100644 packages/google-cloud-logging/docs/stdlib-usage.rst create mode 100644 packages/google-cloud-logging/docs/transport.rst delete mode 100644 packages/google-cloud-logging/docs/transports-base.rst delete mode 100644 packages/google-cloud-logging/docs/transports-sync.rst delete mode 100644 packages/google-cloud-logging/docs/transports-thread.rst delete mode 100644 packages/google-cloud-logging/docs/v2.rst diff --git a/packages/google-cloud-logging/UPGRADING.md b/packages/google-cloud-logging/UPGRADING.md index af7461dda902..e882a497bb13 100644 --- a/packages/google-cloud-logging/UPGRADING.md +++ b/packages/google-cloud-logging/UPGRADING.md @@ -1,3 +1,131 @@ +# 3.0.0 Migration Guide + +The v3.0.0 release of `google-cloud-logging` improves usability of the library, +particularly on serverless environments. + +If you experience technical issues or have questions, please file an [issue](https://github.com/googleapis/python-logging/issues). + +## Primary Changes + +### Handler deprecations ([#310](https://github.com/googleapis/python-logging/pull/310)) + +> **WARNING**: Breaking change + +We have changed our design policy to support more generic `Handler` classes instead of product-specific classes: + +- [`CloudLoggingHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/handlers.py) + - Sends logs over the network (using gRPC or HTTP API calls) + - Replaces `AppEngineHandler` +- [`StructuredLogHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/structured_log.py) + - Exports logs in JSON format through standard out, to be parsed by an agent + - Replaces `ContainerEngineHandler` + +As of v3.0.0, [`AppEngineHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/app_engine.py) +and [`ContainerEngineHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/container_engine.py) +are deprecated and won't be updated. These handlers might be removed from the library in a future update. + +### Full JSON log support in standard library integration ([#316](https://github.com/googleapis/python-logging/pull/316), [#339](https://github.com/googleapis/python-logging/pull/339), [#447](https://github.com/googleapis/python-logging/pull/447)) + +You can now log JSON data using the Python `logging` standard library integration. +To log JSON data, do one of the following: + +1. Use `json_fields` `extra` argument: + +```py +import logging + +data_dict = {"hello": "world"} +logging.info("message field", extra={"json_fields": data_dict}) +``` + +2. Log a JSON-parsable string: + +```py +import logging +import json + +data_dict = {"hello": "world"} +logging.info(json.dumps(data_dict)) +``` + +### Metadata autodetection ([#315](https://github.com/googleapis/python-logging/pull/315)) + +> **WARNING**: Breaking change + +Logs emitted by the library must be associated with a [montored-resource type](https://cloud.google.com/monitoring/api/resources) +that indicates the compute environment the log originated from. +- Prior to 3.0.0, when a log doesn't specify a monitored resource, that field is set to ["global"](https://cloud.google.com/monitoring/api/resources#tag_global). +- With 3.0.0, when a log doesn't specify a monitored resource, the library attempts to identify the resource. If a resource can't be detected, the field will still default to ["global"](https://cloud.google.com/monitoring/api/resources#tag_global). + +### New `Logger.log` method ([#316](https://github.com/googleapis/python-logging/pull/316)) + +In v3.0.0, the library adds a generic `log()` method that will attempt to infer and log any type: + +```py +logger.log("hello world") +``` + +v3.0.0 also supports the Logging class methods from previous releases: + +```py +logger.log_text("hello world") +logger.log_struct({"hello": "world"}) +logger.log_proto(proto_message) +logger.log_empty() +``` + +### More permissive arguments ([#422](https://github.com/googleapis/python-logging/pull/422)) + +> **WARNING**: Breaking change + +In v3.0.0, the library supports a wider variety of input formats: + +```py +# lowercase severity strings will be accepted +logger.log("hello world", severity="warning") +``` + +```py +# a severity will be pulled out of the JSON payload if not otherwise set +logger.log({"hello": "world", "severity":"warning"}) +``` + +```py +# resource data can be passed as a dict instead of a Resource object +logger.log("hello world", resource={"type":"global", "labels":[]}) +``` + +### Allow reading from non-project resources ([#444](https://github.com/googleapis/python-logging/pull/444)) + +Prior to v3.0.0, there was a crashing bug when attempting to read logs from non-project resources: + +- `organizations/[ORGANIZATION_ID]/logs/[LOG_ID]` +- `billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]` +- `folders/[FOLDER_ID]/logs/[LOG_ID]` + +The v3.0.0 update fixes this issue. + +### Internal Gapic and HTTP implementation changes ([#375](https://github.com/googleapis/python-logging/pull/375)) + +> **WARNING**: Breaking change + +The library supports sending logs using two network protocols: gRPC and HTTP. Prior to v3.0.0, there was an +inconsistency in the implementations, resulting in unexpected behavior when in HTTP mode. + +### Max_size argument when listing entries ([#375](https://github.com/googleapis/python-logging/pull/375)) + +v3.0.0 introduces a new `max_size` argument to `list_entries` calls, which can be used to specify an upper bound +on how many logs should be returned: + +```py +from google.cloud import logging_v2 + +client = logging_v2.Client() +client.list_entries(max_size=5) +``` + +--- + # 2.0.0 Migration Guide The 2.0 release of the `google-cloud-logging` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library will likely require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage. @@ -334,4 +462,4 @@ The following resource name helpers have been renamed. **`ConfigServiceV2Client`** * `sink_path` -> `log_sink_path` -* `exclusion_path` -> `log_exclusion_path` \ No newline at end of file +* `exclusion_path` -> `log_exclusion_path` diff --git a/packages/google-cloud-logging/docs/direct-lib-usage.rst b/packages/google-cloud-logging/docs/direct-lib-usage.rst new file mode 100644 index 000000000000..11cf39e9cc3d --- /dev/null +++ b/packages/google-cloud-logging/docs/direct-lib-usage.rst @@ -0,0 +1,330 @@ +Direct Library Usage +==================== + +We recommend that you use the :mod:`google-cloud-logging` library +by integrating it with the :doc:`Python logging standard library`; +However, you can also use the library to interact with the Google Cloud Logging API +directly. + +In addition to writing logs, you can use the library to manage +:doc:`logs`, :doc:`sinks`, :doc:`metrics`, and other resources. + +Setup +---------------------------- + +Create a Client +~~~~~~~~~~~~~~~~~ + +.. _Creating Client: + +You must set up a :doc:`Client` to use the library: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START usage_client_setup] + :end-before: [END usage_client_setup] + :dedent: 4 + +To use HTTP, :doc:`disable gRPC` when you set up the :doc:`Client`: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START usage_http_client_setup] + :end-before: [END usage_http_client_setup] + :dedent: 4 + +Create a Logger +~~~~~~~~~~~~~~~~~ + +Loggers read, write, and delete logs from Google Cloud. + +You use your :doc:`Client` to create a :doc:`Logger`. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_create] + :end-before: [END logger_create] + :dedent: 4 + +To add custom labels, do so when you initialize a :doc:`Logger`. +When you add custom labels, these labels are added to each +:doc:`LogEntry` written by the :doc:`Logger`: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_custom_labels] + :end-before: [END logger_custom_labels] + :dedent: 4 + +By default, the library adds a `Monitored Resource field `_ +associated with the environment the code is run on. For example, code run on +App Engine will have a `gae_app `_ +resource, while code run locally will have a `global `_ resource field. + +To manually set the resource field, do so when you initialize the :doc:`Logger`: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_custom_resource] + :end-before: [END logger_custom_resource] + :dedent: 4 + + +Write Log Entries +------------------- + +You write logs by using :meth:`Logger.log `: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_basic] + :end-before: [END logger_log_basic] + :dedent: 4 + +You can add `LogEntry fields `_ +by passing them as keyword arguments: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_fields] + :end-before: [END logger_log_fields] + :dedent: 4 + +:meth:`Logger.log ` chooses the appropriate :doc:`LogEntry ` type +based on input type. To specify type, you can use the following Logger methods: + +- :meth:`Logger.log_text ` creates a :class:`~google.cloud.logging_v2.entries.TextEntry` +- :meth:`Logger.log_struct ` creates a :class:`~google.cloud.logging_v2.entries.StructEntry` +- :meth:`Logger.log_proto ` creates a :class:`~google.cloud.logging_v2.entries.ProtobufEntry` +- :meth:`Logger.log_empty ` creates an empty :class:`~google.cloud.logging_v2.entries.LogEntry` + +Batch Write Logs +------------------ + +By default, each log write takes place in an individual network request, which may be inefficient at scale. + +Using the :class:`~google.cloud.logging_v2.logger.Batch` class, logs are batched together, and only sent out +when :func:`batch.commit ` is called. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_batch] + :end-before: [END logger_log_batch] + :dedent: 4 + +To simplify things, you can also use :class:`~google.cloud.logging_v2.logger.Batch` as a context manager: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_batch_context] + :end-before: [END logger_log_batch_context] + :dedent: 4 + +In the previous example, the logs are automatically committed when the code exits the "with" block. + +Retrieve Log Entries +--------------------- + +You retrieve log entries for the default project using +:meth:`list_entries() ` +on a :doc:`Client` or :doc:`Logger` object: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_entries_default] + :end-before: [END client_list_entries_default] + :dedent: 4 + +Entries returned by +:meth:`Client.list_entries() ` +or +:meth:`Logger.list_entries() ` +are instances of one of the following classes: + +- :class:`~google.cloud.logging_v2.entries.TextEntry` +- :class:`~google.cloud.logging_v2.entries.StructEntry` +- :class:`~google.cloud.logging_v2.entries.ProtobufEntry` + +To filter entries retrieved using the `Advanced Logs Filters`_ syntax + +.. _Advanced Logs Filters: https://cloud.google.com/logging/docs/view/advanced_filters + +To fetch entries for the default project. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_entries_filter] + :end-before: [END client_list_entries_filter] + :dedent: 4 + +To sort entries in descending timestamp order. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_entries_order_by] + :end-before: [END client_list_entries_order_by] + :dedent: 4 + +To retrieve entries for a single logger, sorting in descending timestamp order: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_list_entries] + :end-before: [END logger_list_entries] + :dedent: 4 + +For example, to retrieve all `GKE Admin Activity audit logs`_ +from the past 24 hours: + +.. _GKE Admin Activity audit logs: https://cloud.google.com/kubernetes-engine/docs/how-to/audit-logging#audit_logs_in_your_project + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_list_gke_audit_logs] + :end-before: [END logging_list_gke_audit_logs] + :dedent: 4 + + +Delete Log Entries +-------------------- + +To delete all logs associated with a logger, use the following call: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_delete] + :end-before: [END logger_delete] + :dedent: 8 + + +Manage Log Metrics +-------------------- + +Logs-based metrics are counters of entries which match a given filter. +They can be used within Cloud Monitoring to create charts and alerts. + +To list all logs-based metrics for a project: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_metrics] + :end-before: [END client_list_metrics] + :dedent: 4 + +To create a logs-based metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_create] + :end-before: [END metric_create] + :dedent: 4 + +To refresh local information about a logs-based metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_reload] + :end-before: [END metric_reload] + :dedent: 4 + +To update a logs-based metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_update] + :end-before: [END metric_update] + :dedent: 4 + +To delete a logs-based metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_delete] + :end-before: [END metric_delete] + :dedent: 4 + +Log Sinks +--------------- + +Sinks allow exporting of log entries which match a given filter to +Cloud Storage buckets, BigQuery datasets, or Cloud Pub/Sub topics. + +Cloud Storage Sink +~~~~~~~~~~~~~~~~~~~~~~~ + +Ensure the storage bucket that you want to export logs to has +``cloud-logs@google.com`` as an owner. See +`Setting permissions for Cloud Storage`_. + +.. _Setting permissions for Cloud Storage: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_cloud_storage + +Ensure that ``cloud-logs@google.com`` is an owner of the bucket: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_bucket_permissions] + :end-before: [END sink_bucket_permissions] + :dedent: 4 + +To create a Cloud Storage sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_storage_create] + :end-before: [END sink_storage_create] + :dedent: 4 + + +BigQuery Sink +~~~~~~~~~~~~~~~~~~ + +To export logs to BigQuery, you must log into the Cloud Console +and add ``cloud-logs@google.com`` to a dataset. + +See: `Setting permissions for BigQuery`_ + +.. _Setting permissions for BigQuery: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_bigquery + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_dataset_permissions] + :end-before: [END sink_dataset_permissions] + :dedent: 4 + +To create a BigQuery sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_bigquery_create] + :end-before: [END sink_bigquery_create] + :dedent: 4 + + +Pub/Sub Sink +~~~~~~~~~~~~~~~~~ + +To export logs to BigQuery you must log into the Cloud Console +and add ``cloud-logs@google.com`` to a topic. + +See: `Setting permissions for Pub/Sub`_ + +.. _Setting permissions for Pub/Sub: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_logs_to_cloud_pubsub + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_topic_permissions] + :end-before: [END sink_topic_permissions] + :dedent: 4 + +To create a Cloud Pub/Sub sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_pubsub_create] + :end-before: [END sink_pubsub_create] + :dedent: 4 + +Manage Sinks +~~~~~~~~~~~~~~ + +To list all sinks for a project: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_sinks] + :end-before: [END client_list_sinks] + :dedent: 4 + +To refresh local information about a sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_reload] + :end-before: [END sink_reload] + :dedent: 4 + +To update a sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_update] + :end-before: [END sink_update] + :dedent: 4 + +To delete a sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_delete] + :end-before: [END sink_delete] + :dedent: 4 diff --git a/packages/google-cloud-logging/docs/entries.rst b/packages/google-cloud-logging/docs/entries.rst index 9d473f3c1fe8..dc257e4c9ceb 100644 --- a/packages/google-cloud-logging/docs/entries.rst +++ b/packages/google-cloud-logging/docs/entries.rst @@ -1,5 +1,5 @@ -Entries -======= +Log Entries +=========== .. automodule:: google.cloud.logging_v2.entries :members: diff --git a/packages/google-cloud-logging/docs/grpc-vs-http.rst b/packages/google-cloud-logging/docs/grpc-vs-http.rst new file mode 100644 index 000000000000..e6891420ce55 --- /dev/null +++ b/packages/google-cloud-logging/docs/grpc-vs-http.rst @@ -0,0 +1,14 @@ +gRPC vs HTTP +==================== + +:mod:`google-cloud-logging` supports two different protocols for sending logs over the network: +gRPC and HTTP. Both implementations conform to the same API, and should be +invisible to the end user. + +gRPC is enabled by default. You can switch to HTTP mode by either: + +- setting the `DISABLE_GRPC` environment variable to `TRUE` +- or, passing `_use_grpc=False` when :ref:`initializing a Client` + +We recommend using gRPC whenever possible, but you may want to try the HTTP +implementation if you have network issues when using gRPC. diff --git a/packages/google-cloud-logging/docs/handlers-app-engine.rst b/packages/google-cloud-logging/docs/handlers-app-engine.rst index f25223a20578..9f8a6c8dbb23 100644 --- a/packages/google-cloud-logging/docs/handlers-app-engine.rst +++ b/packages/google-cloud-logging/docs/handlers-app-engine.rst @@ -1,5 +1,8 @@ -Google App Engine flexible Log Handler -====================================== +[DEPRECATED] App Engine Handler +=================================================== + +.. deprecated:: 3.0.0 + Use :class:`CloudLoggingHandler` instead. .. automodule:: google.cloud.logging_v2.handlers.app_engine :members: diff --git a/packages/google-cloud-logging/docs/handlers-cloud-logging.rst b/packages/google-cloud-logging/docs/handlers-cloud-logging.rst new file mode 100644 index 000000000000..5ebaa51ff8fe --- /dev/null +++ b/packages/google-cloud-logging/docs/handlers-cloud-logging.rst @@ -0,0 +1,6 @@ +Cloud Logging Handler +============================== + +.. automodule:: google.cloud.logging_v2.handlers.handlers + :members: + :show-inheritance: diff --git a/packages/google-cloud-logging/docs/handlers-container-engine.rst b/packages/google-cloud-logging/docs/handlers-container-engine.rst index 981b41dcb105..0c074eb191e6 100644 --- a/packages/google-cloud-logging/docs/handlers-container-engine.rst +++ b/packages/google-cloud-logging/docs/handlers-container-engine.rst @@ -1,5 +1,8 @@ -Google Kubernetes Engine Log Handler -==================================== +[DEPRECATED] Kubernetes Engine Handler +================================================= + +.. deprecated:: 3.0.0 + Use :class:`StructuredLogHandler` instead. .. automodule:: google.cloud.logging_v2.handlers.container_engine :members: diff --git a/packages/google-cloud-logging/docs/handlers-structured-log.rst b/packages/google-cloud-logging/docs/handlers-structured-log.rst new file mode 100644 index 000000000000..337ad591d1d3 --- /dev/null +++ b/packages/google-cloud-logging/docs/handlers-structured-log.rst @@ -0,0 +1,6 @@ +Structured Log Handler +============================== + +.. automodule:: google.cloud.logging_v2.handlers.structured_log + :members: + :show-inheritance: diff --git a/packages/google-cloud-logging/docs/handlers.rst b/packages/google-cloud-logging/docs/handlers.rst index 9089170fbe7c..91475783468c 100644 --- a/packages/google-cloud-logging/docs/handlers.rst +++ b/packages/google-cloud-logging/docs/handlers.rst @@ -1,6 +1,9 @@ -Python Logging Module Handler -============================== +Handlers +---------------- +.. toctree:: + :maxdepth: 2 -.. automodule:: google.cloud.logging_v2.handlers.handlers - :members: - :show-inheritance: + handlers-cloud-logging + handlers-structured-log + handlers-app-engine + handlers-container-engine diff --git a/packages/google-cloud-logging/docs/index.rst b/packages/google-cloud-logging/docs/index.rst index 64c2dcd1e37c..01d8e4eee753 100644 --- a/packages/google-cloud-logging/docs/index.rst +++ b/packages/google-cloud-logging/docs/index.rst @@ -1,17 +1,31 @@ .. include:: README.rst +Usage Guide +------------------- +.. toctree:: + :maxdepth: 2 + + usage + Documentation ------------------- .. toctree:: :maxdepth: 3 - v2 + client + logger + entries + metric + resource + sink + handlers + transport -Migration Guide ---------------- +Migration Guides +---------------- -See the guide below for instructions on migrating to the 2.x release of this library. +See the guide below for instructions on migrating between major releases of this library. .. toctree:: :maxdepth: 2 diff --git a/packages/google-cloud-logging/docs/logger.rst b/packages/google-cloud-logging/docs/logger.rst index 8aca18199333..13f8e0d7e13d 100644 --- a/packages/google-cloud-logging/docs/logger.rst +++ b/packages/google-cloud-logging/docs/logger.rst @@ -3,4 +3,5 @@ Logger .. automodule:: google.cloud.logging_v2.logger :members: + :undoc-members: :show-inheritance: diff --git a/packages/google-cloud-logging/docs/std-lib-integration.rst b/packages/google-cloud-logging/docs/std-lib-integration.rst new file mode 100644 index 000000000000..a485fce6d407 --- /dev/null +++ b/packages/google-cloud-logging/docs/std-lib-integration.rst @@ -0,0 +1,146 @@ +Integration with `logging` Standard Library +=========================================== + +We recommend that you use :mod:`google-cloud-logging` to integrate with +the Python :mod:`logging` standard library. This way, you can write logs using Python +standards, and still have your logs appear in Google Cloud Logging. + +Automatic Configuration +----------------------- + +To integrate :mod:`google-cloud-logging` with the standard :mod:`logging` module, +call :meth:`~google.cloud.logging_v2.client.Client.setup_logging` on a :class:`~google.cloud.logging_v2.client.Client` instance. + +.. literalinclude:: ../samples/snippets/handler.py + :start-after: [START logging_handler_setup] + :end-before: [END logging_handler_setup] + :dedent: 4 + +This :meth:`~google.cloud.logging_v2.client.Client.setup_logging` function chooses the best configurations for the environment your +code is running on. For more information, see the `Google Cloud Logging documentation `_. + +Manual Handler Configuration +----------------------------- + +.. _Manual Handler: + +Automatic Configuration automatically determines the appropriate handler for the environment. +To specify the handler yourself, construct an instance manually and pass it in +as an argument to :meth:`~google.cloud.logging_v2.handlers.setup_logging`: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START create_cloud_handler] + :end-before: [END create_cloud_handler] + :dedent: 4 + +There are two supported handler classes to choose from: + +- :class:`~google.cloud.logging_v2.handlers.handlers.CloudLoggingHandler`: + - Sends logs directly to Cloud Logging over the network (:doc:`gRPC or HTTP`) + - Logs are transmitted according to a :ref:`Transport ` class + - This is the default handler on most environments, including local development +- :class:`~google.cloud.logging_v2.handlers.structured_log.StructuredLogHandler`: + - Outputs logs as `structured JSON `_ + to standard out, to be read and parsed by a GCP logging agent + - This is the default handler on Kubernetes Engine, Cloud Functions and Cloud Run + +Standard Library +--------------------------- + +After you setup the Google Cloud Logging library with the Python :mod:`logging` standard library, +you can send logs with the standard logging library as you normally would: + +.. literalinclude:: ../samples/snippets/handler.py + :start-after: [START logging_handler_usage] + :end-before: [END logging_handler_usage] + :dedent: 4 + +For more information on using the Python :mod:`logging` standard library, see the `logging documentation `_ + +Logging JSON Payloads +---------------------- + +.. _JSON: + +Although the Python :mod:`logging` standard library `expects all logs to be strings `_, +Google Cloud Logging allows `JSON payload data `_. + +To write JSON logs using the standard library integration, do one of the following: + +1. Use the `json_fields` `extra` argument: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_extra_json_fields] + :end-before: [END logging_extra_json_fields] + :dedent: 4 + +2. Log a JSON-parsable string: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_json_dumps] + :end-before: [END logging_json_dumps] + :dedent: 4 + + +Automatic Metadata Detection +---------------------------- + +.. _Autodetection: + +The Google Cloud Logging library attempts to detect and attach additional +`LogEntry fields `_ . +The following fields are currently supported: + +- labels +- trace* +- span_id* +- trace_sampled* +- http_request* +- source_location +- resource +- :ref:`json_fields` + +.. note:: + Fields marked with "*" require a supported Python web framework. The Google Cloud Logging + library currently supports `flask `_ and `django `_ + +Manual Metadata Using the `extra` Argument +-------------------------------------------- + +The Python :mod:`logging` standard library accepts `an "extra" argument `_ when +writing logs. You can use this argument to populate LogRecord objects with user-defined +key-value pairs. Google Cloud Logging uses the `extra` field as a way to pass in additional +metadata to populate `LogEntry fields `_. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_extras] + :end-before: [END logging_extras] + :dedent: 4 + +All of the `LogEntry fields `_ +that can be :ref:`autodetected` can also be set manually through the `extra` argument. Fields sent explicitly through the `extra` +argument override any :ref:`automatically detected` fields. + +CloudLoggingHandler Transports +------------------------------ + +.. _Transports: + +:doc:`Transport` classes define how the :class:`~google.cloud.logging_v2.handlers.handlers.CloudLoggingHandler` +transports logs over the network to Google Cloud. There are two Transport implementations +(defined as subclasses of :class:`transports.base.Transport `): + +- :class:`~google.cloud.logging_v2.handlers.transports.background_thread.BackgroundThreadTransport`: + - sends logs in batches, using a background thread + - the default Transport class +- :class:`~google.cloud.logging_v2.handlers.transports.sync.SyncTransport`: + - sends each log synchronously in a single API call + +You can set a Transport class by passing it as an argument when +:ref:`initializing CloudLoggingHandler manually.` + +You can use both transport options over :doc:`gRPC or HTTP`. + +.. note:: + :class:`~google.cloud.logging_v2.handlers.structured_log.StructuredLogHandler` + prints logs as formatted JSON to standard output, and does not use a Transport class. diff --git a/packages/google-cloud-logging/docs/stdlib-usage.rst b/packages/google-cloud-logging/docs/stdlib-usage.rst deleted file mode 100644 index 375b41ddf3bf..000000000000 --- a/packages/google-cloud-logging/docs/stdlib-usage.rst +++ /dev/null @@ -1,70 +0,0 @@ -Integration with Python logging module --------------------------------------- - - -It's possible to tie the Python :mod:`logging` module directly into Google Cloud Logging. To use it, -create a :class:`CloudLoggingHandler ` instance from your -Logging client. - -.. code-block:: python - - >>> import logging - >>> import google.cloud.logging # Don't conflict with standard logging - >>> from google.cloud.logging.handlers import CloudLoggingHandler - >>> client = google.cloud.logging.Client() - >>> handler = CloudLoggingHandler(client) - >>> cloud_logger = logging.getLogger('cloudLogger') - >>> cloud_logger.setLevel(logging.INFO) # defaults to WARN - >>> cloud_logger.addHandler(handler) - >>> cloud_logger.error('bad news') - -.. note:: - - This handler by default uses an asynchronous transport that sends log entries on a background - thread. However, the API call will still be made in the same process. For other transport - options, see the transports section. - -All logs will go to a single custom log, which defaults to "python". The name of the Python -logger will be included in the structured log entry under the "python_logger" field. You can -change it by providing a name to the handler: - -.. code-block:: python - - >>> handler = CloudLoggingHandler(client, name="mycustomlog") - -It is also possible to attach the handler to the root Python logger, so that for example a plain -`logging.warn` call would be sent to Cloud Logging, as well as any other loggers created. However, -you must avoid infinite recursion from the logging calls the client itself makes. A helper -method :meth:`setup_logging ` is provided to configure -this automatically: - -.. code-block:: python - - >>> import logging - >>> import google.cloud.logging # Don't conflict with standard logging - >>> from google.cloud.logging.handlers import CloudLoggingHandler, setup_logging - >>> client = google.cloud.logging.Client() - >>> handler = CloudLoggingHandler(client) - >>> logging.getLogger().setLevel(logging.INFO) # defaults to WARN - >>> setup_logging(handler) - >>> logging.error('bad news') - -You can also exclude certain loggers: - -.. code-block:: python - - >>> setup_logging(handler, excluded_loggers=('werkzeug',)) - - - -Python logging handler transports -================================== - -The Python logging handler can use different transports. The default is -:class:`google.cloud.logging_v2.handlers.BackgroundThreadTransport`. - - 1. :class:`google.cloud.logging_V2.handlers.BackgroundThreadTransport` this is the default. It writes - entries on a background :class:`python.threading.Thread`. - - 1. :class:`google.cloud.logging_V2.handlers.SyncTransport` this handler does a direct API call on each - logging statement to write the entry. diff --git a/packages/google-cloud-logging/docs/transport.rst b/packages/google-cloud-logging/docs/transport.rst new file mode 100644 index 000000000000..9f4430103fd6 --- /dev/null +++ b/packages/google-cloud-logging/docs/transport.rst @@ -0,0 +1,25 @@ +Transports +---------------- + +These classes define how the :class:`CloudLoggingHandler ` +transport logs into GCP. More information in the :ref:`User Guide` + +Base Transport +~~~~~~~~~~~~~~ +.. automodule:: google.cloud.logging_v2.handlers.transports.base + :members: + :show-inheritance: + +Background Thread Transport +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.logging_v2.handlers.transports.background_thread + :members: + :show-inheritance: + +Synchronous Transport +~~~~~~~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.logging_v2.handlers.transports.sync + :members: + :show-inheritance: diff --git a/packages/google-cloud-logging/docs/transports-base.rst b/packages/google-cloud-logging/docs/transports-base.rst deleted file mode 100644 index b28fb5ba6bc2..000000000000 --- a/packages/google-cloud-logging/docs/transports-base.rst +++ /dev/null @@ -1,6 +0,0 @@ -Python Logging Handler Sync Transport -====================================== - -.. automodule:: google.cloud.logging_v2.handlers.transports.base - :members: - :show-inheritance: diff --git a/packages/google-cloud-logging/docs/transports-sync.rst b/packages/google-cloud-logging/docs/transports-sync.rst deleted file mode 100644 index 32e6401cba05..000000000000 --- a/packages/google-cloud-logging/docs/transports-sync.rst +++ /dev/null @@ -1,6 +0,0 @@ -Python Logging Handler Sync Transport -====================================== - -.. automodule:: google.cloud.logging_v2.handlers.transports.sync - :members: - :show-inheritance: diff --git a/packages/google-cloud-logging/docs/transports-thread.rst b/packages/google-cloud-logging/docs/transports-thread.rst deleted file mode 100644 index 2899e6c480b8..000000000000 --- a/packages/google-cloud-logging/docs/transports-thread.rst +++ /dev/null @@ -1,7 +0,0 @@ -Python Logging Handler Threaded Transport -========================================= - - -.. automodule:: google.cloud.logging_v2.handlers.transports.background_thread - :members: - :show-inheritance: diff --git a/packages/google-cloud-logging/docs/usage.rst b/packages/google-cloud-logging/docs/usage.rst index 1fde3d8ea1fd..929ee9cefc47 100644 --- a/packages/google-cloud-logging/docs/usage.rst +++ b/packages/google-cloud-logging/docs/usage.rst @@ -1,356 +1,9 @@ Usage Guide -=========== +------------- +.. toctree:: + :maxdepth: 2 -Writing log entries -------------------- + std-lib-integration + direct-lib-usage + grpc-vs-http -To write log entries, first create a -:class:`~google.cloud.logging.logger.Logger`, passing the "log name" with -which to associate the entries: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_create] - :end-before: [END logger_create] - :dedent: 4 - -Write a simple text entry to the logger. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_log_text] - :end-before: [END logger_log_text] - :dedent: 4 - -Write a dictionary entry to the logger. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_log_struct] - :end-before: [END logger_log_struct] - :dedent: 4 - -Write a simple text entry and resource to the logger. - -Supported Resource values are listed at `Monitored Resource Types`_ - -.. _Monitored Resource Types: https://cloud.google.com/logging/docs/api/v2/resource-list - - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_log_resource_text] - :end-before: [END logger_log_resource_text] - :dedent: 4 - -Retrieving log entries ----------------------- - -Fetch entries for the default project. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_entries_default] - :end-before: [END client_list_entries_default] - :dedent: 4 - -Entries returned by -:meth:`Client.list_entries ` -or -:meth:`Logger.list_entries ` -will be instances of one of the following classes: - -- :class:`~google.cloud.logging.entries.TextEntry` -- :class:`~google.cloud.logging.entries.StructEntry` -- :class:`~google.cloud.logging.entries.ProtobufEntry` - -Filter entries retrieved using the `Advanced Logs Filters`_ syntax - -.. _Advanced Logs Filters: https://cloud.google.com/logging/docs/view/advanced_filters - -Fetch entries for the default project. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_entries_filter] - :end-before: [END client_list_entries_filter] - :dedent: 4 - -Sort entries in descending timestamp order. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_entries_order_by] - :end-before: [END client_list_entries_order_by] - :dedent: 4 - -Retrieve entries for a single logger, sorting in descending timestamp order: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_list_entries] - :end-before: [END logger_list_entries] - :dedent: 4 - -And as a practical example, retrieve all `GKE Admin Activity audit logs`_ -from the past 24 hours: - -.. _GKE Admin Activity audit logs: https://cloud.google.com/kubernetes-engine/docs/how-to/audit-logging#audit_logs_in_your_project - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logging_list_gke_audit_logs] - :end-before: [END logging_list_gke_audit_logs] - :dedent: 4 - -Delete all entries for a logger -------------------------------- - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_delete] - :end-before: [END logger_delete] - :dedent: 8 - - -Manage log metrics ------------------- - -Metrics are counters of entries which match a given filter. They can be -used within Cloud Monitoring to create charts and alerts. - -List all metrics for a project: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_metrics] - :end-before: [END client_list_metrics] - :dedent: 4 - -Create a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_create] - :end-before: [END metric_create] - :dedent: 4 - -Refresh local information about a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_reload] - :end-before: [END metric_reload] - :dedent: 4 - -Update a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_update] - :end-before: [END metric_update] - :dedent: 4 - -Delete a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_delete] - :end-before: [END metric_delete] - :dedent: 4 - -Export log entries using sinks ------------------------------- - -Sinks allow exporting entries which match a given filter to Cloud Storage -buckets, BigQuery datasets, or Cloud Pub/Sub topics. - -Export to Cloud Storage -~~~~~~~~~~~~~~~~~~~~~~~ - -Make sure that the storage bucket you want to export logs too has -``cloud-logs@google.com`` as the owner. See -`Setting permissions for Cloud Storage`_. - -.. _Setting permissions for Cloud Storage: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_cloud_storage - -Add ``cloud-logs@google.com`` as the owner of the bucket: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_bucket_permissions] - :end-before: [END sink_bucket_permissions] - :dedent: 4 - -Create a Cloud Storage sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_storage_create] - :end-before: [END sink_storage_create] - :dedent: 4 - - -Export to BigQuery -~~~~~~~~~~~~~~~~~~ - -To export logs to BigQuery you must log into the Cloud Platform Console -and add ``cloud-logs@google.com`` to a dataset. - -See: `Setting permissions for BigQuery`_ - -.. _Setting permissions for BigQuery: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_bigquery - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_dataset_permissions] - :end-before: [END sink_dataset_permissions] - :dedent: 4 - -Create a BigQuery sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_bigquery_create] - :end-before: [END sink_bigquery_create] - :dedent: 4 - - -Export to Pub/Sub -~~~~~~~~~~~~~~~~~ - -To export logs to BigQuery you must log into the Cloud Platform Console -and add ``cloud-logs@google.com`` to a topic. - -See: `Setting permissions for Pub/Sub`_ - -.. _Setting permissions for Pub/Sub: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_logs_to_cloud_pubsub - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_topic_permissions] - :end-before: [END sink_topic_permissions] - :dedent: 4 - -Create a Cloud Pub/Sub sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_pubsub_create] - :end-before: [END sink_pubsub_create] - :dedent: 4 - -Manage Sinks -~~~~~~~~~~~~ - -List all sinks for a project: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_sinks] - :end-before: [END client_list_sinks] - :dedent: 4 - -Refresh local information about a sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_reload] - :end-before: [END sink_reload] - :dedent: 4 - -Update a sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_update] - :end-before: [END sink_update] - :dedent: 4 - -Delete a sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_delete] - :end-before: [END sink_delete] - :dedent: 4 - -Integration with Python logging module --------------------------------------- - -It's possible to tie the Python :mod:`logging` module directly into Google -Cloud Logging. There are different handler options to accomplish this. -To automatically pick the default for your current environment, use -:meth:`~google.cloud.logging.client.Client.get_default_handler`. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START create_default_handler] - :end-before: [END create_default_handler] - :dedent: 4 - -It is also possible to attach the handler to the root Python logger, so that -for example a plain ``logging.warn`` call would be sent to Cloud Logging, -as well as any other loggers created. A helper method -:meth:`~google.cloud.logging.client.Client.setup_logging` is provided -to configure this automatically. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START setup_logging] - :end-before: [END setup_logging] - :dedent: 4 - -.. note:: - - To reduce cost and quota usage, do not enable Cloud Logging - handlers while testing locally. - -You can also exclude certain loggers: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START setup_logging_excludes] - :end-before: [END setup_logging_excludes] - :dedent: 4 - -Cloud Logging Handler -~~~~~~~~~~~~~~~~~~~~~ - -If you prefer not to use -:meth:`~google.cloud.logging.client.Client.get_default_handler`, you can -directly create a -:class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler` instance -which will write directly to the API. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START create_cloud_handler] - :end-before: [END create_cloud_handler] - :dedent: 4 - -.. note:: - - This handler by default uses an asynchronous transport that sends log - entries on a background thread. However, the API call will still be made - in the same process. For other transport options, see the transports - section. - -All logs will go to a single custom log, which defaults to "python". The name -of the Python logger will be included in the structured log entry under the -"python_logger" field. You can change it by providing a name to the handler: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START create_named_handler] - :end-before: [END create_named_handler] - :dedent: 4 - -Cloud Logging Handler transports -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler` -logging handler can use different transports. The default is -:class:`~google.cloud.logging.handlers.BackgroundThreadTransport`. - - 1. :class:`~google.cloud.logging.handlers.BackgroundThreadTransport` this is - the default. It writes entries on a background - :class:`python.threading.Thread`. - - 1. :class:`~google.cloud.logging.handlers.SyncTransport` this handler does a - direct API call on each logging statement to write the entry. - - -.. _Google Kubernetes Engine: https://cloud.google.com/kubernetes-engine - -fluentd logging handlers -~~~~~~~~~~~~~~~~~~~~~~~~ - -Besides :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler`, -which writes directly to the API, two other handlers are provided. -:class:`~google.cloud.logging.handlers.app_engine.AppEngineHandler`, which is -recommended when running on the Google App Engine Flexible vanilla runtimes -(i.e. your app.yaml contains ``runtime: python``), and -:class:`~google.cloud.logging.handlers.container_engine.ContainerEngineHandler` -, which is recommended when running on `Google Kubernetes Engine`_ with the -Cloud Logging plugin enabled. - -:meth:`~google.cloud.logging.client.Client.get_default_handler` and -:meth:`~google.cloud.logging.client.Client.setup_logging` will attempt to use -the environment to automatically detect whether the code is running in -these platforms and use the appropriate handler. - -In both cases, the fluentd agent is configured to automatically parse log files -in an expected format and forward them to Cloud Logging. The handlers -provided help set the correct metadata such as log level so that logs can be -filtered accordingly. diff --git a/packages/google-cloud-logging/docs/v2.rst b/packages/google-cloud-logging/docs/v2.rst deleted file mode 100644 index 823097bd73bd..000000000000 --- a/packages/google-cloud-logging/docs/v2.rst +++ /dev/null @@ -1,19 +0,0 @@ -v2 ----------------- -.. toctree:: - :maxdepth: 2 - - usage - client - logger - entries - metric - resource - sink - stdlib-usage - handlers - handlers-app-engine - handlers-container-engine - transports-sync - transports-thread - transports-base diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py index 542e4d62905f..02ecb6905a5a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -158,7 +158,7 @@ def _do_log(self, client, _entry_class, payload=None, **kw): client.logging_api.write_entries([api_repr]) def log_empty(self, *, client=None, **kw): - """Log an empty message via a POST request + """Log an empty message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write @@ -173,7 +173,7 @@ def log_empty(self, *, client=None, **kw): self._do_log(client, LogEntry, **kw) def log_text(self, text, *, client=None, **kw): - """Log a text message via a POST request + """Log a text message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write @@ -189,7 +189,7 @@ def log_text(self, text, *, client=None, **kw): self._do_log(client, TextEntry, text, **kw) def log_struct(self, info, *, client=None, **kw): - """Log a structured message via a POST request + """Log a dictionary message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write @@ -209,7 +209,7 @@ def log_struct(self, info, *, client=None, **kw): self._do_log(client, StructEntry, info, **kw) def log_proto(self, message, *, client=None, **kw): - """Log a protobuf message via a POST request + """Log a protobuf message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list @@ -226,8 +226,7 @@ def log_proto(self, message, *, client=None, **kw): self._do_log(client, ProtobufEntry, message, **kw) def log(self, message=None, *, client=None, **kw): - """Log an arbitrary message via a POST request. - Type will be inferred based on the input message. + """Log an arbitrary message. Type will be inferred based on the input. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list diff --git a/packages/google-cloud-logging/samples/snippets/usage_guide.py b/packages/google-cloud-logging/samples/snippets/usage_guide.py index e519c75c1a35..fdbbe1211dc5 100644 --- a/packages/google-cloud-logging/samples/snippets/usage_guide.py +++ b/packages/google-cloud-logging/samples/snippets/usage_guide.py @@ -100,15 +100,65 @@ def client_list_entries(client, to_delete): # pylint: disable=unused-argument @snippet -def logger_usage(client, to_delete): +def client_setup(client2, to_delete): + """Client setup.""" + + # [START usage_client_setup] + import google.cloud.logging + + # if project not given, it will be inferred from the environment + client = google.cloud.logging.Client(project="my-project") + # [END usage_client_setup] + to_delete.append(client) + + # [START usage_http_client_setup] + http_client = google.cloud.logging.Client(_use_grpc=False) + # [END usage_http_client_setup] + to_delete.append(http_client) + + +@snippet +def logger_usage(client_true, to_delete): """Logger usage.""" - log_name = "logger_usage_%d" % (_millis()) + import google.cloud.logging # [START logger_create] - logger = client.logger(log_name) + client = google.cloud.logging.Client(project="my-project") + logger = client.logger(name="log_id") + # logger will bind to logName "projects/my_project/logs/log_id" # [END logger_create] + client = client_true + + log_id = "logger_usage_%d" % (_millis()) + # [START logger_custom_labels] + custom_labels = {"my-key": "my-value"} + label_logger = client.logger(log_id, labels=custom_labels) + # [END logger_custom_labels] + to_delete.append(label_logger) + # [START logger_custom_resource] + from google.cloud.logging_v2.resource import Resource + + resource = Resource(type="global", labels={}) + global_logger = client.logger(log_id, resource=resource) + # [END logger_custom_resource] + to_delete.append(global_logger) + + logger = client_true.logger(log_id) to_delete.append(logger) + # [START logger_log_basic] + logger.log("A simple entry") # API call + # [END logger_log_basic] + + # [START logger_log_fields] + logger.log( + "an entry with fields set", + severity="ERROR", + insert_id="0123", + labels={"my-label": "my-value"}, + ) # API call + # [END logger_log_fields] + # [START logger_log_text] logger.log_text("A simple entry") # API call # [END logger_log_text] @@ -135,6 +185,20 @@ def logger_usage(client, to_delete): ) # [END logger_log_resource_text] + # [START logger_log_batch] + batch = logger.batch() + batch.log("first log") + batch.log("second log") + batch.commit() + # [END logger_log_batch] + + # [START logger_log_batch_context] + with logger.batch() as batch: + batch.log("first log") + # do work + batch.log("last log") + # [END logger_log_batch_context] + # [START logger_list_entries] from google.cloud.logging import DESCENDING @@ -357,12 +421,10 @@ def logging_handler(client): # [START create_cloud_handler] from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging_v2.handlers import setup_logging handler = CloudLoggingHandler(client) - cloud_logger = logging.getLogger("cloudLogger") - cloud_logger.setLevel(logging.INFO) - cloud_logger.addHandler(handler) - cloud_logger.error("bad news") + setup_logging(handler) # [END create_cloud_handler] # [START create_named_handler] @@ -370,6 +432,39 @@ def logging_handler(client): # [END create_named_handler] +@snippet +def logging_json(client): + # [START logging_json_dumps] + import logging + import json + + data_dict = {"hello": "world"} + logging.info(json.dumps(data_dict)) + # [END logging_json_dumps] + + # [START logging_extra_json_fields] + import logging + + data_dict = {"hello": "world"} + logging.info("message field", extra={"json_fields": data_dict}) + # [END logging_extra_json_fields] + + +@snippet +def using_extras(client): + import logging + + # [START logging_extras] + my_labels = {"foo": "bar"} + my_http = {"requestUrl": "localhost"} + my_trace = "01234" + + logging.info( + "hello", extra={"labels": my_labels, "http_request": my_http, "trace": my_trace} + ) + # [END logging_extras] + + @snippet def setup_logging(client): import logging From 25dc2de74654f36377d50860e7906d28c7a189cf Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 27 Jan 2022 19:58:13 +0100 Subject: [PATCH 592/855] chore(deps): update dependency google-cloud-storage to v2.1.0 (#469) --- .../google-cloud-logging/samples/snippets/requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index d75e274c2621..0ab529f1ccdb 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,5 @@ google-cloud-logging==2.7.0 google-cloud-bigquery==2.32.0 -google-cloud-storage==2.0.0 +google-cloud-storage==2.0.0; python_version == '3.6' +google-cloud-storage==2.1.0; python_version >= '3.7' google-cloud-pubsub==2.9.0 From d8f5f0d34c655fc1b3a9ca07c773b7486e8cc8eb Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 1 Feb 2022 13:51:04 -0800 Subject: [PATCH 593/855] chore(main): release 3.0.0 (#473) --- packages/google-cloud-logging/CHANGELOG.md | 39 ++++++++++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 40 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 068ad3df2dc1..9bfce6bf1010 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,45 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.0.0](https://github.com/googleapis/python-logging/compare/v2.7.0...v3.0.0) (2022-01-27) + + +### ⚠ BREAKING CHANGES + +* make logging API more friendly to use (#422) +* api consistency between HTTP and Gapic layers (#375) +* support string-encoded json (#339) +* Infer default resource in logger (#315) +* support json logs (#316) +* deprecate AppEngineHandler and ContainerEngineHandler (#310) + +### Features + +* add api key support ([#472](https://github.com/googleapis/python-logging/issues/472)) ([81ca8c6](https://github.com/googleapis/python-logging/commit/81ca8c616acb988be1fbecfc2a0b1a5b39280149)) +* add json_fields extras argument for adding to jsonPayload ([#447](https://github.com/googleapis/python-logging/issues/447)) ([a760e02](https://github.com/googleapis/python-logging/commit/a760e02371a55d6262e42de9e0222fffa2c7192b)) +* avoid importing grpc when explicitly disabled ([#416](https://github.com/googleapis/python-logging/issues/416)) ([818213e](https://github.com/googleapis/python-logging/commit/818213e143d6a1941211a48e0b23069a426ac300)) +* Infer default resource in logger ([#315](https://github.com/googleapis/python-logging/issues/315)) ([c632503](https://github.com/googleapis/python-logging/commit/c63250399fcd6e1317d341e98fab11095c443e5e)) +* make logging API more friendly to use ([#422](https://github.com/googleapis/python-logging/issues/422)) ([83d9ca8](https://github.com/googleapis/python-logging/commit/83d9ca8521fe7c470bb6755a48a97496515d7abc)) +* support json logs ([#316](https://github.com/googleapis/python-logging/issues/316)) ([5267152](https://github.com/googleapis/python-logging/commit/5267152574b2ee96eb6f5c536a762f58bd2f886e)) +* support string-encoded json ([#339](https://github.com/googleapis/python-logging/issues/339)) ([6fa1773](https://github.com/googleapis/python-logging/commit/6fa17735fe3edb45483ec5e3abd1f53c24ffa881)) +* trace improvements ([#450](https://github.com/googleapis/python-logging/issues/450)) ([e0c5fc0](https://github.com/googleapis/python-logging/commit/e0c5fc02160ae87faf4ba5c2b62be86de6b02cf3)) + + +### Bug Fixes + +* allow reading logs from non-project paths ([#444](https://github.com/googleapis/python-logging/issues/444)) ([97e32b6](https://github.com/googleapis/python-logging/commit/97e32b67603553fe350b6327455fc9f80b8aa6ce)) +* api consistency between HTTP and Gapic layers ([#375](https://github.com/googleapis/python-logging/issues/375)) ([e1506fa](https://github.com/googleapis/python-logging/commit/e1506fa9030776353878048ce562c53bf6ccf7bf)) + + +### Miscellaneous Chores + +* deprecate AppEngineHandler and ContainerEngineHandler ([#310](https://github.com/googleapis/python-logging/issues/310)) ([e3cac88](https://github.com/googleapis/python-logging/commit/e3cac888d40bf67af11e57b74615b0c3b8e8aa3e)) + + +### Documentation + +* update usage guide for v3.0.0 ([#456](https://github.com/googleapis/python-logging/issues/456)) ([8a67b73](https://github.com/googleapis/python-logging/commit/8a67b73cdfcb9da545671be6cf59c724360b1544)) + ## [2.7.0](https://www.github.com/googleapis/python-logging/compare/v2.6.0...v2.7.0) (2021-11-02) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index b31ae8f7800d..2cf113e34899 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.7.0" +version = "3.0.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 84cc1b4b6df227dee415c25a4575b2ea9ec890f9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Feb 2022 15:54:57 +0000 Subject: [PATCH 594/855] chore: use gapic-generator-python 0.62.1 (#478) - [ ] Regenerate this pull request now. fix: resolve DuplicateCredentialArgs error when using credentials_file committer: parthea PiperOrigin-RevId: 425964861 Source-Link: https://github.com/googleapis/googleapis/commit/84b1a5a4f6fb2d04905be58e586b8a7a4310a8cf Source-Link: https://github.com/googleapis/googleapis-gen/commit/4fb761bbd8506ac156f49bac5f18306aa8eb3aa8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNGZiNzYxYmJkODUwNmFjMTU2ZjQ5YmFjNWYxODMwNmFhOGViM2FhOCJ9 --- .../config_service_v2/async_client.py | 32 +++---- .../services/config_service_v2/client.py | 32 +++---- .../config_service_v2/transports/grpc.py | 5 +- .../transports/grpc_asyncio.py | 5 +- .../logging_service_v2/async_client.py | 8 +- .../services/logging_service_v2/client.py | 8 +- .../logging_service_v2/transports/grpc.py | 5 +- .../transports/grpc_asyncio.py | 5 +- .../metrics_service_v2/async_client.py | 10 +-- .../services/metrics_service_v2/client.py | 10 +-- .../metrics_service_v2/transports/grpc.py | 5 +- .../transports/grpc_asyncio.py | 5 +- .../cloud/logging_v2/types/logging_config.py | 3 +- .../logging_v2/test_config_service_v2.py | 84 +++++++++++++++++- .../logging_v2/test_logging_service_v2.py | 85 ++++++++++++++++++- .../logging_v2/test_metrics_service_v2.py | 85 ++++++++++++++++++- 16 files changed, 320 insertions(+), 67 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 664f10adae9f..81621a4e2f03 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -263,7 +263,7 @@ async def list_buckets( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -579,7 +579,7 @@ async def list_views( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -851,7 +851,7 @@ async def list_sinks( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -953,7 +953,7 @@ async def get_sink( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: @@ -1064,7 +1064,7 @@ async def create_sink( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, sink]) if request is not None and has_flattened_params: @@ -1186,7 +1186,7 @@ async def update_sink( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name, sink, update_mask]) if request is not None and has_flattened_params: @@ -1277,7 +1277,7 @@ async def delete_sink( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: @@ -1368,7 +1368,7 @@ async def list_exclusions( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -1473,7 +1473,7 @@ async def get_exclusion( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -1585,7 +1585,7 @@ async def create_exclusion( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, exclusion]) if request is not None and has_flattened_params: @@ -1700,7 +1700,7 @@ async def update_exclusion( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, exclusion, update_mask]) if request is not None and has_flattened_params: @@ -1778,7 +1778,7 @@ async def delete_exclusion( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -1847,8 +1847,8 @@ async def get_cmek_settings( The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1923,8 +1923,8 @@ async def update_cmek_settings( The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index f4a1be57c357..10bed9347736 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -499,7 +499,7 @@ def list_buckets( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -820,7 +820,7 @@ def list_views( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -1096,7 +1096,7 @@ def list_sinks( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -1187,7 +1187,7 @@ def get_sink( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: @@ -1287,7 +1287,7 @@ def create_sink( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, sink]) if request is not None and has_flattened_params: @@ -1409,7 +1409,7 @@ def update_sink( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name, sink, update_mask]) if request is not None and has_flattened_params: @@ -1489,7 +1489,7 @@ def delete_sink( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: @@ -1569,7 +1569,7 @@ def list_exclusions( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -1663,7 +1663,7 @@ def get_exclusion( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -1764,7 +1764,7 @@ def create_exclusion( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, exclusion]) if request is not None and has_flattened_params: @@ -1879,7 +1879,7 @@ def update_exclusion( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, exclusion, update_mask]) if request is not None and has_flattened_params: @@ -1957,7 +1957,7 @@ def delete_exclusion( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -2015,8 +2015,8 @@ def get_cmek_settings( The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2092,8 +2092,8 @@ def update_cmek_settings( The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index b34d0a12130b..39d9d4f93cf8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -159,8 +159,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 1cf4f3121c57..b4228c690106 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -204,8 +204,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index e14453424a78..dc8b56b8138f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -258,7 +258,7 @@ async def delete_log( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name]) if request is not None and has_flattened_params: @@ -426,7 +426,7 @@ async def write_log_entries( Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name, resource, labels, entries]) if request is not None and has_flattened_params: @@ -558,7 +558,7 @@ async def list_log_entries( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([resource_names, filter, order_by]) if request is not None and has_flattened_params: @@ -717,7 +717,7 @@ async def list_logs( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 5815c8d1948f..b33821be54a4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -440,7 +440,7 @@ def delete_log( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name]) if request is not None and has_flattened_params: @@ -597,7 +597,7 @@ def write_log_entries( Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name, resource, labels, entries]) if request is not None and has_flattened_params: @@ -717,7 +717,7 @@ def list_log_entries( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([resource_names, filter, order_by]) if request is not None and has_flattened_params: @@ -857,7 +857,7 @@ def list_logs( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 0379cbecff42..4f5c9b1caf3a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -159,8 +159,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 16602c2b42fc..27b094831c8d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -204,8 +204,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index eb7321ab7456..311806df2a37 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -245,7 +245,7 @@ async def list_log_metrics( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -344,7 +344,7 @@ async def get_log_metric( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: @@ -451,7 +451,7 @@ async def create_log_metric( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, metric]) if request is not None and has_flattened_params: @@ -546,7 +546,7 @@ async def update_log_metric( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name, metric]) if request is not None and has_flattened_params: @@ -628,7 +628,7 @@ async def delete_log_metric( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index ced653a5107c..ade883811a74 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -439,7 +439,7 @@ def list_log_metrics( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -527,7 +527,7 @@ def get_log_metric( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: @@ -623,7 +623,7 @@ def create_log_metric( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, metric]) if request is not None and has_flattened_params: @@ -718,7 +718,7 @@ def update_log_metric( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name, metric]) if request is not None and has_flattened_params: @@ -789,7 +789,7 @@ def delete_log_metric( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 194d341f32ef..7b72b756fbfe 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -159,8 +159,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 37cec4a63d68..889d7072e1e9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -204,8 +204,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index 3ea70506c6ca..f064f26b7bee 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -120,8 +120,7 @@ class LogView(proto.Message): name (str): The resource name of the view. For example - "projects/my-project-id/locations/my- - location/buckets/my-bucket-id/views/my-view + "projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view description (str): Describes this view. create_time (google.protobuf.timestamp_pb2.Timestamp): diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index efb46eaad5ea..e7d2ea7d16e1 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -522,21 +522,28 @@ def test_config_service_v2_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + ( + ConfigServiceV2Client, + transports.ConfigServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), ( ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_config_service_v2_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -572,6 +579,77 @@ def test_config_service_v2_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ConfigServiceV2Client, + transports.ConfigServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_config_service_v2_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + scopes=None, + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [logging_config.ListBucketsRequest, dict,]) def test_list_buckets(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 9f11a0210203..0b3b202eb113 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -529,21 +529,28 @@ def test_logging_service_v2_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + ( + LoggingServiceV2Client, + transports.LoggingServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), ( LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_logging_service_v2_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -579,6 +586,78 @@ def test_logging_service_v2_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + LoggingServiceV2Client, + transports.LoggingServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_logging_service_v2_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + scopes=None, + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [logging.DeleteLogRequest, dict,]) def test_delete_log(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 97a2c4a99354..764a76121405 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -527,21 +527,28 @@ def test_metrics_service_v2_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + ( + MetricsServiceV2Client, + transports.MetricsServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), ( MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_metrics_service_v2_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -577,6 +584,78 @@ def test_metrics_service_v2_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MetricsServiceV2Client, + transports.MetricsServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_metrics_service_v2_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + scopes=None, + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [logging_metrics.ListLogMetricsRequest, dict,]) def test_list_log_metrics(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( From 0a19b51951f6387b19060984f53929eeeb30623d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 5 Feb 2022 01:00:49 +0100 Subject: [PATCH 595/855] chore(deps): update all dependencies (#477) --- .../samples/snippets/requirements-test.txt | 2 +- .../google-cloud-logging/samples/snippets/requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index fbe6c1c5cfc8..27df4634c3c3 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==1.11.1 -pytest==6.2.5 +pytest==7.0.0 diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 0ab529f1ccdb..7e49254f48e4 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,5 +1,5 @@ -google-cloud-logging==2.7.0 +google-cloud-logging==3.0.0 google-cloud-bigquery==2.32.0 -google-cloud-storage==2.0.0; python_version == '3.6' +google-cloud-storage==2.1.0; python_version == '3.6' google-cloud-storage==2.1.0; python_version >= '3.7' google-cloud-pubsub==2.9.0 From 975287ab8b2f20c1dd889fc8d6c6aeced7f96c36 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Thu, 10 Feb 2022 12:14:30 -0800 Subject: [PATCH 596/855] fix: fix system test for mtls (#485) --- .../tests/system/test_system.py | 80 ++++++++++++++++--- 1 file changed, 69 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 90b4059d6efe..84d0c9552270 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -110,6 +110,8 @@ def setUpModule(): # Skip the test cases using bigquery, storage and pubsub clients for mTLS testing. # Bigquery and storage uses http which doesn't have mTLS support, pubsub doesn't # have mTLS fix released yet. +# We also need to skip HTTP client test cases because mTLS is only available for +# gRPC clients. skip_for_mtls = pytest.mark.skipif( Config.use_mtls == "always", reason="Skip the test case for mTLS testing" ) @@ -196,7 +198,12 @@ def test_list_entry_with_auditlog(self): gapic_logger = Config.CLIENT.logger(f"audit-proto-{uuid.uuid1()}") http_logger = Config.HTTP_CLIENT.logger(f"audit-proto-{uuid.uuid1()}-http") - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: logger.log_proto(audit_struct) # retrieve log @@ -249,7 +256,12 @@ def test_list_entry_with_requestlog(self): gapic_logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}") http_logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}-http") - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: logger.log_proto(req_struct) # retrieve log @@ -301,7 +313,12 @@ def test_log_text(self): TEXT_PAYLOAD = "System test: test_log_text" gapic_logger = Config.CLIENT.logger(self._logger_name("log_text")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_http")) - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log_text(TEXT_PAYLOAD) entries = _list_entries(logger) @@ -314,7 +331,12 @@ def test_log_text_with_timestamp(self): gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_ts_http")) now = datetime.utcnow() - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log_text(text_payload, timestamp=now) entries = _list_entries(logger) @@ -329,7 +351,12 @@ def test_log_text_with_resource(self): gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_res")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_res_http")) now = datetime.utcnow() - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: resource = Resource( type="gae_app", labels={"module_id": "default", "version_id": "test", "zone": ""}, @@ -355,7 +382,12 @@ def test_log_text_w_metadata(self): REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_md")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_md_http")) - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log_text( @@ -381,7 +413,12 @@ def test_log_text_w_metadata(self): def test_log_struct(self): gapic_logger = Config.CLIENT.logger(self._logger_name("log_struct")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_struct_http")) - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log_struct(self.JSON_PAYLOAD) @@ -399,7 +436,12 @@ def test_log_struct_w_metadata(self): REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} gapic_logger = Config.CLIENT.logger(self._logger_name("log_struct_md")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_struct_md_http")) - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log_struct( @@ -423,7 +465,12 @@ def test_log_w_text(self): TEXT_PAYLOAD = "System test: test_log_w_text" gapic_logger = Config.CLIENT.logger(self._logger_name("log_w_text")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_w_text")) - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log(TEXT_PAYLOAD) entries = _list_entries(logger) @@ -433,7 +480,12 @@ def test_log_w_text(self): def test_log_w_struct(self): gapic_logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_w_struct_http")) - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log(self.JSON_PAYLOAD) @@ -446,7 +498,12 @@ def test_log_empty(self): gapic_logger = Config.CLIENT.logger(self._logger_name("log_empty")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_empty_http")) - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log() @@ -829,6 +886,7 @@ def test_update_sink(self): self.assertEqual(sink.filter_, UPDATED_FILTER) self.assertEqual(sink.destination, dataset_uri) + @skip_for_mtls def test_api_equality_list_logs(self): unique_id = uuid.uuid1() gapic_logger = Config.CLIENT.logger(f"api-list-{unique_id}") From 53d4599045e7ad7349341bf9cd5bd4e8175c51f2 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 11 Feb 2022 11:18:37 -0800 Subject: [PATCH 597/855] fix: remove unnecessary detect_resource calls from CloudLoggingHandler (#484) --- .../google/cloud/logging_v2/_helpers.py | 5 +++-- .../google/cloud/logging_v2/handlers/handlers.py | 2 +- .../handlers/transports/background_thread.py | 7 ++++++- .../cloud/logging_v2/handlers/transports/base.py | 13 +++++++++++++ .../cloud/logging_v2/handlers/transports/sync.py | 13 +++++++++++-- .../tests/unit/handlers/test_handlers.py | 2 +- .../handlers/transports/test_background_thread.py | 7 ++++--- .../tests/unit/handlers/transports/test_base.py | 7 +++++-- .../tests/unit/handlers/transports/test_sync.py | 7 ++++--- 9 files changed, 48 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/_helpers.py index 51cc6486836b..75f84e50ccac 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_helpers.py @@ -89,7 +89,7 @@ def entry_from_resource(resource, client, loggers): return LogEntry.from_api_repr(resource, client, loggers=loggers) -def retrieve_metadata_server(metadata_key): +def retrieve_metadata_server(metadata_key, timeout=5): """Retrieve the metadata key in the metadata server. See: https://cloud.google.com/compute/docs/storing-retrieving-metadata @@ -99,6 +99,7 @@ def retrieve_metadata_server(metadata_key): Key of the metadata which will form the url. You can also supply query parameters after the metadata key. e.g. "tags?alt=json" + timeout (number): number of seconds to wait for the HTTP request Returns: str: The value of the metadata key returned by the metadata server. @@ -106,7 +107,7 @@ def retrieve_metadata_server(metadata_key): url = METADATA_URL + metadata_key try: - response = requests.get(url, headers=METADATA_HEADERS) + response = requests.get(url, headers=METADATA_HEADERS, timeout=timeout) if response.status_code == requests.codes.ok: return response.text diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 769146007007..f6fa90d71e5d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -179,7 +179,7 @@ def __init__( resource = detect_resource(client.project) self.name = name self.client = client - self.transport = transport(client, name) + self.transport = transport(client, name, resource=resource) self.project_id = client.project self.resource = resource self.labels = labels diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py index 1097830a8d55..f361e043cdf8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py @@ -29,6 +29,7 @@ from google.cloud.logging_v2 import _helpers from google.cloud.logging_v2.handlers.transports.base import Transport +from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE _DEFAULT_GRACE_PERIOD = 5.0 # Seconds _DEFAULT_MAX_BATCH_SIZE = 10 @@ -260,6 +261,8 @@ def __init__( grace_period=_DEFAULT_GRACE_PERIOD, batch_size=_DEFAULT_MAX_BATCH_SIZE, max_latency=_DEFAULT_MAX_LATENCY, + resource=_GLOBAL_RESOURCE, + **kwargs, ): """ Args: @@ -275,9 +278,11 @@ def __init__( than the grace_period. This means this is effectively the longest amount of time the background thread will hold onto log entries before sending them to the server. + resource (Optional[Resource|dict]): The default monitored resource to associate + with logs when not specified """ self.client = client - logger = self.client.logger(name) + logger = self.client.logger(name, resource=resource) self.worker = _Worker( logger, grace_period=grace_period, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py index bd52b4e75dff..a0c9aafa4aaa 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py @@ -14,6 +14,8 @@ """Module containing base class for logging transport.""" +from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + class Transport(object): """Base class for Google Cloud Logging handler transports. @@ -22,6 +24,17 @@ class Transport(object): client and name object, and must override :meth:`send`. """ + def __init__(self, client, name, resource=_GLOBAL_RESOURCE, **kwargs): + """ + Args: + client (~logging_v2.client.Client): + The Logging client. + name (str): The name of the lgoger. + resource (Optional[Resource|dict]): The default monitored resource to associate + with logs when not specified + """ + super().__init__() + def send(self, record, message, **kwargs): """Transport send to be implemented by subclasses. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py index 796f0d2ff733..6f93b2e57003 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py @@ -18,6 +18,7 @@ """ from google.cloud.logging_v2 import _helpers from google.cloud.logging_v2.handlers.transports.base import Transport +from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE class SyncTransport(Transport): @@ -26,8 +27,16 @@ class SyncTransport(Transport): Uses this library's Logging client to directly make the API call. """ - def __init__(self, client, name): - self.logger = client.logger(name) + def __init__(self, client, name, resource=_GLOBAL_RESOURCE, **kwargs): + """ + Args: + client (~logging_v2.client.Client): + The Logging client. + name (str): The name of the lgoger. + resource (Optional[Resource|dict]): The default monitored resource to associate + with logs when not specified + """ + self.logger = client.logger(name, resource=resource) def send(self, record, message, **kwargs): """Overrides transport.send(). diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index bbfacf59faa5..353e7d2f65f7 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -860,7 +860,7 @@ def __init__(self, project): class _Transport(object): - def __init__(self, client, name): + def __init__(self, client, name, resource=None): self.client = client self.name = name diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index f408de4769ca..0c547d736c88 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -509,11 +509,12 @@ def commit(self): class _Logger(object): - def __init__(self, name): + def __init__(self, name, resource=None): self.name = name self._batch_cls = _Batch self._batch = None self._num_batches = 0 + self.resource = resource def batch(self): self._batch = self._batch_cls() @@ -530,6 +531,6 @@ def __init__(self, project, _http=None, credentials=None): self._credentials = credentials self._connection = mock.Mock(credentials=credentials, spec=["credentials"]) - def logger(self, name): # pylint: disable=unused-argument - self._logger = _Logger(name) + def logger(self, name, resource=None): # pylint: disable=unused-argument + self._logger = _Logger(name, resource=resource) return self._logger diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py index 4cbfab02e92e..71ef1366a9fd 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py @@ -29,10 +29,13 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_send_is_abstract(self): - target = self._make_one() + target = self._make_one("client", "name") with self.assertRaises(NotImplementedError): target.send(None, None, resource=None) + def test_resource_is_valid_argunent(self): + self._make_one("client", "name", resource="resource") + def test_flush_is_abstract_and_optional(self): - target = self._make_one() + target = self._make_one("client", "name") target.flush() diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py index cc8ffe284546..bdc78d89a463 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py @@ -91,8 +91,9 @@ def test_send_struct(self): class _Logger(object): from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE - def __init__(self, name): + def __init__(self, name, resource=_GLOBAL_RESOURCE): self.name = name + self.resource = resource def log( self, @@ -119,8 +120,8 @@ class _Client(object): def __init__(self, project): self.project = project - def logger(self, name): # pylint: disable=unused-argument - self._logger = _Logger(name) + def logger(self, name, resource=None): # pylint: disable=unused-argument + self._logger = _Logger(name, resource=resource) return self._logger From 8932346b7924ffd85fba1fbb27983300e648efbe Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 14 Feb 2022 16:51:54 +0100 Subject: [PATCH 598/855] chore(deps): update dependency pytest to v7.0.1 (#490) --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 27df4634c3c3..c531e813e29e 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==1.11.1 -pytest==7.0.0 +pytest==7.0.1 From f5fdec1e4ca5e4f013be1dc1f1912d7cdb5a2b1c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 7 Mar 2022 16:54:42 -0500 Subject: [PATCH 599/855] fix(deps): require google-api-core>=1.31.5, >=2.3.2 (#494) --- packages/google-cloud-logging/setup.py | 2 +- packages/google-cloud-logging/testing/constraints-3.6.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 2cf113e34899..2b39d7bbd804 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -32,7 +32,7 @@ # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.26.0, <3.0.0dev", + "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", "google-cloud-appengine-logging>=0.1.0, <2.0.0dev", "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", # NOTE: Maintainers, please do not require google-api-core>=2.x.x diff --git a/packages/google-cloud-logging/testing/constraints-3.6.txt b/packages/google-cloud-logging/testing/constraints-3.6.txt index 250c505ff675..0aa016644404 100644 --- a/packages/google-cloud-logging/testing/constraints-3.6.txt +++ b/packages/google-cloud-logging/testing/constraints-3.6.txt @@ -5,6 +5,6 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.28.0 +google-api-core==1.31.5 google-cloud-core==1.4.1 proto-plus==1.15.0 From acfa916b58223141a3bb31c99ee24f06a0d13054 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Mar 2022 01:00:22 +0000 Subject: [PATCH 600/855] feat: KMS configuration in settings (#489) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 431037888 Source-Link: https://github.com/googleapis/googleapis/commit/b3397f5febbf21dfc69b875ddabaf76bee765058 Source-Link: https://github.com/googleapis/googleapis-gen/commit/510b54e1cdefd53173984df16645081308fe897e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTEwYjU0ZTFjZGVmZDUzMTczOTg0ZGYxNjY0NTA4MTMwOGZlODk3ZSJ9 chore: use gapic-generator-python 0.63.4 chore: fix snippet region tag format chore: fix docstring code block formatting PiperOrigin-RevId: 430730865 Source-Link: https://github.com/googleapis/googleapis/commit/ea5800229f73f94fd7204915a86ed09dcddf429a Source-Link: https://github.com/googleapis/googleapis-gen/commit/ca893ff8af25fc7fe001de1405a517d80446ecca Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2E4OTNmZjhhZjI1ZmM3ZmUwMDFkZTE0MDVhNTE3ZDgwNDQ2ZWNjYSJ9 chore: formatting changes PiperOrigin-RevId: 430243637 Source-Link: https://github.com/googleapis/googleapis/commit/95da686e8840cf3edf872ce3d095967e24e41bf6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a1f056b7689ccbe5aebc0bfdd318e9945ee7602a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTFmMDU2Yjc2ODljY2JlNWFlYmMwYmZkZDMxOGU5OTQ1ZWU3NjAyYSJ9 feat: Update Logging API with latest changes PiperOrigin-RevId: 429289471 Source-Link: https://github.com/googleapis/googleapis/commit/acd5f89b8addd2ff54f41a7d43ff9b122bb43337 Source-Link: https://github.com/googleapis/googleapis-gen/commit/8a12622536ae2e9a8978198a151e89234b839b20 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOGExMjYyMjUzNmFlMmU5YTg5NzgxOThhMTUxZTg5MjM0YjgzOWIyMCJ9 chore: use gapic-generator-python 0.63.2 docs: add generated snippets chore: update copyright year to 2022 PiperOrigin-RevId: 427792504 Source-Link: https://github.com/googleapis/googleapis/commit/55b9e1e0b3106c850d13958352bc0751147b6b15 Source-Link: https://github.com/googleapis/googleapis-gen/commit/bf4e86b753f42cb0edb1fd51fbe840d7da0a1cde Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYmY0ZTg2Yjc1M2Y0MmNiMGVkYjFmZDUxZmJlODQwZDdkYTBhMWNkZSJ9 --- .../cloud/logging_v2/gapic_metadata.json | 30 + .../cloud/logging_v2/services/__init__.py | 2 +- .../services/config_service_v2/__init__.py | 2 +- .../config_service_v2/async_client.py | 1089 +++++- .../services/config_service_v2/client.py | 1099 +++++- .../services/config_service_v2/pagers.py | 2 +- .../config_service_v2/transports/__init__.py | 2 +- .../config_service_v2/transports/base.py | 45 +- .../config_service_v2/transports/grpc.py | 221 +- .../transports/grpc_asyncio.py | 229 +- .../services/logging_service_v2/__init__.py | 2 +- .../logging_service_v2/async_client.py | 215 +- .../services/logging_service_v2/client.py | 215 +- .../services/logging_service_v2/pagers.py | 2 +- .../logging_service_v2/transports/__init__.py | 2 +- .../logging_service_v2/transports/base.py | 2 +- .../logging_service_v2/transports/grpc.py | 12 +- .../transports/grpc_asyncio.py | 12 +- .../services/metrics_service_v2/__init__.py | 2 +- .../metrics_service_v2/async_client.py | 105 +- .../services/metrics_service_v2/client.py | 105 +- .../services/metrics_service_v2/pagers.py | 2 +- .../metrics_service_v2/transports/__init__.py | 2 +- .../metrics_service_v2/transports/base.py | 2 +- .../metrics_service_v2/transports/grpc.py | 2 +- .../transports/grpc_asyncio.py | 2 +- .../google/cloud/logging_v2/types/__init__.py | 18 +- .../cloud/logging_v2/types/log_entry.py | 57 +- .../google/cloud/logging_v2/types/logging.py | 103 +- .../cloud/logging_v2/types/logging_config.py | 606 ++- .../cloud/logging_v2/types/logging_metrics.py | 18 +- ...onfig_service_v2_copy_log_entries_async.py | 50 + ...config_service_v2_copy_log_entries_sync.py | 50 + ...d_config_service_v2_create_bucket_async.py | 46 + ...ed_config_service_v2_create_bucket_sync.py | 46 + ...onfig_service_v2_create_exclusion_async.py | 50 + ...config_service_v2_create_exclusion_sync.py | 50 + ...ted_config_service_v2_create_sink_async.py | 50 + ...ated_config_service_v2_create_sink_sync.py | 50 + ...ted_config_service_v2_create_view_async.py | 46 + ...ated_config_service_v2_create_view_sync.py | 46 + ...d_config_service_v2_delete_bucket_async.py | 43 + ...ed_config_service_v2_delete_bucket_sync.py | 43 + ...onfig_service_v2_delete_exclusion_async.py | 43 + ...config_service_v2_delete_exclusion_sync.py | 43 + ...ted_config_service_v2_delete_sink_async.py | 43 + ...ated_config_service_v2_delete_sink_sync.py | 43 + ...ted_config_service_v2_delete_view_async.py | 43 + ...ated_config_service_v2_delete_view_sync.py | 43 + ...ated_config_service_v2_get_bucket_async.py | 45 + ...rated_config_service_v2_get_bucket_sync.py | 45 + ...nfig_service_v2_get_cmek_settings_async.py | 45 + ...onfig_service_v2_get_cmek_settings_sync.py | 45 + ...d_config_service_v2_get_exclusion_async.py | 45 + ...ed_config_service_v2_get_exclusion_sync.py | 45 + ...ed_config_service_v2_get_settings_async.py | 45 + ...ted_config_service_v2_get_settings_sync.py | 45 + ...erated_config_service_v2_get_sink_async.py | 45 + ...nerated_config_service_v2_get_sink_sync.py | 45 + ...erated_config_service_v2_get_view_async.py | 45 + ...nerated_config_service_v2_get_view_sync.py | 45 + ...ed_config_service_v2_list_buckets_async.py | 46 + ...ted_config_service_v2_list_buckets_sync.py | 46 + ...config_service_v2_list_exclusions_async.py | 46 + ..._config_service_v2_list_exclusions_sync.py | 46 + ...ated_config_service_v2_list_sinks_async.py | 46 + ...rated_config_service_v2_list_sinks_sync.py | 46 + ...ated_config_service_v2_list_views_async.py | 46 + ...rated_config_service_v2_list_views_sync.py | 46 + ...config_service_v2_undelete_bucket_async.py | 43 + ..._config_service_v2_undelete_bucket_sync.py | 43 + ...d_config_service_v2_update_bucket_async.py | 45 + ...ed_config_service_v2_update_bucket_sync.py | 45 + ...g_service_v2_update_cmek_settings_async.py | 45 + ...ig_service_v2_update_cmek_settings_sync.py | 45 + ...onfig_service_v2_update_exclusion_async.py | 50 + ...config_service_v2_update_exclusion_sync.py | 50 + ...config_service_v2_update_settings_async.py | 45 + ..._config_service_v2_update_settings_sync.py | 45 + ...ted_config_service_v2_update_sink_async.py | 50 + ...ated_config_service_v2_update_sink_sync.py | 50 + ...ted_config_service_v2_update_view_async.py | 45 + ...ated_config_service_v2_update_view_sync.py | 45 + ...ted_logging_service_v2_delete_log_async.py | 43 + ...ated_logging_service_v2_delete_log_sync.py | 43 + ...gging_service_v2_list_log_entries_async.py | 46 + ...ogging_service_v2_list_log_entries_sync.py | 46 + ...ated_logging_service_v2_list_logs_async.py | 46 + ...rated_logging_service_v2_list_logs_sync.py | 46 + ...st_monitored_resource_descriptors_async.py | 45 + ...ist_monitored_resource_descriptors_sync.py | 45 + ...gging_service_v2_tail_log_entries_async.py | 56 + ...ogging_service_v2_tail_log_entries_sync.py | 56 + ...ging_service_v2_write_log_entries_async.py | 48 + ...gging_service_v2_write_log_entries_sync.py | 48 + ...rics_service_v2_create_log_metric_async.py | 50 + ...trics_service_v2_create_log_metric_sync.py | 50 + ...rics_service_v2_delete_log_metric_async.py | 43 + ...trics_service_v2_delete_log_metric_sync.py | 43 + ...metrics_service_v2_get_log_metric_async.py | 45 + ..._metrics_service_v2_get_log_metric_sync.py | 45 + ...trics_service_v2_list_log_metrics_async.py | 46 + ...etrics_service_v2_list_log_metrics_sync.py | 46 + ...rics_service_v2_update_log_metric_async.py | 50 + ...trics_service_v2_update_log_metric_sync.py | 50 + .../snippet_metadata_logging_v2.json | 3269 +++++++++++++++++ .../google-cloud-logging/tests/__init__.py | 2 +- .../tests/unit/__init__.py | 2 +- .../tests/unit/gapic/__init__.py | 2 +- .../tests/unit/gapic/logging_v2/__init__.py | 2 +- .../logging_v2/test_config_service_v2.py | 627 +++- .../logging_v2/test_logging_service_v2.py | 2 +- .../logging_v2/test_metrics_service_v2.py | 14 +- 113 files changed, 10854 insertions(+), 691 deletions(-) create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/snippet_metadata_logging_v2.json diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_metadata.json b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_metadata.json index da4eefd477fc..a629e5a50d4c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_metadata.json +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "ConfigServiceV2Client", "rpcs": { + "CopyLogEntries": { + "methods": [ + "copy_log_entries" + ] + }, "CreateBucket": { "methods": [ "create_bucket" @@ -65,6 +70,11 @@ "get_exclusion" ] }, + "GetSettings": { + "methods": [ + "get_settings" + ] + }, "GetSink": { "methods": [ "get_sink" @@ -115,6 +125,11 @@ "update_exclusion" ] }, + "UpdateSettings": { + "methods": [ + "update_settings" + ] + }, "UpdateSink": { "methods": [ "update_sink" @@ -130,6 +145,11 @@ "grpc-async": { "libraryClient": "ConfigServiceV2AsyncClient", "rpcs": { + "CopyLogEntries": { + "methods": [ + "copy_log_entries" + ] + }, "CreateBucket": { "methods": [ "create_bucket" @@ -185,6 +205,11 @@ "get_exclusion" ] }, + "GetSettings": { + "methods": [ + "get_settings" + ] + }, "GetSink": { "methods": [ "get_sink" @@ -235,6 +260,11 @@ "update_exclusion" ] }, + "UpdateSettings": { + "methods": [ + "update_settings" + ] + }, "UpdateSink": { "methods": [ "update_sink" diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py index e7f6042801dd..6eb3681ce414 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 81621a4e2f03..de3a6bbb751d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -31,6 +31,8 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config from google.protobuf import field_mask_pb2 # type: ignore @@ -62,6 +64,8 @@ class ConfigServiceV2AsyncClient: parse_log_sink_path = staticmethod(ConfigServiceV2Client.parse_log_sink_path) log_view_path = staticmethod(ConfigServiceV2Client.log_view_path) parse_log_view_path = staticmethod(ConfigServiceV2Client.parse_log_view_path) + settings_path = staticmethod(ConfigServiceV2Client.settings_path) + parse_settings_path = staticmethod(ConfigServiceV2Client.parse_settings_path) common_billing_account_path = staticmethod( ConfigServiceV2Client.common_billing_account_path ) @@ -225,7 +229,27 @@ async def list_buckets( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsAsyncPager: - r"""Lists buckets. + r"""Lists log buckets. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_buckets(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListBucketsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_buckets(request=request) + + # Handle the response + for response in page_result: + print(response) Args: request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): @@ -313,7 +337,26 @@ async def get_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Gets a bucket. + r"""Gets a log bucket. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetBucketRequest( + name="name_value", + ) + + # Make the request + response = client.get_bucket(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): @@ -326,7 +369,9 @@ async def get_bucket( Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. request = logging_config.GetBucketRequest(request) @@ -359,9 +404,30 @@ async def create_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + r"""Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + response = client.create_bucket(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): @@ -374,7 +440,9 @@ async def create_bucket( Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. request = logging_config.CreateBucketRequest(request) @@ -407,17 +475,38 @@ async def update_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: + r"""Updates a log bucket. This method replaces the following fields + in the existing bucket with values from the new bucket: ``retention_period`` If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + ``FAILED_PRECONDITION`` will be returned. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. + .. code-block:: python - A buckets region may not be modified after it is created. + from google.cloud import logging_v2 + + def sample_update_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + response = client.update_bucket(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): @@ -430,7 +519,9 @@ async def update_bucket( Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. request = logging_config.UpdateBucketRequest(request) @@ -463,9 +554,29 @@ async def delete_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + r"""Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.delete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): @@ -506,8 +617,26 @@ async def undelete_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + r"""Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_undelete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UndeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.undelete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): @@ -549,7 +678,27 @@ async def list_views( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsAsyncPager: - r"""Lists views on a bucket. + r"""Lists views on a log bucket. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_views(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_views(request=request) + + # Handle the response + for response in page_result: + print(response) Args: request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): @@ -629,7 +778,26 @@ async def get_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Gets a view. + r"""Gets a view on a log bucket.. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetViewRequest( + name="name_value", + ) + + # Make the request + response = client.get_view(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): @@ -642,8 +810,8 @@ async def get_view( Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -677,8 +845,29 @@ async def create_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + r"""Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + # Make the request + response = client.create_view(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): @@ -691,8 +880,8 @@ async def create_view( Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -726,8 +915,31 @@ async def update_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + r"""Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + name="name_value", + ) + + # Make the request + response = client.update_view(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): @@ -740,8 +952,8 @@ async def update_view( Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -775,7 +987,27 @@ async def delete_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a view from a bucket. + r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteViewRequest( + name="name_value", + ) + + # Make the request + client.delete_view(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): @@ -819,6 +1051,26 @@ async def list_sinks( ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_sinks(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListSinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sinks(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): The request object. The parameters to `ListSinks`. @@ -915,6 +1167,25 @@ async def get_sink( ) -> logging_config.LogSink: r"""Gets a sink. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + response = client.get_sink(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): The request object. The parameters to `GetSink`. @@ -928,7 +1199,9 @@ async def get_sink( "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -944,12 +1217,12 @@ async def get_sink( Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1018,6 +1291,31 @@ async def create_sink( permitted to write to the destination. A sink can export log entries only from the resource owning the sink. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.CreateSinkRequest( + parent="parent_value", + sink=sink, + ) + + # Make the request + response = client.create_sink(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): The request object. The parameters to `CreateSink`. @@ -1031,8 +1329,9 @@ async def create_sink( "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-project"`` ``"organizations/123456789"`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1055,12 +1354,12 @@ async def create_sink( Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1120,6 +1419,31 @@ async def update_sink( The updated sink might also have a new ``writer_identity``; see the ``unique_writer_identity`` field. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.UpdateSinkRequest( + sink_name="sink_name_value", + sink=sink, + ) + + # Make the request + response = client.update_sink(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): The request object. The parameters to `UpdateSink`. @@ -1134,7 +1458,9 @@ async def update_sink( "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1152,16 +1478,18 @@ async def update_sink( overwritten if, and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - An empty updateMask is temporarily treated as using the - following mask for backwards compatibility purposes: - destination,filter,includeChildren At some point in the - future, behavior will be removed and specifying an empty - updateMask will be an error. + An empty ``updateMask`` is temporarily treated as using + the following mask for backwards compatibility purposes: + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed + and specifying an empty ``updateMask`` will be an error. For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1177,12 +1505,12 @@ async def update_sink( Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1251,6 +1579,23 @@ async def delete_sink( r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + client.delete_sink(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): The request object. The parameters to `DeleteSink`. @@ -1265,7 +1610,9 @@ async def delete_sink( "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1334,7 +1681,29 @@ async def list_exclusions( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListExclusionsAsyncPager: - r"""Lists all the exclusions in a parent resource. + r"""Lists all the exclusions on the \_Default sink in a parent + resource. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_exclusions(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_exclusions(request=request) + + # Handle the response + for response in page_result: + print(response) Args: request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): @@ -1430,7 +1799,26 @@ async def get_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: - r"""Gets the description of an exclusion. + r"""Gets the description of an exclusion in the \_Default sink. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + name="name_value", + ) + + # Make the request + response = client.get_exclusion(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): @@ -1445,8 +1833,9 @@ async def get_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1459,17 +1848,13 @@ async def get_exclusion( Returns: google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -1530,10 +1915,34 @@ async def create_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: - r"""Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + r"""Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = client.create_exclusion(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): @@ -1549,8 +1958,10 @@ async def create_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-logging-project"`` + ``"organizations/123456789"`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1571,17 +1982,13 @@ async def create_exclusion( Returns: google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -1634,8 +2041,33 @@ async def update_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: - r"""Changes one or more properties of an existing - exclusion. + r"""Changes one or more properties of an existing exclusion in the + \_Default sink. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name="name_value", + exclusion=exclusion, + ) + + # Make the request + response = client.update_exclusion(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): @@ -1650,8 +2082,9 @@ async def update_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1686,17 +2119,13 @@ async def update_exclusion( Returns: google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -1749,7 +2178,23 @@ async def delete_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes an exclusion. + r"""Deletes an exclusion in the \_Default sink. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + client.delete_exclusion(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): @@ -1765,8 +2210,9 @@ async def delete_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1832,21 +2278,42 @@ async def get_cmek_settings( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: - r"""Gets the Logs Router CMEK settings for the given resource. + r"""Gets the Logging CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_cmek_settings(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1861,12 +2328,12 @@ async def get_cmek_settings( a project, folder, organization, billing account, or flexible resource. - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. @@ -1902,11 +2369,11 @@ async def update_cmek_settings( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: - r"""Updates the Logs Router CMEK settings for the given resource. + r"""Updates the Log Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] will fail if 1) ``kms_key_name`` is invalid, or 2) the @@ -1914,15 +2381,35 @@ async def update_cmek_settings( ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key, or 3) access to the key is disabled. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_cmek_settings(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1937,12 +2424,12 @@ async def update_cmek_settings( a project, folder, organization, billing account, or flexible resource. - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. @@ -1970,6 +2457,340 @@ async def update_cmek_settings( # Done; return the response. return response + async def get_settings( + self, + request: Union[logging_config.GetSettingsRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.Settings: + r"""Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetSettingsRequest, dict]): + The request object. The parameters to + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. + See [Enabling CMEK for Log + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + name (:class:`str`): + Required. The resource for which to retrieve settings. + + :: + + "projects/[PROJECT_ID]/settings" + "organizations/[ORGANIZATION_ID]/settings" + "billingAccounts/[BILLING_ACCOUNT_ID]/settings" + "folders/[FOLDER_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can be get for Google + Cloud projects, folders, organizations and billing + accounts. Currently it can only be configured for + organizations. Once configured for an organization, it + applies to all projects and folders in the Google Cloud + organization. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.GetSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_settings( + self, + request: Union[logging_config.UpdateSettingsRequest, dict] = None, + *, + settings: logging_config.Settings = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.Settings: + r"""Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateSettingsRequest, dict]): + The request object. The parameters to + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. + See [Enabling CMEK for Log + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + settings (:class:`google.cloud.logging_v2.types.Settings`): + Required. The settings to update. + + See `Enabling CMEK for Log + Router `__ + for more information. + + This corresponds to the ``settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask identifying which fields from + ``settings`` should be updated. A field will be + overwritten if and only if it is in the update mask. + Output only fields cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.UpdateSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if settings is not None: + request.settings = settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def copy_log_entries( + self, + request: Union[logging_config.CopyLogEntriesRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_copy_log_entries(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]): + The request object. The parameters to CopyLogEntries. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.CopyLogEntriesResponse` + Response type for CopyLogEntries long running + operations. + + """ + # Create or coerce a protobuf request object. + request = logging_config.CopyLogEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.copy_log_entries, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.CopyLogEntriesResponse, + metadata_type=logging_config.CopyLogEntriesMetadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 10bed9347736..041b1c838d82 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -34,6 +34,8 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config from google.protobuf import field_mask_pb2 # type: ignore @@ -228,6 +230,17 @@ def parse_log_view_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def settings_path(project: str,) -> str: + """Returns a fully-qualified settings string.""" + return "projects/{project}/settings".format(project=project,) + + @staticmethod + def parse_settings_path(path: str) -> Dict[str, str]: + """Parses a settings path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/settings$", path) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path(billing_account: str,) -> str: """Returns a fully-qualified billing_account string.""" @@ -461,7 +474,27 @@ def list_buckets( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsPager: - r"""Lists buckets. + r"""Lists log buckets. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_buckets(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListBucketsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_buckets(request=request) + + # Handle the response + for response in page_result: + print(response) Args: request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): @@ -549,7 +582,26 @@ def get_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Gets a bucket. + r"""Gets a log bucket. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetBucketRequest( + name="name_value", + ) + + # Make the request + response = client.get_bucket(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): @@ -562,7 +614,9 @@ def get_bucket( Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes @@ -596,9 +650,30 @@ def create_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + r"""Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + response = client.create_bucket(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): @@ -611,7 +686,9 @@ def create_bucket( Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes @@ -645,17 +722,38 @@ def update_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: + r"""Updates a log bucket. This method replaces the following fields + in the existing bucket with values from the new bucket: ``retention_period`` If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + ``FAILED_PRECONDITION`` will be returned. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. + .. code-block:: python - A buckets region may not be modified after it is created. + from google.cloud import logging_v2 + + def sample_update_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + response = client.update_bucket(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): @@ -668,7 +766,9 @@ def update_bucket( Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes @@ -702,9 +802,29 @@ def delete_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + r"""Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.delete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): @@ -746,8 +866,26 @@ def undelete_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + r"""Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_undelete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UndeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.undelete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): @@ -790,7 +928,27 @@ def list_views( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsPager: - r"""Lists views on a bucket. + r"""Lists views on a log bucket. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_views(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_views(request=request) + + # Handle the response + for response in page_result: + print(response) Args: request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): @@ -870,7 +1028,26 @@ def get_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Gets a view. + r"""Gets a view on a log bucket.. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetViewRequest( + name="name_value", + ) + + # Make the request + response = client.get_view(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): @@ -883,8 +1060,8 @@ def get_view( Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -919,8 +1096,29 @@ def create_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + r"""Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + # Make the request + response = client.create_view(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): @@ -933,8 +1131,8 @@ def create_view( Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -969,8 +1167,31 @@ def update_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + r"""Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + name="name_value", + ) + + # Make the request + response = client.update_view(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): @@ -983,8 +1204,8 @@ def update_view( Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -1019,7 +1240,27 @@ def delete_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a view from a bucket. + r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteViewRequest( + name="name_value", + ) + + # Make the request + client.delete_view(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): @@ -1064,6 +1305,26 @@ def list_sinks( ) -> pagers.ListSinksPager: r"""Lists sinks. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_sinks(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListSinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sinks(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): The request object. The parameters to `ListSinks`. @@ -1149,6 +1410,25 @@ def get_sink( ) -> logging_config.LogSink: r"""Gets a sink. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + response = client.get_sink(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): The request object. The parameters to `GetSink`. @@ -1162,7 +1442,9 @@ def get_sink( "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1178,12 +1460,12 @@ def get_sink( Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1241,6 +1523,31 @@ def create_sink( permitted to write to the destination. A sink can export log entries only from the resource owning the sink. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.CreateSinkRequest( + parent="parent_value", + sink=sink, + ) + + # Make the request + response = client.create_sink(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): The request object. The parameters to `CreateSink`. @@ -1254,8 +1561,9 @@ def create_sink( "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-project"`` ``"organizations/123456789"`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1278,12 +1586,12 @@ def create_sink( Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1343,6 +1651,31 @@ def update_sink( The updated sink might also have a new ``writer_identity``; see the ``unique_writer_identity`` field. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.UpdateSinkRequest( + sink_name="sink_name_value", + sink=sink, + ) + + # Make the request + response = client.update_sink(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): The request object. The parameters to `UpdateSink`. @@ -1357,7 +1690,9 @@ def update_sink( "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1375,16 +1710,18 @@ def update_sink( overwritten if, and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - An empty updateMask is temporarily treated as using the - following mask for backwards compatibility purposes: - destination,filter,includeChildren At some point in the - future, behavior will be removed and specifying an empty - updateMask will be an error. + An empty ``updateMask`` is temporarily treated as using + the following mask for backwards compatibility purposes: + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed + and specifying an empty ``updateMask`` will be an error. For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1400,12 +1737,12 @@ def update_sink( Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1463,6 +1800,23 @@ def delete_sink( r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + client.delete_sink(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): The request object. The parameters to `DeleteSink`. @@ -1477,7 +1831,9 @@ def delete_sink( "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1535,7 +1891,29 @@ def list_exclusions( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListExclusionsPager: - r"""Lists all the exclusions in a parent resource. + r"""Lists all the exclusions on the \_Default sink in a parent + resource. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_exclusions(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_exclusions(request=request) + + # Handle the response + for response in page_result: + print(response) Args: request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): @@ -1620,7 +1998,26 @@ def get_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: - r"""Gets the description of an exclusion. + r"""Gets the description of an exclusion in the \_Default sink. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + name="name_value", + ) + + # Make the request + response = client.get_exclusion(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): @@ -1635,8 +2032,9 @@ def get_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1649,17 +2047,13 @@ def get_exclusion( Returns: google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -1709,10 +2103,34 @@ def create_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: - r"""Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + r"""Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = client.create_exclusion(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): @@ -1728,8 +2146,10 @@ def create_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-logging-project"`` + ``"organizations/123456789"`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1750,17 +2170,13 @@ def create_exclusion( Returns: google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -1813,8 +2229,33 @@ def update_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: - r"""Changes one or more properties of an existing - exclusion. + r"""Changes one or more properties of an existing exclusion in the + \_Default sink. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name="name_value", + exclusion=exclusion, + ) + + # Make the request + response = client.update_exclusion(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): @@ -1829,8 +2270,9 @@ def update_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1865,17 +2307,13 @@ def update_exclusion( Returns: google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -1928,7 +2366,23 @@ def delete_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes an exclusion. + r"""Deletes an exclusion in the \_Default sink. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + client.delete_exclusion(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): @@ -1944,8 +2398,9 @@ def delete_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2000,21 +2455,42 @@ def get_cmek_settings( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: - r"""Gets the Logs Router CMEK settings for the given resource. + r"""Gets the Logging CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_cmek_settings(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2029,12 +2505,12 @@ def get_cmek_settings( a project, folder, organization, billing account, or flexible resource. - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. @@ -2071,11 +2547,11 @@ def update_cmek_settings( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: - r"""Updates the Logs Router CMEK settings for the given resource. + r"""Updates the Log Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] will fail if 1) ``kms_key_name`` is invalid, or 2) the @@ -2083,15 +2559,35 @@ def update_cmek_settings( ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key, or 3) access to the key is disabled. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_cmek_settings(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2106,12 +2602,12 @@ def update_cmek_settings( a project, folder, organization, billing account, or flexible resource. - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. @@ -2140,6 +2636,341 @@ def update_cmek_settings( # Done; return the response. return response + def get_settings( + self, + request: Union[logging_config.GetSettingsRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.Settings: + r"""Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetSettingsRequest, dict]): + The request object. The parameters to + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. + See [Enabling CMEK for Log + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + name (str): + Required. The resource for which to retrieve settings. + + :: + + "projects/[PROJECT_ID]/settings" + "organizations/[ORGANIZATION_ID]/settings" + "billingAccounts/[BILLING_ACCOUNT_ID]/settings" + "folders/[FOLDER_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can be get for Google + Cloud projects, folders, organizations and billing + accounts. Currently it can only be configured for + organizations. Once configured for an organization, it + applies to all projects and folders in the Google Cloud + organization. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetSettingsRequest): + request = logging_config.GetSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_settings( + self, + request: Union[logging_config.UpdateSettingsRequest, dict] = None, + *, + settings: logging_config.Settings = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.Settings: + r"""Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateSettingsRequest, dict]): + The request object. The parameters to + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. + See [Enabling CMEK for Log + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + settings (google.cloud.logging_v2.types.Settings): + Required. The settings to update. + + See `Enabling CMEK for Log + Router `__ + for more information. + + This corresponds to the ``settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask identifying which fields from + ``settings`` should be updated. A field will be + overwritten if and only if it is in the update mask. + Output only fields cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateSettingsRequest): + request = logging_config.UpdateSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if settings is not None: + request.settings = settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def copy_log_entries( + self, + request: Union[logging_config.CopyLogEntriesRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_copy_log_entries(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]): + The request object. The parameters to CopyLogEntries. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.CopyLogEntriesResponse` + Response type for CopyLogEntries long running + operations. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CopyLogEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CopyLogEntriesRequest): + request = logging_config.CopyLogEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.copy_log_entries] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.CopyLogEntriesResponse, + metadata_type=logging_config.CopyLogEntriesMetadata, + ) + + # Done; return the response. + return response + def __enter__(self): return self diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index b0be053e43e5..3c5ce7754242 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index b1e24fc64213..93a29df099b2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 90e3054882ac..6dfc1fd2fd55 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,10 +22,12 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore try: @@ -286,6 +288,15 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_settings: gapic_v1.method.wrap_method( + self.get_settings, default_timeout=None, client_info=client_info, + ), + self.update_settings: gapic_v1.method.wrap_method( + self.update_settings, default_timeout=None, client_info=client_info, + ), + self.copy_log_entries: gapic_v1.method.wrap_method( + self.copy_log_entries, default_timeout=None, client_info=client_info, + ), } def close(self): @@ -297,6 +308,11 @@ def close(self): """ raise NotImplementedError() + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + @property def list_buckets( self, @@ -516,5 +532,32 @@ def update_cmek_settings( ]: raise NotImplementedError() + @property + def get_settings( + self, + ) -> Callable[ + [logging_config.GetSettingsRequest], + Union[logging_config.Settings, Awaitable[logging_config.Settings]], + ]: + raise NotImplementedError() + + @property + def update_settings( + self, + ) -> Callable[ + [logging_config.UpdateSettingsRequest], + Union[logging_config.Settings, Awaitable[logging_config.Settings]], + ]: + raise NotImplementedError() + + @property + def copy_log_entries( + self, + ) -> Callable[ + [logging_config.CopyLogEntriesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + __all__ = ("ConfigServiceV2Transport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 39d9d4f93cf8..301334f806c0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers +from google.api_core import operations_v1 from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore @@ -25,6 +26,7 @@ import grpc # type: ignore from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO @@ -111,6 +113,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -229,6 +232,20 @@ def grpc_channel(self) -> grpc.Channel: """ return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + @property def list_buckets( self, @@ -237,7 +254,7 @@ def list_buckets( ]: r"""Return a callable for the list buckets method over gRPC. - Lists buckets. + Lists log buckets. Returns: Callable[[~.ListBucketsRequest], @@ -263,7 +280,7 @@ def get_bucket( ) -> Callable[[logging_config.GetBucketRequest], logging_config.LogBucket]: r"""Return a callable for the get bucket method over gRPC. - Gets a bucket. + Gets a log bucket. Returns: Callable[[~.GetBucketRequest], @@ -289,9 +306,9 @@ def create_bucket( ) -> Callable[[logging_config.CreateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the create bucket method over gRPC. - Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. Returns: Callable[[~.CreateBucketRequest], @@ -317,17 +334,18 @@ def update_bucket( ) -> Callable[[logging_config.UpdateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the update bucket method over gRPC. - Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: + Updates a log bucket. This method replaces the following fields + in the existing bucket with values from the new bucket: ``retention_period`` If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + ``FAILED_PRECONDITION`` will be returned. - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. - A buckets region may not be modified after it is created. + After a bucket has been created, the bucket's location cannot be + changed. Returns: Callable[[~.UpdateBucketRequest], @@ -353,9 +371,12 @@ def delete_bucket( ) -> Callable[[logging_config.DeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the delete bucket method over gRPC. - Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. Returns: Callable[[~.DeleteBucketRequest], @@ -381,8 +402,9 @@ def undelete_bucket( ) -> Callable[[logging_config.UndeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the undelete bucket method over gRPC. - Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. Returns: Callable[[~.UndeleteBucketRequest], @@ -408,7 +430,7 @@ def list_views( ) -> Callable[[logging_config.ListViewsRequest], logging_config.ListViewsResponse]: r"""Return a callable for the list views method over gRPC. - Lists views on a bucket. + Lists views on a log bucket. Returns: Callable[[~.ListViewsRequest], @@ -434,7 +456,7 @@ def get_view( ) -> Callable[[logging_config.GetViewRequest], logging_config.LogView]: r"""Return a callable for the get view method over gRPC. - Gets a view. + Gets a view on a log bucket.. Returns: Callable[[~.GetViewRequest], @@ -460,8 +482,8 @@ def create_view( ) -> Callable[[logging_config.CreateViewRequest], logging_config.LogView]: r"""Return a callable for the create view method over gRPC. - Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. Returns: Callable[[~.CreateViewRequest], @@ -487,8 +509,11 @@ def update_view( ) -> Callable[[logging_config.UpdateViewRequest], logging_config.LogView]: r"""Return a callable for the update view method over gRPC. - Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. Returns: Callable[[~.UpdateViewRequest], @@ -514,7 +539,10 @@ def delete_view( ) -> Callable[[logging_config.DeleteViewRequest], empty_pb2.Empty]: r"""Return a callable for the delete view method over gRPC. - Deletes a view from a bucket. + Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. Returns: Callable[[~.DeleteViewRequest], @@ -682,7 +710,8 @@ def list_exclusions( ]: r"""Return a callable for the list exclusions method over gRPC. - Lists all the exclusions in a parent resource. + Lists all the exclusions on the \_Default sink in a parent + resource. Returns: Callable[[~.ListExclusionsRequest], @@ -708,7 +737,7 @@ def get_exclusion( ) -> Callable[[logging_config.GetExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the get exclusion method over gRPC. - Gets the description of an exclusion. + Gets the description of an exclusion in the \_Default sink. Returns: Callable[[~.GetExclusionRequest], @@ -734,10 +763,9 @@ def create_exclusion( ) -> Callable[[logging_config.CreateExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the create exclusion method over gRPC. - Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. Returns: Callable[[~.CreateExclusionRequest], @@ -763,8 +791,8 @@ def update_exclusion( ) -> Callable[[logging_config.UpdateExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the update exclusion method over gRPC. - Changes one or more properties of an existing - exclusion. + Changes one or more properties of an existing exclusion in the + \_Default sink. Returns: Callable[[~.UpdateExclusionRequest], @@ -790,7 +818,7 @@ def delete_exclusion( ) -> Callable[[logging_config.DeleteExclusionRequest], empty_pb2.Empty]: r"""Return a callable for the delete exclusion method over gRPC. - Deletes an exclusion. + Deletes an exclusion in the \_Default sink. Returns: Callable[[~.DeleteExclusionRequest], @@ -816,13 +844,14 @@ def get_cmek_settings( ) -> Callable[[logging_config.GetCmekSettingsRequest], logging_config.CmekSettings]: r"""Return a callable for the get cmek settings method over gRPC. - Gets the Logs Router CMEK settings for the given resource. + Gets the Logging CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -852,11 +881,11 @@ def update_cmek_settings( ]: r"""Return a callable for the update cmek settings method over gRPC. - Updates the Logs Router CMEK settings for the given resource. + Updates the Log Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] will fail if 1) ``kms_key_name`` is invalid, or 2) the @@ -864,7 +893,7 @@ def update_cmek_settings( ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key, or 3) access to the key is disabled. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -886,6 +915,112 @@ def update_cmek_settings( ) return self._stubs["update_cmek_settings"] + @property + def get_settings( + self, + ) -> Callable[[logging_config.GetSettingsRequest], logging_config.Settings]: + r"""Return a callable for the get settings method over gRPC. + + Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.GetSettingsRequest], + ~.Settings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_settings" not in self._stubs: + self._stubs["get_settings"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSettings", + request_serializer=logging_config.GetSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs["get_settings"] + + @property + def update_settings( + self, + ) -> Callable[[logging_config.UpdateSettingsRequest], logging_config.Settings]: + r"""Return a callable for the update settings method over gRPC. + + Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.UpdateSettingsRequest], + ~.Settings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_settings" not in self._stubs: + self._stubs["update_settings"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSettings", + request_serializer=logging_config.UpdateSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs["update_settings"] + + @property + def copy_log_entries( + self, + ) -> Callable[[logging_config.CopyLogEntriesRequest], operations_pb2.Operation]: + r"""Return a callable for the copy log entries method over gRPC. + + Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + Returns: + Callable[[~.CopyLogEntriesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "copy_log_entries" not in self._stubs: + self._stubs["copy_log_entries"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CopyLogEntries", + request_serializer=logging_config.CopyLogEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["copy_log_entries"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index b4228c690106..86e67253cae0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -25,6 +26,7 @@ from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import ConfigServiceV2GrpcTransport @@ -157,6 +159,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -231,6 +234,22 @@ def grpc_channel(self) -> aio.Channel: # Return the channel from cache. return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + @property def list_buckets( self, @@ -240,7 +259,7 @@ def list_buckets( ]: r"""Return a callable for the list buckets method over gRPC. - Lists buckets. + Lists log buckets. Returns: Callable[[~.ListBucketsRequest], @@ -268,7 +287,7 @@ def get_bucket( ]: r"""Return a callable for the get bucket method over gRPC. - Gets a bucket. + Gets a log bucket. Returns: Callable[[~.GetBucketRequest], @@ -296,9 +315,9 @@ def create_bucket( ]: r"""Return a callable for the create bucket method over gRPC. - Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. Returns: Callable[[~.CreateBucketRequest], @@ -326,17 +345,18 @@ def update_bucket( ]: r"""Return a callable for the update bucket method over gRPC. - Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: + Updates a log bucket. This method replaces the following fields + in the existing bucket with values from the new bucket: ``retention_period`` If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + ``FAILED_PRECONDITION`` will be returned. - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. - A buckets region may not be modified after it is created. + After a bucket has been created, the bucket's location cannot be + changed. Returns: Callable[[~.UpdateBucketRequest], @@ -362,9 +382,12 @@ def delete_bucket( ) -> Callable[[logging_config.DeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete bucket method over gRPC. - Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. Returns: Callable[[~.DeleteBucketRequest], @@ -390,8 +413,9 @@ def undelete_bucket( ) -> Callable[[logging_config.UndeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the undelete bucket method over gRPC. - Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. Returns: Callable[[~.UndeleteBucketRequest], @@ -419,7 +443,7 @@ def list_views( ]: r"""Return a callable for the list views method over gRPC. - Lists views on a bucket. + Lists views on a log bucket. Returns: Callable[[~.ListViewsRequest], @@ -445,7 +469,7 @@ def get_view( ) -> Callable[[logging_config.GetViewRequest], Awaitable[logging_config.LogView]]: r"""Return a callable for the get view method over gRPC. - Gets a view. + Gets a view on a log bucket.. Returns: Callable[[~.GetViewRequest], @@ -473,8 +497,8 @@ def create_view( ]: r"""Return a callable for the create view method over gRPC. - Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. Returns: Callable[[~.CreateViewRequest], @@ -502,8 +526,11 @@ def update_view( ]: r"""Return a callable for the update view method over gRPC. - Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. Returns: Callable[[~.UpdateViewRequest], @@ -529,7 +556,10 @@ def delete_view( ) -> Callable[[logging_config.DeleteViewRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete view method over gRPC. - Deletes a view from a bucket. + Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. Returns: Callable[[~.DeleteViewRequest], @@ -704,7 +734,8 @@ def list_exclusions( ]: r"""Return a callable for the list exclusions method over gRPC. - Lists all the exclusions in a parent resource. + Lists all the exclusions on the \_Default sink in a parent + resource. Returns: Callable[[~.ListExclusionsRequest], @@ -732,7 +763,7 @@ def get_exclusion( ]: r"""Return a callable for the get exclusion method over gRPC. - Gets the description of an exclusion. + Gets the description of an exclusion in the \_Default sink. Returns: Callable[[~.GetExclusionRequest], @@ -760,10 +791,9 @@ def create_exclusion( ]: r"""Return a callable for the create exclusion method over gRPC. - Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. Returns: Callable[[~.CreateExclusionRequest], @@ -791,8 +821,8 @@ def update_exclusion( ]: r"""Return a callable for the update exclusion method over gRPC. - Changes one or more properties of an existing - exclusion. + Changes one or more properties of an existing exclusion in the + \_Default sink. Returns: Callable[[~.UpdateExclusionRequest], @@ -818,7 +848,7 @@ def delete_exclusion( ) -> Callable[[logging_config.DeleteExclusionRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete exclusion method over gRPC. - Deletes an exclusion. + Deletes an exclusion in the \_Default sink. Returns: Callable[[~.DeleteExclusionRequest], @@ -846,13 +876,14 @@ def get_cmek_settings( ]: r"""Return a callable for the get cmek settings method over gRPC. - Gets the Logs Router CMEK settings for the given resource. + Gets the Logging CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -883,11 +914,11 @@ def update_cmek_settings( ]: r"""Return a callable for the update cmek settings method over gRPC. - Updates the Logs Router CMEK settings for the given resource. + Updates the Log Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] will fail if 1) ``kms_key_name`` is invalid, or 2) the @@ -895,7 +926,7 @@ def update_cmek_settings( ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key, or 3) access to the key is disabled. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -917,6 +948,118 @@ def update_cmek_settings( ) return self._stubs["update_cmek_settings"] + @property + def get_settings( + self, + ) -> Callable[ + [logging_config.GetSettingsRequest], Awaitable[logging_config.Settings] + ]: + r"""Return a callable for the get settings method over gRPC. + + Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.GetSettingsRequest], + Awaitable[~.Settings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_settings" not in self._stubs: + self._stubs["get_settings"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSettings", + request_serializer=logging_config.GetSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs["get_settings"] + + @property + def update_settings( + self, + ) -> Callable[ + [logging_config.UpdateSettingsRequest], Awaitable[logging_config.Settings] + ]: + r"""Return a callable for the update settings method over gRPC. + + Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.UpdateSettingsRequest], + Awaitable[~.Settings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_settings" not in self._stubs: + self._stubs["update_settings"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSettings", + request_serializer=logging_config.UpdateSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs["update_settings"] + + @property + def copy_log_entries( + self, + ) -> Callable[ + [logging_config.CopyLogEntriesRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the copy log entries method over gRPC. + + Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + Returns: + Callable[[~.CopyLogEntriesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "copy_log_entries" not in self._stubs: + self._stubs["copy_log_entries"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CopyLogEntries", + request_serializer=logging_config.CopyLogEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["copy_log_entries"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py index bd7a79820348..41b2a2d15530 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index dc8b56b8138f..c89da25a5164 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -223,11 +223,28 @@ async def delete_log( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + r"""Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_log(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogRequest( + log_name="log_name_value", + ) + + # Make the request + client.delete_log(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): @@ -235,16 +252,15 @@ async def delete_log( log_name (:class:`str`): Required. The resource name of the log to delete: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``"organizations/123/logs/cloudaudit.googleapis.com%2Factivity"``. + For more information about log names, see [LogEntry][google.logging.v2.LogEntry]. @@ -324,6 +340,29 @@ async def write_log_entries( maximum of 1000 different resources (projects, organizations, billing accounts or folders) + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_write_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + + request = logging_v2.WriteLogEntriesRequest( + entries=entries, + ) + + # Make the request + response = client.write_log_entries(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]): The request object. The parameters to WriteLogEntries. @@ -332,19 +371,17 @@ async def write_log_entries( to all log entries in ``entries`` that do not specify a value for ``log_name``: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: :: "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + "organizations/123/logs/cloudaudit.googleapis.com%2Factivity" The permission ``logging.logEntries.create`` is needed on each project, organization, billing account, or @@ -400,17 +437,17 @@ async def write_log_entries( Log entries with timestamps that are more than the `logs retention - period `__ - in the past or more than 24 hours in the future will not - be available when calling ``entries.list``. However, - those log entries can still be `exported with + period `__ in + the past or more than 24 hours in the future will not be + available when calling ``entries.list``. However, those + log entries can still be `exported with LogSinks `__. To improve throughput and to avoid exceeding the `quota - limit `__ - for calls to ``entries.write``, you should try to - include several log entries in this list, rather than - calling this method for each individual log entry. + limit `__ for + calls to ``entries.write``, you should try to include + several log entries in this list, rather than calling + this method for each individual log entry. This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this @@ -490,6 +527,27 @@ async def list_log_entries( For ways to export log entries, see `Exporting Logs `__. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # Make the request + page_result = client.list_log_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): The request object. The parameters to `ListLogEntries`. @@ -497,18 +555,17 @@ async def list_log_entries( Required. Names of one or more parent resources from which to retrieve log entries: - :: + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + May alternatively be one or more views: - May alternatively be one or more views - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to this list. @@ -620,6 +677,26 @@ async def list_monitored_resource_descriptors( r"""Lists the descriptors for monitored resource types used by Logging. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_monitored_resource_descriptors(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]): The request object. The parameters to @@ -686,18 +763,37 @@ async def list_logs( or billing accounts. Only logs that have entries are listed. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_logs(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_logs(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): The request object. The parameters to ListLogs. parent (:class:`str`): Required. The resource name that owns the logs: - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -782,6 +878,37 @@ def tail_log_entries( Until the stream is terminated, it will continue reading logs. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_tail_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.tail_log_entries(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + Args: requests (AsyncIterator[`google.cloud.logging_v2.types.TailLogEntriesRequest`]): The request object AsyncIterator. The parameters to `TailLogEntries`. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index b33821be54a4..3eae59704dc1 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -405,11 +405,28 @@ def delete_log( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + r"""Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_log(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogRequest( + log_name="log_name_value", + ) + + # Make the request + client.delete_log(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): @@ -417,16 +434,15 @@ def delete_log( log_name (str): Required. The resource name of the log to delete: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``"organizations/123/logs/cloudaudit.googleapis.com%2Factivity"``. + For more information about log names, see [LogEntry][google.logging.v2.LogEntry]. @@ -495,6 +511,29 @@ def write_log_entries( maximum of 1000 different resources (projects, organizations, billing accounts or folders) + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_write_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + + request = logging_v2.WriteLogEntriesRequest( + entries=entries, + ) + + # Make the request + response = client.write_log_entries(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]): The request object. The parameters to WriteLogEntries. @@ -503,19 +542,17 @@ def write_log_entries( to all log entries in ``entries`` that do not specify a value for ``log_name``: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: :: "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + "organizations/123/logs/cloudaudit.googleapis.com%2Factivity" The permission ``logging.logEntries.create`` is needed on each project, organization, billing account, or @@ -571,17 +608,17 @@ def write_log_entries( Log entries with timestamps that are more than the `logs retention - period `__ - in the past or more than 24 hours in the future will not - be available when calling ``entries.list``. However, - those log entries can still be `exported with + period `__ in + the past or more than 24 hours in the future will not be + available when calling ``entries.list``. However, those + log entries can still be `exported with LogSinks `__. To improve throughput and to avoid exceeding the `quota - limit `__ - for calls to ``entries.write``, you should try to - include several log entries in this list, rather than - calling this method for each individual log entry. + limit `__ for + calls to ``entries.write``, you should try to include + several log entries in this list, rather than calling + this method for each individual log entry. This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this @@ -649,6 +686,27 @@ def list_log_entries( For ways to export log entries, see `Exporting Logs `__. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # Make the request + page_result = client.list_log_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): The request object. The parameters to `ListLogEntries`. @@ -656,18 +714,17 @@ def list_log_entries( Required. Names of one or more parent resources from which to retrieve log entries: - :: + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + May alternatively be one or more views: - May alternatively be one or more views - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to this list. @@ -768,6 +825,26 @@ def list_monitored_resource_descriptors( r"""Lists the descriptors for monitored resource types used by Logging. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_monitored_resource_descriptors(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]): The request object. The parameters to @@ -826,18 +903,37 @@ def list_logs( or billing accounts. Only logs that have entries are listed. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_logs(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_logs(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): The request object. The parameters to ListLogs. parent (str): Required. The resource name that owns the logs: - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -911,6 +1007,37 @@ def tail_log_entries( Until the stream is terminated, it will continue reading logs. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_tail_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.tail_log_entries(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + Args: requests (Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]): The request object iterator. The parameters to `TailLogEntries`. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index ca4d01fac494..e1e7188cd167 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index 65e713121f22..4e0163fe6542 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 6fe2e9e8a46c..5f474f006db5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 4f5c9b1caf3a..76b562d7ee61 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -233,11 +233,11 @@ def grpc_channel(self) -> grpc.Channel: def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty_pb2.Empty]: r"""Return a callable for the delete log method over gRPC. - Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. Returns: Callable[[~.DeleteLogRequest], diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 27b094831c8d..1ef7198fdc7b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -237,11 +237,11 @@ def delete_log( ) -> Callable[[logging.DeleteLogRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log method over gRPC. - Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. Returns: Callable[[~.DeleteLogRequest], diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index f37e39314d1d..fc0615f19919 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 311806df2a37..e3bf4c51a712 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -216,6 +216,26 @@ async def list_log_metrics( ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_log_metrics(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogMetricsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_log_metrics(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]): The request object. The parameters to ListLogMetrics. @@ -309,6 +329,25 @@ async def get_log_metric( ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + response = client.get_log_metric(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]): The request object. The parameters to GetLogMetric. @@ -405,6 +444,30 @@ async def create_log_metric( ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.CreateLogMetricRequest( + parent="parent_value", + metric=metric, + ) + + # Make the request + response = client.create_log_metric(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]): The request object. The parameters to CreateLogMetric. @@ -501,6 +564,30 @@ async def update_log_metric( ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.UpdateLogMetricRequest( + metric_name="metric_name_value", + metric=metric, + ) + + # Make the request + response = client.update_log_metric(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]): The request object. The parameters to UpdateLogMetric. @@ -608,6 +695,22 @@ async def delete_log_metric( ) -> None: r"""Deletes a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + client.delete_log_metric(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): The request object. The parameters to DeleteLogMetric. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index ade883811a74..5ab25db207bd 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -410,6 +410,26 @@ def list_log_metrics( ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_log_metrics(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogMetricsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_log_metrics(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]): The request object. The parameters to ListLogMetrics. @@ -492,6 +512,25 @@ def get_log_metric( ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + response = client.get_log_metric(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]): The request object. The parameters to GetLogMetric. @@ -577,6 +616,30 @@ def create_log_metric( ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.CreateLogMetricRequest( + parent="parent_value", + metric=metric, + ) + + # Make the request + response = client.create_log_metric(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]): The request object. The parameters to CreateLogMetric. @@ -673,6 +736,30 @@ def update_log_metric( ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.UpdateLogMetricRequest( + metric_name="metric_name_value", + metric=metric, + ) + + # Make the request + response = client.update_log_metric(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]): The request object. The parameters to UpdateLogMetric. @@ -769,6 +856,22 @@ def delete_log_metric( ) -> None: r"""Deletes a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + client.delete_log_metric(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): The request object. The parameters to DeleteLogMetric. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 7026e3858c12..2c647cda1810 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index 10ccb830c7e3..e28f020df0c7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index fef40f239565..b3d9bab57245 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 7b72b756fbfe..d0241fdd2857 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 889d7072e1e9..28ff48f5cddb 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py index 7d1cdd99e4be..43b5674dd412 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ LogEntry, LogEntryOperation, LogEntrySourceLocation, + LogSplit, ) from .logging import ( DeleteLogRequest, @@ -35,6 +36,9 @@ from .logging_config import ( BigQueryOptions, CmekSettings, + CopyLogEntriesMetadata, + CopyLogEntriesRequest, + CopyLogEntriesResponse, CreateBucketRequest, CreateExclusionRequest, CreateSinkRequest, @@ -46,6 +50,7 @@ GetBucketRequest, GetCmekSettingsRequest, GetExclusionRequest, + GetSettingsRequest, GetSinkRequest, GetViewRequest, ListBucketsRequest, @@ -60,13 +65,16 @@ LogExclusion, LogSink, LogView, + Settings, UndeleteBucketRequest, UpdateBucketRequest, UpdateCmekSettingsRequest, UpdateExclusionRequest, + UpdateSettingsRequest, UpdateSinkRequest, UpdateViewRequest, LifecycleState, + OperationState, ) from .logging_metrics import ( CreateLogMetricRequest, @@ -82,6 +90,7 @@ "LogEntry", "LogEntryOperation", "LogEntrySourceLocation", + "LogSplit", "DeleteLogRequest", "ListLogEntriesRequest", "ListLogEntriesResponse", @@ -96,6 +105,9 @@ "WriteLogEntriesResponse", "BigQueryOptions", "CmekSettings", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", "CreateBucketRequest", "CreateExclusionRequest", "CreateSinkRequest", @@ -107,6 +119,7 @@ "GetBucketRequest", "GetCmekSettingsRequest", "GetExclusionRequest", + "GetSettingsRequest", "GetSinkRequest", "GetViewRequest", "ListBucketsRequest", @@ -121,13 +134,16 @@ "LogExclusion", "LogSink", "LogView", + "Settings", "UndeleteBucketRequest", "UpdateBucketRequest", "UpdateCmekSettingsRequest", "UpdateExclusionRequest", + "UpdateSettingsRequest", "UpdateSinkRequest", "UpdateViewRequest", "LifecycleState", + "OperationState", "CreateLogMetricRequest", "DeleteLogMetricRequest", "GetLogMetricRequest", diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py index 93e428622180..1bc7a3ea405b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,7 +25,7 @@ __protobuf__ = proto.module( package="google.logging.v2", - manifest={"LogEntry", "LogEntryOperation", "LogEntrySourceLocation",}, + manifest={"LogEntry", "LogEntryOperation", "LogEntrySourceLocation", "LogSplit",}, ) @@ -59,6 +59,7 @@ class LogEntry(proto.Message): ``[LOG_ID]`` must be URL-encoded within ``log_name``. Example: ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``[LOG_ID]`` must be less than 512 characters long and can only include the following characters: upper and lower case alphanumeric characters, forward-slash, underscore, hyphen, @@ -66,7 +67,7 @@ class LogEntry(proto.Message): For backward compatibility, if ``log_name`` begins with a forward-slash, such as ``/projects/...``, then the log entry - is ingested as usual but the forward-slash is removed. + is ingested as usual, but the forward-slash is removed. Listing the log entry will not show the leading slash and filtering for a log name with a leading slash will never return any results. @@ -139,9 +140,22 @@ class LogEntry(proto.Message): Optional. Information about the HTTP request associated with this log entry, if applicable. labels (Sequence[google.cloud.logging_v2.types.LogEntry.LabelsEntry]): - Optional. A set of user-defined (key, value) - data that provides additional information about - the log entry. + Optional. A map of key, value pairs that provides additional + information about the log entry. The labels can be + user-defined or system-defined. + + User-defined labels are arbitrary key, value pairs that you + can use to classify logs. + + System-defined labels are defined by GCP services for + platform logs. They have two components - a service + namespace component and the attribute name. For example: + ``compute.googleapis.com/resource_name``. + + Cloud Logging truncates label keys that exceed 512 B and + label values that exceed 64 KB upon their associated log + entry being written. The truncation is indicated by an + ellipsis at the end of the character string. operation (google.cloud.logging_v2.types.LogEntryOperation): Optional. Information about an operation associated with the log entry, if applicable. @@ -171,6 +185,10 @@ class LogEntry(proto.Message): source_location (google.cloud.logging_v2.types.LogEntrySourceLocation): Optional. Source code location information associated with the log entry, if any. + split (google.cloud.logging_v2.types.LogSplit): + Optional. Information indicating this + LogEntry is part of a sequence of multiple log + entries split from a single LogEntry. """ log_name = proto.Field(proto.STRING, number=12,) @@ -201,6 +219,7 @@ class LogEntry(proto.Message): source_location = proto.Field( proto.MESSAGE, number=23, message="LogEntrySourceLocation", ) + split = proto.Field(proto.MESSAGE, number=35, message="LogSplit",) class LogEntryOperation(proto.Message): @@ -258,4 +277,30 @@ class LogEntrySourceLocation(proto.Message): function = proto.Field(proto.STRING, number=3,) +class LogSplit(proto.Message): + r"""Additional information used to correlate multiple log + entries. Used when a single LogEntry would exceed the Google + Cloud Logging size limit and is split across multiple log + entries. + + Attributes: + uid (str): + A globally unique identifier for all log entries in a + sequence of split log entries. All log entries with the same + \|LogSplit.uid\| are assumed to be part of the same sequence + of split log entries. + index (int): + The index of this LogEntry in the sequence of split log + entries. Log entries are given \|index\| values 0, 1, ..., + n-1 for a sequence of n log entries. + total_splits (int): + The total number of log entries that the + original LogEntry was split into. + """ + + uid = proto.Field(proto.STRING, number=1,) + index = proto.Field(proto.INT32, number=2,) + total_splits = proto.Field(proto.INT32, number=3,) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py index 8477c2a49ad2..76d86e34f2e4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -47,16 +47,15 @@ class DeleteLogRequest(proto.Message): log_name (str): Required. The resource name of the log to delete: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``"organizations/123/logs/cloudaudit.googleapis.com%2Factivity"``. + For more information about log names, see [LogEntry][google.logging.v2.LogEntry]. """ @@ -73,19 +72,17 @@ class WriteLogEntriesRequest(proto.Message): all log entries in ``entries`` that do not specify a value for ``log_name``: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: :: "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + "organizations/123/logs/cloudaudit.googleapis.com%2Factivity" The permission ``logging.logEntries.create`` is needed on each project, organization, billing account, or folder that @@ -128,17 +125,17 @@ class WriteLogEntriesRequest(proto.Message): Log entries with timestamps that are more than the `logs retention - period `__ in - the past or more than 24 hours in the future will not be + period `__ in the + past or more than 24 hours in the future will not be available when calling ``entries.list``. However, those log entries can still be `exported with LogSinks `__. To improve throughput and to avoid exceeding the `quota - limit `__ for - calls to ``entries.write``, you should try to include - several log entries in this list, rather than calling this - method for each individual log entry. + limit `__ for calls + to ``entries.write``, you should try to include several log + entries in this list, rather than calling this method for + each individual log entry. partial_success (bool): Optional. Whether valid entries should be written even if some other entries fail due to INVALID_ARGUMENT or @@ -197,18 +194,17 @@ class ListLogEntriesRequest(proto.Message): Required. Names of one or more parent resources from which to retrieve log entries: - :: + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + May alternatively be one or more views: - May alternatively be one or more views - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to this list. @@ -338,12 +334,10 @@ class ListLogsRequest(proto.Message): parent (str): Required. The resource name that owns the logs: - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]". + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` page_size (int): Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of @@ -357,14 +351,18 @@ class ListLogsRequest(proto.Message): should be identical to those in the previous call. resource_names (Sequence[str]): Optional. The resource name that owns the logs: - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` To support legacy queries, it could also be: - "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". + + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` """ parent = proto.Field(proto.STRING, number=1,) @@ -404,18 +402,17 @@ class TailLogEntriesRequest(proto.Message): Required. Name of a parent resource from which to retrieve log entries: - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - "organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]". + + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` filter (str): Optional. A filter that chooses which log entries to return. See `Advanced Logs diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index f064f26b7bee..3dab7a14301e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,6 +23,7 @@ package="google.logging.v2", manifest={ "LifecycleState", + "OperationState", "LogBucket", "LogView", "LogSink", @@ -56,6 +57,12 @@ "GetCmekSettingsRequest", "UpdateCmekSettingsRequest", "CmekSettings", + "GetSettingsRequest", + "UpdateSettingsRequest", + "Settings", + "CopyLogEntriesRequest", + "CopyLogEntriesMetadata", + "CopyLogEntriesResponse", }, ) @@ -67,18 +74,42 @@ class LifecycleState(proto.Enum): DELETE_REQUESTED = 2 +class OperationState(proto.Enum): + r"""List of different operation states. + High level state of the operation. This is used to report the + job's current state to the user. Once a long running operation + is created, the current state of the operation can be queried + even before the operation is finished and the final result is + available. + """ + OPERATION_STATE_UNSPECIFIED = 0 + OPERATION_STATE_SCHEDULED = 1 + OPERATION_STATE_WAITING_FOR_PERMISSIONS = 2 + OPERATION_STATE_RUNNING = 3 + OPERATION_STATE_SUCCEEDED = 4 + OPERATION_STATE_FAILED = 5 + OPERATION_STATE_CANCELLED = 6 + + class LogBucket(proto.Message): - r"""Describes a repository of logs. + r"""Describes a repository in which log entries are stored. Attributes: name (str): - The resource name of the bucket. For example: - "projects/my-project-id/locations/my-location/buckets/my-bucket-id - The supported locations are: "global" + Output only. The resource name of the bucket. + + For example: + + ``projects/my-project/locations/global/buckets/my-bucket`` + + For a list of supported locations, see `Supported + Regions `__ - For the location of ``global`` it is unspecified where logs - are actually stored. Once a bucket has been created, the - location can not be changed. + For the location of ``global`` it is unspecified where log + entries are actually stored. + + After a bucket has been created, the location cannot be + changed. description (str): Describes this bucket. create_time (google.protobuf.timestamp_pb2.Timestamp): @@ -96,12 +127,30 @@ class LogBucket(proto.Message): bucket creation time, the default time of 30 days will be used. locked (bool): - Whether the bucket has been locked. - The retention period on a locked bucket may not + Whether the bucket is locked. + The retention period on a locked bucket cannot be changed. Locked buckets may only be deleted if they are empty. lifecycle_state (google.cloud.logging_v2.types.LifecycleState): Output only. The bucket lifecycle state. + restricted_fields (Sequence[str]): + Log entry field paths that are denied access in this bucket. + + The following fields and their children are eligible: + ``textPayload``, ``jsonPayload``, ``protoPayload``, + ``httpRequest``, ``labels``, ``sourceLocation``. + + Restricting a repeated field will restrict all values. + Adding a parent will block all child fields. (e.g. + ``foo.bar`` will block ``foo.bar.baz``) + cmek_settings (google.cloud.logging_v2.types.CmekSettings): + The CMEK settings of the log bucket. If + present, new log entries written to this log + bucket are encrypted using the CMEK key provided + in this configuration. If a log bucket has CMEK + settings, the CMEK settings cannot be disabled + later by updating the log bucket. Changing the + KMS key is allowed. """ name = proto.Field(proto.STRING, number=1,) @@ -111,16 +160,20 @@ class LogBucket(proto.Message): retention_days = proto.Field(proto.INT32, number=11,) locked = proto.Field(proto.BOOL, number=9,) lifecycle_state = proto.Field(proto.ENUM, number=12, enum="LifecycleState",) + restricted_fields = proto.RepeatedField(proto.STRING, number=15,) + cmek_settings = proto.Field(proto.MESSAGE, number=19, message="CmekSettings",) class LogView(proto.Message): - r"""Describes a view over logs in a bucket. + r"""Describes a view over log entries in a bucket. Attributes: name (str): The resource name of the view. - For example - "projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view + + For example: + + ``projects/my-project/locations/global/buckets/my-bucket/views/my-view`` description (str): Describes this view. create_time (google.protobuf.timestamp_pb2.Timestamp): @@ -131,11 +184,19 @@ class LogView(proto.Message): view. filter (str): Filter that restricts which log entries in a bucket are - visible in this view. Filters are restricted to be a logical - AND of ==/!= of any of the following: originating - project/folder/organization/billing account. resource type - log id Example: SOURCE("projects/myproject") AND - resource.type = "gce_instance" AND LOG_ID("stdout") + visible in this view. + + Filters are restricted to be a logical AND of ==/!= of any + of the following: + + - originating project/folder/organization/billing account. + - resource type + - log id + + For example: + + SOURCE("projects/myproject") AND resource.type = + "gce_instance" AND LOG_ID("stdout") """ name = proto.Field(proto.STRING, number=1,) @@ -148,10 +209,10 @@ class LogView(proto.Message): class LogSink(proto.Message): r"""Describes a sink used to export log entries to one of the following destinations in any project: a Cloud Storage bucket, a - BigQuery dataset, or a Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. The sink must be - created within a project, organization, billing account, or - folder. + BigQuery dataset, a Pub/Sub topic or a Cloud Logging log bucket. + A logs filter controls which log entries are exported. The sink + must be created within a project, organization, billing account, + or folder. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -159,7 +220,9 @@ class LogSink(proto.Message): Attributes: name (str): Required. The client-assigned sink identifier, unique within - the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink + the project. + + For example: ``"my-syslog-errors-to-pubsub"``. Sink identifiers are limited to 100 characters and can include only the following characters: upper and lower-case alphanumeric characters, underscores, hyphens, and periods. @@ -182,30 +245,30 @@ class LogSink(proto.Message): Optional. An `advanced logs filter `__. The only exported log entries are those that are in the - resource owning the sink and that match the filter. For - example: + resource owning the sink and that match the filter. - :: + For example: - logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR + ``logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR`` description (str): Optional. A description of this sink. The maximum length of the description is 8000 characters. disabled (bool): - Optional. If set to True, then this sink is + Optional. If set to true, then this sink is disabled and it does not export any log entries. exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): - Optional. Log entries that match any of the exclusion - filters will not be exported. If a log entry is matched by - both ``filter`` and one of ``exclusion_filters`` it will not - be exported. + Optional. Log entries that match any of these exclusion + filters will not be exported. + + If a log entry is matched by both ``filter`` and one of + ``exclusion_filters`` it will not be exported. output_version_format (google.cloud.logging_v2.types.LogSink.VersionFormat): Deprecated. This field is unused. writer_identity (str): Output only. An IAM identity—a service account or - group—under which Logging writes the exported log entries to - the sink's destination. This field is set by + group—under which Cloud Logging writes the exported log + entries to the sink's destination. This field is set by [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] and [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] @@ -218,25 +281,30 @@ class LogSink(proto.Message): Resource `__. Consult the destination service's documentation to determine the appropriate IAM roles to assign to the identity. + + Sinks that have a destination that is a log bucket in the + same project as the sink do not have a writer_identity and + no additional permissions are required. include_children (bool): Optional. This field applies only to sinks owned by organizations and folders. If the field is false, the default, only the logs owned by the sink's parent resource - are available for export. If the field is true, then logs - from all the projects, folders, and billing accounts + are available for export. If the field is true, then log + entries from all the projects, folders, and billing accounts contained in the sink's parent resource are also available for export. Whether a particular log entry from the children - is exported depends on the sink's filter expression. For - example, if this field is true, then the filter + is exported depends on the sink's filter expression. + + For example, if this field is true, then the filter ``resource.type=gce_instance`` would export all Compute Engine VM instance log entries from all projects in the - sink's parent. To only export entries from certain child - projects, filter on the project part of the log name: + sink's parent. - :: + To only export entries from certain child projects, filter + on the project part of the log name: - logName:("projects/test-project1/" OR "projects/test-project2/") AND - resource.type=gce_instance + logName:("projects/test-project1/" OR + "projects/test-project2/") AND resource.type=gce_instance bigquery_options (google.cloud.logging_v2.types.BigQueryOptions): Optional. Options that affect sinks exporting data to BigQuery. @@ -286,18 +354,20 @@ class BigQueryOptions(proto.Message): use_partitioned_tables (bool): Optional. Whether to use `BigQuery's partition tables `__. - By default, Logging creates dated tables based on the log - entries' timestamps, e.g. syslog_20170523. With partitioned - tables the date suffix is no longer present and `special - query + By default, Cloud Logging creates dated tables based on the + log entries' timestamps, e.g. syslog_20170523. With + partitioned tables the date suffix is no longer present and + `special query syntax `__ has to be used instead. In both cases, tables are sharded based on UTC timezone. uses_timestamp_column_partitioning (bool): Output only. True if new timestamp column based partitioning is in use, false if legacy ingestion-time partitioning is in - use. All new sinks will have this field set true and will - use timestamp column based partitioning. If + use. + + All new sinks will have this field set true and will use + timestamp column based partitioning. If use_partitioned_tables is false, this value has no meaning and will be false. Legacy sinks using partitioned tables will have this field set to false. @@ -369,13 +439,15 @@ class CreateBucketRequest(proto.Message): Attributes: parent (str): - Required. The resource in which to create the bucket: + Required. The resource in which to create the log bucket: :: "projects/[PROJECT_ID]/locations/[LOCATION_ID]" - Example: ``"projects/my-logging-project/locations/global"`` + For example: + + ``"projects/my-project/locations/global"`` bucket_id (str): Required. A client-assigned identifier such as ``"my-bucket"``. Identifiers are limited to 100 characters @@ -407,11 +479,9 @@ class UpdateBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. - Also requires permission - "resourcemanager.projects.updateLiens" to set the locked - property + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` bucket (google.cloud.logging_v2.types.LogBucket): Required. The updated bucket. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -420,10 +490,10 @@ class UpdateBucketRequest(proto.Message): and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - For a detailed ``FieldMask`` definition, see + For a detailed ``FieldMask`` definition, see: https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=retention_days``. + For example: ``updateMask=retention_days`` """ name = proto.Field(proto.STRING, number=1,) @@ -447,8 +517,9 @@ class GetBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` """ name = proto.Field(proto.STRING, number=1,) @@ -468,8 +539,9 @@ class DeleteBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` """ name = proto.Field(proto.STRING, number=1,) @@ -489,8 +561,9 @@ class UndeleteBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` """ name = proto.Field(proto.STRING, number=1,) @@ -514,7 +587,9 @@ class ListViewsRequest(proto.Message): should be identical to those in the previous call. page_size (int): Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of + request. + + Non-positive values are ignored. The presence of ``nextPageToken`` in the response indicates that more results might be available. """ @@ -554,10 +629,11 @@ class CreateViewRequest(proto.Message): :: - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + `"projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"` - Example: - ``"projects/my-logging-project/locations/my-location/buckets/my-bucket"`` + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` view_id (str): Required. The id to use for this view. view (google.cloud.logging_v2.types.LogView): @@ -580,8 +656,9 @@ class UpdateViewRequest(proto.Message): "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket/views/my-view"`` view (google.cloud.logging_v2.types.LogView): Required. The updated view. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -593,7 +670,7 @@ class UpdateViewRequest(proto.Message): For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` """ name = proto.Field(proto.STRING, number=1,) @@ -614,8 +691,9 @@ class GetViewRequest(proto.Message): "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket/views/my-view"`` """ name = proto.Field(proto.STRING, number=1,) @@ -632,8 +710,11 @@ class DeleteViewRequest(proto.Message): "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + For example: + + :: + + `"projects/my-project/locations/global/buckets/my-bucket/views/my-view"` """ name = proto.Field(proto.STRING, number=1,) @@ -705,7 +786,9 @@ class GetSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` """ sink_name = proto.Field(proto.STRING, number=1,) @@ -725,8 +808,9 @@ class CreateSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-project"`` ``"organizations/123456789"`` sink (google.cloud.logging_v2.types.LogSink): Required. The new sink, whose ``name`` parameter is a sink identifier that is not already in use. @@ -735,9 +819,10 @@ class CreateSinkRequest(proto.Message): ``writer_identity`` in the new sink. If this value is omitted or set to false, and if the sink's parent is a project, then the value returned as ``writer_identity`` is - the same group or service account used by Logging before the - addition of writer identities to this API. The sink's - destination must be in the same project as the sink itself. + the same group or service account used by Cloud Logging + before the addition of writer identities to this API. The + sink's destination must be in the same project as the sink + itself. If this field is set to true, or if the sink is owned by a non-project resource such as an organization, then the value @@ -767,7 +852,9 @@ class UpdateSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` sink (google.cloud.logging_v2.types.LogSink): Required. The updated sink, whose name is the same identifier that appears as part of ``sink_name``. @@ -793,16 +880,18 @@ class UpdateSinkRequest(proto.Message): and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - An empty updateMask is temporarily treated as using the + An empty ``updateMask`` is temporarily treated as using the following mask for backwards compatibility purposes: - destination,filter,includeChildren At some point in the - future, behavior will be removed and specifying an empty - updateMask will be an error. + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed and + specifying an empty ``updateMask`` will be an error. For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` """ sink_name = proto.Field(proto.STRING, number=1,) @@ -828,20 +917,21 @@ class DeleteSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` """ sink_name = proto.Field(proto.STRING, number=1,) class LogExclusion(proto.Message): - r"""Specifies a set of log entries that are not to be stored in - Logging. If your GCP resource receives a large volume of logs, - you can use exclusions to reduce your chargeable logs. - Exclusions are processed after log sinks, so you can export log - entries before they are excluded. Note that organization-level - and folder-level exclusions don't apply to child resources, and - that you can't exclude audit log entries. + r"""Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of log entries, + you can use exclusions to reduce your chargeable logs. Note that + exclusions on organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify the \_Required + sink or exclude logs from it. Attributes: name (str): @@ -859,10 +949,11 @@ class LogExclusion(proto.Message): `sample function `__, you can exclude less than 100% of the matching log entries. + For example, the following query matches 99% of low-severity log entries from Google Cloud Storage buckets: - ``"resource.type=gcs_bucket severity`__ for more information. @@ -1069,11 +1165,14 @@ class GetCmekSettingsRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" "folders/[FOLDER_ID]/cmekSettings" - Example: ``"organizations/12345/cmekSettings"``. + For example: + + ``"organizations/12345/cmekSettings"`` - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google + Cloud projects, folders, organizations and billing accounts. + Once configured for an organization, it applies to all + projects and folders in the Google Cloud organization. """ name = proto.Field(proto.STRING, number=1,) @@ -1083,7 +1182,7 @@ class UpdateCmekSettingsRequest(proto.Message): r"""The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -1098,15 +1197,18 @@ class UpdateCmekSettingsRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" "folders/[FOLDER_ID]/cmekSettings" - Example: ``"organizations/12345/cmekSettings"``. + For example: + + ``"organizations/12345/cmekSettings"`` - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, + it applies to all projects and folders in the Google Cloud + organization. cmek_settings (google.cloud.logging_v2.types.CmekSettings): Required. The CMEK settings to update. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -1118,7 +1220,7 @@ class UpdateCmekSettingsRequest(proto.Message): See [FieldMask][google.protobuf.FieldMask] for more information. - Example: ``"updateMask=kmsKeyName"`` + For example: ``"updateMask=kmsKeyName"`` """ name = proto.Field(proto.STRING, number=1,) @@ -1133,11 +1235,11 @@ class CmekSettings(proto.Message): associated with a project, folder, organization, billing account, or flexible resource. - Note: CMEK for the Logs Router can currently only be configured for - GCP organizations. Once configured, it applies to all projects and - folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured for + Google Cloud organizations. Once configured, it applies to all + projects and folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -1149,14 +1251,163 @@ class CmekSettings(proto.Message): The resource name for the configured Cloud KMS key. KMS key name format: - "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" For example: - ``"projects/my-project-id/locations/my-region/keyRings/key-ring-name/cryptoKeys/key-name"`` - To enable CMEK for the Logs Router, set this field to a - valid ``kms_key_name`` for which the associated service - account has the required + ``"projects/my-project/locations/us-central1/keyRings/my-ring/cryptoKeys/my-key"`` + + To enable CMEK for the Log Router, set this field to a valid + ``kms_key_name`` for which the associated service account + has the required cloudkms.cryptoKeyEncrypterDecrypter roles + assigned for the key. + + The Cloud KMS key used by the Log Router can be updated by + changing the ``kms_key_name`` to a new valid key name or + disabled by setting the key name to an empty string. + Encryption operations that are in progress will be completed + with the key that was in use when they started. Decryption + operations will be completed using the key that was used at + the time of encryption unless access to that key has been + revoked. + + To disable CMEK for the Log Router, set this field to an + empty string. + + See `Enabling CMEK for Log + Router `__ + for more information. + service_account_id (str): + Output only. The service account that will be used by the + Log Router to access your Cloud KMS key. + + Before enabling CMEK for Log Router, you must first assign + the cloudkms.cryptoKeyEncrypterDecrypter role to the service + account that the Log Router will use to access your Cloud + KMS key. Use + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings] + to obtain the service account ID. + + See `Enabling CMEK for Log + Router `__ + for more information. + """ + + name = proto.Field(proto.STRING, number=1,) + kms_key_name = proto.Field(proto.STRING, number=2,) + service_account_id = proto.Field(proto.STRING, number=3,) + + +class GetSettingsRequest(proto.Message): + r"""The parameters to + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Attributes: + name (str): + Required. The resource for which to retrieve settings. + + :: + + "projects/[PROJECT_ID]/settings" + "organizations/[ORGANIZATION_ID]/settings" + "billingAccounts/[BILLING_ACCOUNT_ID]/settings" + "folders/[FOLDER_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can be get for Google + Cloud projects, folders, organizations and billing accounts. + Currently it can only be configured for organizations. Once + configured for an organization, it applies to all projects + and folders in the Google Cloud organization. + """ + + name = proto.Field(proto.STRING, number=1,) + + +class UpdateSettingsRequest(proto.Message): + r"""The parameters to + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Attributes: + name (str): + Required. The resource name for the settings to update. + + :: + + "organizations/[ORGANIZATION_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, + it applies to all projects and folders in the Google Cloud + organization. + settings (google.cloud.logging_v2.types.Settings): + Required. The settings to update. + + See `Enabling CMEK for Log + Router `__ + for more information. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask identifying which fields from + ``settings`` should be updated. A field will be overwritten + if and only if it is in the update mask. Output only fields + cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + """ + + name = proto.Field(proto.STRING, number=1,) + settings = proto.Field(proto.MESSAGE, number=2, message="Settings",) + update_mask = proto.Field( + proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + ) + + +class Settings(proto.Message): + r"""Describes the settings associated with a project, folder, + organization, billing account, or flexible resource. + + Attributes: + name (str): + Output only. The resource name of the + settings. + kms_key_name (str): + Optional. The resource name for the configured Cloud KMS + key. + + KMS key name format: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" + + For example: + + ``"projects/my-project/locations/us-central1/keyRings/my-ring/cryptoKeys/my-key"`` + + To enable CMEK for the Log Router, set this field to a valid + ``kms_key_name`` for which the associated service account + has the required ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key. @@ -1168,31 +1419,118 @@ class CmekSettings(proto.Message): the time of encryption unless access to that key has been revoked. - To disable CMEK for the Logs Router, set this field to an + To disable CMEK for the Log Router, set this field to an empty string. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. - service_account_id (str): + kms_service_account_id (str): Output only. The service account that will be used by the - Logs Router to access your Cloud KMS key. + Log Router to access your Cloud KMS key. - Before enabling CMEK for Logs Router, you must first assign + Before enabling CMEK for Log Router, you must first assign the role ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` to - the service account that the Logs Router will use to access + the service account that the Log Router will use to access your Cloud KMS key. Use - [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings] + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings] to obtain the service account ID. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + storage_location (str): + Optional. The Cloud region that will be used for \_Default + and \_Required log buckets for newly created projects and + folders. For example ``europe-west1``. This setting does not + affect the location of custom log buckets. + disable_default_sink (bool): + Optional. If set to true, the \_Default sink in newly + created projects and folders will created in a disabled + state. This can be used to automatically disable log + ingestion if there is already an aggregated sink configured + in the hierarchy. The \_Default sink can be re-enabled + manually if needed. """ name = proto.Field(proto.STRING, number=1,) kms_key_name = proto.Field(proto.STRING, number=2,) - service_account_id = proto.Field(proto.STRING, number=3,) + kms_service_account_id = proto.Field(proto.STRING, number=3,) + storage_location = proto.Field(proto.STRING, number=4,) + disable_default_sink = proto.Field(proto.BOOL, number=5,) + + +class CopyLogEntriesRequest(proto.Message): + r"""The parameters to CopyLogEntries. + + Attributes: + name (str): + Required. Log bucket from which to copy log entries. + + For example: + + ``"projects/my-project/locations/global/buckets/my-source-bucket"`` + filter (str): + Optional. A filter specifying which log + entries to copy. The filter must be no more than + 20k characters. An empty filter matches all log + entries. + destination (str): + Required. Destination to which to copy log + entries. + """ + + name = proto.Field(proto.STRING, number=1,) + filter = proto.Field(proto.STRING, number=3,) + destination = proto.Field(proto.STRING, number=4,) + + +class CopyLogEntriesMetadata(proto.Message): + r"""Metadata for CopyLogEntries long running operations. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The create time of an operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end time of an operation. + state (google.cloud.logging_v2.types.OperationState): + State of an operation. + cancellation_requested (bool): + Identifies whether the user has requested + cancellation of the operation. + request (google.cloud.logging_v2.types.CopyLogEntriesRequest): + CopyLogEntries RPC request. + progress (int): + Estimated progress of the operation (0 - + 100%). + writer_identity (str): + The IAM identity of a service account that must be granted + access to the destination. + + If the service account is not granted permission to the + destination within an hour, the operation will be cancelled. + + For example: ``"serviceAccount:foo@bar.com"`` + """ + + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + state = proto.Field(proto.ENUM, number=3, enum="OperationState",) + cancellation_requested = proto.Field(proto.BOOL, number=4,) + request = proto.Field(proto.MESSAGE, number=5, message="CopyLogEntriesRequest",) + progress = proto.Field(proto.INT32, number=6,) + writer_identity = proto.Field(proto.STRING, number=7,) + + +class CopyLogEntriesResponse(proto.Message): + r"""Response type for CopyLogEntries long running operations. + + Attributes: + log_entries_copied_count (int): + Number of log entries copied. + """ + + log_entries_copied_count = proto.Field(proto.INT64, number=1,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py index 26d855680694..af1f2f548c24 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -55,12 +55,12 @@ class LogMetric(proto.Message): forward-slash character (``/``) denotes a hierarchy of name pieces, and it cannot be the first character of the name. - The metric identifier in this field must not be - `URL-encoded `__. - However, when the metric identifier appears as the - ``[METRIC_ID]`` part of a ``metric_name`` API parameter, - then the metric identifier must be URL-encoded. Example: - ``"projects/my-project/metrics/nginx%2Frequests"``. + This field is the ``[METRIC_ID]`` part of a metric resource + name in the format + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". Example: If the + resource name of a metric is + ``"projects/my-project/metrics/nginx%2Frequests"``, this + field's value is ``"nginx/requests"``. description (str): Optional. A description of this metric, which is used in documentation. The maximum length of @@ -75,6 +75,9 @@ class LogMetric(proto.Message): "resource.type=gae_app AND severity>=ERROR" The maximum length of the filter is 20000 characters. + disabled (bool): + Optional. If set to True, then this metric is + disabled and it does not generate any points. metric_descriptor (google.api.metric_pb2.MetricDescriptor): Optional. The metric descriptor associated with the logs-based metric. If unspecified, it uses a default metric @@ -170,6 +173,7 @@ class ApiVersion(proto.Enum): name = proto.Field(proto.STRING, number=1,) description = proto.Field(proto.STRING, number=2,) filter = proto.Field(proto.STRING, number=3,) + disabled = proto.Field(proto.BOOL, number=12,) metric_descriptor = proto.Field( proto.MESSAGE, number=5, message=metric_pb2.MetricDescriptor, ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py new file mode 100644 index 000000000000..abe149bd66c2 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CopyLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CopyLogEntries_async] +from google.cloud import logging_v2 + + +async def sample_copy_log_entries(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CopyLogEntries_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py new file mode 100644 index 000000000000..90eb5354e226 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CopyLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync] +from google.cloud import logging_v2 + + +def sample_copy_log_entries(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py new file mode 100644 index 000000000000..1c5c329c802c --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucket_async] +from google.cloud import logging_v2 + + +async def sample_create_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + response = await client.create_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucket_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py new file mode 100644 index 000000000000..9b309322914f --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucket_sync] +from google.cloud import logging_v2 + + +def sample_create_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + response = client.create_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucket_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py new file mode 100644 index 000000000000..5be1a9ad3da5 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateExclusion_async] +from google.cloud import logging_v2 + + +async def sample_create_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = await client.create_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateExclusion_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py new file mode 100644 index 000000000000..3b57560f3cec --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateExclusion_sync] +from google.cloud import logging_v2 + + +def sample_create_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = client.create_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateExclusion_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py new file mode 100644 index 000000000000..789598d4c7f2 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateSink_async] +from google.cloud import logging_v2 + + +async def sample_create_sink(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.CreateSinkRequest( + parent="parent_value", + sink=sink, + ) + + # Make the request + response = await client.create_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateSink_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py new file mode 100644 index 000000000000..e22bc60555c5 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateSink_sync] +from google.cloud import logging_v2 + + +def sample_create_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.CreateSinkRequest( + parent="parent_value", + sink=sink, + ) + + # Make the request + response = client.create_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateSink_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py new file mode 100644 index 000000000000..499d4eeba920 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateView_async] +from google.cloud import logging_v2 + + +async def sample_create_view(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + # Make the request + response = await client.create_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateView_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py new file mode 100644 index 000000000000..8e6425d712b1 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateView_sync] +from google.cloud import logging_v2 + + +def sample_create_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + # Make the request + response = client.create_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateView_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py new file mode 100644 index 000000000000..def3e5abcb4a --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteBucket_async] +from google.cloud import logging_v2 + + +async def sample_delete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteBucketRequest( + name="name_value", + ) + + # Make the request + await client.delete_bucket(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteBucket_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py new file mode 100644 index 000000000000..64c95c992455 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteBucket_sync] +from google.cloud import logging_v2 + + +def sample_delete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.delete_bucket(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteBucket_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py new file mode 100644 index 000000000000..4c042c3bec23 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteExclusion_async] +from google.cloud import logging_v2 + + +async def sample_delete_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + await client.delete_exclusion(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteExclusion_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py new file mode 100644 index 000000000000..dc313658435a --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync] +from google.cloud import logging_v2 + + +def sample_delete_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + client.delete_exclusion(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py new file mode 100644 index 000000000000..fe5acb523f2d --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteSink_async] +from google.cloud import logging_v2 + + +async def sample_delete_sink(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + await client.delete_sink(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteSink_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py new file mode 100644 index 000000000000..d9ddc66a02a4 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteSink_sync] +from google.cloud import logging_v2 + + +def sample_delete_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + client.delete_sink(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteSink_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py new file mode 100644 index 000000000000..fd1eee969886 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteView_async] +from google.cloud import logging_v2 + + +async def sample_delete_view(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteViewRequest( + name="name_value", + ) + + # Make the request + await client.delete_view(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteView_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py new file mode 100644 index 000000000000..1169b400046e --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteView_sync] +from google.cloud import logging_v2 + + +def sample_delete_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteViewRequest( + name="name_value", + ) + + # Make the request + client.delete_view(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteView_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py new file mode 100644 index 000000000000..4b964aa7435d --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetBucket_async] +from google.cloud import logging_v2 + + +async def sample_get_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetBucketRequest( + name="name_value", + ) + + # Make the request + response = await client.get_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetBucket_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py new file mode 100644 index 000000000000..1b299dd569ac --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetBucket_sync] +from google.cloud import logging_v2 + + +def sample_get_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetBucketRequest( + name="name_value", + ) + + # Make the request + response = client.get_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetBucket_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py new file mode 100644 index 000000000000..356f0db9fbf1 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetCmekSettings_async] +from google.cloud import logging_v2 + + +async def sample_get_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cmek_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetCmekSettings_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py new file mode 100644 index 000000000000..83dfc8d2c997 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync] +from google.cloud import logging_v2 + + +def sample_get_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_cmek_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py new file mode 100644 index 000000000000..27a7644458dc --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetExclusion_async] +from google.cloud import logging_v2 + + +async def sample_get_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetExclusion_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py new file mode 100644 index 000000000000..980914dac1cb --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetExclusion_sync] +from google.cloud import logging_v2 + + +def sample_get_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + name="name_value", + ) + + # Make the request + response = client.get_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetExclusion_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py new file mode 100644 index 000000000000..0da6e2a7ec06 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetSettings_async] +from google.cloud import logging_v2 + + +async def sample_get_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetSettings_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py new file mode 100644 index 000000000000..ccbc05d502b3 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetSettings_sync] +from google.cloud import logging_v2 + + +def sample_get_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetSettings_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py new file mode 100644 index 000000000000..fa3d7cf7f5b6 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetSink_async] +from google.cloud import logging_v2 + + +async def sample_get_sink(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + response = await client.get_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetSink_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py new file mode 100644 index 000000000000..48581e4706bc --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetSink_sync] +from google.cloud import logging_v2 + + +def sample_get_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + response = client.get_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetSink_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py new file mode 100644 index 000000000000..9f26a54178fa --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetView_async] +from google.cloud import logging_v2 + + +async def sample_get_view(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetViewRequest( + name="name_value", + ) + + # Make the request + response = await client.get_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetView_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py new file mode 100644 index 000000000000..f88c15d2e30f --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetView_sync] +from google.cloud import logging_v2 + + +def sample_get_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetViewRequest( + name="name_value", + ) + + # Make the request + response = client.get_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetView_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py new file mode 100644 index 000000000000..4e3bfea5582b --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuckets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListBuckets_async] +from google.cloud import logging_v2 + + +async def sample_list_buckets(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListBucketsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_buckets(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListBuckets_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py new file mode 100644 index 000000000000..3522c4c8979d --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuckets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListBuckets_sync] +from google.cloud import logging_v2 + + +def sample_list_buckets(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListBucketsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_buckets(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListBuckets_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py new file mode 100644 index 000000000000..788436d6ad15 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListExclusions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListExclusions_async] +from google.cloud import logging_v2 + + +async def sample_list_exclusions(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_exclusions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListExclusions_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py new file mode 100644 index 000000000000..8ea9407a1066 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListExclusions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListExclusions_sync] +from google.cloud import logging_v2 + + +def sample_list_exclusions(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_exclusions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListExclusions_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py new file mode 100644 index 000000000000..b43b5682a28e --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListSinks_async] +from google.cloud import logging_v2 + + +async def sample_list_sinks(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListSinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sinks(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListSinks_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py new file mode 100644 index 000000000000..235395e6d593 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListSinks_sync] +from google.cloud import logging_v2 + + +def sample_list_sinks(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListSinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sinks(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListSinks_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py new file mode 100644 index 000000000000..27910c9f7cfa --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListViews +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListViews_async] +from google.cloud import logging_v2 + + +async def sample_list_views(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_views(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListViews_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py new file mode 100644 index 000000000000..2e5b6e53b3d1 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListViews +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListViews_sync] +from google.cloud import logging_v2 + + +def sample_list_views(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_views(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListViews_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py new file mode 100644 index 000000000000..020866b755b9 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UndeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UndeleteBucket_async] +from google.cloud import logging_v2 + + +async def sample_undelete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UndeleteBucketRequest( + name="name_value", + ) + + # Make the request + await client.undelete_bucket(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_UndeleteBucket_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py new file mode 100644 index 000000000000..0dfb39a11a07 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UndeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync] +from google.cloud import logging_v2 + + +def sample_undelete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UndeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.undelete_bucket(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py new file mode 100644 index 000000000000..78245abfc6f0 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucket_async] +from google.cloud import logging_v2 + + +async def sample_update_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + response = await client.update_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucket_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py new file mode 100644 index 000000000000..c285fd542862 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucket_sync] +from google.cloud import logging_v2 + + +def sample_update_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + response = client.update_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucket_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py new file mode 100644 index 000000000000..8d49b85e7b8c --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async] +from google.cloud import logging_v2 + + +async def sample_update_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.update_cmek_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py new file mode 100644 index 000000000000..7b04208d4c58 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync] +from google.cloud import logging_v2 + + +def sample_update_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_cmek_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py new file mode 100644 index 000000000000..d06cf80d4f63 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateExclusion_async] +from google.cloud import logging_v2 + + +async def sample_update_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name="name_value", + exclusion=exclusion, + ) + + # Make the request + response = await client.update_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateExclusion_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py new file mode 100644 index 000000000000..c0dba34ccb9e --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync] +from google.cloud import logging_v2 + + +def sample_update_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name="name_value", + exclusion=exclusion, + ) + + # Make the request + response = client.update_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py new file mode 100644 index 000000000000..dba1d4e8eb90 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateSettings_async] +from google.cloud import logging_v2 + + +async def sample_update_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.update_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateSettings_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py new file mode 100644 index 000000000000..f70f520361f1 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateSettings_sync] +from google.cloud import logging_v2 + + +def sample_update_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateSettings_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py new file mode 100644 index 000000000000..c46b9ab427d2 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateSink_async] +from google.cloud import logging_v2 + + +async def sample_update_sink(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.UpdateSinkRequest( + sink_name="sink_name_value", + sink=sink, + ) + + # Make the request + response = await client.update_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateSink_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py new file mode 100644 index 000000000000..9639ece285e4 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateSink_sync] +from google.cloud import logging_v2 + + +def sample_update_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.UpdateSinkRequest( + sink_name="sink_name_value", + sink=sink, + ) + + # Make the request + response = client.update_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateSink_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py new file mode 100644 index 000000000000..250d3f9dc881 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateView_async] +from google.cloud import logging_v2 + + +async def sample_update_view(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + name="name_value", + ) + + # Make the request + response = await client.update_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateView_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py new file mode 100644 index 000000000000..1397848800fe --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateView_sync] +from google.cloud import logging_v2 + + +def sample_update_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + name="name_value", + ) + + # Make the request + response = client.update_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateView_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py new file mode 100644 index 000000000000..6338b9abcbd0 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_DeleteLog_async] +from google.cloud import logging_v2 + + +async def sample_delete_log(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLogRequest( + log_name="log_name_value", + ) + + # Make the request + await client.delete_log(request=request) + + +# [END logging_v2_generated_LoggingServiceV2_DeleteLog_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py new file mode 100644 index 000000000000..36280057bfe7 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_DeleteLog_sync] +from google.cloud import logging_v2 + + +def sample_delete_log(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogRequest( + log_name="log_name_value", + ) + + # Make the request + client.delete_log(request=request) + + +# [END logging_v2_generated_LoggingServiceV2_DeleteLog_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py new file mode 100644 index 000000000000..4a8692b04e47 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListLogEntries_async] +from google.cloud import logging_v2 + + +async def sample_list_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # Make the request + page_result = client.list_log_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListLogEntries_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py new file mode 100644 index 000000000000..062075af9091 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListLogEntries_sync] +from google.cloud import logging_v2 + + +def sample_list_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # Make the request + page_result = client.list_log_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListLogEntries_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py new file mode 100644 index 000000000000..fb0106199bd0 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListLogs_async] +from google.cloud import logging_v2 + + +async def sample_list_logs(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_logs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListLogs_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py new file mode 100644 index 000000000000..0f775572f57b --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListLogs_sync] +from google.cloud import logging_v2 + + +def sample_list_logs(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_logs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListLogs_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py new file mode 100644 index 000000000000..b8f3397012de --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMonitoredResourceDescriptors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async] +from google.cloud import logging_v2 + + +async def sample_list_monitored_resource_descriptors(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py new file mode 100644 index 000000000000..736d64d614d1 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMonitoredResourceDescriptors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] +from google.cloud import logging_v2 + + +def sample_list_monitored_resource_descriptors(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py new file mode 100644 index 000000000000..3e77920f81e1 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TailLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_TailLogEntries_async] +from google.cloud import logging_v2 + + +async def sample_tail_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.tail_log_entries(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_TailLogEntries_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py new file mode 100644 index 000000000000..ee1108b33f02 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TailLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_TailLogEntries_sync] +from google.cloud import logging_v2 + + +def sample_tail_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.tail_log_entries(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_TailLogEntries_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py new file mode 100644 index 000000000000..28025d777037 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for WriteLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_WriteLogEntries_async] +from google.cloud import logging_v2 + + +async def sample_write_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + + request = logging_v2.WriteLogEntriesRequest( + entries=entries, + ) + + # Make the request + response = await client.write_log_entries(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_LoggingServiceV2_WriteLogEntries_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py new file mode 100644 index 000000000000..31569811c655 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for WriteLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync] +from google.cloud import logging_v2 + + +def sample_write_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + + request = logging_v2.WriteLogEntriesRequest( + entries=entries, + ) + + # Make the request + response = client.write_log_entries(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py new file mode 100644 index 000000000000..96690c2c36b4 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_CreateLogMetric_async] +from google.cloud import logging_v2 + + +async def sample_create_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2AsyncClient() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.CreateLogMetricRequest( + parent="parent_value", + metric=metric, + ) + + # Make the request + response = await client.create_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2_CreateLogMetric_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py new file mode 100644 index 000000000000..051694d31470 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync] +from google.cloud import logging_v2 + + +def sample_create_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.CreateLogMetricRequest( + parent="parent_value", + metric=metric, + ) + + # Make the request + response = client.create_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py new file mode 100644 index 000000000000..bf2ee5e4abc2 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async] +from google.cloud import logging_v2 + + +async def sample_delete_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + await client.delete_log_metric(request=request) + + +# [END logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py new file mode 100644 index 000000000000..eae109200224 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync] +from google.cloud import logging_v2 + + +def sample_delete_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + client.delete_log_metric(request=request) + + +# [END logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py new file mode 100644 index 000000000000..cea94a356e29 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_GetLogMetric_async] +from google.cloud import logging_v2 + + +async def sample_get_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + response = await client.get_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2_GetLogMetric_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py new file mode 100644 index 000000000000..eea36222a80b --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_GetLogMetric_sync] +from google.cloud import logging_v2 + + +def sample_get_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + response = client.get_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2_GetLogMetric_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py new file mode 100644 index 000000000000..9dac7793736b --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_ListLogMetrics_async] +from google.cloud import logging_v2 + + +async def sample_list_log_metrics(): + # Create a client + client = logging_v2.MetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLogMetricsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_log_metrics(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_MetricsServiceV2_ListLogMetrics_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py new file mode 100644 index 000000000000..97b3c2f1364b --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync] +from google.cloud import logging_v2 + + +def sample_list_log_metrics(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogMetricsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_log_metrics(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py new file mode 100644 index 000000000000..c94c70e76264 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async] +from google.cloud import logging_v2 + + +async def sample_update_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2AsyncClient() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.UpdateLogMetricRequest( + metric_name="metric_name_value", + metric=metric, + ) + + # Make the request + response = await client.update_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py new file mode 100644 index 000000000000..bcdff32693bc --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync] +from google.cloud import logging_v2 + + +def sample_update_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.UpdateLogMetricRequest( + metric_name="metric_name_value", + metric=metric, + ) + + # Make the request + response = client.update_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_logging_v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_logging_v2.json new file mode 100644 index 000000000000..b6ad799b168a --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_logging_v2.json @@ -0,0 +1,3269 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CopyLogEntries" + } + }, + "file": "logging_v2_generated_config_service_v2_copy_log_entries_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CopyLogEntries" + } + }, + "file": "logging_v2_generated_config_service_v2_copy_log_entries_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_create_bucket_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_create_bucket_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_create_exclusion_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_create_exclusion_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateSink" + } + }, + "file": "logging_v2_generated_config_service_v2_create_sink_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateSink" + } + }, + "file": "logging_v2_generated_config_service_v2_create_sink_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateView" + } + }, + "file": "logging_v2_generated_config_service_v2_create_view_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateView" + } + }, + "file": "logging_v2_generated_config_service_v2_create_view_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_exclusion_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteSink" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_sink_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteSink" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_sink_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteView" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_view_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteView" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_view_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetCmekSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetCmekSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_get_exclusion_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_get_exclusion_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_get_settings_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_get_settings_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetSink" + } + }, + "file": "logging_v2_generated_config_service_v2_get_sink_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetSink" + } + }, + "file": "logging_v2_generated_config_service_v2_get_sink_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetView" + } + }, + "file": "logging_v2_generated_config_service_v2_get_view_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetView" + } + }, + "file": "logging_v2_generated_config_service_v2_get_view_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListBuckets" + } + }, + "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListBuckets" + } + }, + "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListExclusions" + } + }, + "file": "logging_v2_generated_config_service_v2_list_exclusions_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListExclusions" + } + }, + "file": "logging_v2_generated_config_service_v2_list_exclusions_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListSinks" + } + }, + "file": "logging_v2_generated_config_service_v2_list_sinks_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListSinks" + } + }, + "file": "logging_v2_generated_config_service_v2_list_sinks_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListViews" + } + }, + "file": "logging_v2_generated_config_service_v2_list_views_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListViews" + } + }, + "file": "logging_v2_generated_config_service_v2_list_views_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UndeleteBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_undelete_bucket_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UndeleteBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_update_bucket_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_update_bucket_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateCmekSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_update_cmek_settings_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateCmekSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_update_cmek_settings_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_update_exclusion_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_update_exclusion_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_update_settings_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_update_settings_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateSink" + } + }, + "file": "logging_v2_generated_config_service_v2_update_sink_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateSink" + } + }, + "file": "logging_v2_generated_config_service_v2_update_sink_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateView" + } + }, + "file": "logging_v2_generated_config_service_v2_update_view_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateView" + } + }, + "file": "logging_v2_generated_config_service_v2_update_view_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "DeleteLog" + } + }, + "file": "logging_v2_generated_logging_service_v2_delete_log_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "DeleteLog" + } + }, + "file": "logging_v2_generated_logging_service_v2_delete_log_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogEntries" + } + }, + "file": "logging_v2_generated_logging_service_v2_list_log_entries_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogEntries" + } + }, + "file": "logging_v2_generated_logging_service_v2_list_log_entries_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogs" + } + }, + "file": "logging_v2_generated_logging_service_v2_list_logs_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogs" + } + }, + "file": "logging_v2_generated_logging_service_v2_list_logs_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListMonitoredResourceDescriptors" + } + }, + "file": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 37, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 40, + "start": 38, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 41, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListMonitoredResourceDescriptors" + } + }, + "file": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 37, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 40, + "start": 38, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 41, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "TailLogEntries" + } + }, + "file": "logging_v2_generated_logging_service_v2_tail_log_entries_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "TailLogEntries" + } + }, + "file": "logging_v2_generated_logging_service_v2_tail_log_entries_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "WriteLogEntries" + } + }, + "file": "logging_v2_generated_logging_service_v2_write_log_entries_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_async", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "WriteLogEntries" + } + }, + "file": "logging_v2_generated_logging_service_v2_write_log_entries_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "CreateLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_create_log_metric_async.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "CreateLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_create_log_metric_sync.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "DeleteLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_delete_log_metric_async.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "DeleteLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "GetLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_get_log_metric_async.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "GetLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_get_log_metric_sync.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "ListLogMetrics" + } + }, + "file": "logging_v2_generated_metrics_service_v2_list_log_metrics_async.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "ListLogMetrics" + } + }, + "file": "logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "UpdateLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_update_log_metric_async.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "UpdateLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_update_log_metric_sync.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} diff --git a/packages/google-cloud-logging/tests/__init__.py b/packages/google-cloud-logging/tests/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-logging/tests/__init__.py +++ b/packages/google-cloud-logging/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/__init__.py b/packages/google-cloud-logging/tests/unit/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-logging/tests/unit/__init__.py +++ b/packages/google-cloud-logging/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/__init__.py b/packages/google-cloud-logging/tests/unit/gapic/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-logging/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index e7d2ea7d16e1..401394debb62 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,9 +25,13 @@ from google.api_core import client_options from google.api_core import exceptions as core_exceptions +from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError @@ -38,6 +42,7 @@ from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.services.config_service_v2 import transports from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -1018,6 +1023,7 @@ def test_get_bucket(request_type, transport: str = "grpc"): retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + restricted_fields=["restricted_fields_value"], ) response = client.get_bucket(request) @@ -1033,6 +1039,7 @@ def test_get_bucket(request_type, transport: str = "grpc"): assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.restricted_fields == ["restricted_fields_value"] def test_get_bucket_empty_call(): @@ -1072,6 +1079,7 @@ async def test_get_bucket_async( retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + restricted_fields=["restricted_fields_value"], ) ) response = await client.get_bucket(request) @@ -1088,6 +1096,7 @@ async def test_get_bucket_async( assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio @@ -1167,6 +1176,7 @@ def test_create_bucket(request_type, transport: str = "grpc"): retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + restricted_fields=["restricted_fields_value"], ) response = client.create_bucket(request) @@ -1182,6 +1192,7 @@ def test_create_bucket(request_type, transport: str = "grpc"): assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.restricted_fields == ["restricted_fields_value"] def test_create_bucket_empty_call(): @@ -1221,6 +1232,7 @@ async def test_create_bucket_async( retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + restricted_fields=["restricted_fields_value"], ) ) response = await client.create_bucket(request) @@ -1237,6 +1249,7 @@ async def test_create_bucket_async( assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio @@ -1316,6 +1329,7 @@ def test_update_bucket(request_type, transport: str = "grpc"): retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + restricted_fields=["restricted_fields_value"], ) response = client.update_bucket(request) @@ -1331,6 +1345,7 @@ def test_update_bucket(request_type, transport: str = "grpc"): assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.restricted_fields == ["restricted_fields_value"] def test_update_bucket_empty_call(): @@ -1370,6 +1385,7 @@ async def test_update_bucket_async( retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + restricted_fields=["restricted_fields_value"], ) ) response = await client.update_bucket(request) @@ -1386,6 +1402,7 @@ async def test_update_bucket_async( assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio @@ -5433,6 +5450,538 @@ async def test_update_cmek_settings_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] +@pytest.mark.parametrize("request_type", [logging_config.GetSettingsRequest, dict,]) +def test_get_settings(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + response = client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" + assert response.disable_default_sink is True + + +def test_get_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + client.get_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() + + +@pytest.mark.asyncio +async def test_get_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetSettingsRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) + response = await client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" + assert response.disable_default_sink is True + + +@pytest.mark.asyncio +async def test_get_settings_async_from_dict(): + await test_get_settings_async(request_type=dict) + + +def test_get_settings_field_headers(): + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSettingsRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = logging_config.Settings() + client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSettingsRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) + await client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_settings_flattened(): + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_settings(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_settings_flattened_error(): + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_settings( + logging_config.GetSettingsRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_settings_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_settings(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_settings_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_settings( + logging_config.GetSettingsRequest(), name="name_value", + ) + + +@pytest.mark.parametrize("request_type", [logging_config.UpdateSettingsRequest, dict,]) +def test_update_settings(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + response = client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" + assert response.disable_default_sink is True + + +def test_update_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + client.update_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() + + +@pytest.mark.asyncio +async def test_update_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateSettingsRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) + response = await client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" + assert response.disable_default_sink is True + + +@pytest.mark.asyncio +async def test_update_settings_async_from_dict(): + await test_update_settings_async(request_type=dict) + + +def test_update_settings_field_headers(): + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSettingsRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = logging_config.Settings() + client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSettingsRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) + await client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_settings_flattened(): + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_settings( + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].settings + mock_val = logging_config.Settings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_settings_flattened_error(): + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_settings( + logging_config.UpdateSettingsRequest(), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_settings_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_settings( + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].settings + mock_val = logging_config.Settings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_settings_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_settings( + logging_config.UpdateSettingsRequest(), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize("request_type", [logging_config.CopyLogEntriesRequest, dict,]) +def test_copy_log_entries(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_copy_log_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + client.copy_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() + + +@pytest.mark.asyncio +async def test_copy_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging_config.CopyLogEntriesRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_copy_log_entries_async_from_dict(): + await test_copy_log_entries_async(request_type=dict) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( @@ -5570,6 +6119,9 @@ def test_config_service_v2_base_transport(): "delete_exclusion", "get_cmek_settings", "update_cmek_settings", + "get_settings", + "update_settings", + "copy_log_entries", ) for method in methods: with pytest.raises(NotImplementedError): @@ -5578,6 +6130,11 @@ def test_config_service_v2_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + def test_config_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -5886,6 +6443,32 @@ def test_config_service_v2_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel +def test_config_service_v2_grpc_lro_client(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_config_service_v2_grpc_lro_async_client(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + def test_cmek_settings_path(): project = "squid" expected = "projects/{project}/cmekSettings".format(project=project,) @@ -5996,8 +6579,26 @@ def test_parse_log_view_path(): assert expected == actual +def test_settings_path(): + project = "squid" + expected = "projects/{project}/settings".format(project=project,) + actual = ConfigServiceV2Client.settings_path(project) + assert expected == actual + + +def test_parse_settings_path(): + expected = { + "project": "clam", + } + path = ConfigServiceV2Client.settings_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_settings_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "squid" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -6007,7 +6608,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "clam", + "billing_account": "octopus", } path = ConfigServiceV2Client.common_billing_account_path(**expected) @@ -6017,7 +6618,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "whelk" + folder = "oyster" expected = "folders/{folder}".format(folder=folder,) actual = ConfigServiceV2Client.common_folder_path(folder) assert expected == actual @@ -6025,7 +6626,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "octopus", + "folder": "nudibranch", } path = ConfigServiceV2Client.common_folder_path(**expected) @@ -6035,7 +6636,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "oyster" + organization = "cuttlefish" expected = "organizations/{organization}".format(organization=organization,) actual = ConfigServiceV2Client.common_organization_path(organization) assert expected == actual @@ -6043,7 +6644,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nudibranch", + "organization": "mussel", } path = ConfigServiceV2Client.common_organization_path(**expected) @@ -6053,7 +6654,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "cuttlefish" + project = "winkle" expected = "projects/{project}".format(project=project,) actual = ConfigServiceV2Client.common_project_path(project) assert expected == actual @@ -6061,7 +6662,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "mussel", + "project": "nautilus", } path = ConfigServiceV2Client.common_project_path(**expected) @@ -6071,8 +6672,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "winkle" - location = "nautilus" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -6082,8 +6683,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "scallop", - "location": "abalone", + "project": "squid", + "location": "clam", } path = ConfigServiceV2Client.common_location_path(**expected) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 0b3b202eb113..e87e1c26dc4a 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 764a76121405..e6883889dda6 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -1024,6 +1024,7 @@ def test_get_log_metric(request_type, transport: str = "grpc"): name="name_value", description="description_value", filter="filter_value", + disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1039,6 +1040,7 @@ def test_get_log_metric(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1078,6 +1080,7 @@ async def test_get_log_metric_async( name="name_value", description="description_value", filter="filter_value", + disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1094,6 +1097,7 @@ async def test_get_log_metric_async( assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1249,6 +1253,7 @@ def test_create_log_metric(request_type, transport: str = "grpc"): name="name_value", description="description_value", filter="filter_value", + disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1264,6 +1269,7 @@ def test_create_log_metric(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1307,6 +1313,7 @@ async def test_create_log_metric_async( name="name_value", description="description_value", filter="filter_value", + disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1323,6 +1330,7 @@ async def test_create_log_metric_async( assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1500,6 +1508,7 @@ def test_update_log_metric(request_type, transport: str = "grpc"): name="name_value", description="description_value", filter="filter_value", + disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1515,6 +1524,7 @@ def test_update_log_metric(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1558,6 +1568,7 @@ async def test_update_log_metric_async( name="name_value", description="description_value", filter="filter_value", + disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1574,6 +1585,7 @@ async def test_update_log_metric_async( assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 From 69209028ceeef8533e7432c40fd645d8e1637b18 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 9 Mar 2022 12:28:34 +0100 Subject: [PATCH 601/855] chore(deps): update all dependencies (#492) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-bigquery](https://togithub.com/googleapis/python-bigquery) | `==2.32.0` -> `==2.34.2` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.34.2/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.34.2/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.34.2/compatibility-slim/2.32.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.34.2/confidence-slim/2.32.0)](https://docs.renovatebot.com/merge-confidence/) | | [google-cloud-pubsub](https://togithub.com/googleapis/python-pubsub) | `==2.9.0` -> `==2.10.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.10.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.10.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.10.0/compatibility-slim/2.9.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.10.0/confidence-slim/2.9.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-bigquery ### [`v2.34.2`](https://togithub.com/googleapis/python-bigquery/blob/HEAD/CHANGELOG.md#​2342-httpsgithubcomgoogleapispython-bigquerycomparev2341v2342-2022-03-05) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.34.1...v2.34.2) ### [`v2.34.1`](https://togithub.com/googleapis/python-bigquery/blob/HEAD/CHANGELOG.md#​2341-httpsgithubcomgoogleapispython-bigquerycomparev2340v2341-2022-03-02) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.34.0...v2.34.1) ### [`v2.34.0`](https://togithub.com/googleapis/python-bigquery/blob/HEAD/CHANGELOG.md#​2340-httpsgithubcomgoogleapispython-bigquerycomparev2330v2340-2022-02-18) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.33.0...v2.34.0) ##### Features - support BI Engine statistics in query job ([#​1144](https://togithub.com/googleapis/python-bigquery/issues/1144)) ([7482549](https://togithub.com/googleapis/python-bigquery/commit/7482549cb42ed5302634ab4fb7b4efcd97b35c68)) ### [`v2.33.0`](https://togithub.com/googleapis/python-bigquery/blob/HEAD/CHANGELOG.md#​2330-httpsgithubcomgoogleapispython-bigquerycomparev2320v2330-2022-02-16) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.32.0...v2.33.0) ##### Features - add `--no_query_cache` option to `%%bigquery` magics to disable query cache ([#​1141](https://togithub.com/googleapis/python-bigquery/issues/1141)) ([7dd30af](https://togithub.com/googleapis/python-bigquery/commit/7dd30af41b8a595b96176c964ba14aa41645ef0d)) ##### Bug Fixes - return 403 when VPC-SC violation happens ([#​1131](https://togithub.com/googleapis/python-bigquery/issues/1131)) ([f5daa9b](https://togithub.com/googleapis/python-bigquery/commit/f5daa9b41377a58cb3220bb2ab7c72adc6462196)) ##### Documentation - reference BigQuery REST API defaults in `LoadJobConfig` descrip… ([#​1132](https://togithub.com/googleapis/python-bigquery/issues/1132)) ([18d9580](https://togithub.com/googleapis/python-bigquery/commit/18d958062721d6be81e7bd7a5bd66f277344a864)) - show common job properties in `get_job` and `cancel_job` samples ([#​1137](https://togithub.com/googleapis/python-bigquery/issues/1137)) ([8edc10d](https://togithub.com/googleapis/python-bigquery/commit/8edc10d019bd96defebc4f92a47774901e9b956f))
googleapis/python-pubsub ### [`v2.10.0`](https://togithub.com/googleapis/python-pubsub/blob/HEAD/CHANGELOG.md#​2100-httpsgithubcomgoogleapispython-pubsubcomparev290v2100-2022-03-04) [Compare Source](https://togithub.com/googleapis/python-pubsub/compare/v2.9.0...v2.10.0) ##### Features - add api key support ([#​571](https://togithub.com/googleapis/python-pubsub/issues/571)) ([cdda762](https://togithub.com/googleapis/python-pubsub/commit/cdda762f6d15d96f5e2d7fac975f3494dc49eaa9)) - add exactly once delivery flag ([#​577](https://togithub.com/googleapis/python-pubsub/issues/577)) ([d6614e2](https://togithub.com/googleapis/python-pubsub/commit/d6614e274328c58449e67dfc788e2e7986c0c10b)) - add support for exactly once delivery ([#​578](https://togithub.com/googleapis/python-pubsub/issues/578)) ([95a86fa](https://togithub.com/googleapis/python-pubsub/commit/95a86fa5f528701b760064f0cece0efa4e60cd44)) - exactly-once delivery support ([#​550](https://togithub.com/googleapis/python-pubsub/issues/550)) ([2fb6e15](https://togithub.com/googleapis/python-pubsub/commit/2fb6e1533192ae81dceee5c71283169a0a85a015)) ##### Bug Fixes - **deps:** move libcst to extras ([#​585](https://togithub.com/googleapis/python-pubsub/issues/585)) ([0846762](https://togithub.com/googleapis/python-pubsub/commit/084676243ca4afd54cda601e589b80883f9703a3)) - refactor client classes for safer type checking ([#​552](https://togithub.com/googleapis/python-pubsub/issues/552)) ([7f705be](https://togithub.com/googleapis/python-pubsub/commit/7f705beb927383f14b9d56f0341ee0de101f7c05)) - resolve DuplicateCredentialArgs error when using credentials_file ([8ca8cf2](https://togithub.com/googleapis/python-pubsub/commit/8ca8cf27333baf823a1dffd081e63079f1a12625)) ##### Samples - samples: create subscription with filtering enabled [#​580](https://togithub.com/googleapis/python-pubsub/pull/580) - samples: handle empty response in sync pull samples [#​586](https://togithub.com/googleapis/python-pubsub/pull/586) - samples: sample for receiving messages with exactly-once delivery enabled [#​588](https://togithub.com/googleapis/python-pubsub/pull/588) - samples: create subscription with exactly once delivery [#​592](https://togithub.com/googleapis/python-pubsub/pull/592) [https://github.com/googleapis/python-pubsub/pull/588](https://togithub.com/googleapis/python-pubsub/pull/588)/588 ##### Documentation - add autogenerated code snippets ([aa3754c](https://togithub.com/googleapis/python-pubsub/commit/aa3754cf432bd02be2734a23a32d5b36cd216aee)) - Docs have inconsistent default values for max_latency and max_bytes ([#​572](https://togithub.com/googleapis/python-pubsub/issues/572)) ([d136dfd](https://togithub.com/googleapis/python-pubsub/commit/d136dfdb69ebeebd1411a1415f863b94d07078f0))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 👻 **Immortal**: This PR will be recreated if closed unmerged. Get [config help](https://togithub.com/renovatebot/renovate/discussions) if that's undesired. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-logging). --- .../google-cloud-logging/samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 7e49254f48e4..b44576c1ba9f 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,5 +1,5 @@ google-cloud-logging==3.0.0 -google-cloud-bigquery==2.32.0 +google-cloud-bigquery==2.34.2 google-cloud-storage==2.1.0; python_version == '3.6' google-cloud-storage==2.1.0; python_version >= '3.7' -google-cloud-pubsub==2.9.0 +google-cloud-pubsub==2.10.0 From ea16b77253f3bac6ece52b58a03d6b4276fe4966 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 10 Mar 2022 00:52:13 +0000 Subject: [PATCH 602/855] chore: Adding support for pytest-xdist and pytest-parallel (#496) Source-Link: https://github.com/googleapis/synthtool/commit/38e11ad1104dcc1e63b52691ddf2fe4015d06955 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 --- .../.github/.OwlBot.lock.yaml | 3 +- .../samples/snippets/noxfile.py | 80 +++++++++++-------- 2 files changed, 47 insertions(+), 36 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index fa15cb546774..44c78f7cc12d 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 - + digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 20cdfc620138..4c808af73ea2 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -188,42 +188,54 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) From 9c0748239bda1e1ef00fb427103dfdfaa2a96306 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 10 Mar 2022 11:44:16 +0100 Subject: [PATCH 603/855] chore(deps): update dependency google-cloud-pubsub to v2.11.0 (#499) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-pubsub](https://togithub.com/googleapis/python-pubsub) | `==2.10.0` -> `==2.11.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.11.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.11.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.11.0/compatibility-slim/2.10.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.11.0/confidence-slim/2.10.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-pubsub ### [`v2.11.0`](https://togithub.com/googleapis/python-pubsub/blob/HEAD/CHANGELOG.md#​2110-httpsgithubcomgoogleapispython-pubsubcomparev2100v2110-2022-03-09) [Compare Source](https://togithub.com/googleapis/python-pubsub/compare/v2.10.0...v2.11.0) ##### Features - retry temporary GRPC statuses for ack/modack/nack when exactly-once delivery is enabled ([#​607](https://togithub.com/googleapis/python-pubsub/issues/607)) ([a91bed8](https://togithub.com/googleapis/python-pubsub/commit/a91bed829c9040fcc6c1e70b99b66188ac4ded40)) - return singleton success future for exactly-once methods in Message ([#​608](https://togithub.com/googleapis/python-pubsub/issues/608)) ([253ced2](https://togithub.com/googleapis/python-pubsub/commit/253ced28f308450c7a1a93cc38f6d101ecd7d4c0)) ##### Bug Fixes - **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#​600](https://togithub.com/googleapis/python-pubsub/issues/600)) ([1608b7f](https://togithub.com/googleapis/python-pubsub/commit/1608b7ffdd5b5db87e1e55fde763440ca9a4086e)) - **deps:** require proto-plus>=1.15.0 ([1608b7f](https://togithub.com/googleapis/python-pubsub/commit/1608b7ffdd5b5db87e1e55fde763440ca9a4086e))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-logging). --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index b44576c1ba9f..a0c73323deb6 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -2,4 +2,4 @@ google-cloud-logging==3.0.0 google-cloud-bigquery==2.34.2 google-cloud-storage==2.1.0; python_version == '3.6' google-cloud-storage==2.1.0; python_version >= '3.7' -google-cloud-pubsub==2.10.0 +google-cloud-pubsub==2.11.0 From 4866430cb146a20fe9b2353668ffa80892d0706f Mon Sep 17 00:00:00 2001 From: Drew Brown Date: Mon, 14 Mar 2022 16:38:39 -0700 Subject: [PATCH 604/855] chore: Update environment tests submodule (#503) --- .../.github/sync-repo-settings.yaml | 21 +++++++++++++++++++ .../deployable/python/requirements.txt | 1 + .../tests/environment/noxfile.py | 2 +- tests/environment | 2 +- 4 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 packages/google-cloud-logging/tests/environment/.github/sync-repo-settings.yaml diff --git a/packages/google-cloud-logging/tests/environment/.github/sync-repo-settings.yaml b/packages/google-cloud-logging/tests/environment/.github/sync-repo-settings.yaml new file mode 100644 index 000000000000..e7b31aedc604 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/.github/sync-repo-settings.yaml @@ -0,0 +1,21 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings +# Rules for main branch protection +branchProtectionRules: +# Identifies the protection rule pattern. Name of the branch to be protected. +# Defaults to `main` +- pattern: main + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: true + requiredStatusCheckContexts: + - 'cla/google' +permissionRules: + - team: actools-python + permission: admin + - team: actools + permission: admin + - team: yoshi-python + permission: push + - team: python-samples-owners + permission: push + - team: python-samples-reviewers + permission: push diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt index e088494bddc1..a93899463b1a 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt +++ b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt @@ -3,3 +3,4 @@ google-cloud-pubsub>=2.8.0 click==7.1.2 pytz==2021.1 pandas>=1.1.5 +itsdangerous==2.0.1 diff --git a/packages/google-cloud-logging/tests/environment/noxfile.py b/packages/google-cloud-logging/tests/environment/noxfile.py index f3404fa839cf..666a9946bba2 100644 --- a/packages/google-cloud-logging/tests/environment/noxfile.py +++ b/packages/google-cloud-logging/tests/environment/noxfile.py @@ -69,7 +69,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: # We also need to specify the rules which are ignored by default: # ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -DEFAULT_PYTHON_VERSION = "3.7" +DEFAULT_PYTHON_VERSION = "3.9" BLACK_PATHS = ["./deployable/python"] BLACK_VERSION = "black==19.10b0" diff --git a/tests/environment b/tests/environment index 41c32ce34255..21f1ea63a567 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 41c32ce3425529680e32701549d3f682f9c82b63 +Subproject commit 21f1ea63a567dfd1b601f7cb8ee6177c77f82cc5 From 9bd2100e8b16c7ddfae51be41bb4de9e181c0e5e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 15 Mar 2022 16:54:35 +0100 Subject: [PATCH 605/855] chore(deps): update all dependencies (#502) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-storage](https://togithub.com/googleapis/python-storage) | `==2.1.0` -> `==2.2.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/2.2.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/2.2.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/2.2.0/compatibility-slim/2.1.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/2.2.0/confidence-slim/2.1.0)](https://docs.renovatebot.com/merge-confidence/) | | [pytest](https://docs.pytest.org/en/latest/) ([source](https://togithub.com/pytest-dev/pytest), [changelog](https://docs.pytest.org/en/stable/changelog.html)) | `==7.0.1` -> `==7.1.0` | [![age](https://badges.renovateapi.com/packages/pypi/pytest/7.1.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/pytest/7.1.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/pytest/7.1.0/compatibility-slim/7.0.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/pytest/7.1.0/confidence-slim/7.0.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-storage ### [`v2.2.0`](https://togithub.com/googleapis/python-storage/blob/HEAD/CHANGELOG.md#​220-httpsgithubcomgoogleapispython-storagecomparev210v220-2022-03-14) [Compare Source](https://togithub.com/googleapis/python-storage/compare/v2.1.0...v2.2.0) ##### Features - allow no project in client methods using storage emulator ([#​703](https://togithub.com/googleapis/python-storage/issues/703)) ([bcde0ec](https://togithub.com/googleapis/python-storage/commit/bcde0ec619d7d303892bcc0863b7f977c79f7649)) ##### Bug Fixes - add user agent in python-storage when calling resumable media ([c7bf615](https://togithub.com/googleapis/python-storage/commit/c7bf615909a04f3bab3efb1047a9f4ba659bba19)) - **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#​722](https://togithub.com/googleapis/python-storage/issues/722)) ([e9aab38](https://togithub.com/googleapis/python-storage/commit/e9aab389f868799d4425133954bad4f1cbb85786)) - Fix BlobReader handling of interleaved reads and seeks ([#​721](https://togithub.com/googleapis/python-storage/issues/721)) ([5d1cfd2](https://togithub.com/googleapis/python-storage/commit/5d1cfd2050321481a3bc4acbe80537ea666506fa)) - retry client side requests timeout ([#​727](https://togithub.com/googleapis/python-storage/issues/727)) ([e0b3b35](https://togithub.com/googleapis/python-storage/commit/e0b3b354d51e4be7c563d7f2f628a7139df842c0)) ##### Documentation - fixed download_blob_to_file example ([#​704](https://togithub.com/googleapis/python-storage/issues/704)) ([2c94d98](https://togithub.com/googleapis/python-storage/commit/2c94d98ed21cc768cfa54fac3d734254fc4d8480))
pytest-dev/pytest ### [`v7.1.0`](https://togithub.com/pytest-dev/pytest/releases/7.1.0) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/7.0.1...7.1.0) # pytest 7.1.0 (2022-03-13) ## Breaking Changes - [#​8838](https://togithub.com/pytest-dev/pytest/issues/8838): As per our policy, the following features have been deprecated in the 6.X series and are now removed: - `pytest._fillfuncargs` function. - `pytest_warning_captured` hook - use `pytest_warning_recorded` instead. - `-k -foobar` syntax - use `-k 'not foobar'` instead. - `-k foobar:` syntax. - `pytest.collect` module - import from `pytest` directly. For more information consult [Deprecations and Removals](https://docs.pytest.org/en/latest/deprecations.html) in the docs. - [#​9437](https://togithub.com/pytest-dev/pytest/issues/9437): Dropped support for Python 3.6, which reached [end-of-life](https://devguide.python.org/#status-of-python-branches) at 2021-12-23. ## Improvements - [#​5192](https://togithub.com/pytest-dev/pytest/issues/5192): Fixed test output for some data types where `-v` would show less information. Also, when showing diffs for sequences, `-q` would produce full diffs instead of the expected diff. - [#​9362](https://togithub.com/pytest-dev/pytest/issues/9362): pytest now avoids specialized assert formatting when it is detected that the default `__eq__` is overridden in `attrs` or `dataclasses`. - [#​9536](https://togithub.com/pytest-dev/pytest/issues/9536): When `-vv` is given on command line, show skipping and xfail reasons in full instead of truncating them to fit the terminal width. - [#​9644](https://togithub.com/pytest-dev/pytest/issues/9644): More information about the location of resources that led Python to raise `ResourceWarning`{.interpreted-text role="class"} can now be obtained by enabling `tracemalloc`{.interpreted-text role="mod"}. See `resource-warnings`{.interpreted-text role="ref"} for more information. - [#​9678](https://togithub.com/pytest-dev/pytest/issues/9678): More types are now accepted in the `ids` argument to `@pytest.mark.parametrize`. Previously only \[str]{.title-ref}, \[float]{.title-ref}, \[int]{.title-ref} and \[bool]{.title-ref} were accepted; now \[bytes]{.title-ref}, \[complex]{.title-ref}, \[re.Pattern]{.title-ref}, \[Enum]{.title-ref} and anything with a \[\__name\_\_]{.title-ref} are also accepted. - [#​9692](https://togithub.com/pytest-dev/pytest/issues/9692): `pytest.approx`{.interpreted-text role="func"} now raises a `TypeError`{.interpreted-text role="class"} when given an unordered sequence (such as `set`{.interpreted-text role="class"}). Note that this implies that custom classes which only implement `__iter__` and `__len__` are no longer supported as they don't guarantee order. ## Bug Fixes - [#​8242](https://togithub.com/pytest-dev/pytest/issues/8242): The deprecation of raising `unittest.SkipTest`{.interpreted-text role="class"} to skip collection of tests during the pytest collection phase is reverted - this is now a supported feature again. - [#​9493](https://togithub.com/pytest-dev/pytest/issues/9493): Symbolic link components are no longer resolved in conftest paths. This means that if a conftest appears twice in collection tree, using symlinks, it will be executed twice. For example, given > tests/real/conftest.py > tests/real/test_it.py > tests/link -> tests/real running `pytest tests` now imports the conftest twice, once as `tests/real/conftest.py` and once as `tests/link/conftest.py`. This is a fix to match a similar change made to test collection itself in pytest 6.0 (see `6523`{.interpreted-text role="pull"} for details). - [#​9626](https://togithub.com/pytest-dev/pytest/issues/9626): Fixed count of selected tests on terminal collection summary when there were errors or skipped modules. If there were errors or skipped modules on collection, pytest would mistakenly subtract those from the selected count. - [#​9645](https://togithub.com/pytest-dev/pytest/issues/9645): Fixed regression where `--import-mode=importlib` used together with `PYTHONPATH`{.interpreted-text role="envvar"} or `pythonpath`{.interpreted-text role="confval"} would cause import errors in test suites. - [#​9708](https://togithub.com/pytest-dev/pytest/issues/9708): `pytester`{.interpreted-text role="fixture"} now requests a `monkeypatch`{.interpreted-text role="fixture"} fixture instead of creating one internally. This solves some issues with tests that involve pytest environment variables. - [#​9730](https://togithub.com/pytest-dev/pytest/issues/9730): Malformed `pyproject.toml` files now produce a clearer error message.
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 👻 **Immortal**: This PR will be recreated if closed unmerged. Get [config help](https://togithub.com/renovatebot/renovate/discussions) if that's undesired. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-logging). --- .../samples/snippets/requirements-test.txt | 2 +- .../google-cloud-logging/samples/snippets/requirements.txt | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index c531e813e29e..c265ab7091f6 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==1.11.1 -pytest==7.0.1 +pytest==7.1.0 diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index a0c73323deb6..4cc5419a5db3 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,5 +1,4 @@ google-cloud-logging==3.0.0 google-cloud-bigquery==2.34.2 -google-cloud-storage==2.1.0; python_version == '3.6' -google-cloud-storage==2.1.0; python_version >= '3.7' +google-cloud-storage==2.2.0 google-cloud-pubsub==2.11.0 From 7a5944de2f066fe26599a3e380ca45f833e8fefd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 16 Mar 2022 11:57:13 +0100 Subject: [PATCH 606/855] chore(deps): update dependency google-cloud-storage to v2.2.1 (#506) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 4cc5419a5db3..b234d190bc77 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.0.0 google-cloud-bigquery==2.34.2 -google-cloud-storage==2.2.0 +google-cloud-storage==2.2.1 google-cloud-pubsub==2.11.0 From add84046375896effe8bfe82afad383044f7d364 Mon Sep 17 00:00:00 2001 From: Drew Brown Date: Thu, 17 Mar 2022 15:50:31 -0700 Subject: [PATCH 607/855] chore: Update blunderbuss (#508) --- packages/google-cloud-logging/.github/blunderbuss.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/.github/blunderbuss.yml b/packages/google-cloud-logging/.github/blunderbuss.yml index 148ebf4e81cb..28438484fcbf 100644 --- a/packages/google-cloud-logging/.github/blunderbuss.yml +++ b/packages/google-cloud-logging/.github/blunderbuss.yml @@ -1,4 +1,4 @@ assign_issues: - - Daniel-Sanche + - arbrown assign_prs: - - Daniel-Sanche + - arbrown From 20ce90e56704f96dfa7f4b2eb6c0572d3dde5c0d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 18 Mar 2022 05:09:41 +0100 Subject: [PATCH 608/855] chore(deps): update dependency pytest to v7.1.1 (#509) Co-authored-by: Drew Brown --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index c265ab7091f6..678dbc8efd1b 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==1.11.1 -pytest==7.1.0 +pytest==7.1.1 From 52a729d9c0bb877f1c901df2300ec150fd74e23a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 29 Mar 2022 00:00:18 +0000 Subject: [PATCH 609/855] chore(python): use black==22.3.0 (#513) Source-Link: https://github.com/googleapis/synthtool/commit/6fab84af09f2cf89a031fd8671d1def6b2931b11 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe --- .../.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/docs/conf.py | 5 +- .../google/cloud/logging_v2/_gapic.py | 31 +- .../google/cloud/logging_v2/_http.py | 22 +- .../google/cloud/logging_v2/client.py | 2 +- .../google/cloud/logging_v2/metric.py | 4 +- .../config_service_v2/async_client.py | 196 ++- .../services/config_service_v2/client.py | 293 +++- .../config_service_v2/transports/base.py | 82 +- .../config_service_v2/transports/grpc.py | 3 +- .../logging_service_v2/async_client.py | 59 +- .../services/logging_service_v2/client.py | 110 +- .../logging_service_v2/transports/base.py | 10 +- .../logging_service_v2/transports/grpc.py | 3 +- .../metrics_service_v2/async_client.py | 42 +- .../services/metrics_service_v2/client.py | 91 +- .../metrics_service_v2/transports/base.py | 14 +- .../metrics_service_v2/transports/grpc.py | 3 +- .../cloud/logging_v2/types/log_entry.py | 144 +- .../google/cloud/logging_v2/types/logging.py | 160 +- .../cloud/logging_v2/types/logging_config.py | 590 +++++-- .../cloud/logging_v2/types/logging_metrics.py | 113 +- packages/google-cloud-logging/noxfile.py | 9 +- .../samples/snippets/noxfile.py | 2 +- .../logging_v2/test_config_service_v2.py | 1424 +++++++++++++---- .../logging_v2/test_logging_service_v2.py | 475 ++++-- .../logging_v2/test_metrics_service_v2.py | 379 +++-- .../tests/unit/handlers/test_handlers.py | 62 +- .../unit/handlers/test_structured_log.py | 70 +- .../transports/test_background_thread.py | 14 +- .../tests/unit/test__gapic.py | 4 +- .../tests/unit/test_logger.py | 18 +- 32 files changed, 3471 insertions(+), 965 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 44c78f7cc12d..87dd00611576 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 + digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index 743981d3a4c6..04f5d0ef5d00 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -361,7 +361,10 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py index 3661d3d09184..4b5429f2f871 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py @@ -299,7 +299,12 @@ def sink_get(self, sink_name): ) def sink_update( - self, sink_name, filter_, destination, *, unique_writer_identity=False, + self, + sink_name, + filter_, + destination, + *, + unique_writer_identity=False, ): """Update a sink resource. @@ -326,7 +331,11 @@ def sink_update( protobuf to a dictionary). """ name = sink_name.split("/")[-1] # parse name out of full resoure name - sink_pb = LogSink(name=name, filter=filter_, destination=destination,) + sink_pb = LogSink( + name=name, + filter=filter_, + destination=destination, + ) request = UpdateSinkRequest( sink_name=sink_name, @@ -362,7 +371,7 @@ def sink_delete(self, sink_name): class _MetricsAPI(object): - """Helper mapping sink-related APIs. """ + """Helper mapping sink-related APIs.""" def __init__(self, gapic_api, client): self._gapic_api = gapic_api @@ -389,7 +398,9 @@ def list_metrics( """ path = f"projects/{project}" request = ListLogMetricsRequest( - parent=path, page_size=page_size, page_token=page_token, + parent=path, + page_size=page_size, + page_token=page_token, ) response = self._gapic_api.list_log_metrics(request=request) metric_iter = iter(response) @@ -449,7 +460,11 @@ def metric_get(self, project, metric_name): ) def metric_update( - self, project, metric_name, filter_, description, + self, + project, + metric_name, + filter_, + description, ): """Update a metric resource. @@ -465,7 +480,11 @@ def metric_update( protobuf to a dictionary). """ path = f"projects/{project}/metrics/{metric_name}" - metric_pb = LogMetric(name=path, filter=filter_, description=description,) + metric_pb = LogMetric( + name=path, + filter=filter_, + description=description, + ) metric_pb = self._gapic_api.update_log_metric( metric_name=path, metric=metric_pb ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_http.py b/packages/google-cloud-logging/google/cloud/logging_v2/_http.py index 21fb38606d0c..cb5fd61eb931 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_http.py @@ -454,18 +454,18 @@ def metric_get(self, project, metric_name): def metric_update(self, project, metric_name, filter_, description): """Update a metric resource. - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update - - Args: - project (str): ID of the project containing the metric. - metric_name (str): the name of the metric - filter_ (str): the advanced logs filter expression defining the - entries exported by the metric. - description (str): description of the metric. + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update - Returns: - dict: The returned (updated) resource. + Args: + project (str): ID of the project containing the metric. + metric_name (str): the name of the metric + filter_ (str): the advanced logs filter expression defining the + entries exported by the metric. + description (str): description of the metric. + + Returns: + dict: The returned (updated) resource. """ target = f"/projects/{project}/metrics/{metric_name}" data = {"name": metric_name, "filter": filter_, "description": description} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py index 3d5ea24fc568..049737861b79 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -118,7 +118,7 @@ def __init__( client_options (Optional[Union[dict, google.api_core.client_options.ClientOptions]]): Client options used to set user options on the client. API Endpoint should be set through client_options. - """ + """ super(Client, self).__init__( project=project, credentials=credentials, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/metric.py b/packages/google-cloud-logging/google/cloud/logging_v2/metric.py index 2959bacc2e5b..167165c973e4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/metric.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/metric.py @@ -20,8 +20,8 @@ class Metric(object): """Metrics represent named filters for log entries. - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics """ def __init__(self, name, *, filter_=None, client=None, description=""): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index de3a6bbb751d..916fbd18bed0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -318,12 +318,20 @@ def sample_list_buckets(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListBucketsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -391,7 +399,12 @@ def sample_get_bucket(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -462,7 +475,12 @@ def sample_create_bucket(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -541,7 +559,12 @@ def sample_update_bucket(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -606,7 +629,10 @@ def sample_delete_bucket(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def undelete_bucket( @@ -666,7 +692,10 @@ def sample_undelete_bucket(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def list_views( @@ -759,12 +788,20 @@ def sample_list_views(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListViewsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -832,7 +869,12 @@ def sample_get_view(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -902,7 +944,12 @@ def sample_create_view(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -974,7 +1021,12 @@ def sample_update_view(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1037,7 +1089,10 @@ def sample_delete_view(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def list_sinks( @@ -1145,12 +1200,20 @@ def sample_list_sinks(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListSinksAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1270,7 +1333,12 @@ def sample_get_sink(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1396,7 +1464,12 @@ def sample_create_sink(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1562,7 +1635,12 @@ def sample_update_sink(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1669,7 +1747,10 @@ def sample_delete_sink(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def list_exclusions( @@ -1779,12 +1860,20 @@ def sample_list_exclusions(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListExclusionsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1900,7 +1989,12 @@ def sample_get_exclusion(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2025,7 +2119,12 @@ def sample_create_exclusion(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2164,7 +2263,12 @@ def sample_update_exclusion(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2267,7 +2371,10 @@ def sample_delete_exclusion(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def get_cmek_settings( @@ -2356,7 +2463,12 @@ def sample_get_cmek_settings(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2452,7 +2564,12 @@ def sample_update_cmek_settings(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2574,7 +2691,12 @@ def sample_get_settings(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2706,7 +2828,12 @@ def sample_update_settings(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2778,7 +2905,12 @@ def sample_copy_log_entries(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -2800,7 +2932,9 @@ async def __aexit__(self, exc_type, exc, tb): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 041b1c838d82..d14ea70da2f8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -59,7 +59,10 @@ class ConfigServiceV2ClientMeta(type): _transport_registry["grpc"] = ConfigServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[ConfigServiceV2Transport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[ConfigServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -164,9 +167,13 @@ def transport(self) -> ConfigServiceV2Transport: return self._transport @staticmethod - def cmek_settings_path(project: str,) -> str: + def cmek_settings_path( + project: str, + ) -> str: """Returns a fully-qualified cmek_settings string.""" - return "projects/{project}/cmekSettings".format(project=project,) + return "projects/{project}/cmekSettings".format( + project=project, + ) @staticmethod def parse_cmek_settings_path(path: str) -> Dict[str, str]: @@ -175,10 +182,16 @@ def parse_cmek_settings_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def log_bucket_path(project: str, location: str, bucket: str,) -> str: + def log_bucket_path( + project: str, + location: str, + bucket: str, + ) -> str: """Returns a fully-qualified log_bucket string.""" return "projects/{project}/locations/{location}/buckets/{bucket}".format( - project=project, location=location, bucket=bucket, + project=project, + location=location, + bucket=bucket, ) @staticmethod @@ -191,10 +204,14 @@ def parse_log_bucket_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def log_exclusion_path(project: str, exclusion: str,) -> str: + def log_exclusion_path( + project: str, + exclusion: str, + ) -> str: """Returns a fully-qualified log_exclusion string.""" return "projects/{project}/exclusions/{exclusion}".format( - project=project, exclusion=exclusion, + project=project, + exclusion=exclusion, ) @staticmethod @@ -204,9 +221,15 @@ def parse_log_exclusion_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def log_sink_path(project: str, sink: str,) -> str: + def log_sink_path( + project: str, + sink: str, + ) -> str: """Returns a fully-qualified log_sink string.""" - return "projects/{project}/sinks/{sink}".format(project=project, sink=sink,) + return "projects/{project}/sinks/{sink}".format( + project=project, + sink=sink, + ) @staticmethod def parse_log_sink_path(path: str) -> Dict[str, str]: @@ -215,10 +238,18 @@ def parse_log_sink_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def log_view_path(project: str, location: str, bucket: str, view: str,) -> str: + def log_view_path( + project: str, + location: str, + bucket: str, + view: str, + ) -> str: """Returns a fully-qualified log_view string.""" return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( - project=project, location=location, bucket=bucket, view=view, + project=project, + location=location, + bucket=bucket, + view=view, ) @staticmethod @@ -231,9 +262,13 @@ def parse_log_view_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def settings_path(project: str,) -> str: + def settings_path( + project: str, + ) -> str: """Returns a fully-qualified settings string.""" - return "projects/{project}/settings".format(project=project,) + return "projects/{project}/settings".format( + project=project, + ) @staticmethod def parse_settings_path(path: str) -> Dict[str, str]: @@ -242,7 +277,9 @@ def parse_settings_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -255,9 +292,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -266,9 +307,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -277,9 +322,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -288,10 +337,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -563,12 +616,20 @@ def sample_list_buckets(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListBucketsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -637,7 +698,12 @@ def sample_get_bucket(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -709,7 +775,12 @@ def sample_create_bucket(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -789,7 +860,12 @@ def sample_update_bucket(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -855,7 +931,10 @@ def sample_delete_bucket(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def undelete_bucket( @@ -916,7 +995,10 @@ def sample_undelete_bucket(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def list_views( @@ -1009,12 +1091,20 @@ def sample_list_views(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListViewsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1083,7 +1173,12 @@ def sample_get_view(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1154,7 +1249,12 @@ def sample_create_view(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1227,7 +1327,12 @@ def sample_update_view(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1291,7 +1396,10 @@ def sample_delete_view(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def list_sinks( @@ -1388,12 +1496,20 @@ def sample_list_sinks(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListSinksPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1502,7 +1618,12 @@ def sample_get_sink(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1628,7 +1749,12 @@ def sample_create_sink(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1783,7 +1909,12 @@ def sample_update_sink(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1879,7 +2010,10 @@ def sample_delete_sink(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def list_exclusions( @@ -1978,12 +2112,20 @@ def sample_list_exclusions(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListExclusionsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -2088,7 +2230,12 @@ def sample_get_exclusion(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2213,7 +2360,12 @@ def sample_create_exclusion(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2352,7 +2504,12 @@ def sample_update_exclusion(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2444,7 +2601,10 @@ def sample_delete_exclusion(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def get_cmek_settings( @@ -2534,7 +2694,12 @@ def sample_get_cmek_settings(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2631,7 +2796,12 @@ def sample_update_cmek_settings(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2753,7 +2923,12 @@ def sample_get_settings(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2885,7 +3060,12 @@ def sample_update_settings(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2958,7 +3138,12 @@ def sample_copy_log_entries(): rpc = self._transport._wrapped_methods[self._transport.copy_log_entries] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -2987,7 +3172,9 @@ def __exit__(self, type, value, traceback): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 6dfc1fd2fd55..95de06d1a972 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -32,7 +32,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() @@ -127,37 +129,59 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.list_buckets: gapic_v1.method.wrap_method( - self.list_buckets, default_timeout=None, client_info=client_info, + self.list_buckets, + default_timeout=None, + client_info=client_info, ), self.get_bucket: gapic_v1.method.wrap_method( - self.get_bucket, default_timeout=None, client_info=client_info, + self.get_bucket, + default_timeout=None, + client_info=client_info, ), self.create_bucket: gapic_v1.method.wrap_method( - self.create_bucket, default_timeout=None, client_info=client_info, + self.create_bucket, + default_timeout=None, + client_info=client_info, ), self.update_bucket: gapic_v1.method.wrap_method( - self.update_bucket, default_timeout=None, client_info=client_info, + self.update_bucket, + default_timeout=None, + client_info=client_info, ), self.delete_bucket: gapic_v1.method.wrap_method( - self.delete_bucket, default_timeout=None, client_info=client_info, + self.delete_bucket, + default_timeout=None, + client_info=client_info, ), self.undelete_bucket: gapic_v1.method.wrap_method( - self.undelete_bucket, default_timeout=None, client_info=client_info, + self.undelete_bucket, + default_timeout=None, + client_info=client_info, ), self.list_views: gapic_v1.method.wrap_method( - self.list_views, default_timeout=None, client_info=client_info, + self.list_views, + default_timeout=None, + client_info=client_info, ), self.get_view: gapic_v1.method.wrap_method( - self.get_view, default_timeout=None, client_info=client_info, + self.get_view, + default_timeout=None, + client_info=client_info, ), self.create_view: gapic_v1.method.wrap_method( - self.create_view, default_timeout=None, client_info=client_info, + self.create_view, + default_timeout=None, + client_info=client_info, ), self.update_view: gapic_v1.method.wrap_method( - self.update_view, default_timeout=None, client_info=client_info, + self.update_view, + default_timeout=None, + client_info=client_info, ), self.delete_view: gapic_v1.method.wrap_method( - self.delete_view, default_timeout=None, client_info=client_info, + self.delete_view, + default_timeout=None, + client_info=client_info, ), self.list_sinks: gapic_v1.method.wrap_method( self.list_sinks, @@ -192,7 +216,9 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.create_sink: gapic_v1.method.wrap_method( - self.create_sink, default_timeout=120.0, client_info=client_info, + self.create_sink, + default_timeout=120.0, + client_info=client_info, ), self.update_sink: gapic_v1.method.wrap_method( self.update_sink, @@ -259,10 +285,14 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.create_exclusion: gapic_v1.method.wrap_method( - self.create_exclusion, default_timeout=120.0, client_info=client_info, + self.create_exclusion, + default_timeout=120.0, + client_info=client_info, ), self.update_exclusion: gapic_v1.method.wrap_method( - self.update_exclusion, default_timeout=120.0, client_info=client_info, + self.update_exclusion, + default_timeout=120.0, + client_info=client_info, ), self.delete_exclusion: gapic_v1.method.wrap_method( self.delete_exclusion, @@ -281,7 +311,9 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.get_cmek_settings: gapic_v1.method.wrap_method( - self.get_cmek_settings, default_timeout=None, client_info=client_info, + self.get_cmek_settings, + default_timeout=None, + client_info=client_info, ), self.update_cmek_settings: gapic_v1.method.wrap_method( self.update_cmek_settings, @@ -289,22 +321,28 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.get_settings: gapic_v1.method.wrap_method( - self.get_settings, default_timeout=None, client_info=client_info, + self.get_settings, + default_timeout=None, + client_info=client_info, ), self.update_settings: gapic_v1.method.wrap_method( - self.update_settings, default_timeout=None, client_info=client_info, + self.update_settings, + default_timeout=None, + client_info=client_info, ), self.copy_log_entries: gapic_v1.method.wrap_method( - self.copy_log_entries, default_timeout=None, client_info=client_info, + self.copy_log_entries, + default_timeout=None, + client_info=client_info, ), } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 301334f806c0..228f1c9a32cf 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -228,8 +228,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index c89da25a5164..7973d4395117 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -317,7 +317,10 @@ def sample_delete_log(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def write_log_entries( @@ -506,7 +509,12 @@ def sample_write_log_entries(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -655,12 +663,20 @@ def sample_list_log_entries(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListLogEntriesAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -739,12 +755,20 @@ def sample_list_monitored_resource_descriptors(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListMonitoredResourceDescriptorsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -855,12 +879,20 @@ def sample_list_logs(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListLogsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -943,7 +975,12 @@ def request_generator(): ) # Send the request. - response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -957,7 +994,9 @@ async def __aexit__(self, exc_type, exc, tb): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 3eae59704dc1..8638cfb10044 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -57,7 +57,10 @@ class LoggingServiceV2ClientMeta(type): _transport_registry["grpc"] = LoggingServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[LoggingServiceV2Transport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[LoggingServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -162,9 +165,15 @@ def transport(self) -> LoggingServiceV2Transport: return self._transport @staticmethod - def log_path(project: str, log: str,) -> str: + def log_path( + project: str, + log: str, + ) -> str: """Returns a fully-qualified log string.""" - return "projects/{project}/logs/{log}".format(project=project, log=log,) + return "projects/{project}/logs/{log}".format( + project=project, + log=log, + ) @staticmethod def parse_log_path(path: str) -> Dict[str, str]: @@ -173,7 +182,9 @@ def parse_log_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -186,9 +197,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -197,9 +212,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -208,9 +227,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -219,10 +242,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -488,7 +515,10 @@ def sample_delete_log(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def write_log_entries( @@ -665,7 +695,12 @@ def sample_write_log_entries(): rpc = self._transport._wrapped_methods[self._transport.write_log_entries] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -803,12 +838,20 @@ def sample_list_log_entries(): rpc = self._transport._wrapped_methods[self._transport.list_log_entries] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListLogEntriesPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -879,12 +922,20 @@ def sample_list_monitored_resource_descriptors(): ] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListMonitoredResourceDescriptorsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -984,12 +1035,20 @@ def sample_list_logs(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListLogsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1057,7 +1116,12 @@ def request_generator(): rpc = self._transport._wrapped_methods[self._transport.tail_log_entries] # Send the request. - response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1078,7 +1142,9 @@ def __exit__(self, type, value, traceback): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 5f474f006db5..716a2fbbc313 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -30,7 +30,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() @@ -226,9 +228,9 @@ def _prep_wrapped_messages(self, client_info): def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 76b562d7ee61..176d4475f20f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -225,8 +225,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index e3bf4c51a712..af6265e82c26 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -307,12 +307,20 @@ def sample_list_log_metrics(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListLogMetricsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -427,7 +435,12 @@ def sample_get_log_metric(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -547,7 +560,12 @@ def sample_create_log_metric(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -679,7 +697,12 @@ def sample_update_log_metric(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -776,7 +799,10 @@ def sample_delete_log_metric(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def __aenter__(self): @@ -788,7 +814,9 @@ async def __aexit__(self, exc_type, exc, tb): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 5ab25db207bd..bb2221b857b5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -58,7 +58,10 @@ class MetricsServiceV2ClientMeta(type): _transport_registry["grpc"] = MetricsServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[MetricsServiceV2Transport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[MetricsServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -163,10 +166,14 @@ def transport(self) -> MetricsServiceV2Transport: return self._transport @staticmethod - def log_metric_path(project: str, metric: str,) -> str: + def log_metric_path( + project: str, + metric: str, + ) -> str: """Returns a fully-qualified log_metric string.""" return "projects/{project}/metrics/{metric}".format( - project=project, metric=metric, + project=project, + metric=metric, ) @staticmethod @@ -176,7 +183,9 @@ def parse_log_metric_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -189,9 +198,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -200,9 +213,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -211,9 +228,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -222,10 +243,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -490,12 +515,20 @@ def sample_list_log_metrics(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListLogMetricsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -599,7 +632,12 @@ def sample_get_log_metric(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -719,7 +757,12 @@ def sample_create_log_metric(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -840,7 +883,12 @@ def sample_update_log_metric(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -926,7 +974,10 @@ def sample_delete_log_metric(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def __enter__(self): @@ -945,7 +996,9 @@ def __exit__(self, type, value, traceback): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index b3d9bab57245..cc483aeff32b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -30,7 +30,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() @@ -158,7 +160,9 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.create_log_metric: gapic_v1.method.wrap_method( - self.create_log_metric, default_timeout=60.0, client_info=client_info, + self.create_log_metric, + default_timeout=60.0, + client_info=client_info, ), self.update_log_metric: gapic_v1.method.wrap_method( self.update_log_metric, @@ -197,9 +201,9 @@ def _prep_wrapped_messages(self, client_info): def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index d0241fdd2857..6c1fd9b73082 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -225,8 +225,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py index 1bc7a3ea405b..2bdea1b73fb1 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py @@ -25,7 +25,12 @@ __protobuf__ = proto.module( package="google.logging.v2", - manifest={"LogEntry", "LogEntryOperation", "LogEntrySourceLocation", "LogSplit",}, + manifest={ + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", + }, ) @@ -191,35 +196,88 @@ class LogEntry(proto.Message): entries split from a single LogEntry. """ - log_name = proto.Field(proto.STRING, number=12,) + log_name = proto.Field( + proto.STRING, + number=12, + ) resource = proto.Field( - proto.MESSAGE, number=8, message=monitored_resource_pb2.MonitoredResource, + proto.MESSAGE, + number=8, + message=monitored_resource_pb2.MonitoredResource, ) proto_payload = proto.Field( - proto.MESSAGE, number=2, oneof="payload", message=any_pb2.Any, + proto.MESSAGE, + number=2, + oneof="payload", + message=any_pb2.Any, + ) + text_payload = proto.Field( + proto.STRING, + number=3, + oneof="payload", ) - text_payload = proto.Field(proto.STRING, number=3, oneof="payload",) json_payload = proto.Field( - proto.MESSAGE, number=6, oneof="payload", message=struct_pb2.Struct, + proto.MESSAGE, + number=6, + oneof="payload", + message=struct_pb2.Struct, + ) + timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, ) - timestamp = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) receive_timestamp = proto.Field( - proto.MESSAGE, number=24, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=24, + message=timestamp_pb2.Timestamp, + ) + severity = proto.Field( + proto.ENUM, + number=10, + enum=log_severity_pb2.LogSeverity, + ) + insert_id = proto.Field( + proto.STRING, + number=4, ) - severity = proto.Field(proto.ENUM, number=10, enum=log_severity_pb2.LogSeverity,) - insert_id = proto.Field(proto.STRING, number=4,) http_request = proto.Field( - proto.MESSAGE, number=7, message=http_request_pb2.HttpRequest, + proto.MESSAGE, + number=7, + message=http_request_pb2.HttpRequest, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=11, + ) + operation = proto.Field( + proto.MESSAGE, + number=15, + message="LogEntryOperation", + ) + trace = proto.Field( + proto.STRING, + number=22, + ) + span_id = proto.Field( + proto.STRING, + number=27, + ) + trace_sampled = proto.Field( + proto.BOOL, + number=30, ) - labels = proto.MapField(proto.STRING, proto.STRING, number=11,) - operation = proto.Field(proto.MESSAGE, number=15, message="LogEntryOperation",) - trace = proto.Field(proto.STRING, number=22,) - span_id = proto.Field(proto.STRING, number=27,) - trace_sampled = proto.Field(proto.BOOL, number=30,) source_location = proto.Field( - proto.MESSAGE, number=23, message="LogEntrySourceLocation", + proto.MESSAGE, + number=23, + message="LogEntrySourceLocation", + ) + split = proto.Field( + proto.MESSAGE, + number=35, + message="LogSplit", ) - split = proto.Field(proto.MESSAGE, number=35, message="LogSplit",) class LogEntryOperation(proto.Message): @@ -244,10 +302,22 @@ class LogEntryOperation(proto.Message): last log entry in the operation. """ - id = proto.Field(proto.STRING, number=1,) - producer = proto.Field(proto.STRING, number=2,) - first = proto.Field(proto.BOOL, number=3,) - last = proto.Field(proto.BOOL, number=4,) + id = proto.Field( + proto.STRING, + number=1, + ) + producer = proto.Field( + proto.STRING, + number=2, + ) + first = proto.Field( + proto.BOOL, + number=3, + ) + last = proto.Field( + proto.BOOL, + number=4, + ) class LogEntrySourceLocation(proto.Message): @@ -272,9 +342,18 @@ class LogEntrySourceLocation(proto.Message): (Go), ``function`` (Python). """ - file = proto.Field(proto.STRING, number=1,) - line = proto.Field(proto.INT64, number=2,) - function = proto.Field(proto.STRING, number=3,) + file = proto.Field( + proto.STRING, + number=1, + ) + line = proto.Field( + proto.INT64, + number=2, + ) + function = proto.Field( + proto.STRING, + number=3, + ) class LogSplit(proto.Message): @@ -298,9 +377,18 @@ class LogSplit(proto.Message): original LogEntry was split into. """ - uid = proto.Field(proto.STRING, number=1,) - index = proto.Field(proto.INT32, number=2,) - total_splits = proto.Field(proto.INT32, number=3,) + uid = proto.Field( + proto.STRING, + number=1, + ) + index = proto.Field( + proto.INT32, + number=2, + ) + total_splits = proto.Field( + proto.INT32, + number=3, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py index 76d86e34f2e4..383a4ef772a8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py @@ -60,7 +60,10 @@ class DeleteLogRequest(proto.Message): [LogEntry][google.logging.v2.LogEntry]. """ - log_name = proto.Field(proto.STRING, number=1,) + log_name = proto.Field( + proto.STRING, + number=1, + ) class WriteLogEntriesRequest(proto.Message): @@ -152,19 +155,37 @@ class WriteLogEntriesRequest(proto.Message): properly before sending valuable data. """ - log_name = proto.Field(proto.STRING, number=1,) + log_name = proto.Field( + proto.STRING, + number=1, + ) resource = proto.Field( - proto.MESSAGE, number=2, message=monitored_resource_pb2.MonitoredResource, + proto.MESSAGE, + number=2, + message=monitored_resource_pb2.MonitoredResource, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + entries = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=log_entry.LogEntry, + ) + partial_success = proto.Field( + proto.BOOL, + number=5, + ) + dry_run = proto.Field( + proto.BOOL, + number=6, ) - labels = proto.MapField(proto.STRING, proto.STRING, number=3,) - entries = proto.RepeatedField(proto.MESSAGE, number=4, message=log_entry.LogEntry,) - partial_success = proto.Field(proto.BOOL, number=5,) - dry_run = proto.Field(proto.BOOL, number=6,) class WriteLogEntriesResponse(proto.Message): - r"""Result returned from WriteLogEntries. - """ + r"""Result returned from WriteLogEntries.""" class WriteLogEntriesPartialErrors(proto.Message): @@ -182,7 +203,10 @@ class WriteLogEntriesPartialErrors(proto.Message): """ log_entry_errors = proto.MapField( - proto.INT32, proto.MESSAGE, number=1, message=status_pb2.Status, + proto.INT32, + proto.MESSAGE, + number=1, + message=status_pb2.Status, ) @@ -241,11 +265,26 @@ class ListLogEntriesRequest(proto.Message): should be identical to those in the previous call. """ - resource_names = proto.RepeatedField(proto.STRING, number=8,) - filter = proto.Field(proto.STRING, number=2,) - order_by = proto.Field(proto.STRING, number=3,) - page_size = proto.Field(proto.INT32, number=4,) - page_token = proto.Field(proto.STRING, number=5,) + resource_names = proto.RepeatedField( + proto.STRING, + number=8, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + order_by = proto.Field( + proto.STRING, + number=3, + ) + page_size = proto.Field( + proto.INT32, + number=4, + ) + page_token = proto.Field( + proto.STRING, + number=5, + ) class ListLogEntriesResponse(proto.Message): @@ -277,8 +316,15 @@ class ListLogEntriesResponse(proto.Message): def raw_page(self): return self - entries = proto.RepeatedField(proto.MESSAGE, number=1, message=log_entry.LogEntry,) - next_page_token = proto.Field(proto.STRING, number=2,) + entries = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=log_entry.LogEntry, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class ListMonitoredResourceDescriptorsRequest(proto.Message): @@ -298,8 +344,14 @@ class ListMonitoredResourceDescriptorsRequest(proto.Message): should be identical to those in the previous call. """ - page_size = proto.Field(proto.INT32, number=1,) - page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field( + proto.INT32, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) class ListMonitoredResourceDescriptorsResponse(proto.Message): @@ -324,7 +376,10 @@ def raw_page(self): number=1, message=monitored_resource_pb2.MonitoredResourceDescriptor, ) - next_page_token = proto.Field(proto.STRING, number=2,) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class ListLogsRequest(proto.Message): @@ -365,10 +420,22 @@ class ListLogsRequest(proto.Message): - ``folders/[FOLDER_ID]`` """ - parent = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) - resource_names = proto.RepeatedField(proto.STRING, number=8,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + resource_names = proto.RepeatedField( + proto.STRING, + number=8, + ) class ListLogsResponse(proto.Message): @@ -390,8 +457,14 @@ class ListLogsResponse(proto.Message): def raw_page(self): return self - log_names = proto.RepeatedField(proto.STRING, number=3,) - next_page_token = proto.Field(proto.STRING, number=2,) + log_names = proto.RepeatedField( + proto.STRING, + number=3, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class TailLogEntriesRequest(proto.Message): @@ -432,9 +505,19 @@ class TailLogEntriesRequest(proto.Message): milliseconds. """ - resource_names = proto.RepeatedField(proto.STRING, number=1,) - filter = proto.Field(proto.STRING, number=2,) - buffer_window = proto.Field(proto.MESSAGE, number=3, message=duration_pb2.Duration,) + resource_names = proto.RepeatedField( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + buffer_window = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) class TailLogEntriesResponse(proto.Message): @@ -476,13 +559,24 @@ class Reason(proto.Enum): NOT_CONSUMED = 2 reason = proto.Field( - proto.ENUM, number=1, enum="TailLogEntriesResponse.SuppressionInfo.Reason", + proto.ENUM, + number=1, + enum="TailLogEntriesResponse.SuppressionInfo.Reason", + ) + suppressed_count = proto.Field( + proto.INT32, + number=2, ) - suppressed_count = proto.Field(proto.INT32, number=2,) - entries = proto.RepeatedField(proto.MESSAGE, number=1, message=log_entry.LogEntry,) + entries = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=log_entry.LogEntry, + ) suppression_info = proto.RepeatedField( - proto.MESSAGE, number=2, message=SuppressionInfo, + proto.MESSAGE, + number=2, + message=SuppressionInfo, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index 3dab7a14301e..c2fcf30bde70 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -153,15 +153,46 @@ class LogBucket(proto.Message): KMS key is allowed. """ - name = proto.Field(proto.STRING, number=1,) - description = proto.Field(proto.STRING, number=3,) - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) - update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) - retention_days = proto.Field(proto.INT32, number=11,) - locked = proto.Field(proto.BOOL, number=9,) - lifecycle_state = proto.Field(proto.ENUM, number=12, enum="LifecycleState",) - restricted_fields = proto.RepeatedField(proto.STRING, number=15,) - cmek_settings = proto.Field(proto.MESSAGE, number=19, message="CmekSettings",) + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + retention_days = proto.Field( + proto.INT32, + number=11, + ) + locked = proto.Field( + proto.BOOL, + number=9, + ) + lifecycle_state = proto.Field( + proto.ENUM, + number=12, + enum="LifecycleState", + ) + restricted_fields = proto.RepeatedField( + proto.STRING, + number=15, + ) + cmek_settings = proto.Field( + proto.MESSAGE, + number=19, + message="CmekSettings", + ) class LogView(proto.Message): @@ -199,11 +230,28 @@ class LogView(proto.Message): "gce_instance" AND LOG_ID("stdout") """ - name = proto.Field(proto.STRING, number=1,) - description = proto.Field(proto.STRING, number=3,) - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) - update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) - filter = proto.Field(proto.STRING, number=7,) + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + filter = proto.Field( + proto.STRING, + number=7, + ) class LogSink(proto.Message): @@ -326,23 +374,59 @@ class VersionFormat(proto.Enum): V2 = 1 V1 = 2 - name = proto.Field(proto.STRING, number=1,) - destination = proto.Field(proto.STRING, number=3,) - filter = proto.Field(proto.STRING, number=5,) - description = proto.Field(proto.STRING, number=18,) - disabled = proto.Field(proto.BOOL, number=19,) - exclusions = proto.RepeatedField(proto.MESSAGE, number=16, message="LogExclusion",) - output_version_format = proto.Field(proto.ENUM, number=6, enum=VersionFormat,) - writer_identity = proto.Field(proto.STRING, number=8,) - include_children = proto.Field(proto.BOOL, number=9,) + name = proto.Field( + proto.STRING, + number=1, + ) + destination = proto.Field( + proto.STRING, + number=3, + ) + filter = proto.Field( + proto.STRING, + number=5, + ) + description = proto.Field( + proto.STRING, + number=18, + ) + disabled = proto.Field( + proto.BOOL, + number=19, + ) + exclusions = proto.RepeatedField( + proto.MESSAGE, + number=16, + message="LogExclusion", + ) + output_version_format = proto.Field( + proto.ENUM, + number=6, + enum=VersionFormat, + ) + writer_identity = proto.Field( + proto.STRING, + number=8, + ) + include_children = proto.Field( + proto.BOOL, + number=9, + ) bigquery_options = proto.Field( - proto.MESSAGE, number=12, oneof="options", message="BigQueryOptions", + proto.MESSAGE, + number=12, + oneof="options", + message="BigQueryOptions", ) create_time = proto.Field( - proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, ) update_time = proto.Field( - proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, ) @@ -373,8 +457,14 @@ class BigQueryOptions(proto.Message): will have this field set to false. """ - use_partitioned_tables = proto.Field(proto.BOOL, number=1,) - uses_timestamp_column_partitioning = proto.Field(proto.BOOL, number=3,) + use_partitioned_tables = proto.Field( + proto.BOOL, + number=1, + ) + uses_timestamp_column_partitioning = proto.Field( + proto.BOOL, + number=3, + ) class ListBucketsRequest(proto.Message): @@ -408,9 +498,18 @@ class ListBucketsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1,) - page_token = proto.Field(proto.STRING, number=2,) - page_size = proto.Field(proto.INT32, number=3,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) class ListBucketsResponse(proto.Message): @@ -430,8 +529,15 @@ class ListBucketsResponse(proto.Message): def raw_page(self): return self - buckets = proto.RepeatedField(proto.MESSAGE, number=1, message="LogBucket",) - next_page_token = proto.Field(proto.STRING, number=2,) + buckets = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogBucket", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class CreateBucketRequest(proto.Message): @@ -460,9 +566,19 @@ class CreateBucketRequest(proto.Message): name field in the bucket is ignored. """ - parent = proto.Field(proto.STRING, number=1,) - bucket_id = proto.Field(proto.STRING, number=2,) - bucket = proto.Field(proto.MESSAGE, number=3, message="LogBucket",) + parent = proto.Field( + proto.STRING, + number=1, + ) + bucket_id = proto.Field( + proto.STRING, + number=2, + ) + bucket = proto.Field( + proto.MESSAGE, + number=3, + message="LogBucket", + ) class UpdateBucketRequest(proto.Message): @@ -496,10 +612,19 @@ class UpdateBucketRequest(proto.Message): For example: ``updateMask=retention_days`` """ - name = proto.Field(proto.STRING, number=1,) - bucket = proto.Field(proto.MESSAGE, number=2, message="LogBucket",) + name = proto.Field( + proto.STRING, + number=1, + ) + bucket = proto.Field( + proto.MESSAGE, + number=2, + message="LogBucket", + ) update_mask = proto.Field( - proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, ) @@ -522,7 +647,10 @@ class GetBucketRequest(proto.Message): ``"projects/my-project/locations/global/buckets/my-bucket"`` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class DeleteBucketRequest(proto.Message): @@ -544,7 +672,10 @@ class DeleteBucketRequest(proto.Message): ``"projects/my-project/locations/global/buckets/my-bucket"`` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class UndeleteBucketRequest(proto.Message): @@ -566,7 +697,10 @@ class UndeleteBucketRequest(proto.Message): ``"projects/my-project/locations/global/buckets/my-bucket"`` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class ListViewsRequest(proto.Message): @@ -594,9 +728,18 @@ class ListViewsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1,) - page_token = proto.Field(proto.STRING, number=2,) - page_size = proto.Field(proto.INT32, number=3,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) class ListViewsResponse(proto.Message): @@ -616,8 +759,15 @@ class ListViewsResponse(proto.Message): def raw_page(self): return self - views = proto.RepeatedField(proto.MESSAGE, number=1, message="LogView",) - next_page_token = proto.Field(proto.STRING, number=2,) + views = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogView", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class CreateViewRequest(proto.Message): @@ -640,9 +790,19 @@ class CreateViewRequest(proto.Message): Required. The new view. """ - parent = proto.Field(proto.STRING, number=1,) - view_id = proto.Field(proto.STRING, number=2,) - view = proto.Field(proto.MESSAGE, number=3, message="LogView",) + parent = proto.Field( + proto.STRING, + number=1, + ) + view_id = proto.Field( + proto.STRING, + number=2, + ) + view = proto.Field( + proto.MESSAGE, + number=3, + message="LogView", + ) class UpdateViewRequest(proto.Message): @@ -673,10 +833,19 @@ class UpdateViewRequest(proto.Message): For example: ``updateMask=filter`` """ - name = proto.Field(proto.STRING, number=1,) - view = proto.Field(proto.MESSAGE, number=2, message="LogView",) + name = proto.Field( + proto.STRING, + number=1, + ) + view = proto.Field( + proto.MESSAGE, + number=2, + message="LogView", + ) update_mask = proto.Field( - proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, ) @@ -696,7 +865,10 @@ class GetViewRequest(proto.Message): ``"projects/my-project/locations/global/buckets/my-bucket/views/my-view"`` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class DeleteViewRequest(proto.Message): @@ -717,7 +889,10 @@ class DeleteViewRequest(proto.Message): `"projects/my-project/locations/global/buckets/my-bucket/views/my-view"` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class ListSinksRequest(proto.Message): @@ -746,9 +921,18 @@ class ListSinksRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1,) - page_token = proto.Field(proto.STRING, number=2,) - page_size = proto.Field(proto.INT32, number=3,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) class ListSinksResponse(proto.Message): @@ -768,8 +952,15 @@ class ListSinksResponse(proto.Message): def raw_page(self): return self - sinks = proto.RepeatedField(proto.MESSAGE, number=1, message="LogSink",) - next_page_token = proto.Field(proto.STRING, number=2,) + sinks = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogSink", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class GetSinkRequest(proto.Message): @@ -791,7 +982,10 @@ class GetSinkRequest(proto.Message): ``"projects/my-project/sinks/my-sink"`` """ - sink_name = proto.Field(proto.STRING, number=1,) + sink_name = proto.Field( + proto.STRING, + number=1, + ) class CreateSinkRequest(proto.Message): @@ -832,9 +1026,19 @@ class CreateSinkRequest(proto.Message): [LogSink][google.logging.v2.LogSink]. """ - parent = proto.Field(proto.STRING, number=1,) - sink = proto.Field(proto.MESSAGE, number=2, message="LogSink",) - unique_writer_identity = proto.Field(proto.BOOL, number=3,) + parent = proto.Field( + proto.STRING, + number=1, + ) + sink = proto.Field( + proto.MESSAGE, + number=2, + message="LogSink", + ) + unique_writer_identity = proto.Field( + proto.BOOL, + number=3, + ) class UpdateSinkRequest(proto.Message): @@ -894,11 +1098,23 @@ class UpdateSinkRequest(proto.Message): For example: ``updateMask=filter`` """ - sink_name = proto.Field(proto.STRING, number=1,) - sink = proto.Field(proto.MESSAGE, number=2, message="LogSink",) - unique_writer_identity = proto.Field(proto.BOOL, number=3,) + sink_name = proto.Field( + proto.STRING, + number=1, + ) + sink = proto.Field( + proto.MESSAGE, + number=2, + message="LogSink", + ) + unique_writer_identity = proto.Field( + proto.BOOL, + number=3, + ) update_mask = proto.Field( - proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, ) @@ -922,7 +1138,10 @@ class DeleteSinkRequest(proto.Message): ``"projects/my-project/sinks/my-sink"`` """ - sink_name = proto.Field(proto.STRING, number=1,) + sink_name = proto.Field( + proto.STRING, + number=1, + ) class LogExclusion(proto.Message): @@ -971,12 +1190,32 @@ class LogExclusion(proto.Message): exclusions. """ - name = proto.Field(proto.STRING, number=1,) - description = proto.Field(proto.STRING, number=2,) - filter = proto.Field(proto.STRING, number=3,) - disabled = proto.Field(proto.BOOL, number=4,) - create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) - update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,) + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=2, + ) + filter = proto.Field( + proto.STRING, + number=3, + ) + disabled = proto.Field( + proto.BOOL, + number=4, + ) + create_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) class ListExclusionsRequest(proto.Message): @@ -1006,9 +1245,18 @@ class ListExclusionsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1,) - page_token = proto.Field(proto.STRING, number=2,) - page_size = proto.Field(proto.INT32, number=3,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) class ListExclusionsResponse(proto.Message): @@ -1028,8 +1276,15 @@ class ListExclusionsResponse(proto.Message): def raw_page(self): return self - exclusions = proto.RepeatedField(proto.MESSAGE, number=1, message="LogExclusion",) - next_page_token = proto.Field(proto.STRING, number=2,) + exclusions = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogExclusion", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class GetExclusionRequest(proto.Message): @@ -1051,7 +1306,10 @@ class GetExclusionRequest(proto.Message): ``"projects/my-project/exclusions/my-exclusion"`` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class CreateExclusionRequest(proto.Message): @@ -1079,8 +1337,15 @@ class CreateExclusionRequest(proto.Message): resource. """ - parent = proto.Field(proto.STRING, number=1,) - exclusion = proto.Field(proto.MESSAGE, number=2, message="LogExclusion",) + parent = proto.Field( + proto.STRING, + number=1, + ) + exclusion = proto.Field( + proto.MESSAGE, + number=2, + message="LogExclusion", + ) class UpdateExclusionRequest(proto.Message): @@ -1116,10 +1381,19 @@ class UpdateExclusionRequest(proto.Message): ``"filter,description"``. """ - name = proto.Field(proto.STRING, number=1,) - exclusion = proto.Field(proto.MESSAGE, number=2, message="LogExclusion",) + name = proto.Field( + proto.STRING, + number=1, + ) + exclusion = proto.Field( + proto.MESSAGE, + number=2, + message="LogExclusion", + ) update_mask = proto.Field( - proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, ) @@ -1143,7 +1417,10 @@ class DeleteExclusionRequest(proto.Message): ``"projects/my-project/exclusions/my-exclusion"`` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class GetCmekSettingsRequest(proto.Message): @@ -1175,7 +1452,10 @@ class GetCmekSettingsRequest(proto.Message): projects and folders in the Google Cloud organization. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class UpdateCmekSettingsRequest(proto.Message): @@ -1223,10 +1503,19 @@ class UpdateCmekSettingsRequest(proto.Message): For example: ``"updateMask=kmsKeyName"`` """ - name = proto.Field(proto.STRING, number=1,) - cmek_settings = proto.Field(proto.MESSAGE, number=2, message="CmekSettings",) + name = proto.Field( + proto.STRING, + number=1, + ) + cmek_settings = proto.Field( + proto.MESSAGE, + number=2, + message="CmekSettings", + ) update_mask = proto.Field( - proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, ) @@ -1296,9 +1585,18 @@ class CmekSettings(proto.Message): for more information. """ - name = proto.Field(proto.STRING, number=1,) - kms_key_name = proto.Field(proto.STRING, number=2,) - service_account_id = proto.Field(proto.STRING, number=3,) + name = proto.Field( + proto.STRING, + number=1, + ) + kms_key_name = proto.Field( + proto.STRING, + number=2, + ) + service_account_id = proto.Field( + proto.STRING, + number=3, + ) class GetSettingsRequest(proto.Message): @@ -1331,7 +1629,10 @@ class GetSettingsRequest(proto.Message): and folders in the Google Cloud organization. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class UpdateSettingsRequest(proto.Message): @@ -1376,10 +1677,19 @@ class UpdateSettingsRequest(proto.Message): For example: ``"updateMask=kmsKeyName"`` """ - name = proto.Field(proto.STRING, number=1,) - settings = proto.Field(proto.MESSAGE, number=2, message="Settings",) + name = proto.Field( + proto.STRING, + number=1, + ) + settings = proto.Field( + proto.MESSAGE, + number=2, + message="Settings", + ) update_mask = proto.Field( - proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, ) @@ -1453,11 +1763,26 @@ class Settings(proto.Message): manually if needed. """ - name = proto.Field(proto.STRING, number=1,) - kms_key_name = proto.Field(proto.STRING, number=2,) - kms_service_account_id = proto.Field(proto.STRING, number=3,) - storage_location = proto.Field(proto.STRING, number=4,) - disable_default_sink = proto.Field(proto.BOOL, number=5,) + name = proto.Field( + proto.STRING, + number=1, + ) + kms_key_name = proto.Field( + proto.STRING, + number=2, + ) + kms_service_account_id = proto.Field( + proto.STRING, + number=3, + ) + storage_location = proto.Field( + proto.STRING, + number=4, + ) + disable_default_sink = proto.Field( + proto.BOOL, + number=5, + ) class CopyLogEntriesRequest(proto.Message): @@ -1480,9 +1805,18 @@ class CopyLogEntriesRequest(proto.Message): entries. """ - name = proto.Field(proto.STRING, number=1,) - filter = proto.Field(proto.STRING, number=3,) - destination = proto.Field(proto.STRING, number=4,) + name = proto.Field( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=3, + ) + destination = proto.Field( + proto.STRING, + number=4, + ) class CopyLogEntriesMetadata(proto.Message): @@ -1513,13 +1847,38 @@ class CopyLogEntriesMetadata(proto.Message): For example: ``"serviceAccount:foo@bar.com"`` """ - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) - state = proto.Field(proto.ENUM, number=3, enum="OperationState",) - cancellation_requested = proto.Field(proto.BOOL, number=4,) - request = proto.Field(proto.MESSAGE, number=5, message="CopyLogEntriesRequest",) - progress = proto.Field(proto.INT32, number=6,) - writer_identity = proto.Field(proto.STRING, number=7,) + start_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state = proto.Field( + proto.ENUM, + number=3, + enum="OperationState", + ) + cancellation_requested = proto.Field( + proto.BOOL, + number=4, + ) + request = proto.Field( + proto.MESSAGE, + number=5, + message="CopyLogEntriesRequest", + ) + progress = proto.Field( + proto.INT32, + number=6, + ) + writer_identity = proto.Field( + proto.STRING, + number=7, + ) class CopyLogEntriesResponse(proto.Message): @@ -1530,7 +1889,10 @@ class CopyLogEntriesResponse(proto.Message): Number of log entries copied. """ - log_entries_copied_count = proto.Field(proto.INT64, number=1,) + log_entries_copied_count = proto.Field( + proto.INT64, + number=1, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py index af1f2f548c24..323599423adf 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py @@ -170,23 +170,56 @@ class ApiVersion(proto.Enum): V2 = 0 V1 = 1 - name = proto.Field(proto.STRING, number=1,) - description = proto.Field(proto.STRING, number=2,) - filter = proto.Field(proto.STRING, number=3,) - disabled = proto.Field(proto.BOOL, number=12,) + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=2, + ) + filter = proto.Field( + proto.STRING, + number=3, + ) + disabled = proto.Field( + proto.BOOL, + number=12, + ) metric_descriptor = proto.Field( - proto.MESSAGE, number=5, message=metric_pb2.MetricDescriptor, + proto.MESSAGE, + number=5, + message=metric_pb2.MetricDescriptor, + ) + value_extractor = proto.Field( + proto.STRING, + number=6, + ) + label_extractors = proto.MapField( + proto.STRING, + proto.STRING, + number=7, ) - value_extractor = proto.Field(proto.STRING, number=6,) - label_extractors = proto.MapField(proto.STRING, proto.STRING, number=7,) bucket_options = proto.Field( - proto.MESSAGE, number=8, message=distribution_pb2.Distribution.BucketOptions, + proto.MESSAGE, + number=8, + message=distribution_pb2.Distribution.BucketOptions, + ) + create_time = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, ) - create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) update_time = proto.Field( - proto.MESSAGE, number=10, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + version = proto.Field( + proto.ENUM, + number=4, + enum=ApiVersion, ) - version = proto.Field(proto.ENUM, number=4, enum=ApiVersion,) class ListLogMetricsRequest(proto.Message): @@ -212,9 +245,18 @@ class ListLogMetricsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1,) - page_token = proto.Field(proto.STRING, number=2,) - page_size = proto.Field(proto.INT32, number=3,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) class ListLogMetricsResponse(proto.Message): @@ -234,8 +276,15 @@ class ListLogMetricsResponse(proto.Message): def raw_page(self): return self - metrics = proto.RepeatedField(proto.MESSAGE, number=1, message="LogMetric",) - next_page_token = proto.Field(proto.STRING, number=2,) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogMetric", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class GetLogMetricRequest(proto.Message): @@ -250,7 +299,10 @@ class GetLogMetricRequest(proto.Message): "projects/[PROJECT_ID]/metrics/[METRIC_ID]". """ - metric_name = proto.Field(proto.STRING, number=1,) + metric_name = proto.Field( + proto.STRING, + number=1, + ) class CreateLogMetricRequest(proto.Message): @@ -271,8 +323,15 @@ class CreateLogMetricRequest(proto.Message): must not have an identifier that already exists. """ - parent = proto.Field(proto.STRING, number=1,) - metric = proto.Field(proto.MESSAGE, number=2, message="LogMetric",) + parent = proto.Field( + proto.STRING, + number=1, + ) + metric = proto.Field( + proto.MESSAGE, + number=2, + message="LogMetric", + ) class UpdateLogMetricRequest(proto.Message): @@ -294,8 +353,15 @@ class UpdateLogMetricRequest(proto.Message): Required. The updated metric. """ - metric_name = proto.Field(proto.STRING, number=1,) - metric = proto.Field(proto.MESSAGE, number=2, message="LogMetric",) + metric_name = proto.Field( + proto.STRING, + number=1, + ) + metric = proto.Field( + proto.MESSAGE, + number=2, + message="LogMetric", + ) class DeleteLogMetricRequest(proto.Message): @@ -310,7 +376,10 @@ class DeleteLogMetricRequest(proto.Message): "projects/[PROJECT_ID]/metrics/[METRIC_ID]". """ - metric_name = proto.Field(proto.STRING, number=1,) + metric_name = proto.Field( + proto.STRING, + number=1, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 96ef7ee7e4ea..d2f8f0e56276 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -24,7 +24,7 @@ import nox -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" @@ -57,7 +57,9 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", "--check", *BLACK_PATHS, + "black", + "--check", + *BLACK_PATHS, ) session.run("flake8", "google", "tests") @@ -67,7 +69,8 @@ def blacken(session): """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( - "black", *BLACK_PATHS, + "black", + *BLACK_PATHS, ) diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 4c808af73ea2..949e0fde9ae1 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -29,7 +29,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" # Copy `noxfile_config.py` to your directory and modify it instead. diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 401394debb62..75227b5b35c1 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -94,7 +94,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] + "client_class", + [ + ConfigServiceV2Client, + ConfigServiceV2AsyncClient, + ], ) def test_config_service_v2_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -136,7 +140,11 @@ def test_config_service_v2_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] + "client_class", + [ + ConfigServiceV2Client, + ConfigServiceV2AsyncClient, + ], ) def test_config_service_v2_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -510,7 +518,9 @@ def test_config_service_v2_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -655,10 +665,17 @@ def test_config_service_v2_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [logging_config.ListBucketsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListBucketsRequest, + dict, + ], +) def test_list_buckets(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -687,7 +704,8 @@ def test_list_buckets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -703,7 +721,8 @@ async def test_list_buckets_async( transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -714,7 +733,9 @@ async def test_list_buckets_async( with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListBucketsResponse(next_page_token="next_page_token_value",) + logging_config.ListBucketsResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_buckets(request) @@ -734,7 +755,9 @@ async def test_list_buckets_async_from_dict(): def test_list_buckets_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -754,7 +777,10 @@ def test_list_buckets_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -783,11 +809,16 @@ async def test_list_buckets_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_buckets_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -795,7 +826,9 @@ def test_list_buckets_flattened(): call.return_value = logging_config.ListBucketsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_buckets(parent="parent_value",) + client.list_buckets( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -807,13 +840,16 @@ def test_list_buckets_flattened(): def test_list_buckets_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_buckets( - logging_config.ListBucketsRequest(), parent="parent_value", + logging_config.ListBucketsRequest(), + parent="parent_value", ) @@ -833,7 +869,9 @@ async def test_list_buckets_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_buckets(parent="parent_value",) + response = await client.list_buckets( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -854,13 +892,15 @@ async def test_list_buckets_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_buckets( - logging_config.ListBucketsRequest(), parent="parent_value", + logging_config.ListBucketsRequest(), + parent="parent_value", ) def test_list_buckets_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -875,12 +915,21 @@ def test_list_buckets_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_config.ListBucketsResponse(buckets=[], next_page_token="def",), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(),], next_page_token="ghi", + buckets=[], + next_page_token="def", + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token="ghi", ), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(), logging_config.LogBucket(),], + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], ), RuntimeError, ) @@ -900,7 +949,8 @@ def test_list_buckets_pager(transport_name: str = "grpc"): def test_list_buckets_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -915,12 +965,21 @@ def test_list_buckets_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_config.ListBucketsResponse(buckets=[], next_page_token="def",), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(),], next_page_token="ghi", + buckets=[], + next_page_token="def", + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token="ghi", ), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(), logging_config.LogBucket(),], + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], ), RuntimeError, ) @@ -949,16 +1008,27 @@ async def test_list_buckets_async_pager(): ], next_page_token="abc", ), - logging_config.ListBucketsResponse(buckets=[], next_page_token="def",), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(),], next_page_token="ghi", + buckets=[], + next_page_token="def", + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token="ghi", ), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(), logging_config.LogBucket(),], + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], ), RuntimeError, ) - async_pager = await client.list_buckets(request={},) + async_pager = await client.list_buckets( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -988,12 +1058,21 @@ async def test_list_buckets_async_pages(): ], next_page_token="abc", ), - logging_config.ListBucketsResponse(buckets=[], next_page_token="def",), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(),], next_page_token="ghi", + buckets=[], + next_page_token="def", + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token="ghi", ), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(), logging_config.LogBucket(),], + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], ), RuntimeError, ) @@ -1004,10 +1083,17 @@ async def test_list_buckets_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [logging_config.GetBucketRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetBucketRequest, + dict, + ], +) def test_get_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1046,7 +1132,8 @@ def test_get_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1062,7 +1149,8 @@ async def test_get_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1105,7 +1193,9 @@ async def test_get_bucket_async_from_dict(): def test_get_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1125,7 +1215,10 @@ def test_get_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1154,13 +1247,23 @@ async def test_get_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.CreateBucketRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateBucketRequest, + dict, + ], +) def test_create_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1199,7 +1302,8 @@ def test_create_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1215,7 +1319,8 @@ async def test_create_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1258,7 +1363,9 @@ async def test_create_bucket_async_from_dict(): def test_create_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1278,7 +1385,10 @@ def test_create_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1307,13 +1417,23 @@ async def test_create_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.UpdateBucketRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateBucketRequest, + dict, + ], +) def test_update_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1352,7 +1472,8 @@ def test_update_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1368,7 +1489,8 @@ async def test_update_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1411,7 +1533,9 @@ async def test_update_bucket_async_from_dict(): def test_update_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1431,7 +1555,10 @@ def test_update_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1460,13 +1587,23 @@ async def test_update_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.DeleteBucketRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteBucketRequest, + dict, + ], +) def test_delete_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1492,7 +1629,8 @@ def test_delete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1508,7 +1646,8 @@ async def test_delete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1536,7 +1675,9 @@ async def test_delete_bucket_async_from_dict(): def test_delete_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1556,7 +1697,10 @@ def test_delete_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1583,13 +1727,23 @@ async def test_delete_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.UndeleteBucketRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UndeleteBucketRequest, + dict, + ], +) def test_undelete_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1615,7 +1769,8 @@ def test_undelete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1631,7 +1786,8 @@ async def test_undelete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1659,7 +1815,9 @@ async def test_undelete_bucket_async_from_dict(): def test_undelete_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1679,7 +1837,10 @@ def test_undelete_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1706,13 +1867,23 @@ async def test_undelete_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.ListViewsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListViewsRequest, + dict, + ], +) def test_list_views(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1741,7 +1912,8 @@ def test_list_views_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1757,7 +1929,8 @@ async def test_list_views_async( transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1768,7 +1941,9 @@ async def test_list_views_async( with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListViewsResponse(next_page_token="next_page_token_value",) + logging_config.ListViewsResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_views(request) @@ -1788,7 +1963,9 @@ async def test_list_views_async_from_dict(): def test_list_views_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1808,7 +1985,10 @@ def test_list_views_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1837,11 +2017,16 @@ async def test_list_views_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_views_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1849,7 +2034,9 @@ def test_list_views_flattened(): call.return_value = logging_config.ListViewsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_views(parent="parent_value",) + client.list_views( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1861,13 +2048,16 @@ def test_list_views_flattened(): def test_list_views_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_views( - logging_config.ListViewsRequest(), parent="parent_value", + logging_config.ListViewsRequest(), + parent="parent_value", ) @@ -1887,7 +2077,9 @@ async def test_list_views_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_views(parent="parent_value",) + response = await client.list_views( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1908,13 +2100,15 @@ async def test_list_views_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_views( - logging_config.ListViewsRequest(), parent="parent_value", + logging_config.ListViewsRequest(), + parent="parent_value", ) def test_list_views_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1929,12 +2123,21 @@ def test_list_views_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_config.ListViewsResponse(views=[], next_page_token="def",), logging_config.ListViewsResponse( - views=[logging_config.LogView(),], next_page_token="ghi", + views=[], + next_page_token="def", ), logging_config.ListViewsResponse( - views=[logging_config.LogView(), logging_config.LogView(),], + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], ), RuntimeError, ) @@ -1954,7 +2157,8 @@ def test_list_views_pager(transport_name: str = "grpc"): def test_list_views_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1969,12 +2173,21 @@ def test_list_views_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_config.ListViewsResponse(views=[], next_page_token="def",), logging_config.ListViewsResponse( - views=[logging_config.LogView(),], next_page_token="ghi", + views=[], + next_page_token="def", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", ), logging_config.ListViewsResponse( - views=[logging_config.LogView(), logging_config.LogView(),], + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], ), RuntimeError, ) @@ -2003,16 +2216,27 @@ async def test_list_views_async_pager(): ], next_page_token="abc", ), - logging_config.ListViewsResponse(views=[], next_page_token="def",), logging_config.ListViewsResponse( - views=[logging_config.LogView(),], next_page_token="ghi", + views=[], + next_page_token="def", ), logging_config.ListViewsResponse( - views=[logging_config.LogView(), logging_config.LogView(),], + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], ), RuntimeError, ) - async_pager = await client.list_views(request={},) + async_pager = await client.list_views( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -2042,12 +2266,21 @@ async def test_list_views_async_pages(): ], next_page_token="abc", ), - logging_config.ListViewsResponse(views=[], next_page_token="def",), logging_config.ListViewsResponse( - views=[logging_config.LogView(),], next_page_token="ghi", + views=[], + next_page_token="def", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", ), logging_config.ListViewsResponse( - views=[logging_config.LogView(), logging_config.LogView(),], + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], ), RuntimeError, ) @@ -2058,10 +2291,17 @@ async def test_list_views_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [logging_config.GetViewRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetViewRequest, + dict, + ], +) def test_get_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2072,7 +2312,9 @@ def test_get_view(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.get_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name="name_value", description="description_value", filter="filter_value", + name="name_value", + description="description_value", + filter="filter_value", ) response = client.get_view(request) @@ -2092,7 +2334,8 @@ def test_get_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2108,7 +2351,8 @@ async def test_get_view_async( transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2145,7 +2389,9 @@ async def test_get_view_async_from_dict(): def test_get_view_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2165,7 +2411,10 @@ def test_get_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2194,13 +2443,23 @@ async def test_get_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.CreateViewRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateViewRequest, + dict, + ], +) def test_create_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2211,7 +2470,9 @@ def test_create_view(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name="name_value", description="description_value", filter="filter_value", + name="name_value", + description="description_value", + filter="filter_value", ) response = client.create_view(request) @@ -2231,7 +2492,8 @@ def test_create_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2247,7 +2509,8 @@ async def test_create_view_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2284,7 +2547,9 @@ async def test_create_view_async_from_dict(): def test_create_view_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2304,7 +2569,10 @@ def test_create_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2333,13 +2601,23 @@ async def test_create_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.UpdateViewRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateViewRequest, + dict, + ], +) def test_update_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2350,7 +2628,9 @@ def test_update_view(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name="name_value", description="description_value", filter="filter_value", + name="name_value", + description="description_value", + filter="filter_value", ) response = client.update_view(request) @@ -2370,7 +2650,8 @@ def test_update_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2386,7 +2667,8 @@ async def test_update_view_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2423,7 +2705,9 @@ async def test_update_view_async_from_dict(): def test_update_view_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2443,7 +2727,10 @@ def test_update_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2472,13 +2759,23 @@ async def test_update_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.DeleteViewRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteViewRequest, + dict, + ], +) def test_delete_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2504,7 +2801,8 @@ def test_delete_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2520,7 +2818,8 @@ async def test_delete_view_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2548,7 +2847,9 @@ async def test_delete_view_async_from_dict(): def test_delete_view_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2568,7 +2869,10 @@ def test_delete_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2595,13 +2899,23 @@ async def test_delete_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.ListSinksRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListSinksRequest, + dict, + ], +) def test_list_sinks(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2630,7 +2944,8 @@ def test_list_sinks_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2646,7 +2961,8 @@ async def test_list_sinks_async( transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2657,7 +2973,9 @@ async def test_list_sinks_async( with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListSinksResponse(next_page_token="next_page_token_value",) + logging_config.ListSinksResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_sinks(request) @@ -2677,7 +2995,9 @@ async def test_list_sinks_async_from_dict(): def test_list_sinks_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2697,7 +3017,10 @@ def test_list_sinks_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2726,11 +3049,16 @@ async def test_list_sinks_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_sinks_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -2738,7 +3066,9 @@ def test_list_sinks_flattened(): call.return_value = logging_config.ListSinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_sinks(parent="parent_value",) + client.list_sinks( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2750,13 +3080,16 @@ def test_list_sinks_flattened(): def test_list_sinks_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_sinks( - logging_config.ListSinksRequest(), parent="parent_value", + logging_config.ListSinksRequest(), + parent="parent_value", ) @@ -2776,7 +3109,9 @@ async def test_list_sinks_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_sinks(parent="parent_value",) + response = await client.list_sinks( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2797,13 +3132,15 @@ async def test_list_sinks_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_sinks( - logging_config.ListSinksRequest(), parent="parent_value", + logging_config.ListSinksRequest(), + parent="parent_value", ) def test_list_sinks_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2818,12 +3155,21 @@ def test_list_sinks_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_config.ListSinksResponse(sinks=[], next_page_token="def",), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(),], next_page_token="ghi", + sinks=[], + next_page_token="def", ), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(), logging_config.LogSink(),], + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], ), RuntimeError, ) @@ -2843,7 +3189,8 @@ def test_list_sinks_pager(transport_name: str = "grpc"): def test_list_sinks_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2858,12 +3205,21 @@ def test_list_sinks_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_config.ListSinksResponse(sinks=[], next_page_token="def",), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(),], next_page_token="ghi", + sinks=[], + next_page_token="def", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", ), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(), logging_config.LogSink(),], + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], ), RuntimeError, ) @@ -2892,16 +3248,27 @@ async def test_list_sinks_async_pager(): ], next_page_token="abc", ), - logging_config.ListSinksResponse(sinks=[], next_page_token="def",), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(),], next_page_token="ghi", + sinks=[], + next_page_token="def", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", ), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(), logging_config.LogSink(),], + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], ), RuntimeError, ) - async_pager = await client.list_sinks(request={},) + async_pager = await client.list_sinks( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -2931,12 +3298,21 @@ async def test_list_sinks_async_pages(): ], next_page_token="abc", ), - logging_config.ListSinksResponse(sinks=[], next_page_token="def",), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(),], next_page_token="ghi", + sinks=[], + next_page_token="def", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", ), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(), logging_config.LogSink(),], + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], ), RuntimeError, ) @@ -2947,10 +3323,17 @@ async def test_list_sinks_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [logging_config.GetSinkRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetSinkRequest, + dict, + ], +) def test_get_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2996,7 +3379,8 @@ def test_get_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3012,7 +3396,8 @@ async def test_get_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3059,7 +3444,9 @@ async def test_get_sink_async_from_dict(): def test_get_sink_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3079,7 +3466,10 @@ def test_get_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "sink_name=sink_name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3108,11 +3498,16 @@ async def test_get_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "sink_name=sink_name/value", + ) in kw["metadata"] def test_get_sink_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: @@ -3120,7 +3515,9 @@ def test_get_sink_flattened(): call.return_value = logging_config.LogSink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_sink(sink_name="sink_name_value",) + client.get_sink( + sink_name="sink_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3132,13 +3529,16 @@ def test_get_sink_flattened(): def test_get_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_sink( - logging_config.GetSinkRequest(), sink_name="sink_name_value", + logging_config.GetSinkRequest(), + sink_name="sink_name_value", ) @@ -3158,7 +3558,9 @@ async def test_get_sink_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_sink(sink_name="sink_name_value",) + response = await client.get_sink( + sink_name="sink_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3179,14 +3581,22 @@ async def test_get_sink_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_sink( - logging_config.GetSinkRequest(), sink_name="sink_name_value", + logging_config.GetSinkRequest(), + sink_name="sink_name_value", ) -@pytest.mark.parametrize("request_type", [logging_config.CreateSinkRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateSinkRequest, + dict, + ], +) def test_create_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3232,7 +3642,8 @@ def test_create_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3248,7 +3659,8 @@ async def test_create_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3295,7 +3707,9 @@ async def test_create_sink_async_from_dict(): def test_create_sink_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3315,7 +3729,10 @@ def test_create_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3344,11 +3761,16 @@ async def test_create_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_sink_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: @@ -3357,7 +3779,8 @@ def test_create_sink_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_sink( - parent="parent_value", sink=logging_config.LogSink(name="name_value"), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -3373,7 +3796,9 @@ def test_create_sink_flattened(): def test_create_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3402,7 +3827,8 @@ async def test_create_sink_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_sink( - parent="parent_value", sink=logging_config.LogSink(name="name_value"), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -3433,10 +3859,17 @@ async def test_create_sink_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [logging_config.UpdateSinkRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateSinkRequest, + dict, + ], +) def test_update_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3482,7 +3915,8 @@ def test_update_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3498,7 +3932,8 @@ async def test_update_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3545,7 +3980,9 @@ async def test_update_sink_async_from_dict(): def test_update_sink_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3565,7 +4002,10 @@ def test_update_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "sink_name=sink_name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3594,11 +4034,16 @@ async def test_update_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "sink_name=sink_name/value", + ) in kw["metadata"] def test_update_sink_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: @@ -3628,7 +4073,9 @@ def test_update_sink_flattened(): def test_update_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3695,10 +4142,17 @@ async def test_update_sink_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [logging_config.DeleteSinkRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteSinkRequest, + dict, + ], +) def test_delete_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3724,7 +4178,8 @@ def test_delete_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3740,7 +4195,8 @@ async def test_delete_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3768,7 +4224,9 @@ async def test_delete_sink_async_from_dict(): def test_delete_sink_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3788,7 +4246,10 @@ def test_delete_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "sink_name=sink_name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3815,11 +4276,16 @@ async def test_delete_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "sink_name=sink_name/value", + ) in kw["metadata"] def test_delete_sink_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: @@ -3827,7 +4293,9 @@ def test_delete_sink_flattened(): call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_sink(sink_name="sink_name_value",) + client.delete_sink( + sink_name="sink_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3839,13 +4307,16 @@ def test_delete_sink_flattened(): def test_delete_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_sink( - logging_config.DeleteSinkRequest(), sink_name="sink_name_value", + logging_config.DeleteSinkRequest(), + sink_name="sink_name_value", ) @@ -3863,7 +4334,9 @@ async def test_delete_sink_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_sink(sink_name="sink_name_value",) + response = await client.delete_sink( + sink_name="sink_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3884,14 +4357,22 @@ async def test_delete_sink_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_sink( - logging_config.DeleteSinkRequest(), sink_name="sink_name_value", + logging_config.DeleteSinkRequest(), + sink_name="sink_name_value", ) -@pytest.mark.parametrize("request_type", [logging_config.ListExclusionsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListExclusionsRequest, + dict, + ], +) def test_list_exclusions(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3920,7 +4401,8 @@ def test_list_exclusions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3936,7 +4418,8 @@ async def test_list_exclusions_async( transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3969,7 +4452,9 @@ async def test_list_exclusions_async_from_dict(): def test_list_exclusions_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3989,7 +4474,10 @@ def test_list_exclusions_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4018,11 +4506,16 @@ async def test_list_exclusions_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_exclusions_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4030,7 +4523,9 @@ def test_list_exclusions_flattened(): call.return_value = logging_config.ListExclusionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_exclusions(parent="parent_value",) + client.list_exclusions( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -4042,13 +4537,16 @@ def test_list_exclusions_flattened(): def test_list_exclusions_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_exclusions( - logging_config.ListExclusionsRequest(), parent="parent_value", + logging_config.ListExclusionsRequest(), + parent="parent_value", ) @@ -4068,7 +4566,9 @@ async def test_list_exclusions_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_exclusions(parent="parent_value",) + response = await client.list_exclusions( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -4089,13 +4589,15 @@ async def test_list_exclusions_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_exclusions( - logging_config.ListExclusionsRequest(), parent="parent_value", + logging_config.ListExclusionsRequest(), + parent="parent_value", ) def test_list_exclusions_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4111,10 +4613,14 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): next_page_token="abc", ), logging_config.ListExclusionsResponse( - exclusions=[], next_page_token="def", + exclusions=[], + next_page_token="def", ), logging_config.ListExclusionsResponse( - exclusions=[logging_config.LogExclusion(),], next_page_token="ghi", + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -4140,7 +4646,8 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): def test_list_exclusions_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4156,10 +4663,14 @@ def test_list_exclusions_pages(transport_name: str = "grpc"): next_page_token="abc", ), logging_config.ListExclusionsResponse( - exclusions=[], next_page_token="def", + exclusions=[], + next_page_token="def", ), logging_config.ListExclusionsResponse( - exclusions=[logging_config.LogExclusion(),], next_page_token="ghi", + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -4195,10 +4706,14 @@ async def test_list_exclusions_async_pager(): next_page_token="abc", ), logging_config.ListExclusionsResponse( - exclusions=[], next_page_token="def", + exclusions=[], + next_page_token="def", ), logging_config.ListExclusionsResponse( - exclusions=[logging_config.LogExclusion(),], next_page_token="ghi", + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -4208,7 +4723,9 @@ async def test_list_exclusions_async_pager(): ), RuntimeError, ) - async_pager = await client.list_exclusions(request={},) + async_pager = await client.list_exclusions( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -4239,10 +4756,14 @@ async def test_list_exclusions_async_pages(): next_page_token="abc", ), logging_config.ListExclusionsResponse( - exclusions=[], next_page_token="def", + exclusions=[], + next_page_token="def", ), logging_config.ListExclusionsResponse( - exclusions=[logging_config.LogExclusion(),], next_page_token="ghi", + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -4259,10 +4780,17 @@ async def test_list_exclusions_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [logging_config.GetExclusionRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetExclusionRequest, + dict, + ], +) def test_get_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4297,7 +4825,8 @@ def test_get_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4313,7 +4842,8 @@ async def test_get_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4352,7 +4882,9 @@ async def test_get_exclusion_async_from_dict(): def test_get_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4372,7 +4904,10 @@ def test_get_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4401,11 +4936,16 @@ async def test_get_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: @@ -4413,7 +4953,9 @@ def test_get_exclusion_flattened(): call.return_value = logging_config.LogExclusion() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_exclusion(name="name_value",) + client.get_exclusion( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -4425,13 +4967,16 @@ def test_get_exclusion_flattened(): def test_get_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_exclusion( - logging_config.GetExclusionRequest(), name="name_value", + logging_config.GetExclusionRequest(), + name="name_value", ) @@ -4451,7 +4996,9 @@ async def test_get_exclusion_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_exclusion(name="name_value",) + response = await client.get_exclusion( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -4472,14 +5019,22 @@ async def test_get_exclusion_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_exclusion( - logging_config.GetExclusionRequest(), name="name_value", + logging_config.GetExclusionRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [logging_config.CreateExclusionRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateExclusionRequest, + dict, + ], +) def test_create_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4514,7 +5069,8 @@ def test_create_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4530,7 +5086,8 @@ async def test_create_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4569,7 +5126,9 @@ async def test_create_exclusion_async_from_dict(): def test_create_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4589,7 +5148,10 @@ def test_create_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4618,11 +5180,16 @@ async def test_create_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: @@ -4648,7 +5215,9 @@ def test_create_exclusion_flattened(): def test_create_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4709,10 +5278,17 @@ async def test_create_exclusion_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [logging_config.UpdateExclusionRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateExclusionRequest, + dict, + ], +) def test_update_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4747,7 +5323,8 @@ def test_update_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4763,7 +5340,8 @@ async def test_update_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4802,7 +5380,9 @@ async def test_update_exclusion_async_from_dict(): def test_update_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4822,7 +5402,10 @@ def test_update_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4851,11 +5434,16 @@ async def test_update_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_update_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: @@ -4885,7 +5473,9 @@ def test_update_exclusion_flattened(): def test_update_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4952,10 +5542,17 @@ async def test_update_exclusion_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [logging_config.DeleteExclusionRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteExclusionRequest, + dict, + ], +) def test_delete_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4981,7 +5578,8 @@ def test_delete_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4997,7 +5595,8 @@ async def test_delete_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5025,7 +5624,9 @@ async def test_delete_exclusion_async_from_dict(): def test_delete_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5045,7 +5646,10 @@ def test_delete_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5072,11 +5676,16 @@ async def test_delete_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_delete_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: @@ -5084,7 +5693,9 @@ def test_delete_exclusion_flattened(): call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_exclusion(name="name_value",) + client.delete_exclusion( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -5096,13 +5707,16 @@ def test_delete_exclusion_flattened(): def test_delete_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_exclusion( - logging_config.DeleteExclusionRequest(), name="name_value", + logging_config.DeleteExclusionRequest(), + name="name_value", ) @@ -5120,7 +5734,9 @@ async def test_delete_exclusion_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_exclusion(name="name_value",) + response = await client.delete_exclusion( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -5141,14 +5757,22 @@ async def test_delete_exclusion_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_exclusion( - logging_config.DeleteExclusionRequest(), name="name_value", + logging_config.DeleteExclusionRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [logging_config.GetCmekSettingsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetCmekSettingsRequest, + dict, + ], +) def test_get_cmek_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5183,7 +5807,8 @@ def test_get_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5201,7 +5826,8 @@ async def test_get_cmek_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5240,7 +5866,9 @@ async def test_get_cmek_settings_async_from_dict(): def test_get_cmek_settings_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5262,7 +5890,10 @@ def test_get_cmek_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5293,15 +5924,23 @@ async def test_get_cmek_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.parametrize( - "request_type", [logging_config.UpdateCmekSettingsRequest, dict,] + "request_type", + [ + logging_config.UpdateCmekSettingsRequest, + dict, + ], ) def test_update_cmek_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5336,7 +5975,8 @@ def test_update_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5355,7 +5995,8 @@ async def test_update_cmek_settings_async( request_type=logging_config.UpdateCmekSettingsRequest, ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5394,7 +6035,9 @@ async def test_update_cmek_settings_async_from_dict(): def test_update_cmek_settings_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5416,7 +6059,10 @@ def test_update_cmek_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5447,13 +6093,23 @@ async def test_update_cmek_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.GetSettingsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetSettingsRequest, + dict, + ], +) def test_get_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5490,7 +6146,8 @@ def test_get_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5506,7 +6163,8 @@ async def test_get_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSettingsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5547,7 +6205,9 @@ async def test_get_settings_async_from_dict(): def test_get_settings_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5567,7 +6227,10 @@ def test_get_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5596,11 +6259,16 @@ async def test_get_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_settings_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_settings), "__call__") as call: @@ -5608,7 +6276,9 @@ def test_get_settings_flattened(): call.return_value = logging_config.Settings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_settings(name="name_value",) + client.get_settings( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -5620,13 +6290,16 @@ def test_get_settings_flattened(): def test_get_settings_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_settings( - logging_config.GetSettingsRequest(), name="name_value", + logging_config.GetSettingsRequest(), + name="name_value", ) @@ -5646,7 +6319,9 @@ async def test_get_settings_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_settings(name="name_value",) + response = await client.get_settings( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -5667,14 +6342,22 @@ async def test_get_settings_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_settings( - logging_config.GetSettingsRequest(), name="name_value", + logging_config.GetSettingsRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [logging_config.UpdateSettingsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateSettingsRequest, + dict, + ], +) def test_update_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5711,7 +6394,8 @@ def test_update_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5727,7 +6411,8 @@ async def test_update_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateSettingsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5768,7 +6453,9 @@ async def test_update_settings_async_from_dict(): def test_update_settings_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5788,7 +6475,10 @@ def test_update_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5817,11 +6507,16 @@ async def test_update_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_update_settings_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_settings), "__call__") as call: @@ -5847,7 +6542,9 @@ def test_update_settings_flattened(): def test_update_settings_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5908,10 +6605,17 @@ async def test_update_settings_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [logging_config.CopyLogEntriesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CopyLogEntriesRequest, + dict, + ], +) def test_copy_log_entries(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5937,7 +6641,8 @@ def test_copy_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5953,7 +6658,8 @@ async def test_copy_log_entries_async( transport: str = "grpc_asyncio", request_type=logging_config.CopyLogEntriesRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5989,7 +6695,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -6009,7 +6716,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = ConfigServiceV2Client(client_options=options, transport=transport,) + client = ConfigServiceV2Client( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -6025,7 +6735,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = ConfigServiceV2Client( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -6070,8 +6781,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.ConfigServiceV2GrpcTransport,) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ConfigServiceV2GrpcTransport, + ) def test_config_service_v2_base_transport_error(): @@ -6146,7 +6862,8 @@ def test_config_service_v2_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigServiceV2Transport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -6324,7 +7041,8 @@ def test_config_service_v2_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.ConfigServiceV2GrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -6336,7 +7054,8 @@ def test_config_service_v2_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.ConfigServiceV2GrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -6445,12 +7164,16 @@ def test_config_service_v2_transport_channel_mtls_with_adc(transport_class): def test_config_service_v2_grpc_lro_client(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -6458,12 +7181,16 @@ def test_config_service_v2_grpc_lro_client(): def test_config_service_v2_grpc_lro_async_client(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -6471,7 +7198,9 @@ def test_config_service_v2_grpc_lro_async_client(): def test_cmek_settings_path(): project = "squid" - expected = "projects/{project}/cmekSettings".format(project=project,) + expected = "projects/{project}/cmekSettings".format( + project=project, + ) actual = ConfigServiceV2Client.cmek_settings_path(project) assert expected == actual @@ -6492,7 +7221,9 @@ def test_log_bucket_path(): location = "octopus" bucket = "oyster" expected = "projects/{project}/locations/{location}/buckets/{bucket}".format( - project=project, location=location, bucket=bucket, + project=project, + location=location, + bucket=bucket, ) actual = ConfigServiceV2Client.log_bucket_path(project, location, bucket) assert expected == actual @@ -6515,7 +7246,8 @@ def test_log_exclusion_path(): project = "winkle" exclusion = "nautilus" expected = "projects/{project}/exclusions/{exclusion}".format( - project=project, exclusion=exclusion, + project=project, + exclusion=exclusion, ) actual = ConfigServiceV2Client.log_exclusion_path(project, exclusion) assert expected == actual @@ -6536,7 +7268,10 @@ def test_parse_log_exclusion_path(): def test_log_sink_path(): project = "squid" sink = "clam" - expected = "projects/{project}/sinks/{sink}".format(project=project, sink=sink,) + expected = "projects/{project}/sinks/{sink}".format( + project=project, + sink=sink, + ) actual = ConfigServiceV2Client.log_sink_path(project, sink) assert expected == actual @@ -6558,8 +7293,13 @@ def test_log_view_path(): location = "nudibranch" bucket = "cuttlefish" view = "mussel" - expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( - project=project, location=location, bucket=bucket, view=view, + expected = ( + "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( + project=project, + location=location, + bucket=bucket, + view=view, + ) ) actual = ConfigServiceV2Client.log_view_path(project, location, bucket, view) assert expected == actual @@ -6581,7 +7321,9 @@ def test_parse_log_view_path(): def test_settings_path(): project = "squid" - expected = "projects/{project}/settings".format(project=project,) + expected = "projects/{project}/settings".format( + project=project, + ) actual = ConfigServiceV2Client.settings_path(project) assert expected == actual @@ -6619,7 +7361,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "oyster" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = ConfigServiceV2Client.common_folder_path(folder) assert expected == actual @@ -6637,7 +7381,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = ConfigServiceV2Client.common_organization_path(organization) assert expected == actual @@ -6655,7 +7401,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "winkle" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = ConfigServiceV2Client.common_project_path(project) assert expected == actual @@ -6675,7 +7423,8 @@ def test_common_location_path(): project = "scallop" location = "abalone" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = ConfigServiceV2Client.common_location_path(project, location) assert expected == actual @@ -6700,7 +7449,8 @@ def test_client_with_default_client_info(): transports.ConfigServiceV2Transport, "_prep_wrapped_messages" ) as prep: client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -6709,7 +7459,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = ConfigServiceV2Client.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -6717,7 +7468,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index e87e1c26dc4a..09cff71ee85b 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -96,7 +96,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] + "client_class", + [ + LoggingServiceV2Client, + LoggingServiceV2AsyncClient, + ], ) def test_logging_service_v2_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -138,7 +142,11 @@ def test_logging_service_v2_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] + "client_class", + [ + LoggingServiceV2Client, + LoggingServiceV2AsyncClient, + ], ) def test_logging_service_v2_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -512,7 +520,9 @@ def test_logging_service_v2_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -658,10 +668,17 @@ def test_logging_service_v2_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [logging.DeleteLogRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging.DeleteLogRequest, + dict, + ], +) def test_delete_log(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -687,7 +704,8 @@ def test_delete_log_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -703,7 +721,8 @@ async def test_delete_log_async( transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -731,7 +750,9 @@ async def test_delete_log_async_from_dict(): def test_delete_log_field_headers(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -751,7 +772,10 @@ def test_delete_log_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "log_name=log_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "log_name=log_name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -778,11 +802,16 @@ async def test_delete_log_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "log_name=log_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "log_name=log_name/value", + ) in kw["metadata"] def test_delete_log_flattened(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: @@ -790,7 +819,9 @@ def test_delete_log_flattened(): call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_log(log_name="log_name_value",) + client.delete_log( + log_name="log_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -802,13 +833,16 @@ def test_delete_log_flattened(): def test_delete_log_flattened_error(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_log( - logging.DeleteLogRequest(), log_name="log_name_value", + logging.DeleteLogRequest(), + log_name="log_name_value", ) @@ -826,7 +860,9 @@ async def test_delete_log_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_log(log_name="log_name_value",) + response = await client.delete_log( + log_name="log_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -847,14 +883,22 @@ async def test_delete_log_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_log( - logging.DeleteLogRequest(), log_name="log_name_value", + logging.DeleteLogRequest(), + log_name="log_name_value", ) -@pytest.mark.parametrize("request_type", [logging.WriteLogEntriesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging.WriteLogEntriesRequest, + dict, + ], +) def test_write_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -882,7 +926,8 @@ def test_write_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -900,7 +945,8 @@ async def test_write_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -932,7 +978,9 @@ async def test_write_log_entries_async_from_dict(): def test_write_log_entries_flattened(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -968,7 +1016,9 @@ def test_write_log_entries_flattened(): def test_write_log_entries_flattened_error(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1043,10 +1093,17 @@ async def test_write_log_entries_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [logging.ListLogEntriesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging.ListLogEntriesRequest, + dict, + ], +) def test_list_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1075,7 +1132,8 @@ def test_list_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1091,7 +1149,8 @@ async def test_list_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1102,7 +1161,9 @@ async def test_list_log_entries_async( with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging.ListLogEntriesResponse(next_page_token="next_page_token_value",) + logging.ListLogEntriesResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_log_entries(request) @@ -1122,7 +1183,9 @@ async def test_list_log_entries_async_from_dict(): def test_list_log_entries_flattened(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: @@ -1152,7 +1215,9 @@ def test_list_log_entries_flattened(): def test_list_log_entries_flattened_error(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1221,7 +1286,8 @@ async def test_list_log_entries_flattened_error_async(): def test_list_log_entries_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1236,12 +1302,21 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging.ListLogEntriesResponse(entries=[], next_page_token="def",), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(),], next_page_token="ghi", + entries=[], + next_page_token="def", + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token="ghi", ), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(), log_entry.LogEntry(),], + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], ), RuntimeError, ) @@ -1258,7 +1333,8 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): def test_list_log_entries_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1273,12 +1349,21 @@ def test_list_log_entries_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging.ListLogEntriesResponse(entries=[], next_page_token="def",), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(),], next_page_token="ghi", + entries=[], + next_page_token="def", ), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(), log_entry.LogEntry(),], + entries=[ + log_entry.LogEntry(), + ], + next_page_token="ghi", + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], ), RuntimeError, ) @@ -1307,16 +1392,27 @@ async def test_list_log_entries_async_pager(): ], next_page_token="abc", ), - logging.ListLogEntriesResponse(entries=[], next_page_token="def",), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(),], next_page_token="ghi", + entries=[], + next_page_token="def", ), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(), log_entry.LogEntry(),], + entries=[ + log_entry.LogEntry(), + ], + next_page_token="ghi", + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], ), RuntimeError, ) - async_pager = await client.list_log_entries(request={},) + async_pager = await client.list_log_entries( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1346,12 +1442,21 @@ async def test_list_log_entries_async_pages(): ], next_page_token="abc", ), - logging.ListLogEntriesResponse(entries=[], next_page_token="def",), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(),], next_page_token="ghi", + entries=[], + next_page_token="def", + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token="ghi", ), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(), log_entry.LogEntry(),], + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], ), RuntimeError, ) @@ -1363,11 +1468,16 @@ async def test_list_log_entries_async_pages(): @pytest.mark.parametrize( - "request_type", [logging.ListMonitoredResourceDescriptorsRequest, dict,] + "request_type", + [ + logging.ListMonitoredResourceDescriptorsRequest, + dict, + ], ) def test_list_monitored_resource_descriptors(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1398,7 +1508,8 @@ def test_list_monitored_resource_descriptors_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1417,7 +1528,8 @@ async def test_list_monitored_resource_descriptors_async( request_type=logging.ListMonitoredResourceDescriptorsRequest, ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1453,7 +1565,8 @@ async def test_list_monitored_resource_descriptors_async_from_dict(): def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1471,7 +1584,8 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], next_page_token="def", + resource_descriptors=[], + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -1503,7 +1617,8 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1521,7 +1636,8 @@ def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc") next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], next_page_token="def", + resource_descriptors=[], + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -1565,7 +1681,8 @@ async def test_list_monitored_resource_descriptors_async_pager(): next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], next_page_token="def", + resource_descriptors=[], + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -1581,7 +1698,9 @@ async def test_list_monitored_resource_descriptors_async_pager(): ), RuntimeError, ) - async_pager = await client.list_monitored_resource_descriptors(request={},) + async_pager = await client.list_monitored_resource_descriptors( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1617,7 +1736,8 @@ async def test_list_monitored_resource_descriptors_async_pages(): next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], next_page_token="def", + resource_descriptors=[], + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -1642,10 +1762,17 @@ async def test_list_monitored_resource_descriptors_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [logging.ListLogsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging.ListLogsRequest, + dict, + ], +) def test_list_logs(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1656,7 +1783,8 @@ def test_list_logs(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogsResponse( - log_names=["log_names_value"], next_page_token="next_page_token_value", + log_names=["log_names_value"], + next_page_token="next_page_token_value", ) response = client.list_logs(request) @@ -1675,7 +1803,8 @@ def test_list_logs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1691,7 +1820,8 @@ async def test_list_logs_async( transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1703,7 +1833,8 @@ async def test_list_logs_async( # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging.ListLogsResponse( - log_names=["log_names_value"], next_page_token="next_page_token_value", + log_names=["log_names_value"], + next_page_token="next_page_token_value", ) ) response = await client.list_logs(request) @@ -1725,7 +1856,9 @@ async def test_list_logs_async_from_dict(): def test_list_logs_field_headers(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1745,7 +1878,10 @@ def test_list_logs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1774,11 +1910,16 @@ async def test_list_logs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_logs_flattened(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1786,7 +1927,9 @@ def test_list_logs_flattened(): call.return_value = logging.ListLogsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_logs(parent="parent_value",) + client.list_logs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1798,13 +1941,16 @@ def test_list_logs_flattened(): def test_list_logs_flattened_error(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_logs( - logging.ListLogsRequest(), parent="parent_value", + logging.ListLogsRequest(), + parent="parent_value", ) @@ -1824,7 +1970,9 @@ async def test_list_logs_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_logs(parent="parent_value",) + response = await client.list_logs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1845,13 +1993,15 @@ async def test_list_logs_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_logs( - logging.ListLogsRequest(), parent="parent_value", + logging.ListLogsRequest(), + parent="parent_value", ) def test_list_logs_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1859,11 +2009,29 @@ def test_list_logs_pager(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( - log_names=[str(), str(), str(),], next_page_token="abc", + log_names=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + logging.ListLogsResponse( + log_names=[], + next_page_token="def", + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token="ghi", + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], ), - logging.ListLogsResponse(log_names=[], next_page_token="def",), - logging.ListLogsResponse(log_names=[str(),], next_page_token="ghi",), - logging.ListLogsResponse(log_names=[str(), str(),],), RuntimeError, ) @@ -1882,7 +2050,8 @@ def test_list_logs_pager(transport_name: str = "grpc"): def test_list_logs_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1890,11 +2059,29 @@ def test_list_logs_pages(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( - log_names=[str(), str(), str(),], next_page_token="abc", + log_names=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + logging.ListLogsResponse( + log_names=[], + next_page_token="def", + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token="ghi", + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], ), - logging.ListLogsResponse(log_names=[], next_page_token="def",), - logging.ListLogsResponse(log_names=[str(),], next_page_token="ghi",), - logging.ListLogsResponse(log_names=[str(), str(),],), RuntimeError, ) pages = list(client.list_logs(request={}).pages) @@ -1915,14 +2102,34 @@ async def test_list_logs_async_pager(): # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( - log_names=[str(), str(), str(),], next_page_token="abc", + log_names=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + logging.ListLogsResponse( + log_names=[], + next_page_token="def", + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token="ghi", + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], ), - logging.ListLogsResponse(log_names=[], next_page_token="def",), - logging.ListLogsResponse(log_names=[str(),], next_page_token="ghi",), - logging.ListLogsResponse(log_names=[str(), str(),],), RuntimeError, ) - async_pager = await client.list_logs(request={},) + async_pager = await client.list_logs( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1945,11 +2152,29 @@ async def test_list_logs_async_pages(): # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( - log_names=[str(), str(), str(),], next_page_token="abc", + log_names=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + logging.ListLogsResponse( + log_names=[], + next_page_token="def", + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token="ghi", + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], ), - logging.ListLogsResponse(log_names=[], next_page_token="def",), - logging.ListLogsResponse(log_names=[str(),], next_page_token="ghi",), - logging.ListLogsResponse(log_names=[str(), str(),],), RuntimeError, ) pages = [] @@ -1959,10 +2184,17 @@ async def test_list_logs_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [logging.TailLogEntriesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging.TailLogEntriesRequest, + dict, + ], +) def test_tail_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1991,7 +2223,8 @@ async def test_tail_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2030,7 +2263,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -2050,7 +2284,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = LoggingServiceV2Client(client_options=options, transport=transport,) + client = LoggingServiceV2Client( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -2066,7 +2303,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = LoggingServiceV2Client( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -2111,8 +2349,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.LoggingServiceV2GrpcTransport,) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.LoggingServiceV2GrpcTransport, + ) def test_logging_service_v2_base_transport_error(): @@ -2162,7 +2405,8 @@ def test_logging_service_v2_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.LoggingServiceV2Transport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2344,7 +2588,8 @@ def test_logging_service_v2_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.LoggingServiceV2GrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2356,7 +2601,8 @@ def test_logging_service_v2_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.LoggingServiceV2GrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2466,7 +2712,10 @@ def test_logging_service_v2_transport_channel_mtls_with_adc(transport_class): def test_log_path(): project = "squid" log = "clam" - expected = "projects/{project}/logs/{log}".format(project=project, log=log,) + expected = "projects/{project}/logs/{log}".format( + project=project, + log=log, + ) actual = LoggingServiceV2Client.log_path(project, log) assert expected == actual @@ -2505,7 +2754,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = LoggingServiceV2Client.common_folder_path(folder) assert expected == actual @@ -2523,7 +2774,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = LoggingServiceV2Client.common_organization_path(organization) assert expected == actual @@ -2541,7 +2794,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = LoggingServiceV2Client.common_project_path(project) assert expected == actual @@ -2561,7 +2816,8 @@ def test_common_location_path(): project = "squid" location = "clam" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = LoggingServiceV2Client.common_location_path(project, location) assert expected == actual @@ -2586,7 +2842,8 @@ def test_client_with_default_client_info(): transports.LoggingServiceV2Transport, "_prep_wrapped_messages" ) as prep: client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2595,7 +2852,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = LoggingServiceV2Client.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2603,7 +2861,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index e6883889dda6..5ce917c6e7e5 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -94,7 +94,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] + "client_class", + [ + MetricsServiceV2Client, + MetricsServiceV2AsyncClient, + ], ) def test_metrics_service_v2_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -136,7 +140,11 @@ def test_metrics_service_v2_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] + "client_class", + [ + MetricsServiceV2Client, + MetricsServiceV2AsyncClient, + ], ) def test_metrics_service_v2_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -510,7 +518,9 @@ def test_metrics_service_v2_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -656,10 +666,17 @@ def test_metrics_service_v2_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [logging_metrics.ListLogMetricsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_metrics.ListLogMetricsRequest, + dict, + ], +) def test_list_log_metrics(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -688,7 +705,8 @@ def test_list_log_metrics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -704,7 +722,8 @@ async def test_list_log_metrics_async( transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -737,7 +756,9 @@ async def test_list_log_metrics_async_from_dict(): def test_list_log_metrics_field_headers(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -757,7 +778,10 @@ def test_list_log_metrics_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -786,11 +810,16 @@ async def test_list_log_metrics_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_log_metrics_flattened(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -798,7 +827,9 @@ def test_list_log_metrics_flattened(): call.return_value = logging_metrics.ListLogMetricsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_log_metrics(parent="parent_value",) + client.list_log_metrics( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -810,13 +841,16 @@ def test_list_log_metrics_flattened(): def test_list_log_metrics_flattened_error(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_log_metrics( - logging_metrics.ListLogMetricsRequest(), parent="parent_value", + logging_metrics.ListLogMetricsRequest(), + parent="parent_value", ) @@ -836,7 +870,9 @@ async def test_list_log_metrics_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_log_metrics(parent="parent_value",) + response = await client.list_log_metrics( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -857,13 +893,15 @@ async def test_list_log_metrics_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_log_metrics( - logging_metrics.ListLogMetricsRequest(), parent="parent_value", + logging_metrics.ListLogMetricsRequest(), + parent="parent_value", ) def test_list_log_metrics_pager(transport_name: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -878,12 +916,21 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_metrics.ListLogMetricsResponse(metrics=[], next_page_token="def",), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(),], next_page_token="ghi", + metrics=[], + next_page_token="def", ), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(), logging_metrics.LogMetric(),], + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token="ghi", + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], ), RuntimeError, ) @@ -903,7 +950,8 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): def test_list_log_metrics_pages(transport_name: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -918,12 +966,21 @@ def test_list_log_metrics_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_metrics.ListLogMetricsResponse(metrics=[], next_page_token="def",), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(),], next_page_token="ghi", + metrics=[], + next_page_token="def", + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token="ghi", ), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(), logging_metrics.LogMetric(),], + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], ), RuntimeError, ) @@ -952,16 +1009,27 @@ async def test_list_log_metrics_async_pager(): ], next_page_token="abc", ), - logging_metrics.ListLogMetricsResponse(metrics=[], next_page_token="def",), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(),], next_page_token="ghi", + metrics=[], + next_page_token="def", + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token="ghi", ), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(), logging_metrics.LogMetric(),], + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], ), RuntimeError, ) - async_pager = await client.list_log_metrics(request={},) + async_pager = await client.list_log_metrics( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -991,12 +1059,21 @@ async def test_list_log_metrics_async_pages(): ], next_page_token="abc", ), - logging_metrics.ListLogMetricsResponse(metrics=[], next_page_token="def",), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(),], next_page_token="ghi", + metrics=[], + next_page_token="def", ), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(), logging_metrics.LogMetric(),], + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token="ghi", + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], ), RuntimeError, ) @@ -1007,10 +1084,17 @@ async def test_list_log_metrics_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [logging_metrics.GetLogMetricRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_metrics.GetLogMetricRequest, + dict, + ], +) def test_get_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1049,7 +1133,8 @@ def test_get_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1065,7 +1150,8 @@ async def test_get_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1108,7 +1194,9 @@ async def test_get_log_metric_async_from_dict(): def test_get_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1128,7 +1216,10 @@ def test_get_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "metric_name=metric_name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1157,11 +1248,16 @@ async def test_get_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "metric_name=metric_name/value", + ) in kw["metadata"] def test_get_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: @@ -1169,7 +1265,9 @@ def test_get_log_metric_flattened(): call.return_value = logging_metrics.LogMetric() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_log_metric(metric_name="metric_name_value",) + client.get_log_metric( + metric_name="metric_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1181,13 +1279,16 @@ def test_get_log_metric_flattened(): def test_get_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_log_metric( - logging_metrics.GetLogMetricRequest(), metric_name="metric_name_value", + logging_metrics.GetLogMetricRequest(), + metric_name="metric_name_value", ) @@ -1207,7 +1308,9 @@ async def test_get_log_metric_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_log_metric(metric_name="metric_name_value",) + response = await client.get_log_metric( + metric_name="metric_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1228,16 +1331,22 @@ async def test_get_log_metric_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_log_metric( - logging_metrics.GetLogMetricRequest(), metric_name="metric_name_value", + logging_metrics.GetLogMetricRequest(), + metric_name="metric_name_value", ) @pytest.mark.parametrize( - "request_type", [logging_metrics.CreateLogMetricRequest, dict,] + "request_type", + [ + logging_metrics.CreateLogMetricRequest, + dict, + ], ) def test_create_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1278,7 +1387,8 @@ def test_create_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1296,7 +1406,8 @@ async def test_create_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1341,7 +1452,9 @@ async def test_create_log_metric_async_from_dict(): def test_create_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1363,7 +1476,10 @@ def test_create_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1394,11 +1510,16 @@ async def test_create_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1409,7 +1530,8 @@ def test_create_log_metric_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_log_metric( - parent="parent_value", metric=logging_metrics.LogMetric(name="name_value"), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -1425,7 +1547,9 @@ def test_create_log_metric_flattened(): def test_create_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1456,7 +1580,8 @@ async def test_create_log_metric_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_log_metric( - parent="parent_value", metric=logging_metrics.LogMetric(name="name_value"), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -1488,11 +1613,16 @@ async def test_create_log_metric_flattened_error_async(): @pytest.mark.parametrize( - "request_type", [logging_metrics.UpdateLogMetricRequest, dict,] + "request_type", + [ + logging_metrics.UpdateLogMetricRequest, + dict, + ], ) def test_update_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1533,7 +1663,8 @@ def test_update_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1551,7 +1682,8 @@ async def test_update_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1596,7 +1728,9 @@ async def test_update_log_metric_async_from_dict(): def test_update_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1618,7 +1752,10 @@ def test_update_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "metric_name=metric_name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1649,11 +1786,16 @@ async def test_update_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "metric_name=metric_name/value", + ) in kw["metadata"] def test_update_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1681,7 +1823,9 @@ def test_update_log_metric_flattened(): def test_update_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1745,11 +1889,16 @@ async def test_update_log_metric_flattened_error_async(): @pytest.mark.parametrize( - "request_type", [logging_metrics.DeleteLogMetricRequest, dict,] + "request_type", + [ + logging_metrics.DeleteLogMetricRequest, + dict, + ], ) def test_delete_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1777,7 +1926,8 @@ def test_delete_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1795,7 +1945,8 @@ async def test_delete_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1825,7 +1976,9 @@ async def test_delete_log_metric_async_from_dict(): def test_delete_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1847,7 +2000,10 @@ def test_delete_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "metric_name=metric_name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1876,11 +2032,16 @@ async def test_delete_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "metric_name=metric_name/value", + ) in kw["metadata"] def test_delete_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1890,7 +2051,9 @@ def test_delete_log_metric_flattened(): call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_log_metric(metric_name="metric_name_value",) + client.delete_log_metric( + metric_name="metric_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1902,13 +2065,16 @@ def test_delete_log_metric_flattened(): def test_delete_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_log_metric( - logging_metrics.DeleteLogMetricRequest(), metric_name="metric_name_value", + logging_metrics.DeleteLogMetricRequest(), + metric_name="metric_name_value", ) @@ -1928,7 +2094,9 @@ async def test_delete_log_metric_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_log_metric(metric_name="metric_name_value",) + response = await client.delete_log_metric( + metric_name="metric_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1949,7 +2117,8 @@ async def test_delete_log_metric_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_log_metric( - logging_metrics.DeleteLogMetricRequest(), metric_name="metric_name_value", + logging_metrics.DeleteLogMetricRequest(), + metric_name="metric_name_value", ) @@ -1960,7 +2129,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1980,7 +2150,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = MetricsServiceV2Client(client_options=options, transport=transport,) + client = MetricsServiceV2Client( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -1996,7 +2169,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = MetricsServiceV2Client( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -2041,8 +2215,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.MetricsServiceV2GrpcTransport,) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MetricsServiceV2GrpcTransport, + ) def test_metrics_service_v2_base_transport_error(): @@ -2091,7 +2270,8 @@ def test_metrics_service_v2_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MetricsServiceV2Transport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2273,7 +2453,8 @@ def test_metrics_service_v2_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.MetricsServiceV2GrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2285,7 +2466,8 @@ def test_metrics_service_v2_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.MetricsServiceV2GrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2396,7 +2578,8 @@ def test_log_metric_path(): project = "squid" metric = "clam" expected = "projects/{project}/metrics/{metric}".format( - project=project, metric=metric, + project=project, + metric=metric, ) actual = MetricsServiceV2Client.log_metric_path(project, metric) assert expected == actual @@ -2436,7 +2619,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = MetricsServiceV2Client.common_folder_path(folder) assert expected == actual @@ -2454,7 +2639,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = MetricsServiceV2Client.common_organization_path(organization) assert expected == actual @@ -2472,7 +2659,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = MetricsServiceV2Client.common_project_path(project) assert expected == actual @@ -2492,7 +2681,8 @@ def test_common_location_path(): project = "squid" location = "clam" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = MetricsServiceV2Client.common_location_path(project, location) assert expected == actual @@ -2517,7 +2707,8 @@ def test_client_with_default_client_info(): transports.MetricsServiceV2Transport, "_prep_wrapped_messages" ) as prep: client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2526,7 +2717,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = MetricsServiceV2Client.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2534,7 +2726,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 353e7d2f65f7..0bcde45de0b4 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -100,7 +100,15 @@ def test_minimal_record(self): import logging filter_obj = self._make_one() - record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) record.created = None success = filter_obj.filter(record) @@ -128,7 +136,15 @@ def test_record_with_request(self): import logging filter_obj = self._make_one() - record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) record.created = None expected_path = "http://testserver/123" @@ -170,7 +186,15 @@ def test_record_with_traceparent_request(self): import logging filter_obj = self._make_one() - record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) record.created = None expected_path = "http://testserver/123" @@ -368,7 +392,17 @@ def test_emit_minimal(self): handler.handle(record) self.assertEqual( handler.transport.send_called_with, - (record, None, _GLOBAL_RESOURCE, None, None, None, False, None, None,), + ( + record, + None, + _GLOBAL_RESOURCE, + None, + None, + None, + False, + None, + None, + ), ) def test_emit_manual_field_override(self): @@ -437,7 +471,9 @@ def test_emit_with_custom_formatter(self): client = _Client(self.PROJECT) handler = self._make_one( - client, transport=_Transport, resource=_GLOBAL_RESOURCE, + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, ) logFormatter = logging.Formatter(fmt="%(name)s :: %(levelname)s :: %(message)s") handler.setFormatter(logFormatter) @@ -473,7 +509,9 @@ def test_emit_dict(self): client = _Client(self.PROJECT) handler = self._make_one( - client, transport=_Transport, resource=_GLOBAL_RESOURCE, + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, ) message = {"x": "test"} logname = "logname" @@ -506,7 +544,9 @@ def test_emit_w_json_extras(self): client = _Client(self.PROJECT) handler = self._make_one( - client, transport=_Transport, resource=_GLOBAL_RESOURCE, + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, ) message = "message" json_fields = {"hello": "world"} @@ -541,7 +581,9 @@ def test_emit_with_encoded_json(self): client = _Client(self.PROJECT) handler = self._make_one( - client, transport=_Transport, resource=_GLOBAL_RESOURCE, + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, ) logFormatter = logging.Formatter(fmt='{ "x" : "%(name)s" }') handler.setFormatter(logFormatter) @@ -574,7 +616,9 @@ def test_format_with_arguments(self): client = _Client(self.PROJECT) handler = self._make_one( - client, transport=_Transport, resource=_GLOBAL_RESOURCE, + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, ) message = "name: %s" name_arg = "Daniel" diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 5db098c29a8b..5031748f9e12 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -90,7 +90,15 @@ def test_format_minimal(self): import json handler = self._make_one() - record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) record.created = None expected_payload = { "severity": "INFO", @@ -118,7 +126,15 @@ def test_format_with_quotes(self): handler = self._make_one() message = '"test"' expected_result = '\\"test\\"' - record = logging.LogRecord(None, logging.INFO, None, None, message, None, None,) + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + message, + None, + None, + ) record.created = None handler.filter(record) result = handler.format(record) @@ -151,7 +167,15 @@ def test_format_with_line_break(self): handler = self._make_one() message = "test\ntest" expected_result = "test\\ntest" - record = logging.LogRecord(None, logging.INFO, None, None, message, None, None,) + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + message, + None, + None, + ) record.created = None handler.filter(record) result = handler.format(record) @@ -169,7 +193,13 @@ def test_format_with_custom_formatter(self): message = "test" expected_result = "logname :: INFO :: test" record = logging.LogRecord( - "logname", logging.INFO, None, None, message, None, None, + "logname", + logging.INFO, + None, + None, + message, + None, + None, ) record.created = None handler.filter(record) @@ -187,7 +217,13 @@ def test_dict(self): message = {"x": "test"} expected_result = '"x": "test"' record = logging.LogRecord( - "logname", logging.INFO, None, None, message, None, None, + "logname", + logging.INFO, + None, + None, + message, + None, + None, ) record.created = None handler.filter(record) @@ -206,7 +242,13 @@ def test_encoded_json(self): handler.setFormatter(logFormatter) expected_result = '"name": "logname"' record = logging.LogRecord( - "logname", logging.INFO, None, None, None, None, None, + "logname", + logging.INFO, + None, + None, + None, + None, + None, ) record.created = None handler.filter(record) @@ -225,7 +267,13 @@ def test_format_with_arguments(self): name_arg = "Daniel" expected_result = "name: Daniel" record = logging.LogRecord( - None, logging.INFO, None, None, message, name_arg, None, + None, + logging.INFO, + None, + None, + message, + name_arg, + None, ) record.created = None handler.filter(record) @@ -375,7 +423,13 @@ def test_format_with_json_fields(self): expected_result = "name: Daniel" json_fields = {"hello": "world", "number": 12} record = logging.LogRecord( - None, logging.INFO, None, None, message, name_arg, None, + None, + logging.INFO, + None, + None, + message, + name_arg, + None, ) record.created = None setattr(record, "json_fields", json_fields) diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 0c547d736c88..07e1a7e663e6 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -64,7 +64,9 @@ def test_send(self): transport.send(record, message, resource=_GLOBAL_RESOURCE) transport.worker.enqueue.assert_called_once_with( - record, message, resource=_GLOBAL_RESOURCE, + record, + message, + resource=_GLOBAL_RESOURCE, ) def test_trace_send(self): @@ -86,7 +88,10 @@ def test_trace_send(self): transport.send(record, message, resource=_GLOBAL_RESOURCE, trace=trace) transport.worker.enqueue.assert_called_once_with( - record, message, resource=_GLOBAL_RESOURCE, trace=trace, + record, + message, + resource=_GLOBAL_RESOURCE, + trace=trace, ) def test_span_send(self): @@ -108,7 +113,10 @@ def test_span_send(self): transport.send(record, message, resource=_GLOBAL_RESOURCE, span_id=span_id) transport.worker.enqueue.assert_called_once_with( - record, message, resource=_GLOBAL_RESOURCE, span_id=span_id, + record, + message, + resource=_GLOBAL_RESOURCE, + span_id=span_id, ) def test_flush(self): diff --git a/packages/google-cloud-logging/tests/unit/test__gapic.py b/packages/google-cloud-logging/tests/unit/test__gapic.py index d8c4bf57eb37..127c856b4ad9 100644 --- a/packages/google-cloud-logging/tests/unit/test__gapic.py +++ b/packages/google-cloud-logging/tests/unit/test__gapic.py @@ -215,7 +215,9 @@ def test_list_sinks(self): ) as call: call.return_value = logging_v2.types.ListSinksResponse(sinks=[sink_msg]) - result = client.list_sinks(self.PARENT_PATH,) + result = client.list_sinks( + self.PARENT_PATH, + ) sinks = list(result) diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 1eae1cda6899..597313824502 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -443,8 +443,8 @@ def test_log_w_dict_resource(self): def test_log_lowercase_severity(self): """ - lower case severity strings should be accepted - """ + lower case severity strings should be accepted + """ from google.cloud.logging_v2.handlers._monitored_resources import ( detect_resource, ) @@ -785,7 +785,10 @@ def test_list_entries_explicit(self): }, ) # verify that default filter is 24 hours - LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME,) + LOG_FILTER = "logName=projects/%s/logs/%s" % ( + self.PROJECT, + self.LOGGER_NAME, + ) combined_filter = ( INPUT_FILTER + " AND " @@ -826,7 +829,10 @@ def test_list_entries_explicit_timestamp(self): self.assertEqual(len(entries), 0) # self.assertEqual(client._listed, LISTED) # check call payload - LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME,) + LOG_FILTER = "logName=projects/%s/logs/%s" % ( + self.PROJECT, + self.LOGGER_NAME, + ) combined_filter = INPUT_FILTER + " AND " + LOG_FILTER self.assertEqual( client._connection._called_with, @@ -958,7 +964,9 @@ def test_list_entries_folder(self): returned = {"entries": ENTRIES} client._connection = _Connection(returned) - iterator = client.list_entries(resource_names=[f"folder/{FOLDER_ID}"],) + iterator = client.list_entries( + resource_names=[f"folder/{FOLDER_ID}"], + ) entries = list(iterator) # Check the entries. self.assertEqual(len(entries), 1) From 4f8930780460f5d5a575b3f61f78f10b434ef838 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 30 Mar 2022 18:09:28 +0200 Subject: [PATCH 610/855] chore(deps): update dependency google-cloud-bigquery to v2.34.3 (#514) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index b234d190bc77..cc4d87623b1f 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.0.0 -google-cloud-bigquery==2.34.2 +google-cloud-bigquery==2.34.3 google-cloud-storage==2.2.1 google-cloud-pubsub==2.11.0 From a3698ec80d6ff55c794a97c17df59ec0cb96fe97 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 30 Mar 2022 17:34:20 +0000 Subject: [PATCH 611/855] chore(python): add E231 to .flake8 ignore list (#516) Source-Link: https://github.com/googleapis/synthtool/commit/7ff4aad2ec5af0380e8bd6da1fa06eaadf24ec81 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 --- packages/google-cloud-logging/.flake8 | 2 +- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/.flake8 b/packages/google-cloud-logging/.flake8 index 1a7e4989e86a..7f85294c9359 100644 --- a/packages/google-cloud-logging/.flake8 +++ b/packages/google-cloud-logging/.flake8 @@ -16,7 +16,7 @@ # Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503 +ignore = E203, E231, E266, E501, W503 exclude = # Exclude environment test code. tests/environment/** diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 87dd00611576..9e0a9356b6eb 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe + digest: sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 From 529a0c8b422306630c447b4614185bbe0bb2e61d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 00:22:15 +0000 Subject: [PATCH 612/855] chore(python): update .pre-commit-config.yaml to use black==22.3.0 (#521) Source-Link: https://github.com/googleapis/synthtool/commit/7804ade3daae0d66649bee8df6c55484c6580b8d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 3 ++- packages/google-cloud-logging/.pre-commit-config.yaml | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 9e0a9356b6eb..22cc254afa2c 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 + digest: sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d +# created: 2022-03-30T23:44:26.560599165Z diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index 62eb5a77d9a3..46d237160f6d 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 19.10b0 + rev: 22.3.0 hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 From 7b0805466138c3bfec935b7faacf92acd68a8dc0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 02:22:15 +0000 Subject: [PATCH 613/855] chore(python): Enable size-label bot (#523) Source-Link: https://github.com/googleapis/synthtool/commit/06e82790dd719a165ad32b8a06f8f6ec3e3cae0f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-logging/.github/auto-label.yaml | 5 ----- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 22cc254afa2c..58a0b153bf0e 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d -# created: 2022-03-30T23:44:26.560599165Z + digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce +# created: 2022-04-01T01:42:03.609279246Z diff --git a/packages/google-cloud-logging/.github/auto-label.yaml b/packages/google-cloud-logging/.github/auto-label.yaml index 1e4706499b3a..09c8d735b456 100644 --- a/packages/google-cloud-logging/.github/auto-label.yaml +++ b/packages/google-cloud-logging/.github/auto-label.yaml @@ -1,7 +1,2 @@ -product: true requestsize: enabled: true -staleness: - pullrequest: true - old: 30 - extraold: 60 From 85ed1d29175c677ce1537b96c3239a2b58d70250 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 19:36:11 +0000 Subject: [PATCH 614/855] chore(python): refactor unit / system test dependency install (#525) Source-Link: https://github.com/googleapis/synthtool/commit/993985f0fc4b37152e588f0549bcbdaf34666023 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd --- .../.github/.OwlBot.lock.yaml | 4 +- packages/google-cloud-logging/noxfile.py | 127 +++++++++++++----- 2 files changed, 96 insertions(+), 35 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 58a0b153bf0e..fa5762290c5b 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce -# created: 2022-04-01T01:42:03.609279246Z + digest: sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd +# created: 2022-04-01T15:48:07.524222836Z diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index d2f8f0e56276..49f754c74c64 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -20,16 +20,49 @@ import os import pathlib import shutil +import warnings import nox - BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] + UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [ + "flask", + "webob", + "django", +] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [ + "google-cloud-bigquery", + "google-cloud-pubsub", + "google-cloud-storage", + "google-cloud-testutils", +] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -81,26 +114,41 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + def default(session): # Install all test dependencies, then install this package in-place. constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install( - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", - "-c", - constraints_path, - ) - session.install("flask", "-c", constraints_path) - session.install("webob", "-c", constraints_path) - session.install("django", "-c", constraints_path) - - session.install("-e", ".", "-c", constraints_path) + install_unittest_dependencies(session, "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -124,6 +172,35 @@ def unit(session): default(session) +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" @@ -146,23 +223,7 @@ def system(session): if not system_test_exists and not system_test_folder_exists: session.skip("System tests were not found") - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install( - "mock", - "pytest", - "google-cloud-testutils", - "google-cloud-bigquery", - "google-cloud-pubsub", - "google-cloud-storage", - "google-cloud-testutils", - "-c", - constraints_path, - ) - session.install("-e", ".", "-c", constraints_path) + install_systemtest_dependencies(session, "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: From 541c91f7af062dd3d8021d23066ddf7df02fa652 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 4 Apr 2022 05:11:23 -0400 Subject: [PATCH 615/855] chore: allow releases on previous majors (#512) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: allow releases on previous majors * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../google-cloud-logging/.github/release-please.yml | 12 ++++++++++++ packages/google-cloud-logging/owlbot.py | 2 ++ 2 files changed, 14 insertions(+) diff --git a/packages/google-cloud-logging/.github/release-please.yml b/packages/google-cloud-logging/.github/release-please.yml index 466597e5b196..5161ab347cdf 100644 --- a/packages/google-cloud-logging/.github/release-please.yml +++ b/packages/google-cloud-logging/.github/release-please.yml @@ -1,2 +1,14 @@ releaseType: python handleGHRelease: true +# NOTE: this section is generated by synthtool.languages.python +# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py +branches: +- branch: v2 + handleGHRelease: true + releaseType: python +- branch: v1 + handleGHRelease: true + releaseType: python +- branch: v0 + handleGHRelease: true + releaseType: python diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index b266ed13f8fe..f78036e69827 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -103,5 +103,7 @@ python.py_samples() +python.configure_previous_major_version_branches() + s.shell.run(["nox", "-s", "blacken"], hide_output=False) From 2dea24023cb059dbe7e354d0b97d97bc03b05fa6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Apr 2022 07:14:13 -0400 Subject: [PATCH 616/855] chore(python): add license header to auto-label.yaml (#526) Source-Link: https://github.com/googleapis/synthtool/commit/eb78c980b52c7c6746d2edb77d9cf7aaa99a2aab Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 Co-authored-by: Owl Bot --- .../google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- .../google-cloud-logging/.github/auto-label.yaml | 13 +++++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index fa5762290c5b..bc893c979e20 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd -# created: 2022-04-01T15:48:07.524222836Z + digest: sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 +# created: 2022-04-06T10:30:21.687684602Z diff --git a/packages/google-cloud-logging/.github/auto-label.yaml b/packages/google-cloud-logging/.github/auto-label.yaml index 09c8d735b456..41bff0b5375a 100644 --- a/packages/google-cloud-logging/.github/auto-label.yaml +++ b/packages/google-cloud-logging/.github/auto-label.yaml @@ -1,2 +1,15 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. requestsize: enabled: true From 53ea57df9371242fd17a4fa2d74c8cecb17f51ae Mon Sep 17 00:00:00 2001 From: Drew Brown Date: Fri, 8 Apr 2022 14:39:35 -0600 Subject: [PATCH 617/855] deps: Pin jinja2 version to fix CI (#522) * deps: Pin jinja2 version to fix CI * deps: Pin werkzeug version in sub-repo Co-authored-by: Anthonios Partheniou --- .../tests/environment/deployable/python/requirements.txt | 2 ++ tests/environment | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt index a93899463b1a..c466074b0547 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt +++ b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt @@ -4,3 +4,5 @@ click==7.1.2 pytz==2021.1 pandas>=1.1.5 itsdangerous==2.0.1 +jinja2==3.0.3 +werkzeug==2.0.2 \ No newline at end of file diff --git a/tests/environment b/tests/environment index 21f1ea63a567..fd113e1b444b 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 21f1ea63a567dfd1b601f7cb8ee6177c77f82cc5 +Subproject commit fd113e1b444b823a62f0e55eecc14a8dc34f26ee From 87ebff7937eb7d244638f66b82b779622083e2ce Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 11 Apr 2022 02:02:23 +0200 Subject: [PATCH 618/855] chore(deps): update dependency google-cloud-bigquery to v3 (#520) Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index cc4d87623b1f..541b22a365f9 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.0.0 -google-cloud-bigquery==2.34.3 +google-cloud-bigquery==3.0.1 google-cloud-storage==2.2.1 google-cloud-pubsub==2.11.0 From a82ed05193399e12afc3e81b331f5516eb236c3a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 14 Apr 2022 07:43:57 -0400 Subject: [PATCH 619/855] chore: use gapic-generator-python 0.65.1 (#534) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.65.1 PiperOrigin-RevId: 441524537 Source-Link: https://github.com/googleapis/googleapis/commit/2a273915b3f70fe86c9d2a75470a0b83e48d0abf Source-Link: https://github.com/googleapis/googleapis-gen/commit/ab6756a48c89b5bcb9fb73443cb8e55d574f4643 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWI2NzU2YTQ4Yzg5YjViY2I5ZmI3MzQ0M2NiOGU1NWQ1NzRmNDY0MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../config_service_v2/async_client.py | 20 +- .../services/config_service_v2/client.py | 20 +- .../config_service_v2/transports/base.py | 5 + .../config_service_v2/transports/grpc.py | 4 + .../logging_service_v2/async_client.py | 11 +- .../services/logging_service_v2/client.py | 22 +- .../logging_service_v2/transports/base.py | 5 + .../logging_service_v2/transports/grpc.py | 4 + .../metrics_service_v2/async_client.py | 2 +- .../services/metrics_service_v2/client.py | 2 +- .../metrics_service_v2/transports/base.py | 5 + .../metrics_service_v2/transports/grpc.py | 4 + .../cloud/logging_v2/types/log_entry.py | 2 +- .../google/cloud/logging_v2/types/logging.py | 4 +- .../cloud/logging_v2/types/logging_metrics.py | 2 +- .../snippet_metadata_logging_v2.json | 2956 ++++++++++++++++- .../logging_v2/test_config_service_v2.py | 103 +- .../logging_v2/test_logging_service_v2.py | 95 +- .../logging_v2/test_metrics_service_v2.py | 85 +- 19 files changed, 3075 insertions(+), 276 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 916fbd18bed0..8087660792b3 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -421,7 +421,6 @@ async def create_bucket( entries. After a bucket has been created, the bucket's location cannot be changed. - .. code-block:: python from google.cloud import logging_v2 @@ -506,7 +505,6 @@ async def update_bucket( After a bucket has been created, the bucket's location cannot be changed. - .. code-block:: python from google.cloud import logging_v2 @@ -584,7 +582,6 @@ async def delete_bucket( purged and all log entries in the bucket will be permanently deleted. - .. code-block:: python from google.cloud import logging_v2 @@ -647,7 +644,6 @@ async def undelete_bucket( deleted can be undeleted within the grace period of 7 days. - .. code-block:: python from google.cloud import logging_v2 @@ -890,7 +886,6 @@ async def create_view( r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. - .. code-block:: python from google.cloud import logging_v2 @@ -968,7 +963,6 @@ async def update_view( indicates that system is not in a state where it can update the view. If this occurs, please try again in a few minutes. - .. code-block:: python from google.cloud import logging_v2 @@ -1044,7 +1038,6 @@ async def delete_view( can delete the view. If this occurs, please try again in a few minutes. - .. code-block:: python from google.cloud import logging_v2 @@ -1359,7 +1352,6 @@ async def create_sink( permitted to write to the destination. A sink can export log entries only from the resource owning the sink. - .. code-block:: python from google.cloud import logging_v2 @@ -1492,7 +1484,6 @@ async def update_sink( The updated sink might also have a new ``writer_identity``; see the ``unique_writer_identity`` field. - .. code-block:: python from google.cloud import logging_v2 @@ -1657,7 +1648,6 @@ async def delete_sink( r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. - .. code-block:: python from google.cloud import logging_v2 @@ -1765,7 +1755,6 @@ async def list_exclusions( r"""Lists all the exclusions on the \_Default sink in a parent resource. - .. code-block:: python from google.cloud import logging_v2 @@ -2013,7 +2002,6 @@ async def create_exclusion( parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource. - .. code-block:: python from google.cloud import logging_v2 @@ -2143,7 +2131,6 @@ async def update_exclusion( r"""Changes one or more properties of an existing exclusion in the \_Default sink. - .. code-block:: python from google.cloud import logging_v2 @@ -2396,7 +2383,6 @@ async def get_cmek_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -2497,7 +2483,6 @@ async def update_cmek_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -2595,7 +2580,6 @@ async def get_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -2730,7 +2714,6 @@ async def update_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -2849,7 +2832,6 @@ async def copy_log_entries( r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. - .. code-block:: python from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index d14ea70da2f8..35e511abd34a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -720,7 +720,6 @@ def create_bucket( entries. After a bucket has been created, the bucket's location cannot be changed. - .. code-block:: python from google.cloud import logging_v2 @@ -806,7 +805,6 @@ def update_bucket( After a bucket has been created, the bucket's location cannot be changed. - .. code-block:: python from google.cloud import logging_v2 @@ -885,7 +883,6 @@ def delete_bucket( purged and all log entries in the bucket will be permanently deleted. - .. code-block:: python from google.cloud import logging_v2 @@ -949,7 +946,6 @@ def undelete_bucket( deleted can be undeleted within the grace period of 7 days. - .. code-block:: python from google.cloud import logging_v2 @@ -1194,7 +1190,6 @@ def create_view( r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. - .. code-block:: python from google.cloud import logging_v2 @@ -1273,7 +1268,6 @@ def update_view( indicates that system is not in a state where it can update the view. If this occurs, please try again in a few minutes. - .. code-block:: python from google.cloud import logging_v2 @@ -1350,7 +1344,6 @@ def delete_view( can delete the view. If this occurs, please try again in a few minutes. - .. code-block:: python from google.cloud import logging_v2 @@ -1644,7 +1637,6 @@ def create_sink( permitted to write to the destination. A sink can export log entries only from the resource owning the sink. - .. code-block:: python from google.cloud import logging_v2 @@ -1777,7 +1769,6 @@ def update_sink( The updated sink might also have a new ``writer_identity``; see the ``unique_writer_identity`` field. - .. code-block:: python from google.cloud import logging_v2 @@ -1931,7 +1922,6 @@ def delete_sink( r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. - .. code-block:: python from google.cloud import logging_v2 @@ -2028,7 +2018,6 @@ def list_exclusions( r"""Lists all the exclusions on the \_Default sink in a parent resource. - .. code-block:: python from google.cloud import logging_v2 @@ -2254,7 +2243,6 @@ def create_exclusion( parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource. - .. code-block:: python from google.cloud import logging_v2 @@ -2384,7 +2372,6 @@ def update_exclusion( r"""Changes one or more properties of an existing exclusion in the \_Default sink. - .. code-block:: python from google.cloud import logging_v2 @@ -2626,7 +2613,6 @@ def get_cmek_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -2728,7 +2714,6 @@ def update_cmek_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -2827,7 +2812,6 @@ def get_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -2962,7 +2946,6 @@ def update_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -3081,7 +3064,6 @@ def copy_log_entries( r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. - .. code-block:: python from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 95de06d1a972..685f174b4412 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -88,6 +88,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -597,5 +598,9 @@ def copy_log_entries( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("ConfigServiceV2Transport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 228f1c9a32cf..25de4885159f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1023,5 +1023,9 @@ def copy_log_entries( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("ConfigServiceV2GrpcTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 7973d4395117..ec78309a6f0b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Mapping, Optional, AsyncIterable, Awaitable, @@ -229,7 +230,6 @@ async def delete_log( deleted. Entries received after the delete operation with a timestamp before the operation will be deleted. - .. code-block:: python from google.cloud import logging_v2 @@ -329,7 +329,7 @@ async def write_log_entries( *, log_name: str = None, resource: monitored_resource_pb2.MonitoredResource = None, - labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, + labels: Mapping[str, str] = None, entries: Sequence[log_entry.LogEntry] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -343,7 +343,6 @@ async def write_log_entries( maximum of 1000 different resources (projects, organizations, billing accounts or folders) - .. code-block:: python from google.cloud import logging_v2 @@ -411,7 +410,7 @@ def sample_write_log_entries(): This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]`): + labels (:class:`Mapping[str, str]`): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a @@ -535,7 +534,6 @@ async def list_log_entries( For ways to export log entries, see `Exporting Logs `__. - .. code-block:: python from google.cloud import logging_v2 @@ -693,7 +691,6 @@ async def list_monitored_resource_descriptors( r"""Lists the descriptors for monitored resource types used by Logging. - .. code-block:: python from google.cloud import logging_v2 @@ -787,7 +784,6 @@ async def list_logs( or billing accounts. Only logs that have entries are listed. - .. code-block:: python from google.cloud import logging_v2 @@ -910,7 +906,6 @@ def tail_log_entries( Until the stream is terminated, it will continue reading logs. - .. code-block:: python from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 8638cfb10044..47c5bfe82db0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -16,7 +16,17 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + Optional, + Iterable, + Iterator, + Sequence, + Tuple, + Type, + Union, +) import pkg_resources from google.api_core import client_options as client_options_lib @@ -438,7 +448,6 @@ def delete_log( deleted. Entries received after the delete operation with a timestamp before the operation will be deleted. - .. code-block:: python from google.cloud import logging_v2 @@ -527,7 +536,7 @@ def write_log_entries( *, log_name: str = None, resource: monitored_resource_pb2.MonitoredResource = None, - labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, + labels: Mapping[str, str] = None, entries: Sequence[log_entry.LogEntry] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -541,7 +550,6 @@ def write_log_entries( maximum of 1000 different resources (projects, organizations, billing accounts or folders) - .. code-block:: python from google.cloud import logging_v2 @@ -609,7 +617,7 @@ def sample_write_log_entries(): This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]): + labels (Mapping[str, str]): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a @@ -721,7 +729,6 @@ def list_log_entries( For ways to export log entries, see `Exporting Logs `__. - .. code-block:: python from google.cloud import logging_v2 @@ -868,7 +875,6 @@ def list_monitored_resource_descriptors( r"""Lists the descriptors for monitored resource types used by Logging. - .. code-block:: python from google.cloud import logging_v2 @@ -954,7 +960,6 @@ def list_logs( or billing accounts. Only logs that have entries are listed. - .. code-block:: python from google.cloud import logging_v2 @@ -1066,7 +1071,6 @@ def tail_log_entries( Until the stream is terminated, it will continue reading logs. - .. code-block:: python from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 716a2fbbc313..ceefeda8a50a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -87,6 +87,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -296,5 +297,9 @@ def tail_log_entries( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("LoggingServiceV2Transport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 176d4475f20f..22affa06bc37 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -409,5 +409,9 @@ def tail_log_entries( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("LoggingServiceV2GrpcTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index af6265e82c26..e9b59bf30135 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index bb2221b857b5..df5d4d2fc917 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index cc483aeff32b..eae5f5da7caf 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -87,6 +87,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -255,5 +256,9 @@ def delete_log_metric( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("MetricsServiceV2Transport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 6c1fd9b73082..12d70452f908 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -363,5 +363,9 @@ def delete_log_metric( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("MetricsServiceV2GrpcTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py index 2bdea1b73fb1..cc34694281ad 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py @@ -144,7 +144,7 @@ class LogEntry(proto.Message): http_request (google.logging.type.http_request_pb2.HttpRequest): Optional. Information about the HTTP request associated with this log entry, if applicable. - labels (Sequence[google.cloud.logging_v2.types.LogEntry.LabelsEntry]): + labels (Mapping[str, str]): Optional. A map of key, value pairs that provides additional information about the log entry. The labels can be user-defined or system-defined. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py index 383a4ef772a8..42bb9dbb8846 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py @@ -103,7 +103,7 @@ class WriteLogEntriesRequest(proto.Message): "zone": "us-central1-a", "instance_id": "00000000000000000000" }} See [LogEntry][google.logging.v2.LogEntry]. - labels (Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]): + labels (Mapping[str, str]): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a label in this @@ -192,7 +192,7 @@ class WriteLogEntriesPartialErrors(proto.Message): r"""Error details for WriteLogEntries with partial success. Attributes: - log_entry_errors (Sequence[google.cloud.logging_v2.types.WriteLogEntriesPartialErrors.LogEntryErrorsEntry]): + log_entry_errors (Mapping[int, google.rpc.status_pb2.Status]): When ``WriteLogEntriesRequest.partial_success`` is true, records the error status for entries that were not written due to a permanent error, keyed by the entry's zero-based diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py index 323599423adf..bcad752b3bf6 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py @@ -128,7 +128,7 @@ class LogMetric(proto.Message): Example: ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` - label_extractors (Sequence[google.cloud.logging_v2.types.LogMetric.LabelExtractorsEntry]): + label_extractors (Mapping[str, str]): Optional. A map from a label key string to an extractor expression which is used to extract data from a log entry field and assign as the label value. Each label key diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_logging_v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_logging_v2.json index b6ad799b168a..657563cd6dec 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_logging_v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_logging_v2.json @@ -1,16 +1,57 @@ { + "clientLibrary": { + "apis": [ + { + "id": "google.logging.v2", + "version": "v2" + } + ], + "language": "PYTHON", + "name": "google-cloud-logging" + }, "snippets": [ { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.copy_log_entries", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CopyLogEntries", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CopyLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CopyLogEntriesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "copy_log_entries" }, + "description": "Sample for CopyLogEntries", "file": "logging_v2_generated_config_service_v2_copy_log_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_async", "segments": [ { @@ -43,18 +84,50 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_copy_log_entries_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.copy_log_entries", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CopyLogEntries", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CopyLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CopyLogEntriesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "copy_log_entries" }, + "description": "Sample for CopyLogEntries", "file": "logging_v2_generated_config_service_v2_copy_log_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync", "segments": [ { @@ -87,19 +160,51 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_copy_log_entries_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "create_bucket" }, + "description": "Sample for CreateBucket", "file": "logging_v2_generated_config_service_v2_create_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async", "segments": [ { @@ -132,18 +237,50 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_bucket_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "create_bucket" }, + "description": "Sample for CreateBucket", "file": "logging_v2_generated_config_service_v2_create_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_sync", "segments": [ { @@ -176,19 +313,59 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_bucket_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "create_exclusion" }, + "description": "Sample for CreateExclusion", "file": "logging_v2_generated_config_service_v2_create_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_async", "segments": [ { @@ -221,18 +398,58 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_exclusion_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "create_exclusion" }, + "description": "Sample for CreateExclusion", "file": "logging_v2_generated_config_service_v2_create_exclusion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_sync", "segments": [ { @@ -265,19 +482,59 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_exclusion_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "create_sink" }, + "description": "Sample for CreateSink", "file": "logging_v2_generated_config_service_v2_create_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", "segments": [ { @@ -310,18 +567,58 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_sink_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "create_sink" }, + "description": "Sample for CreateSink", "file": "logging_v2_generated_config_service_v2_create_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", "segments": [ { @@ -354,19 +651,51 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_sink_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "create_view" }, + "description": "Sample for CreateView", "file": "logging_v2_generated_config_service_v2_create_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", "segments": [ { @@ -399,18 +728,50 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_view_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "create_view" }, + "description": "Sample for CreateView", "file": "logging_v2_generated_config_service_v2_create_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", "segments": [ { @@ -443,19 +804,50 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_view_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_bucket" }, + "description": "Sample for DeleteBucket", "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", "segments": [ { @@ -486,18 +878,49 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_bucket" }, + "description": "Sample for DeleteBucket", "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", "segments": [ { @@ -528,19 +951,54 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_exclusion" }, + "description": "Sample for DeleteExclusion", "file": "logging_v2_generated_config_service_v2_delete_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", "segments": [ { @@ -571,18 +1029,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_exclusion_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_exclusion" }, + "description": "Sample for DeleteExclusion", "file": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", "segments": [ { @@ -613,19 +1106,54 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_sink" }, + "description": "Sample for DeleteSink", "file": "logging_v2_generated_config_service_v2_delete_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", "segments": [ { @@ -656,18 +1184,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_sink_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_sink" }, + "description": "Sample for DeleteSink", "file": "logging_v2_generated_config_service_v2_delete_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", "segments": [ { @@ -698,19 +1261,50 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_sink_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_view" }, + "description": "Sample for DeleteView", "file": "logging_v2_generated_config_service_v2_delete_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", "segments": [ { @@ -741,18 +1335,49 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_view_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_view" }, + "description": "Sample for DeleteView", "file": "logging_v2_generated_config_service_v2_delete_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", "segments": [ { @@ -783,19 +1408,51 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_view_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" }, + "description": "Sample for GetBucket", "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", "segments": [ { @@ -828,18 +1485,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_bucket_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" }, + "description": "Sample for GetBucket", "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", "segments": [ { @@ -872,19 +1561,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_bucket_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_cmek_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetCmekSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "get_cmek_settings" }, + "description": "Sample for GetCmekSettings", "file": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", "segments": [ { @@ -917,18 +1638,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_cmek_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetCmekSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "get_cmek_settings" }, + "description": "Sample for GetCmekSettings", "file": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", "segments": [ { @@ -961,19 +1714,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "get_exclusion" }, + "description": "Sample for GetExclusion", "file": "logging_v2_generated_config_service_v2_get_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", "segments": [ { @@ -1006,18 +1795,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_exclusion_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "get_exclusion" }, + "description": "Sample for GetExclusion", "file": "logging_v2_generated_config_service_v2_get_exclusion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", "segments": [ { @@ -1050,19 +1875,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_exclusion_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "get_settings" }, + "description": "Sample for GetSettings", "file": "logging_v2_generated_config_service_v2_get_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_async", "segments": [ { @@ -1095,18 +1956,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_settings_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "get_settings" }, + "description": "Sample for GetSettings", "file": "logging_v2_generated_config_service_v2_get_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_sync", "segments": [ { @@ -1139,19 +2036,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_settings_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "get_sink" }, + "description": "Sample for GetSink", "file": "logging_v2_generated_config_service_v2_get_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", "segments": [ { @@ -1184,18 +2117,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_sink_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "get_sink" }, + "description": "Sample for GetSink", "file": "logging_v2_generated_config_service_v2_get_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", "segments": [ { @@ -1228,19 +2197,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_sink_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "get_view" }, + "description": "Sample for GetView", "file": "logging_v2_generated_config_service_v2_get_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", "segments": [ { @@ -1273,18 +2274,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_view_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "get_view" }, + "description": "Sample for GetView", "file": "logging_v2_generated_config_service_v2_get_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", "segments": [ { @@ -1317,19 +2350,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_view_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_buckets", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListBuckets" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListBucketsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", + "shortName": "list_buckets" }, + "description": "Sample for ListBuckets", "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", "segments": [ { @@ -1362,18 +2431,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_buckets_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_buckets", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListBuckets" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListBucketsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", + "shortName": "list_buckets" }, + "description": "Sample for ListBuckets", "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", "segments": [ { @@ -1406,19 +2511,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_buckets_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_exclusions", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListExclusions" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager", + "shortName": "list_exclusions" }, + "description": "Sample for ListExclusions", "file": "logging_v2_generated_config_service_v2_list_exclusions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", "segments": [ { @@ -1451,18 +2592,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_exclusions_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_exclusions", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListExclusions" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager", + "shortName": "list_exclusions" }, + "description": "Sample for ListExclusions", "file": "logging_v2_generated_config_service_v2_list_exclusions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", "segments": [ { @@ -1495,19 +2672,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_exclusions_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_sinks", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListSinks" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListSinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager", + "shortName": "list_sinks" }, + "description": "Sample for ListSinks", "file": "logging_v2_generated_config_service_v2_list_sinks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_async", "segments": [ { @@ -1540,18 +2753,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_sinks_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_sinks", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListSinks" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListSinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager", + "shortName": "list_sinks" }, + "description": "Sample for ListSinks", "file": "logging_v2_generated_config_service_v2_list_sinks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_sync", "segments": [ { @@ -1584,19 +2833,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_sinks_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_views", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListViews", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListViews" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListViewsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager", + "shortName": "list_views" }, + "description": "Sample for ListViews", "file": "logging_v2_generated_config_service_v2_list_views_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_async", "segments": [ { @@ -1629,18 +2914,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_views_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_views", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListViews", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListViews" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListViewsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager", + "shortName": "list_views" }, + "description": "Sample for ListViews", "file": "logging_v2_generated_config_service_v2_list_views_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_sync", "segments": [ { @@ -1673,19 +2994,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_views_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.undelete_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UndeleteBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "undelete_bucket" }, + "description": "Sample for UndeleteBucket", "file": "logging_v2_generated_config_service_v2_undelete_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async", "segments": [ { @@ -1716,18 +3068,49 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_undelete_bucket_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.undelete_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UndeleteBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "undelete_bucket" }, + "description": "Sample for UndeleteBucket", "file": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync", "segments": [ { @@ -1758,19 +3141,51 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "update_bucket" }, + "description": "Sample for UpdateBucket", "file": "logging_v2_generated_config_service_v2_update_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_async", "segments": [ { @@ -1803,18 +3218,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_bucket_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "update_bucket" }, + "description": "Sample for UpdateBucket", "file": "logging_v2_generated_config_service_v2_update_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_sync", "segments": [ { @@ -1847,19 +3294,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_bucket_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_cmek_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateCmekSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateCmekSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "update_cmek_settings" }, + "description": "Sample for UpdateCmekSettings", "file": "logging_v2_generated_config_service_v2_update_cmek_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async", "segments": [ { @@ -1892,18 +3371,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_cmek_settings_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_cmek_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateCmekSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateCmekSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "update_cmek_settings" }, + "description": "Sample for UpdateCmekSettings", "file": "logging_v2_generated_config_service_v2_update_cmek_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync", "segments": [ { @@ -1936,19 +3447,63 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_cmek_settings_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "update_exclusion" }, + "description": "Sample for UpdateExclusion", "file": "logging_v2_generated_config_service_v2_update_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_async", "segments": [ { @@ -1981,18 +3536,62 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_exclusion_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "update_exclusion" }, + "description": "Sample for UpdateExclusion", "file": "logging_v2_generated_config_service_v2_update_exclusion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync", "segments": [ { @@ -2025,19 +3624,59 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_exclusion_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSettingsRequest" + }, + { + "name": "settings", + "type": "google.cloud.logging_v2.types.Settings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "update_settings" }, + "description": "Sample for UpdateSettings", "file": "logging_v2_generated_config_service_v2_update_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_async", "segments": [ { @@ -2070,18 +3709,58 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_settings_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSettingsRequest" + }, + { + "name": "settings", + "type": "google.cloud.logging_v2.types.Settings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "update_settings" }, + "description": "Sample for UpdateSettings", "file": "logging_v2_generated_config_service_v2_update_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_sync", "segments": [ { @@ -2114,19 +3793,63 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_settings_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "update_sink" }, + "description": "Sample for UpdateSink", "file": "logging_v2_generated_config_service_v2_update_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_async", "segments": [ { @@ -2159,18 +3882,62 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_sink_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "update_sink" }, + "description": "Sample for UpdateSink", "file": "logging_v2_generated_config_service_v2_update_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_sync", "segments": [ { @@ -2203,19 +3970,51 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_sink_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "update_view" }, + "description": "Sample for UpdateView", "file": "logging_v2_generated_config_service_v2_update_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_async", "segments": [ { @@ -2248,18 +4047,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_view_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "update_view" }, + "description": "Sample for UpdateView", "file": "logging_v2_generated_config_service_v2_update_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_sync", "segments": [ { @@ -2292,19 +4123,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_view_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.delete_log", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.DeleteLog", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "DeleteLog" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_log" }, + "description": "Sample for DeleteLog", "file": "logging_v2_generated_logging_service_v2_delete_log_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_async", "segments": [ { @@ -2335,18 +4201,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_delete_log_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.delete_log", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.DeleteLog", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "DeleteLog" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_log" }, + "description": "Sample for DeleteLog", "file": "logging_v2_generated_logging_service_v2_delete_log_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_sync", "segments": [ { @@ -2377,19 +4278,63 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_delete_log_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.list_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogEntriesRequest" + }, + { + "name": "resource_names", + "type": "Sequence[str]" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "order_by", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager", + "shortName": "list_log_entries" }, + "description": "Sample for ListLogEntries", "file": "logging_v2_generated_logging_service_v2_list_log_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_async", "segments": [ { @@ -2422,18 +4367,62 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_log_entries_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.list_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogEntriesRequest" + }, + { + "name": "resource_names", + "type": "Sequence[str]" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "order_by", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager", + "shortName": "list_log_entries" }, + "description": "Sample for ListLogEntries", "file": "logging_v2_generated_logging_service_v2_list_log_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_sync", "segments": [ { @@ -2466,19 +4455,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_log_entries_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.list_logs", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogs", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListLogs" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager", + "shortName": "list_logs" }, + "description": "Sample for ListLogs", "file": "logging_v2_generated_logging_service_v2_list_logs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_async", "segments": [ { @@ -2511,18 +4536,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_logs_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.list_logs", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogs", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListLogs" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager", + "shortName": "list_logs" }, + "description": "Sample for ListLogs", "file": "logging_v2_generated_logging_service_v2_list_logs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_sync", "segments": [ { @@ -2555,19 +4616,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_logs_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.list_monitored_resource_descriptors", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListMonitoredResourceDescriptors" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager", + "shortName": "list_monitored_resource_descriptors" }, + "description": "Sample for ListMonitoredResourceDescriptors", "file": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async", "segments": [ { @@ -2600,18 +4693,50 @@ "start": 41, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.list_monitored_resource_descriptors", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListMonitoredResourceDescriptors" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager", + "shortName": "list_monitored_resource_descriptors" }, + "description": "Sample for ListMonitoredResourceDescriptors", "file": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync", "segments": [ { @@ -2644,19 +4769,51 @@ "start": 41, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.tail_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.TailLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "TailLogEntries" - } + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", + "shortName": "tail_log_entries" }, + "description": "Sample for TailLogEntries", "file": "logging_v2_generated_logging_service_v2_tail_log_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_async", "segments": [ { @@ -2689,18 +4846,50 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_tail_log_entries_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.tail_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.TailLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "TailLogEntries" - } + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", + "shortName": "tail_log_entries" }, + "description": "Sample for TailLogEntries", "file": "logging_v2_generated_logging_service_v2_tail_log_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_sync", "segments": [ { @@ -2733,19 +4922,67 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_tail_log_entries_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.write_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.WriteLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "WriteLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.WriteLogEntriesRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "resource", + "type": "google.api.monitored_resource_pb2.MonitoredResource" + }, + { + "name": "labels", + "type": "Mapping[str, str]" + }, + { + "name": "entries", + "type": "Sequence[google.cloud.logging_v2.types.LogEntry]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", + "shortName": "write_log_entries" }, + "description": "Sample for WriteLogEntries", "file": "logging_v2_generated_logging_service_v2_write_log_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_async", "segments": [ { @@ -2778,18 +5015,66 @@ "start": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_write_log_entries_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.write_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.WriteLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "WriteLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.WriteLogEntriesRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "resource", + "type": "google.api.monitored_resource_pb2.MonitoredResource" + }, + { + "name": "labels", + "type": "Mapping[str, str]" + }, + { + "name": "entries", + "type": "Sequence[google.cloud.logging_v2.types.LogEntry]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", + "shortName": "write_log_entries" }, + "description": "Sample for WriteLogEntries", "file": "logging_v2_generated_logging_service_v2_write_log_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync", "segments": [ { @@ -2822,19 +5107,59 @@ "start": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_write_log_entries_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient", + "shortName": "MetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient.create_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.CreateLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "CreateLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLogMetricRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "create_log_metric" }, + "description": "Sample for CreateLogMetric", "file": "logging_v2_generated_metrics_service_v2_create_log_metric_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_async", "segments": [ { @@ -2867,18 +5192,58 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_create_log_metric_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client", + "shortName": "MetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client.create_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.CreateLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "CreateLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLogMetricRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "create_log_metric" }, + "description": "Sample for CreateLogMetric", "file": "logging_v2_generated_metrics_service_v2_create_log_metric_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync", "segments": [ { @@ -2911,19 +5276,54 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_create_log_metric_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient", + "shortName": "MetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient.delete_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.DeleteLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "DeleteLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_log_metric" }, + "description": "Sample for DeleteLogMetric", "file": "logging_v2_generated_metrics_service_v2_delete_log_metric_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async", "segments": [ { @@ -2954,18 +5354,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_delete_log_metric_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client", + "shortName": "MetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client.delete_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.DeleteLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "DeleteLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_log_metric" }, + "description": "Sample for DeleteLogMetric", "file": "logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync", "segments": [ { @@ -2996,19 +5431,55 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient", + "shortName": "MetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient.get_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.GetLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "GetLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "get_log_metric" }, + "description": "Sample for GetLogMetric", "file": "logging_v2_generated_metrics_service_v2_get_log_metric_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_async", "segments": [ { @@ -3041,18 +5512,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_get_log_metric_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client", + "shortName": "MetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client.get_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.GetLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "GetLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "get_log_metric" }, + "description": "Sample for GetLogMetric", "file": "logging_v2_generated_metrics_service_v2_get_log_metric_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_sync", "segments": [ { @@ -3085,19 +5592,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_get_log_metric_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient", + "shortName": "MetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient.list_log_metrics", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.ListLogMetrics", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "ListLogMetrics" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogMetricsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager", + "shortName": "list_log_metrics" }, + "description": "Sample for ListLogMetrics", "file": "logging_v2_generated_metrics_service_v2_list_log_metrics_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_async", "segments": [ { @@ -3130,18 +5673,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_list_log_metrics_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client", + "shortName": "MetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client.list_log_metrics", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.ListLogMetrics", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "ListLogMetrics" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogMetricsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager", + "shortName": "list_log_metrics" }, + "description": "Sample for ListLogMetrics", "file": "logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync", "segments": [ { @@ -3174,19 +5753,59 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient", + "shortName": "MetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient.update_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.UpdateLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "UpdateLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "update_log_metric" }, + "description": "Sample for UpdateLogMetric", "file": "logging_v2_generated_metrics_service_v2_update_log_metric_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async", "segments": [ { @@ -3219,18 +5838,58 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_update_log_metric_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client", + "shortName": "MetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client.update_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.UpdateLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "UpdateLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "update_log_metric" }, + "description": "Sample for UpdateLogMetric", "file": "logging_v2_generated_metrics_service_v2_update_log_metric_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync", "segments": [ { @@ -3263,7 +5922,8 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_update_log_metric_sync.py" } ] } diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 75227b5b35c1..18982084f797 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -94,24 +94,26 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - ConfigServiceV2Client, - ConfigServiceV2AsyncClient, + (ConfigServiceV2Client, "grpc"), + (ConfigServiceV2AsyncClient, "grpc_asyncio"), ], ) -def test_config_service_v2_client_from_service_account_info(client_class): +def test_config_service_v2_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") @pytest.mark.parametrize( @@ -140,27 +142,33 @@ def test_config_service_v2_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - ConfigServiceV2Client, - ConfigServiceV2AsyncClient, + (ConfigServiceV2Client, "grpc"), + (ConfigServiceV2AsyncClient, "grpc_asyncio"), ], ) -def test_config_service_v2_client_from_service_account_file(client_class): +def test_config_service_v2_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") def test_config_service_v2_client_get_transport_class(): @@ -1031,7 +1039,7 @@ async def test_list_buckets_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1077,7 +1085,9 @@ async def test_list_buckets_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_buckets(request={})).pages: + async for page_ in ( + await client.list_buckets(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2239,7 +2249,7 @@ async def test_list_views_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -2285,7 +2295,9 @@ async def test_list_views_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_views(request={})).pages: + async for page_ in ( + await client.list_views(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -3271,7 +3283,7 @@ async def test_list_sinks_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -3317,7 +3329,9 @@ async def test_list_sinks_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_sinks(request={})).pages: + async for page_ in ( + await client.list_sinks(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -4728,7 +4742,7 @@ async def test_list_exclusions_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -4774,7 +4788,9 @@ async def test_list_exclusions_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_exclusions(request={})).pages: + async for page_ in ( + await client.list_exclusions(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -6779,6 +6795,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = ConfigServiceV2Client.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = ConfigServiceV2Client( @@ -6851,6 +6880,14 @@ def test_config_service_v2_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_config_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -7016,24 +7053,40 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls(transport_ ) -def test_config_service_v2_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_config_service_v2_host_no_port(transport_name): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") -def test_config_service_v2_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_config_service_v2_host_with_port(transport_name): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "logging.googleapis.com:8000" + assert client.transport._host == ("logging.googleapis.com:8000") def test_config_service_v2_grpc_transport_channel(): diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 09cff71ee85b..1f74ac5b2ec4 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -96,24 +96,26 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - LoggingServiceV2Client, - LoggingServiceV2AsyncClient, + (LoggingServiceV2Client, "grpc"), + (LoggingServiceV2AsyncClient, "grpc_asyncio"), ], ) -def test_logging_service_v2_client_from_service_account_info(client_class): +def test_logging_service_v2_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") @pytest.mark.parametrize( @@ -142,27 +144,33 @@ def test_logging_service_v2_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - LoggingServiceV2Client, - LoggingServiceV2AsyncClient, + (LoggingServiceV2Client, "grpc"), + (LoggingServiceV2AsyncClient, "grpc_asyncio"), ], ) -def test_logging_service_v2_client_from_service_account_file(client_class): +def test_logging_service_v2_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") def test_logging_service_v2_client_get_transport_class(): @@ -1415,7 +1423,7 @@ async def test_list_log_entries_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1461,7 +1469,9 @@ async def test_list_log_entries_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_log_entries(request={})).pages: + async for page_ in ( + await client.list_log_entries(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1703,7 +1713,7 @@ async def test_list_monitored_resource_descriptors_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1756,7 +1766,7 @@ async def test_list_monitored_resource_descriptors_async_pages(): pages = [] async for page_ in ( await client.list_monitored_resource_descriptors(request={}) - ).pages: + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2132,7 +2142,7 @@ async def test_list_logs_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -2178,7 +2188,9 @@ async def test_list_logs_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_logs(request={})).pages: + async for page_ in ( + await client.list_logs(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2347,6 +2359,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = LoggingServiceV2Client.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = LoggingServiceV2Client( @@ -2394,6 +2419,14 @@ def test_logging_service_v2_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_logging_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2563,24 +2596,40 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls(transport ) -def test_logging_service_v2_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_logging_service_v2_host_no_port(transport_name): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") -def test_logging_service_v2_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_logging_service_v2_host_with_port(transport_name): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "logging.googleapis.com:8000" + assert client.transport._host == ("logging.googleapis.com:8000") def test_logging_service_v2_grpc_transport_channel(): diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 5ce917c6e7e5..53ced9ce47ae 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -94,24 +94,26 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - MetricsServiceV2Client, - MetricsServiceV2AsyncClient, + (MetricsServiceV2Client, "grpc"), + (MetricsServiceV2AsyncClient, "grpc_asyncio"), ], ) -def test_metrics_service_v2_client_from_service_account_info(client_class): +def test_metrics_service_v2_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") @pytest.mark.parametrize( @@ -140,27 +142,33 @@ def test_metrics_service_v2_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - MetricsServiceV2Client, - MetricsServiceV2AsyncClient, + (MetricsServiceV2Client, "grpc"), + (MetricsServiceV2AsyncClient, "grpc_asyncio"), ], ) -def test_metrics_service_v2_client_from_service_account_file(client_class): +def test_metrics_service_v2_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") def test_metrics_service_v2_client_get_transport_class(): @@ -1032,7 +1040,7 @@ async def test_list_log_metrics_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1078,7 +1086,9 @@ async def test_list_log_metrics_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_log_metrics(request={})).pages: + async for page_ in ( + await client.list_log_metrics(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2213,6 +2223,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = MetricsServiceV2Client.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = MetricsServiceV2Client( @@ -2259,6 +2282,14 @@ def test_metrics_service_v2_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_metrics_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2428,24 +2459,40 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls(transport ) -def test_metrics_service_v2_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_metrics_service_v2_host_no_port(transport_name): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") -def test_metrics_service_v2_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_metrics_service_v2_host_with_port(transport_name): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "logging.googleapis.com:8000" + assert client.transport._host == ("logging.googleapis.com:8000") def test_metrics_service_v2_grpc_transport_channel(): From 6e07d11134e3a0df7ac33994db918018fe970f02 Mon Sep 17 00:00:00 2001 From: losalex <90795544+losalex@users.noreply.github.com> Date: Sat, 16 Apr 2022 01:00:33 -0500 Subject: [PATCH 620/855] fix: Reenable staleness bot (#535) * fix: Reenable staleness bot Reenable staleness bot on the repo * Update auto-label.yaml --- packages/google-cloud-logging/.github/auto-label.yaml | 4 ++++ packages/google-cloud-logging/owlbot.py | 1 + 2 files changed, 5 insertions(+) diff --git a/packages/google-cloud-logging/.github/auto-label.yaml b/packages/google-cloud-logging/.github/auto-label.yaml index 41bff0b5375a..ccad49b4ebfb 100644 --- a/packages/google-cloud-logging/.github/auto-label.yaml +++ b/packages/google-cloud-logging/.github/auto-label.yaml @@ -13,3 +13,7 @@ # limitations under the License. requestsize: enabled: true +staleness: + pullrequest: true + old: 30 + extraold: 60 diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index f78036e69827..cf8252bcc2f5 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -66,6 +66,7 @@ ".coveragerc", "docs/multiprocessing.rst", ".github/workflows", # exclude gh actions as credentials are needed for tests + ".github/auto-label.yaml", ]) # adjust .trampolinerc for environment tests From 0be75e32f22bc0f2c3630269c2f8f0ba5c355417 Mon Sep 17 00:00:00 2001 From: Drew Brown Date: Tue, 19 Apr 2022 10:14:16 -0600 Subject: [PATCH 621/855] chore: Update env-tests-logging submodule (#537) --- .../tests/environment/deployable/java/Dockerfile | 4 ++-- .../tests/environment/deployable/java/pom.xml | 2 +- .../tests/environment/deployable/python/requirements.txt | 2 +- .../envctl/env_scripts/python/appengine_standard.sh | 2 +- tests/environment | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile b/packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile index 86bec8092075..dae2b02f1228 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile +++ b/packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile @@ -13,14 +13,14 @@ # limitations under the License. # compile local java-logging library -FROM maven:3.8.1 AS lib-env +FROM maven:3.8.4 AS lib-env WORKDIR /app COPY _library ./ RUN mvn verify --fail-never RUN mvn -Dmaven.test.skip=true package # Compile the deployable code. -FROM maven:3.8.1 AS build-env +FROM maven:3.8.4 AS build-env WORKDIR /app COPY pom.xml /app/pom.xml # copy over compiled library diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/pom.xml b/packages/google-cloud-logging/tests/environment/deployable/java/pom.xml index e4c74e012dd0..0c9e5f4434ea 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/java/pom.xml +++ b/packages/google-cloud-logging/tests/environment/deployable/java/pom.xml @@ -105,7 +105,7 @@ io.netty netty-tcnative-boringssl-static - 2.0.34.Final + 2.0.51.Final diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt index c466074b0547..b3e97e794731 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt +++ b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt @@ -5,4 +5,4 @@ pytz==2021.1 pandas>=1.1.5 itsdangerous==2.0.1 jinja2==3.0.3 -werkzeug==2.0.2 \ No newline at end of file +werkzeug==2.0.2 diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_standard.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_standard.sh index 3b92e2b68c11..7ee3fefad1e1 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_standard.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_standard.sh @@ -57,7 +57,7 @@ deploy() { tar -xvf $TMP_DIR/lib.tar --directory $TMP_DIR/python-logging # copy test scripts cp $REPO_ROOT/deployable/python/*.py $TMP_DIR - echo "-e ./python-logging" | cat $REPO_ROOT/deployable/python/requirements.txt - > $TMP_DIR/requirements.txt + echo $'\n-e ./python-logging' | cat $REPO_ROOT/deployable/python/requirements.txt - > $TMP_DIR/requirements.txt # build app.yaml cat < $TMP_DIR/app.yaml runtime: python37 diff --git a/tests/environment b/tests/environment index fd113e1b444b..be8b03308472 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit fd113e1b444b823a62f0e55eecc14a8dc34f26ee +Subproject commit be8b033084726bb7a3f62130157e1da3500f6855 From d551b4e1929345be29dfc6e49a63839e7979902a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 20 Apr 2022 20:34:07 -0400 Subject: [PATCH 622/855] chore(python): add nox session to sort python imports (#538) Source-Link: https://github.com/googleapis/synthtool/commit/1b71c10e20de7ed3f97f692f99a0e3399b67049f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +-- packages/google-cloud-logging/noxfile.py | 27 ++++++++++++++++--- .../samples/snippets/noxfile.py | 21 +++++++++++++++ 3 files changed, 47 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index bc893c979e20..7c454abf76f3 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 -# created: 2022-04-06T10:30:21.687684602Z + digest: sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 +# created: 2022-04-20T23:42:53.970438194Z diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 49f754c74c64..6cfcca2ee52b 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -25,7 +25,8 @@ import nox BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" @@ -92,7 +93,7 @@ def lint(session): session.run( "black", "--check", - *BLACK_PATHS, + *LINT_PATHS, ) session.run("flake8", "google", "tests") @@ -103,7 +104,27 @@ def blacken(session): session.install(BLACK_VERSION) session.run( "black", - *BLACK_PATHS, + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, ) diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 949e0fde9ae1..38bb0a572b81 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -30,6 +30,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -168,12 +169,32 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) +# +# format = isort + black +# + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + # # Sample Tests # From 08acfe99f828ec9bce15eea00ddca63665e3fdf1 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 21 Apr 2022 16:24:17 +0000 Subject: [PATCH 623/855] chore(python): use ubuntu 22.04 in docs image (#539) Source-Link: https://github.com/googleapis/synthtool/commit/f15cc72fb401b4861cedebb10af74afe428fb1f8 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd --- .../.github/.OwlBot.lock.yaml | 4 ++-- .../.kokoro/docker/docs/Dockerfile | 20 +++++++++++++++++-- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 7c454abf76f3..64f82d6bf4bc 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 -# created: 2022-04-20T23:42:53.970438194Z + digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd +# created: 2022-04-21T15:43:16.246106921Z diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile index 4e1b1fb8b5a5..238b87b9d1c9 100644 --- a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:20.04 +from ubuntu:22.04 ENV DEBIAN_FRONTEND noninteractive @@ -60,8 +60,24 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb +###################### Install python 3.8.11 + +# Download python 3.8.11 +RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz + +# Extract files +RUN tar -xvf Python-3.8.11.tgz + +# Install python 3.8.11 +RUN ./Python-3.8.11/configure --enable-optimizations +RUN make altinstall + +###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.8 /tmp/get-pip.py \ + && python3 /tmp/get-pip.py \ && rm /tmp/get-pip.py +# Test pip +RUN python3 -m pip + CMD ["python3.8"] From 6be33cf7fe0f497bb3b0ba6e38259d11a1a5b414 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 27 Apr 2022 10:38:29 -0600 Subject: [PATCH 624/855] chore: use gapic-generator-python 0.65.2 (#540) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.65.2 PiperOrigin-RevId: 444333013 Source-Link: https://github.com/googleapis/googleapis/commit/f91b6cf82e929280f6562f6110957c654bd9e2e6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/16eb36095c294e712c74a1bf23550817b42174e5 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTZlYjM2MDk1YzI5NGU3MTJjNzRhMWJmMjM1NTA4MTdiNDIxNzRlNSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../config_service_v2/async_client.py | 156 ++++++------- .../logging_service_v2/async_client.py | 38 ++-- .../metrics_service_v2/async_client.py | 30 +-- .../logging_v2/test_config_service_v2.py | 208 +++++++++--------- .../logging_v2/test_logging_service_v2.py | 22 +- .../logging_v2/test_metrics_service_v2.py | 42 ++-- 6 files changed, 248 insertions(+), 248 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 8087660792b3..0a325ce86daa 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -235,9 +235,9 @@ async def list_buckets( from google.cloud import logging_v2 - def sample_list_buckets(): + async def sample_list_buckets(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListBucketsRequest( @@ -248,7 +248,7 @@ def sample_list_buckets(): page_result = client.list_buckets(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -351,9 +351,9 @@ async def get_bucket( from google.cloud import logging_v2 - def sample_get_bucket(): + async def sample_get_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetBucketRequest( @@ -361,7 +361,7 @@ def sample_get_bucket(): ) # Make the request - response = client.get_bucket(request=request) + response = await client.get_bucket(request=request) # Handle the response print(response) @@ -425,9 +425,9 @@ async def create_bucket( from google.cloud import logging_v2 - def sample_create_bucket(): + async def sample_create_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.CreateBucketRequest( @@ -436,7 +436,7 @@ def sample_create_bucket(): ) # Make the request - response = client.create_bucket(request=request) + response = await client.create_bucket(request=request) # Handle the response print(response) @@ -509,9 +509,9 @@ async def update_bucket( from google.cloud import logging_v2 - def sample_update_bucket(): + async def sample_update_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.UpdateBucketRequest( @@ -519,7 +519,7 @@ def sample_update_bucket(): ) # Make the request - response = client.update_bucket(request=request) + response = await client.update_bucket(request=request) # Handle the response print(response) @@ -586,9 +586,9 @@ async def delete_bucket( from google.cloud import logging_v2 - def sample_delete_bucket(): + async def sample_delete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteBucketRequest( @@ -596,7 +596,7 @@ def sample_delete_bucket(): ) # Make the request - client.delete_bucket(request=request) + await client.delete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): @@ -648,9 +648,9 @@ async def undelete_bucket( from google.cloud import logging_v2 - def sample_undelete_bucket(): + async def sample_undelete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.UndeleteBucketRequest( @@ -658,7 +658,7 @@ def sample_undelete_bucket(): ) # Make the request - client.undelete_bucket(request=request) + await client.undelete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): @@ -709,9 +709,9 @@ async def list_views( from google.cloud import logging_v2 - def sample_list_views(): + async def sample_list_views(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListViewsRequest( @@ -722,7 +722,7 @@ def sample_list_views(): page_result = client.list_views(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -817,9 +817,9 @@ async def get_view( from google.cloud import logging_v2 - def sample_get_view(): + async def sample_get_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetViewRequest( @@ -827,7 +827,7 @@ def sample_get_view(): ) # Make the request - response = client.get_view(request=request) + response = await client.get_view(request=request) # Handle the response print(response) @@ -890,9 +890,9 @@ async def create_view( from google.cloud import logging_v2 - def sample_create_view(): + async def sample_create_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.CreateViewRequest( @@ -901,7 +901,7 @@ def sample_create_view(): ) # Make the request - response = client.create_view(request=request) + response = await client.create_view(request=request) # Handle the response print(response) @@ -967,9 +967,9 @@ async def update_view( from google.cloud import logging_v2 - def sample_update_view(): + async def sample_update_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.UpdateViewRequest( @@ -977,7 +977,7 @@ def sample_update_view(): ) # Make the request - response = client.update_view(request=request) + response = await client.update_view(request=request) # Handle the response print(response) @@ -1042,9 +1042,9 @@ async def delete_view( from google.cloud import logging_v2 - def sample_delete_view(): + async def sample_delete_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteViewRequest( @@ -1052,7 +1052,7 @@ def sample_delete_view(): ) # Make the request - client.delete_view(request=request) + await client.delete_view(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): @@ -1103,9 +1103,9 @@ async def list_sinks( from google.cloud import logging_v2 - def sample_list_sinks(): + async def sample_list_sinks(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListSinksRequest( @@ -1116,7 +1116,7 @@ def sample_list_sinks(): page_result = client.list_sinks(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -1227,9 +1227,9 @@ async def get_sink( from google.cloud import logging_v2 - def sample_get_sink(): + async def sample_get_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetSinkRequest( @@ -1237,7 +1237,7 @@ def sample_get_sink(): ) # Make the request - response = client.get_sink(request=request) + response = await client.get_sink(request=request) # Handle the response print(response) @@ -1356,9 +1356,9 @@ async def create_sink( from google.cloud import logging_v2 - def sample_create_sink(): + async def sample_create_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) sink = logging_v2.LogSink() @@ -1371,7 +1371,7 @@ def sample_create_sink(): ) # Make the request - response = client.create_sink(request=request) + response = await client.create_sink(request=request) # Handle the response print(response) @@ -1488,9 +1488,9 @@ async def update_sink( from google.cloud import logging_v2 - def sample_update_sink(): + async def sample_update_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) sink = logging_v2.LogSink() @@ -1503,7 +1503,7 @@ def sample_update_sink(): ) # Make the request - response = client.update_sink(request=request) + response = await client.update_sink(request=request) # Handle the response print(response) @@ -1652,9 +1652,9 @@ async def delete_sink( from google.cloud import logging_v2 - def sample_delete_sink(): + async def sample_delete_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteSinkRequest( @@ -1662,7 +1662,7 @@ def sample_delete_sink(): ) # Make the request - client.delete_sink(request=request) + await client.delete_sink(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): @@ -1759,9 +1759,9 @@ async def list_exclusions( from google.cloud import logging_v2 - def sample_list_exclusions(): + async def sample_list_exclusions(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListExclusionsRequest( @@ -1772,7 +1772,7 @@ def sample_list_exclusions(): page_result = client.list_exclusions(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -1883,9 +1883,9 @@ async def get_exclusion( from google.cloud import logging_v2 - def sample_get_exclusion(): + async def sample_get_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetExclusionRequest( @@ -1893,7 +1893,7 @@ def sample_get_exclusion(): ) # Make the request - response = client.get_exclusion(request=request) + response = await client.get_exclusion(request=request) # Handle the response print(response) @@ -2006,9 +2006,9 @@ async def create_exclusion( from google.cloud import logging_v2 - def sample_create_exclusion(): + async def sample_create_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) exclusion = logging_v2.LogExclusion() @@ -2021,7 +2021,7 @@ def sample_create_exclusion(): ) # Make the request - response = client.create_exclusion(request=request) + response = await client.create_exclusion(request=request) # Handle the response print(response) @@ -2135,9 +2135,9 @@ async def update_exclusion( from google.cloud import logging_v2 - def sample_update_exclusion(): + async def sample_update_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) exclusion = logging_v2.LogExclusion() @@ -2150,7 +2150,7 @@ def sample_update_exclusion(): ) # Make the request - response = client.update_exclusion(request=request) + response = await client.update_exclusion(request=request) # Handle the response print(response) @@ -2275,9 +2275,9 @@ async def delete_exclusion( from google.cloud import logging_v2 - def sample_delete_exclusion(): + async def sample_delete_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteExclusionRequest( @@ -2285,7 +2285,7 @@ def sample_delete_exclusion(): ) # Make the request - client.delete_exclusion(request=request) + await client.delete_exclusion(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): @@ -2387,9 +2387,9 @@ async def get_cmek_settings( from google.cloud import logging_v2 - def sample_get_cmek_settings(): + async def sample_get_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetCmekSettingsRequest( @@ -2397,7 +2397,7 @@ def sample_get_cmek_settings(): ) # Make the request - response = client.get_cmek_settings(request=request) + response = await client.get_cmek_settings(request=request) # Handle the response print(response) @@ -2487,9 +2487,9 @@ async def update_cmek_settings( from google.cloud import logging_v2 - def sample_update_cmek_settings(): + async def sample_update_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.UpdateCmekSettingsRequest( @@ -2497,7 +2497,7 @@ def sample_update_cmek_settings(): ) # Make the request - response = client.update_cmek_settings(request=request) + response = await client.update_cmek_settings(request=request) # Handle the response print(response) @@ -2584,9 +2584,9 @@ async def get_settings( from google.cloud import logging_v2 - def sample_get_settings(): + async def sample_get_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetSettingsRequest( @@ -2594,7 +2594,7 @@ def sample_get_settings(): ) # Make the request - response = client.get_settings(request=request) + response = await client.get_settings(request=request) # Handle the response print(response) @@ -2718,9 +2718,9 @@ async def update_settings( from google.cloud import logging_v2 - def sample_update_settings(): + async def sample_update_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.UpdateSettingsRequest( @@ -2728,7 +2728,7 @@ def sample_update_settings(): ) # Make the request - response = client.update_settings(request=request) + response = await client.update_settings(request=request) # Handle the response print(response) @@ -2836,9 +2836,9 @@ async def copy_log_entries( from google.cloud import logging_v2 - def sample_copy_log_entries(): + async def sample_copy_log_entries(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.CopyLogEntriesRequest( @@ -2851,7 +2851,7 @@ def sample_copy_log_entries(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index ec78309a6f0b..32a1d1808cc6 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -234,9 +234,9 @@ async def delete_log( from google.cloud import logging_v2 - def sample_delete_log(): + async def sample_delete_log(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteLogRequest( @@ -244,7 +244,7 @@ def sample_delete_log(): ) # Make the request - client.delete_log(request=request) + await client.delete_log(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): @@ -347,9 +347,9 @@ async def write_log_entries( from google.cloud import logging_v2 - def sample_write_log_entries(): + async def sample_write_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) entries = logging_v2.LogEntry() @@ -360,7 +360,7 @@ def sample_write_log_entries(): ) # Make the request - response = client.write_log_entries(request=request) + response = await client.write_log_entries(request=request) # Handle the response print(response) @@ -538,9 +538,9 @@ async def list_log_entries( from google.cloud import logging_v2 - def sample_list_log_entries(): + async def sample_list_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( @@ -551,7 +551,7 @@ def sample_list_log_entries(): page_result = client.list_log_entries(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -695,9 +695,9 @@ async def list_monitored_resource_descriptors( from google.cloud import logging_v2 - def sample_list_monitored_resource_descriptors(): + async def sample_list_monitored_resource_descriptors(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListMonitoredResourceDescriptorsRequest( @@ -707,7 +707,7 @@ def sample_list_monitored_resource_descriptors(): page_result = client.list_monitored_resource_descriptors(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -788,9 +788,9 @@ async def list_logs( from google.cloud import logging_v2 - def sample_list_logs(): + async def sample_list_logs(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListLogsRequest( @@ -801,7 +801,7 @@ def sample_list_logs(): page_result = client.list_logs(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -910,9 +910,9 @@ def tail_log_entries( from google.cloud import logging_v2 - def sample_tail_log_entries(): + async def sample_tail_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( @@ -930,10 +930,10 @@ def request_generator(): yield request # Make the request - stream = client.tail_log_entries(requests=request_generator()) + stream = await client.tail_log_entries(requests=request_generator()) # Handle the response - for response in stream: + async for response in stream: print(response) Args: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index e9b59bf30135..816b70695642 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -220,9 +220,9 @@ async def list_log_metrics( from google.cloud import logging_v2 - def sample_list_log_metrics(): + async def sample_list_log_metrics(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListLogMetricsRequest( @@ -233,7 +233,7 @@ def sample_list_log_metrics(): page_result = client.list_log_metrics(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -341,9 +341,9 @@ async def get_log_metric( from google.cloud import logging_v2 - def sample_get_log_metric(): + async def sample_get_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetLogMetricRequest( @@ -351,7 +351,7 @@ def sample_get_log_metric(): ) # Make the request - response = client.get_log_metric(request=request) + response = await client.get_log_metric(request=request) # Handle the response print(response) @@ -461,9 +461,9 @@ async def create_log_metric( from google.cloud import logging_v2 - def sample_create_log_metric(): + async def sample_create_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) metric = logging_v2.LogMetric() @@ -476,7 +476,7 @@ def sample_create_log_metric(): ) # Make the request - response = client.create_log_metric(request=request) + response = await client.create_log_metric(request=request) # Handle the response print(response) @@ -586,9 +586,9 @@ async def update_log_metric( from google.cloud import logging_v2 - def sample_update_log_metric(): + async def sample_update_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) metric = logging_v2.LogMetric() @@ -601,7 +601,7 @@ def sample_update_log_metric(): ) # Make the request - response = client.update_log_metric(request=request) + response = await client.update_log_metric(request=request) # Handle the response print(response) @@ -722,9 +722,9 @@ async def delete_log_metric( from google.cloud import logging_v2 - def sample_delete_log_metric(): + async def sample_delete_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteLogMetricRequest( @@ -732,7 +732,7 @@ def sample_delete_log_metric(): ) # Make the request - client.delete_log_metric(request=request) + await client.delete_log_metric(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 18982084f797..caa7bd6892f1 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -771,7 +771,7 @@ def test_list_buckets_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -787,7 +787,7 @@ def test_list_buckets_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -801,7 +801,7 @@ async def test_list_buckets_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -819,7 +819,7 @@ async def test_list_buckets_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -950,7 +950,7 @@ def test_list_buckets_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, logging_config.LogBucket) for i in results) @@ -1211,7 +1211,7 @@ def test_get_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: @@ -1227,7 +1227,7 @@ def test_get_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1241,7 +1241,7 @@ async def test_get_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: @@ -1259,7 +1259,7 @@ async def test_get_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1381,7 +1381,7 @@ def test_create_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: @@ -1397,7 +1397,7 @@ def test_create_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1411,7 +1411,7 @@ async def test_create_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: @@ -1429,7 +1429,7 @@ async def test_create_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1551,7 +1551,7 @@ def test_update_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: @@ -1567,7 +1567,7 @@ def test_update_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1581,7 +1581,7 @@ async def test_update_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: @@ -1599,7 +1599,7 @@ async def test_update_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1693,7 +1693,7 @@ def test_delete_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: @@ -1709,7 +1709,7 @@ def test_delete_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1723,7 +1723,7 @@ async def test_delete_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: @@ -1739,7 +1739,7 @@ async def test_delete_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1833,7 +1833,7 @@ def test_undelete_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: @@ -1849,7 +1849,7 @@ def test_undelete_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1863,7 +1863,7 @@ async def test_undelete_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: @@ -1879,7 +1879,7 @@ async def test_undelete_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1981,7 +1981,7 @@ def test_list_views_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1997,7 +1997,7 @@ def test_list_views_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2011,7 +2011,7 @@ async def test_list_views_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -2029,7 +2029,7 @@ async def test_list_views_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2160,7 +2160,7 @@ def test_list_views_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, logging_config.LogView) for i in results) @@ -2409,7 +2409,7 @@ def test_get_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_view), "__call__") as call: @@ -2425,7 +2425,7 @@ def test_get_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2439,7 +2439,7 @@ async def test_get_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_view), "__call__") as call: @@ -2457,7 +2457,7 @@ async def test_get_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2567,7 +2567,7 @@ def test_create_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_view), "__call__") as call: @@ -2583,7 +2583,7 @@ def test_create_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2597,7 +2597,7 @@ async def test_create_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_view), "__call__") as call: @@ -2615,7 +2615,7 @@ async def test_create_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2725,7 +2725,7 @@ def test_update_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_view), "__call__") as call: @@ -2741,7 +2741,7 @@ def test_update_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2755,7 +2755,7 @@ async def test_update_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_view), "__call__") as call: @@ -2773,7 +2773,7 @@ async def test_update_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2867,7 +2867,7 @@ def test_delete_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_view), "__call__") as call: @@ -2883,7 +2883,7 @@ def test_delete_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2897,7 +2897,7 @@ async def test_delete_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_view), "__call__") as call: @@ -2913,7 +2913,7 @@ async def test_delete_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -3015,7 +3015,7 @@ def test_list_sinks_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -3031,7 +3031,7 @@ def test_list_sinks_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -3045,7 +3045,7 @@ async def test_list_sinks_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -3063,7 +3063,7 @@ async def test_list_sinks_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -3194,7 +3194,7 @@ def test_list_sinks_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, logging_config.LogSink) for i in results) @@ -3466,7 +3466,7 @@ def test_get_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() - request.sink_name = "sink_name/value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: @@ -3482,7 +3482,7 @@ def test_get_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name/value", + "sink_name=sink_name_value", ) in kw["metadata"] @@ -3496,7 +3496,7 @@ async def test_get_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() - request.sink_name = "sink_name/value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: @@ -3514,7 +3514,7 @@ async def test_get_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name/value", + "sink_name=sink_name_value", ) in kw["metadata"] @@ -3729,7 +3729,7 @@ def test_create_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: @@ -3745,7 +3745,7 @@ def test_create_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -3759,7 +3759,7 @@ async def test_create_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: @@ -3777,7 +3777,7 @@ async def test_create_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -4002,7 +4002,7 @@ def test_update_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() - request.sink_name = "sink_name/value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: @@ -4018,7 +4018,7 @@ def test_update_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name/value", + "sink_name=sink_name_value", ) in kw["metadata"] @@ -4032,7 +4032,7 @@ async def test_update_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() - request.sink_name = "sink_name/value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: @@ -4050,7 +4050,7 @@ async def test_update_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name/value", + "sink_name=sink_name_value", ) in kw["metadata"] @@ -4246,7 +4246,7 @@ def test_delete_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() - request.sink_name = "sink_name/value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: @@ -4262,7 +4262,7 @@ def test_delete_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name/value", + "sink_name=sink_name_value", ) in kw["metadata"] @@ -4276,7 +4276,7 @@ async def test_delete_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() - request.sink_name = "sink_name/value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: @@ -4292,7 +4292,7 @@ async def test_delete_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name/value", + "sink_name=sink_name_value", ) in kw["metadata"] @@ -4474,7 +4474,7 @@ def test_list_exclusions_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4490,7 +4490,7 @@ def test_list_exclusions_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -4504,7 +4504,7 @@ async def test_list_exclusions_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4522,7 +4522,7 @@ async def test_list_exclusions_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -4653,7 +4653,7 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, logging_config.LogExclusion) for i in results) @@ -4906,7 +4906,7 @@ def test_get_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: @@ -4922,7 +4922,7 @@ def test_get_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -4936,7 +4936,7 @@ async def test_get_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: @@ -4954,7 +4954,7 @@ async def test_get_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -5150,7 +5150,7 @@ def test_create_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: @@ -5166,7 +5166,7 @@ def test_create_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -5180,7 +5180,7 @@ async def test_create_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: @@ -5198,7 +5198,7 @@ async def test_create_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -5404,7 +5404,7 @@ def test_update_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: @@ -5420,7 +5420,7 @@ def test_update_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -5434,7 +5434,7 @@ async def test_update_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: @@ -5452,7 +5452,7 @@ async def test_update_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -5648,7 +5648,7 @@ def test_delete_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: @@ -5664,7 +5664,7 @@ def test_delete_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -5678,7 +5678,7 @@ async def test_delete_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: @@ -5694,7 +5694,7 @@ async def test_delete_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -5890,7 +5890,7 @@ def test_get_cmek_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5908,7 +5908,7 @@ def test_get_cmek_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -5922,7 +5922,7 @@ async def test_get_cmek_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5942,7 +5942,7 @@ async def test_get_cmek_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -6059,7 +6059,7 @@ def test_update_cmek_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6077,7 +6077,7 @@ def test_update_cmek_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -6091,7 +6091,7 @@ async def test_update_cmek_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6111,7 +6111,7 @@ async def test_update_cmek_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -6229,7 +6229,7 @@ def test_get_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_settings), "__call__") as call: @@ -6245,7 +6245,7 @@ def test_get_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -6259,7 +6259,7 @@ async def test_get_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_settings), "__call__") as call: @@ -6277,7 +6277,7 @@ async def test_get_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -6477,7 +6477,7 @@ def test_update_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_settings), "__call__") as call: @@ -6493,7 +6493,7 @@ def test_update_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -6507,7 +6507,7 @@ async def test_update_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_settings), "__call__") as call: @@ -6525,7 +6525,7 @@ async def test_update_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 1f74ac5b2ec4..e7cbfcd8a1fb 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -766,7 +766,7 @@ def test_delete_log_field_headers(): # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() - request.log_name = "log_name/value" + request.log_name = "log_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: @@ -782,7 +782,7 @@ def test_delete_log_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "log_name=log_name/value", + "log_name=log_name_value", ) in kw["metadata"] @@ -796,7 +796,7 @@ async def test_delete_log_field_headers_async(): # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() - request.log_name = "log_name/value" + request.log_name = "log_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: @@ -812,7 +812,7 @@ async def test_delete_log_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "log_name=log_name/value", + "log_name=log_name_value", ) in kw["metadata"] @@ -1334,7 +1334,7 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, log_entry.LogEntry) for i in results) @@ -1617,7 +1617,7 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all( isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) @@ -1874,7 +1874,7 @@ def test_list_logs_field_headers(): # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1890,7 +1890,7 @@ def test_list_logs_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1904,7 +1904,7 @@ async def test_list_logs_field_headers_async(): # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1922,7 +1922,7 @@ async def test_list_logs_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2053,7 +2053,7 @@ def test_list_logs_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, str) for i in results) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 53ced9ce47ae..456d43946760 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -772,7 +772,7 @@ def test_list_log_metrics_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -788,7 +788,7 @@ def test_list_log_metrics_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -802,7 +802,7 @@ async def test_list_log_metrics_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -820,7 +820,7 @@ async def test_list_log_metrics_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -951,7 +951,7 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, logging_metrics.LogMetric) for i in results) @@ -1212,7 +1212,7 @@ def test_get_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() - request.metric_name = "metric_name/value" + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: @@ -1228,7 +1228,7 @@ def test_get_log_metric_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "metric_name=metric_name/value", + "metric_name=metric_name_value", ) in kw["metadata"] @@ -1242,7 +1242,7 @@ async def test_get_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() - request.metric_name = "metric_name/value" + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: @@ -1260,7 +1260,7 @@ async def test_get_log_metric_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "metric_name=metric_name/value", + "metric_name=metric_name_value", ) in kw["metadata"] @@ -1470,7 +1470,7 @@ def test_create_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1488,7 +1488,7 @@ def test_create_log_metric_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1502,7 +1502,7 @@ async def test_create_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1522,7 +1522,7 @@ async def test_create_log_metric_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1746,7 +1746,7 @@ def test_update_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() - request.metric_name = "metric_name/value" + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1764,7 +1764,7 @@ def test_update_log_metric_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "metric_name=metric_name/value", + "metric_name=metric_name_value", ) in kw["metadata"] @@ -1778,7 +1778,7 @@ async def test_update_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() - request.metric_name = "metric_name/value" + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1798,7 +1798,7 @@ async def test_update_log_metric_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "metric_name=metric_name/value", + "metric_name=metric_name_value", ) in kw["metadata"] @@ -1994,7 +1994,7 @@ def test_delete_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() - request.metric_name = "metric_name/value" + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2012,7 +2012,7 @@ def test_delete_log_metric_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "metric_name=metric_name/value", + "metric_name=metric_name_value", ) in kw["metadata"] @@ -2026,7 +2026,7 @@ async def test_delete_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() - request.metric_name = "metric_name/value" + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2044,7 +2044,7 @@ async def test_delete_log_metric_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "metric_name=metric_name/value", + "metric_name=metric_name_value", ) in kw["metadata"] From 02831684d13fb0bce7e00b591b55e706bb58605a Mon Sep 17 00:00:00 2001 From: Drew Brown Date: Thu, 28 Apr 2022 13:55:34 -0600 Subject: [PATCH 625/855] docs: Add link to interactive walkthrough (#541) --- packages/google-cloud-logging/README.rst | 8 ++++ .../docs/_static/guide-me.svg | 45 +++++++++++++++++++ 2 files changed, 53 insertions(+) create mode 100644 packages/google-cloud-logging/docs/_static/guide-me.svg diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index e5017619e38c..93b601ba9ab4 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -34,6 +34,14 @@ In order to use this library, you first need to go through the following steps: .. _Enable the Cloud Logging API.: https://cloud.google.com/logging .. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +For an interactive walkthrough on how to use this library in a python application, click the Guide Me button below: + +.. raw:: html + +
+ + + Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-logging/docs/_static/guide-me.svg b/packages/google-cloud-logging/docs/_static/guide-me.svg new file mode 100644 index 000000000000..c0196b96b31c --- /dev/null +++ b/packages/google-cloud-logging/docs/_static/guide-me.svg @@ -0,0 +1,45 @@ + + + + + + GUIDE ME + + + + + + + From e53b5deb064f694f9b298bd30e61d21b62102032 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 5 May 2022 12:12:18 -0400 Subject: [PATCH 626/855] chore: [autoapprove] update readme_gen.py to include autoescape True (#546) Source-Link: https://github.com/googleapis/synthtool/commit/6b4d5a6407d740beb4158b302194a62a4108a8a6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- .../google-cloud-logging/scripts/readme-gen/readme_gen.py | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 64f82d6bf4bc..b631901e99f4 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd -# created: 2022-04-21T15:43:16.246106921Z + digest: sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 +# created: 2022-05-05T15:17:27.599381182Z diff --git a/packages/google-cloud-logging/scripts/readme-gen/readme_gen.py b/packages/google-cloud-logging/scripts/readme-gen/readme_gen.py index d309d6e97518..91b59676bfc7 100644 --- a/packages/google-cloud-logging/scripts/readme-gen/readme_gen.py +++ b/packages/google-cloud-logging/scripts/readme-gen/readme_gen.py @@ -28,7 +28,10 @@ jinja_env = jinja2.Environment( trim_blocks=True, loader=jinja2.FileSystemLoader( - os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) README_TMPL = jinja_env.get_template('README.tmpl.rst') From 5dfc396cced795beefa3465ba3081af7e67652a0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 May 2022 15:00:42 -0700 Subject: [PATCH 627/855] chore(python): auto approve template changes (#549) Source-Link: https://github.com/googleapis/synthtool/commit/453a5d9c9a55d1969240a37d36cec626d20a9024 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-logging/.github/auto-approve.yml | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 packages/google-cloud-logging/.github/auto-approve.yml diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index b631901e99f4..757c9dca75ad 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 -# created: 2022-05-05T15:17:27.599381182Z + digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 +# created: 2022-05-05T22:08:23.383410683Z diff --git a/packages/google-cloud-logging/.github/auto-approve.yml b/packages/google-cloud-logging/.github/auto-approve.yml new file mode 100644 index 000000000000..311ebbb853a9 --- /dev/null +++ b/packages/google-cloud-logging/.github/auto-approve.yml @@ -0,0 +1,3 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve +processes: + - "OwlBotTemplateChanges" From 4a3621ac02933c86c3f7976d6f067ac82a2133ab Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sun, 8 May 2022 14:30:50 +0200 Subject: [PATCH 628/855] chore(deps): update all dependencies (#532) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Drew Brown Co-authored-by: Anthonios Partheniou --- .../google-cloud-logging/samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 541b22a365f9..acdfd427675c 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.0.0 google-cloud-bigquery==3.0.1 -google-cloud-storage==2.2.1 -google-cloud-pubsub==2.11.0 +google-cloud-storage==2.3.0 +google-cloud-pubsub==2.12.0 From 680e93125d8418e1f3859ab60845616d926ee859 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 19 May 2022 12:14:17 -0400 Subject: [PATCH 629/855] chore(main): release 3.1.0 (#479) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-logging/CHANGELOG.md | 28 ++++++++++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 9bfce6bf1010..f859d72363f9 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,34 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.1.0](https://github.com/googleapis/python-logging/compare/v3.0.0...v3.1.0) (2022-05-08) + + +### Features + +* KMS configuration in settings ([#489](https://github.com/googleapis/python-logging/issues/489)) ([6699f8c](https://github.com/googleapis/python-logging/commit/6699f8c545d1a9904a945a9d789d7220da9433bf)) +* Update Logging API with latest changes ([6699f8c](https://github.com/googleapis/python-logging/commit/6699f8c545d1a9904a945a9d789d7220da9433bf)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#494](https://github.com/googleapis/python-logging/issues/494)) ([ab14563](https://github.com/googleapis/python-logging/commit/ab145630ffbb25a88cc058569b9e425e62b32ced)) +* fix system test for mtls ([#485](https://github.com/googleapis/python-logging/issues/485)) ([96bb6f7](https://github.com/googleapis/python-logging/commit/96bb6f786c91656b52624fbbf52e036b1a908d53)) +* Reenable staleness bot ([#535](https://github.com/googleapis/python-logging/issues/535)) ([1595e42](https://github.com/googleapis/python-logging/commit/1595e4203faeb3d46b28a7d98f68761998e3aa26)) +* remove unnecessary detect_resource calls from CloudLoggingHandler ([#484](https://github.com/googleapis/python-logging/issues/484)) ([def7440](https://github.com/googleapis/python-logging/commit/def7440ac6964451f3202b5117e3060ec62045b0)) +* resolve DuplicateCredentialArgs error when using credentials_file ([265061e](https://github.com/googleapis/python-logging/commit/265061eae8396caaef3fdfeae80e0a120f9a5cda)) + + +### Dependencies + +* Pin jinja2 version to fix CI ([#522](https://github.com/googleapis/python-logging/issues/522)) ([383f2f0](https://github.com/googleapis/python-logging/commit/383f2f0062d3703dfc7e2c331562fb88327cdf38)) + + +### Documentation + +* add generated snippets ([6699f8c](https://github.com/googleapis/python-logging/commit/6699f8c545d1a9904a945a9d789d7220da9433bf)) +* Add link to interactive walkthrough ([#541](https://github.com/googleapis/python-logging/issues/541)) ([422a77d](https://github.com/googleapis/python-logging/commit/422a77d93655fba3406ecf397cf417ad37dd1ce1)) + ## [3.0.0](https://github.com/googleapis/python-logging/compare/v2.7.0...v3.0.0) (2022-01-27) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 2b39d7bbd804..dc26595c0dc4 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "3.0.0" +version = "3.1.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 61ee1b8963f822937abaf99ad82058f939847dae Mon Sep 17 00:00:00 2001 From: Drew Brown Date: Sun, 22 May 2022 12:26:03 -0600 Subject: [PATCH 630/855] docs: Change button in README to .png file (#554) * fix: Change button to .png file * Change the "Guide Me" button to a .png file to work with RST `image` * Avoids `raw` directive disabled by PyPi * Fixes #553 * Fix unexpected unindent in docs Add copy of png file to render correctly in GitHub readme and Sphinx generated docs. --- packages/google-cloud-logging/README.rst | 9 ++++----- .../google-cloud-logging/_static/guide-me.png | Bin 0 -> 5600 bytes .../docs/_static/guide-me.png | Bin 0 -> 5600 bytes 3 files changed, 4 insertions(+), 5 deletions(-) create mode 100644 packages/google-cloud-logging/_static/guide-me.png create mode 100644 packages/google-cloud-logging/docs/_static/guide-me.png diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 93b601ba9ab4..240faa3c2397 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -36,11 +36,10 @@ In order to use this library, you first need to go through the following steps: For an interactive walkthrough on how to use this library in a python application, click the Guide Me button below: -.. raw:: html - - - - +|guide| + +.. |guide| image:: _static/guide-me.png + :target: https://console.cloud.google.com/?walkthrough_id=logging__logging-python Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-logging/_static/guide-me.png b/packages/google-cloud-logging/_static/guide-me.png new file mode 100644 index 0000000000000000000000000000000000000000..0ce1a8cc6bb079e11b97aaa9199b98236c87549d GIT binary patch literal 5600 zcmZ`*1yoeq*Pj`N4u=M*0VJe`kRFDX6b32jk{DXLOQaE{o1vtnyIZ1;4=um4jIiEkeV#$1H<_CA9RI@J`^HaN_1B{v+|8aM zy>x0ie$w9fE0!8|?`uB|^ucHc@MyKRFmO+IFu8J!ybnC&yE8gFLCI(BqD=aw{_rB; zMu4yS$m>N#r625tP60eShn)RfG@TxtzGz1O~&?u+N49w zCAJ=7JsblQA_&UA516#K_OlvH$TPxN*Amoa9m+V$Weea9!XbN$Cm2l63)=3&5`oTn z0~prtg-C-VfRUz%br5h6i=+yNEGP+0l^7Ch%2^DaGZkLMCJO0AV`c)ZyD*(GmjRw# zvXl5&T`Ihw)t7uYAV5ycbPnJFQgdu|XhiZOYZ98957RyXfFH)CpNvT@RXm-s}O05kt(bx)JUVk%fw z%!>FOmFDY`UfSNwf#q%j(^ZS=XM*pk=gDb8pY&OorkF37d75WbeP496m-HYD??run z@0{I)LyMgf&H2G$fUYa+!%83FI^wgEmwy`u0+16*xuLr;a(H!E^h5Y?;?M%j5`x>E z1(9ASPKF3U&LB%T3%D4ee0ls+F}q4Jj8+g`63&R&zT-ZHzQzY6LvOx3PM0N#$5SFN zkS&NVKuojoC)q1my-gV6Kk_*;b`ifsTynpX;VZmA#kX3*D`2LFl!FX{ zi>hRDm2>qr-t}vX};(%?=WxvOx*uRRIf6aMUy3&L;=)pA$_5SWqM7! zOQ=i3k=WMta7rR}iAd?gxt!sA3Jp)qiKka6_H^P{`ORsanX}3987!?T4l54(B!Z-; zNiR8!IG9VdwAtoF=K4#uOWCx2=a9DVZ0+m0>IrSb=k`nebr!U(OONLkN`*_;v~JP4 zu&H{ zII@%`@^mLBrW6R=2to*+V+mpzVOfM(g;j@@5o8cB&=AK`Jw`n?h!Kl<9CI4m8-pK1 z6T^@SQKe!9B{;BVu`;NRsct@5NNq?ZN=Kzuj~$KOAEi&z9Bm(?OkYS}Yv|JF<>Ke| z&|j_!<<8>9Ox8|bC^zM#&TB?bT}664o18elowZ zO+I{IpqF9gKK|2c$3DffabQ1JVh*twkmR3nO@pn1T_Ky)>!1*25wwXnpfj+brlaOp zWTbYUi5fQ>x62gI>}+XjQ8c`76lNG}q-p5cbjGVi^)|*mCfboo*rbu%k!=~#+}gC$ z!r*b++xvle#-ejz-M;qYuaCY~=f3xST;O(a-}YVOhsKgVr#>`Zu5EVhIX|f`fB1d% z%NF2Hr5?o?)$Q=;AX&5MV0gCw-txVNWTFY$B+?{Ba!zu-bNW^($V|#dO14W8?YchQ zbp};MDo(yb(>L&Bj8s_1fm^X)_y|fFqDZ0=0`L2_)Eu}D_%C}ydWFIz@Pu(GUp2}F z;du0IM-05RdW+8-nD#P@RJ6!;yq5i{IHY*oM*k#a_j>BU!p}me4;~{Jm(=gk_wWPD zz}$d;#CbF<`rt`wQWe*Ra-L2eZ?zz93R#L)$|JFb!2S7VjG^(N{vpquv@rwt!L(ob zrQWeR57Mwp&-npn0!dDdd@y<2S^pHL zaD&4$oX^ghMR!`p!!tX*7}t9~s#)#qMQ2m08@hD5{o>zr86i*M9MjFJq;K8!`~ECN zMfcc!;g53RWyd=AiG76o%++?9K|mwVvejCL3)?TX^OeQcbDwx$pM%4F!qthE+n>^_ zFRQ~}#qq`|({oDj1m+zVoasiJWcV2QsGnV)8u{P9J~5MQyxnq|Uh5zrlgNj223Fj* z1x%X#=tFi*#!|YP5VVnX($etU7+s#zRgRj_UH9(9l`-QoV^p$K4y?WRExYtM5HIqi z%YU1%zG~gcaD_b5INt0~k5SLc*_4!IFgU?HFZ^8AztG^sxJDmq9TyiZN-|zRCdG)M zrh>6PW-Ld;#-tCN+Ujf8T=JPlmmCBufo>y# z9YnG#Wy?&&uSz|xvJ>L;!R z(;7Ay2Jj(aAgbgB-&Q?rzfGaP6~*wz49vI%Q>#+8rHJTL-(7aiEuSe_J$VA)xYKa} zARr0g?(Pc!-o1cu6af6E4gj#;NdN#NJ`8|;CrR!G5)Jyti$-Joql^3wl-870Qo56x z7A}^Sj;=ONZowsW0ssIAj?#MO_UwtOn1z!AkJ)o4b4wmC2j|}q0NhLLPIa(!GlP0L z*gLw4c}Xz+86kG3|2D&zpnrzA*-0=xdxC_@I=NUvg?M;*c$pA*P$(4c^4v=7sodkg z;dg%$Og3(A&SEf_r>7^6rvQ(Wi#3c-R8$nk%Mat{=e~>JcJ+32GxOqhbY=c4$$#gO zvvjp^K{>mjoE)LQ^O~7Exw}a)G5sd`=lN@%mR_j;Fgd#ZUDjQJu-_gS9}h3=|AM)p zto|R^Z_i(_KkNF74*olt7!u`WX|E@Ta=7d2-P90*LhwIX{+HuF^ZW~_?P}>F>*R2U zb3^=RRsIJ5+xb6%e=L`_cKTD$_4<&>c6fD){ z9IXMRB2||G5c4+F)j>!?rMe)z)C5?#$41yA8!4p1xp}2ZE}3|tQVI~on$VsLKE5A4 zs$~TA@W#urD%TZ2t_pK!czRrDoWjcnea0aEu$m7E>lk?- zS(v~lQc_%Yj~~;jA6=dVWqUzE=6a;6ywb8bU=WbG5QqU3Km0<>J3Pbdhv|R|lz>1< z-(wH=$r1w@@pSB^$wPy}aBwuL!r;YrGE&4qJQ=dwT)ev|;4Fsa=iCTDnyI`^vCKWN zBF5KX{zIf(SHZJQh5kXUS_bQS|KSple(sErvQe#k*=f*?n#D$)++b$>?!g{nVnqBP zNZTOFzae+HA~J|0(q`6AAD5v=%+~n4vK1Hu-pnN{_JS=6nUR%_v))YrN~!!T>H`p&Bo+}E{nq!Chd7V zO6kYfaLU|CV_0RFr|~{^>{z4ewTHOw0BtR=oo8ECH0H5xFDJ0kQcG8o%*#;Bm~VO&7gVHjp)}3d4lek#RaSO|A6rV%DRr8!0{QIV@^Q^*gD8K!edMB{l39n3+qLj|4QkY5-{jD?QF}Ksam!C|bbW`e@t@XCg z^rFb(VVXf1Pg+`E+QrN(qNWLz?QRoeWsAwpCLRRZi;3OK_g?EObw9%~xrT>*0KzAZ z+{6@CL9(BoV(P3hbYiu3YAMX%W!2CT2)!}SlYIaZaYIu-O=TFGxnU?>VhLhY)eP*Z z^0AXQ>2{zJ7Q<5GGtr9twN*HqLqFht3~a}x&oR^ykBmiV8oIHEo45*eO-VXBU(F1# zIJ2tan}(kV*V@p8O1;`S{G4%f8MMp(<;%SDwg<6_QNLU|lxr5y+QAdPWBuT8MWx~9 zDBdm4RoWHU7bF`%Lx`!4qx>k)LBQ|QPabcdE+^n-ga4GN30PrYk21Mo$|&h!oEwuovTJjVgC9G%FEea>B*3S= zs=dz|6Z2&^VcL&T5)nYtqYD3-#{4XRRFg?_db4|Unvdq$s+yV&MQ;zhIy+$t z>`+LbgIqN>9>3YfV+)G>HgPg#wpJml;63F1hsL}iKx1YIGcJcJhxE(Hvd$|^!s1io zVUDn|5lyI_yJUsIqc}>QdFlBM{@|4*PnGrd(`&Om0?w3?UZLh4JG0*L`EIk;CG zp_?a<>9)4Y)yv~>VQk71@J(SD3(&4imWPkKRpEUBu>JCl!)e^<+il41kP9k>lZHGa zpI1-sbK|!t7n=Trk{%d1V1)idmUYiL#!=y$i=aI@rvSw zA7g=07RSAJzA9xl!q2}pi!-A2=^-hGmNjm{u&>>IMU0~<#52{MW!)e0bWG$x`}kQE zMn(D%CgZ(4@X(hN5vrQ1xPm2P;{!oHZj{_B$7n!1b?ztQr3ya21bX10P(#3X;71ZB z`fN<<$e_+Z*nDqZ?EV~!ADu(1Bq*Bzb)LgHRz=E$9XB*^B1;4=um4jIiEkeV#$1H<_CA9RI@J`^HaN_1B{v+|8aM zy>x0ie$w9fE0!8|?`uB|^ucHc@MyKRFmO+IFu8J!ybnC&yE8gFLCI(BqD=aw{_rB; zMu4yS$m>N#r625tP60eShn)RfG@TxtzGz1O~&?u+N49w zCAJ=7JsblQA_&UA516#K_OlvH$TPxN*Amoa9m+V$Weea9!XbN$Cm2l63)=3&5`oTn z0~prtg-C-VfRUz%br5h6i=+yNEGP+0l^7Ch%2^DaGZkLMCJO0AV`c)ZyD*(GmjRw# zvXl5&T`Ihw)t7uYAV5ycbPnJFQgdu|XhiZOYZ98957RyXfFH)CpNvT@RXm-s}O05kt(bx)JUVk%fw z%!>FOmFDY`UfSNwf#q%j(^ZS=XM*pk=gDb8pY&OorkF37d75WbeP496m-HYD??run z@0{I)LyMgf&H2G$fUYa+!%83FI^wgEmwy`u0+16*xuLr;a(H!E^h5Y?;?M%j5`x>E z1(9ASPKF3U&LB%T3%D4ee0ls+F}q4Jj8+g`63&R&zT-ZHzQzY6LvOx3PM0N#$5SFN zkS&NVKuojoC)q1my-gV6Kk_*;b`ifsTynpX;VZmA#kX3*D`2LFl!FX{ zi>hRDm2>qr-t}vX};(%?=WxvOx*uRRIf6aMUy3&L;=)pA$_5SWqM7! zOQ=i3k=WMta7rR}iAd?gxt!sA3Jp)qiKka6_H^P{`ORsanX}3987!?T4l54(B!Z-; zNiR8!IG9VdwAtoF=K4#uOWCx2=a9DVZ0+m0>IrSb=k`nebr!U(OONLkN`*_;v~JP4 zu&H{ zII@%`@^mLBrW6R=2to*+V+mpzVOfM(g;j@@5o8cB&=AK`Jw`n?h!Kl<9CI4m8-pK1 z6T^@SQKe!9B{;BVu`;NRsct@5NNq?ZN=Kzuj~$KOAEi&z9Bm(?OkYS}Yv|JF<>Ke| z&|j_!<<8>9Ox8|bC^zM#&TB?bT}664o18elowZ zO+I{IpqF9gKK|2c$3DffabQ1JVh*twkmR3nO@pn1T_Ky)>!1*25wwXnpfj+brlaOp zWTbYUi5fQ>x62gI>}+XjQ8c`76lNG}q-p5cbjGVi^)|*mCfboo*rbu%k!=~#+}gC$ z!r*b++xvle#-ejz-M;qYuaCY~=f3xST;O(a-}YVOhsKgVr#>`Zu5EVhIX|f`fB1d% z%NF2Hr5?o?)$Q=;AX&5MV0gCw-txVNWTFY$B+?{Ba!zu-bNW^($V|#dO14W8?YchQ zbp};MDo(yb(>L&Bj8s_1fm^X)_y|fFqDZ0=0`L2_)Eu}D_%C}ydWFIz@Pu(GUp2}F z;du0IM-05RdW+8-nD#P@RJ6!;yq5i{IHY*oM*k#a_j>BU!p}me4;~{Jm(=gk_wWPD zz}$d;#CbF<`rt`wQWe*Ra-L2eZ?zz93R#L)$|JFb!2S7VjG^(N{vpquv@rwt!L(ob zrQWeR57Mwp&-npn0!dDdd@y<2S^pHL zaD&4$oX^ghMR!`p!!tX*7}t9~s#)#qMQ2m08@hD5{o>zr86i*M9MjFJq;K8!`~ECN zMfcc!;g53RWyd=AiG76o%++?9K|mwVvejCL3)?TX^OeQcbDwx$pM%4F!qthE+n>^_ zFRQ~}#qq`|({oDj1m+zVoasiJWcV2QsGnV)8u{P9J~5MQyxnq|Uh5zrlgNj223Fj* z1x%X#=tFi*#!|YP5VVnX($etU7+s#zRgRj_UH9(9l`-QoV^p$K4y?WRExYtM5HIqi z%YU1%zG~gcaD_b5INt0~k5SLc*_4!IFgU?HFZ^8AztG^sxJDmq9TyiZN-|zRCdG)M zrh>6PW-Ld;#-tCN+Ujf8T=JPlmmCBufo>y# z9YnG#Wy?&&uSz|xvJ>L;!R z(;7Ay2Jj(aAgbgB-&Q?rzfGaP6~*wz49vI%Q>#+8rHJTL-(7aiEuSe_J$VA)xYKa} zARr0g?(Pc!-o1cu6af6E4gj#;NdN#NJ`8|;CrR!G5)Jyti$-Joql^3wl-870Qo56x z7A}^Sj;=ONZowsW0ssIAj?#MO_UwtOn1z!AkJ)o4b4wmC2j|}q0NhLLPIa(!GlP0L z*gLw4c}Xz+86kG3|2D&zpnrzA*-0=xdxC_@I=NUvg?M;*c$pA*P$(4c^4v=7sodkg z;dg%$Og3(A&SEf_r>7^6rvQ(Wi#3c-R8$nk%Mat{=e~>JcJ+32GxOqhbY=c4$$#gO zvvjp^K{>mjoE)LQ^O~7Exw}a)G5sd`=lN@%mR_j;Fgd#ZUDjQJu-_gS9}h3=|AM)p zto|R^Z_i(_KkNF74*olt7!u`WX|E@Ta=7d2-P90*LhwIX{+HuF^ZW~_?P}>F>*R2U zb3^=RRsIJ5+xb6%e=L`_cKTD$_4<&>c6fD){ z9IXMRB2||G5c4+F)j>!?rMe)z)C5?#$41yA8!4p1xp}2ZE}3|tQVI~on$VsLKE5A4 zs$~TA@W#urD%TZ2t_pK!czRrDoWjcnea0aEu$m7E>lk?- zS(v~lQc_%Yj~~;jA6=dVWqUzE=6a;6ywb8bU=WbG5QqU3Km0<>J3Pbdhv|R|lz>1< z-(wH=$r1w@@pSB^$wPy}aBwuL!r;YrGE&4qJQ=dwT)ev|;4Fsa=iCTDnyI`^vCKWN zBF5KX{zIf(SHZJQh5kXUS_bQS|KSple(sErvQe#k*=f*?n#D$)++b$>?!g{nVnqBP zNZTOFzae+HA~J|0(q`6AAD5v=%+~n4vK1Hu-pnN{_JS=6nUR%_v))YrN~!!T>H`p&Bo+}E{nq!Chd7V zO6kYfaLU|CV_0RFr|~{^>{z4ewTHOw0BtR=oo8ECH0H5xFDJ0kQcG8o%*#;Bm~VO&7gVHjp)}3d4lek#RaSO|A6rV%DRr8!0{QIV@^Q^*gD8K!edMB{l39n3+qLj|4QkY5-{jD?QF}Ksam!C|bbW`e@t@XCg z^rFb(VVXf1Pg+`E+QrN(qNWLz?QRoeWsAwpCLRRZi;3OK_g?EObw9%~xrT>*0KzAZ z+{6@CL9(BoV(P3hbYiu3YAMX%W!2CT2)!}SlYIaZaYIu-O=TFGxnU?>VhLhY)eP*Z z^0AXQ>2{zJ7Q<5GGtr9twN*HqLqFht3~a}x&oR^ykBmiV8oIHEo45*eO-VXBU(F1# zIJ2tan}(kV*V@p8O1;`S{G4%f8MMp(<;%SDwg<6_QNLU|lxr5y+QAdPWBuT8MWx~9 zDBdm4RoWHU7bF`%Lx`!4qx>k)LBQ|QPabcdE+^n-ga4GN30PrYk21Mo$|&h!oEwuovTJjVgC9G%FEea>B*3S= zs=dz|6Z2&^VcL&T5)nYtqYD3-#{4XRRFg?_db4|Unvdq$s+yV&MQ;zhIy+$t z>`+LbgIqN>9>3YfV+)G>HgPg#wpJml;63F1hsL}iKx1YIGcJcJhxE(Hvd$|^!s1io zVUDn|5lyI_yJUsIqc}>QdFlBM{@|4*PnGrd(`&Om0?w3?UZLh4JG0*L`EIk;CG zp_?a<>9)4Y)yv~>VQk71@J(SD3(&4imWPkKRpEUBu>JCl!)e^<+il41kP9k>lZHGa zpI1-sbK|!t7n=Trk{%d1V1)idmUYiL#!=y$i=aI@rvSw zA7g=07RSAJzA9xl!q2}pi!-A2=^-hGmNjm{u&>>IMU0~<#52{MW!)e0bWG$x`}kQE zMn(D%CgZ(4@X(hN5vrQ1xPm2P;{!oHZj{_B$7n!1b?ztQr3ya21bX10P(#3X;71ZB z`fN<<$e_+Z*nDqZ?EV~!ADu(1Bq*Bzb)LgHRz=E$9XB*^B Date: Mon, 23 May 2022 12:58:35 -0600 Subject: [PATCH 631/855] chore: update environment tests submodule (#558) --- .../environment/deployable/python/snippets.py | 18 ++++++++++-- .../tests/environment/noxfile.py | 8 ++++-- .../tests/environment/tests/common/common.py | 14 +++++++--- .../tests/environment/tests/common/python.py | 28 ++++++++++++------- .../tests/java/test_appengine_flex.py | 1 + .../tests/java/test_appengine_standard.py | 1 + .../environment/tests/java/test_cloudrun.py | 2 +- .../environment/tests/java/test_compute.py | 1 + .../environment/tests/java/test_kubernetes.py | 8 +++++- .../tests/nodejs/test_appengine_standard.py | 2 -- .../environment/tests/nodejs/test_cloudrun.py | 2 +- .../tests/nodejs/test_functions.py | 1 - .../tests/nodejs/test_kubernetes.py | 2 +- tests/environment | 2 +- 14 files changed, 63 insertions(+), 27 deletions(-) diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py index a2fd2c82e206..e7db34651a6f 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -160,26 +160,38 @@ def pylogging_flask( app = flask.Flask(__name__) with app.test_request_context( - path, base_url, headers={"User-Agent": agent, "X_CLOUD_TRACE_CONTEXT": trace, "TRACEPARENT":traceparent} + path, + base_url, + headers={ + "User-Agent": agent, + "X_CLOUD_TRACE_CONTEXT": trace, + "TRACEPARENT": traceparent, + }, ): logging.info(log_text) + def pylogging_pandas(log_text="pylogging_pandas", **kwargs): """ Ensure pandas dataframes are handled properly https://github.com/googleapis/python-logging/issues/409 """ import pandas as pd - df = pd.DataFrame(columns=['log_text']) + + df = pd.DataFrame(columns=["log_text"]) df = df.append({"log_text": log_text}, ignore_index=True) logging.error(df) -def pylogging_exception(log_text="pylogging_exception", exception_text="Test", **kwargs): + +def pylogging_exception( + log_text="pylogging_exception", exception_text="Test", **kwargs +): try: raise Exception(exception_text) except Exception: logging.exception(log_text) + def print_handlers(**kwargs): root_logger = logging.getLogger() handlers_str = ", ".join([type(h).__name__ for h in root_logger.handlers]) diff --git a/packages/google-cloud-logging/tests/environment/noxfile.py b/packages/google-cloud-logging/tests/environment/noxfile.py index 666a9946bba2..5e9dc99e377f 100644 --- a/packages/google-cloud-logging/tests/environment/noxfile.py +++ b/packages/google-cloud-logging/tests/environment/noxfile.py @@ -88,10 +88,14 @@ def lint(session): f"{target}/{path}" for path in os.listdir(target) if path.endswith(".py") ] session.run( - "black", "--check", *black_files, + "black", + "--check", + *black_files, ) session.run( - "flake8", "--exclude=deployable/python/python-logging", *BLACK_PATHS, + "flake8", + "--exclude=deployable/python/python-logging", + *BLACK_PATHS, ) diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index 7bdeecfcdb2b..529004bc248e 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -95,7 +95,13 @@ def _trigger(self, snippet, **kwargs): @RetryErrors(exception=(LogsNotFound, RpcError), delay=2, max_tries=2) def trigger_and_retrieve( - self, log_text, snippet, append_uuid=True, ignore_protos=True, max_tries=3, **kwargs + self, + log_text, + snippet, + append_uuid=True, + ignore_protos=True, + max_tries=3, + **kwargs, ): """ Trigger a snippet deployed in the cloud by envctl, and return resulting @@ -133,7 +139,7 @@ def trigger_and_retrieve( print(f"RPC error: {e}") # most RpcErrors come from exceeding the reads per minute quota # wait at least 60 seconds - # use a randomized backoff so parallel runs don't start up at + # use a randomized backoff so parallel runs don't start up at # the same time again sleep(random.randint(60, 300)) tries += 1 @@ -141,7 +147,7 @@ def trigger_and_retrieve( print("logs not found...") # logs may not have been fully ingested into Cloud Logging # Wait before trying again - sleep(10 * (tries+1)) + sleep(10 * (tries + 1)) tries += 1 # log not found raise LogsNotFound @@ -228,7 +234,7 @@ def test_json_log(self): self.assertEqual(found_log.payload, expected_dict) def test_monitored_resource(self): - if self.language == 'java': + if self.language == "java": # TODO: implement in java return True diff --git a/packages/google-cloud-logging/tests/environment/tests/common/python.py b/packages/google-cloud-logging/tests/environment/tests/common/python.py index 3a613e487924..d5f160db59fc 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/python.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/python.py @@ -317,13 +317,16 @@ def test_pylogging_extras_sparse(self): def test_pylogging_exception(self): log_text = f"{inspect.currentframe().f_code.co_name}" exception_text = "test_exception" - log_list = self.trigger_and_retrieve(log_text, "pylogging_exception", - exception_text=exception_text) + log_list = self.trigger_and_retrieve( + log_text, "pylogging_exception", exception_text=exception_text + ) found_log = log_list[-1] - message = (found_log.payload.get("message", None) - if isinstance(found_log.payload, dict) - else str(found_log.payload)) + message = ( + found_log.payload.get("message", None) + if isinstance(found_log.payload, dict) + else str(found_log.payload) + ) self.assertIn(log_text, message) self.assertIn(f"Exception: {exception_text}", message) @@ -335,16 +338,21 @@ def test_pylogging_pandas(self): https://github.com/googleapis/python-logging/issues/409 """ import pandas as pd + log_text = f"{inspect.currentframe().f_code.co_name} {str(uuid.uuid1())[-10:]}" - log_list = self.trigger_and_retrieve(log_text, "pylogging_pandas", append_uuid=False) + log_list = self.trigger_and_retrieve( + log_text, "pylogging_pandas", append_uuid=False + ) found_log = log_list[-1] - message = (found_log.payload.get("message", None) - if isinstance(found_log.payload, dict) - else str(found_log.payload)) + message = ( + found_log.payload.get("message", None) + if isinstance(found_log.payload, dict) + else str(found_log.payload) + ) - df = pd.DataFrame(columns=['log_text']) + df = pd.DataFrame(columns=["log_text"]) df = df.append({"log_text": log_text}, ignore_index=True) self.assertEqual(str(df), message) diff --git a/packages/google-cloud-logging/tests/environment/tests/java/test_appengine_flex.py b/packages/google-cloud-logging/tests/environment/tests/java/test_appengine_flex.py index cfc262a4cf89..fc392860734b 100644 --- a/packages/google-cloud-logging/tests/environment/tests/java/test_appengine_flex.py +++ b/packages/google-cloud-logging/tests/environment/tests/java/test_appengine_flex.py @@ -17,6 +17,7 @@ from ..common.common import Common + class TestAppEngineFlex(Common, unittest.TestCase): environment = "appengine_flex" diff --git a/packages/google-cloud-logging/tests/environment/tests/java/test_appengine_standard.py b/packages/google-cloud-logging/tests/environment/tests/java/test_appengine_standard.py index ab7d4b9e761d..589073ae6bb0 100644 --- a/packages/google-cloud-logging/tests/environment/tests/java/test_appengine_standard.py +++ b/packages/google-cloud-logging/tests/environment/tests/java/test_appengine_standard.py @@ -17,6 +17,7 @@ from ..common.common import Common + class TestAppEngineStandard(Common, unittest.TestCase): environment = "appengine_standard" diff --git a/packages/google-cloud-logging/tests/environment/tests/java/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/java/test_cloudrun.py index b63caebff5af..079b1d236edd 100644 --- a/packages/google-cloud-logging/tests/environment/tests/java/test_cloudrun.py +++ b/packages/google-cloud-logging/tests/environment/tests/java/test_cloudrun.py @@ -19,6 +19,7 @@ from ..common.common import Common + class TestCloudRun(Common, unittest.TestCase): environment = "cloudrun" @@ -32,4 +33,3 @@ class TestCloudRun(Common, unittest.TestCase): "location", "configuration_name", ] - diff --git a/packages/google-cloud-logging/tests/environment/tests/java/test_compute.py b/packages/google-cloud-logging/tests/environment/tests/java/test_compute.py index 4ee3319faab3..94101f90412d 100644 --- a/packages/google-cloud-logging/tests/environment/tests/java/test_compute.py +++ b/packages/google-cloud-logging/tests/environment/tests/java/test_compute.py @@ -18,6 +18,7 @@ from ..common.common import Common + class TestComputeEngine(Common, unittest.TestCase): environment = "compute" diff --git a/packages/google-cloud-logging/tests/environment/tests/java/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/java/test_kubernetes.py index 324ff7943b0c..0a91d7ed7e62 100644 --- a/packages/google-cloud-logging/tests/environment/tests/java/test_kubernetes.py +++ b/packages/google-cloud-logging/tests/environment/tests/java/test_kubernetes.py @@ -26,4 +26,10 @@ class TestKubernetesEngine(Common, unittest.TestCase): language = "java" monitored_resource_name = "k8s_container" - monitored_resource_labels = ["project_id", "location", "cluster_name", "pod_name", "namespace_name"] + monitored_resource_labels = [ + "project_id", + "location", + "cluster_name", + "pod_name", + "namespace_name", + ] diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py index 4d4151dd4d3a..8e24fe96a57d 100644 --- a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_appengine_standard.py @@ -27,5 +27,3 @@ class TestAppEngineStandard(Common, unittest.TestCase): monitored_resource_name = "gae_app" monitored_resource_labels = ["project_id", "module_id", "version_id", "zone"] - - diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py index e2d000689d77..98f2ce816820 100644 --- a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_cloudrun.py @@ -22,6 +22,7 @@ from ..common.common import Common + class TestCloudRun(Common, unittest.TestCase): environment = "cloudrun" @@ -36,4 +37,3 @@ class TestCloudRun(Common, unittest.TestCase): "location", "configuration_name", ] - diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py index b3258dc1cf9c..46f7382e585f 100644 --- a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions.py @@ -32,4 +32,3 @@ class TestCloudFunctions(Common, unittest.TestCase): "function_name", "project_id", ] - diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py index caad651a0bc4..9df54df9518d 100644 --- a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_kubernetes.py @@ -20,6 +20,7 @@ from ..common.common import Common + class TestKubernetesEngine(Common, unittest.TestCase): environment = "kubernetes" @@ -33,4 +34,3 @@ class TestKubernetesEngine(Common, unittest.TestCase): "pod_name", "namespace_name", ] - diff --git a/tests/environment b/tests/environment index be8b03308472..4c7ed3d1ca34 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit be8b033084726bb7a3f62130157e1da3500f6855 +Subproject commit 4c7ed3d1ca34b1eb2188124f7aaa2508bb6b1a5c From 499933def97478ac7d3bbe9b88957b5431742171 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 23 May 2022 15:01:50 -0600 Subject: [PATCH 632/855] chore(main): release 3.1.1 (#557) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-logging/CHANGELOG.md | 7 +++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index f859d72363f9..cca40b3d4fdb 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +### [3.1.1](https://github.com/googleapis/python-logging/compare/v3.1.0...v3.1.1) (2022-05-23) + + +### Documentation + +* Change button in README to .png file ([#554](https://github.com/googleapis/python-logging/issues/554)) ([e297747](https://github.com/googleapis/python-logging/commit/e2977477f97469671b8cbfc920f39743cf2f7c80)) + ## [3.1.0](https://github.com/googleapis/python-logging/compare/v3.0.0...v3.1.0) (2022-05-08) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index dc26595c0dc4..2350653bc8c3 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "3.1.0" +version = "3.1.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 5d5875de7ae3f5234264ad9f27d5316c2e50f131 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 May 2022 17:42:32 +0000 Subject: [PATCH 633/855] chore: use gapic-generator-python 1.0.0 (#560) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 451250442 Source-Link: https://github.com/googleapis/googleapis/commit/cca5e8181f6442b134e8d4d206fbe9e0e74684ba Source-Link: https://github.com/googleapis/googleapis-gen/commit/0b219da161a8bdcc3c6f7b2efcd82105182a30ca Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGIyMTlkYTE2MWE4YmRjYzNjNmY3YjJlZmNkODIxMDUxODJhMzBjYSJ9 --- .../tests/unit/gapic/logging_v2/test_config_service_v2.py | 8 +++++++- .../unit/gapic/logging_v2/test_logging_service_v2.py | 8 +++++++- .../unit/gapic/logging_v2/test_metrics_service_v2.py | 8 +++++++- 3 files changed, 21 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index caa7bd6892f1..883b5162894a 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -14,7 +14,13 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index e7cbfcd8a1fb..fe1508f73148 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -14,7 +14,13 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 456d43946760..41ba6dc1f653 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -14,7 +14,13 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio From 9e0ed77afafd58cf58da22670dbcea71db1678ad Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Wed, 1 Jun 2022 11:42:17 -0400 Subject: [PATCH 634/855] docs: fix changelog header to consistent size (#562) --- packages/google-cloud-logging/CHANGELOG.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index cca40b3d4fdb..f77001b3dbb9 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,7 +4,7 @@ [1]: https://pypi.org/project/google-cloud-logging/#history -### [3.1.1](https://github.com/googleapis/python-logging/compare/v3.1.0...v3.1.1) (2022-05-23) +## [3.1.1](https://github.com/googleapis/python-logging/compare/v3.1.0...v3.1.1) (2022-05-23) ### Documentation @@ -159,7 +159,7 @@ * improve API compatibility for next release ([#292](https://www.github.com/googleapis/python-logging/issues/292)) ([1f9517d](https://www.github.com/googleapis/python-logging/commit/1f9517da7302e19198e598d452df58238d4e6306)) * remove noisy logs ([#290](https://www.github.com/googleapis/python-logging/issues/290)) ([bdf8273](https://www.github.com/googleapis/python-logging/commit/bdf827358de5935f736ecd73ab10b2d861daf690)) -### [2.3.1](https://www.github.com/googleapis/python-logging/compare/v2.3.0...v2.3.1) (2021-03-24) +## [2.3.1](https://www.github.com/googleapis/python-logging/compare/v2.3.0...v2.3.1) (2021-03-24) ### Bug Fixes @@ -199,7 +199,7 @@ * django content length extraction bug ([#160](https://www.github.com/googleapis/python-logging/issues/160)) ([93eeaef](https://www.github.com/googleapis/python-logging/commit/93eeaef1cce286aa8aa830d2369212b912d184b6)) * fix sphinx identifiers ([a9ff2b7](https://www.github.com/googleapis/python-logging/commit/a9ff2b7984a54542963fc8d52864365ef1562f57)) -### [2.1.1](https://www.github.com/googleapis/python-logging/compare/v2.1.0...v2.1.1) (2021-01-14) +## [2.1.1](https://www.github.com/googleapis/python-logging/compare/v2.1.0...v2.1.1) (2021-01-14) ### Bug Fixes @@ -224,7 +224,7 @@ * fix usage guide ([#140](https://www.github.com/googleapis/python-logging/issues/140)) ([1ca3981](https://www.github.com/googleapis/python-logging/commit/1ca398103fdfefb5576d6ef2ba20cfa4bd4ab252)) -### [2.0.2](https://www.github.com/googleapis/python-logging/compare/v2.0.1...v2.0.2) (2020-12-14) +## [2.0.2](https://www.github.com/googleapis/python-logging/compare/v2.0.1...v2.0.2) (2020-12-14) ### Bug Fixes @@ -233,7 +233,7 @@ * remove client recv msg limit fix: add enums to `types/__init__.py` ([#131](https://www.github.com/googleapis/python-logging/issues/131)) ([6349b89](https://www.github.com/googleapis/python-logging/commit/6349b899811cbb16f5548df0b77564b46666c4e7)) * Remove keyword only argument for RequestsMiddleware ([#113](https://www.github.com/googleapis/python-logging/issues/113)) ([e704f28](https://www.github.com/googleapis/python-logging/commit/e704f287a40db38d0da42fa5e21e7a9ef73922ec)) -### [2.0.1](https://www.github.com/googleapis/python-logging/compare/v2.0.0...v2.0.1) (2020-12-02) +## [2.0.1](https://www.github.com/googleapis/python-logging/compare/v2.0.0...v2.0.1) (2020-12-02) ### Bug Fixes @@ -274,7 +274,7 @@ * update docs ([#77](https://www.github.com/googleapis/python-logging/issues/77)) ([bdd9c44](https://www.github.com/googleapis/python-logging/commit/bdd9c440f29d1fcd6fb9545d8465c63efa6c0cea)) -### [1.15.1](https://www.github.com/googleapis/python-logging/compare/v1.15.0...v1.15.1) (2020-07-01) +## [1.15.1](https://www.github.com/googleapis/python-logging/compare/v1.15.0...v1.15.1) (2020-07-01) ### Documentation From 62b20135c03db7f42d3353cd28db20e4d6a420fc Mon Sep 17 00:00:00 2001 From: Katie McLaughlin Date: Thu, 2 Jun 2022 03:28:56 +1000 Subject: [PATCH 635/855] docs: Update README image to absolute URL, fix PyPI rendering (#561) Co-authored-by: Drew Brown --- packages/google-cloud-logging/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 240faa3c2397..cdeb4bb1cf90 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -38,7 +38,7 @@ For an interactive walkthrough on how to use this library in a python applicatio |guide| -.. |guide| image:: _static/guide-me.png +.. |guide| image:: https://raw.githubusercontent.com/googleapis/python-logging/main/_static/guide-me.png :target: https://console.cloud.google.com/?walkthrough_id=logging__logging-python Installation From 056643933003c4db72e426a267c05fbe5ad80242 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 1 Jun 2022 22:38:59 -0400 Subject: [PATCH 636/855] fix(deps): require protobuf <4.0.0dev (#564) Co-authored-by: Drew Brown --- packages/google-cloud-logging/setup.py | 5 +++-- packages/google-cloud-logging/testing/constraints-3.6.txt | 1 + 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 2350653bc8c3..0b6fc74f6bae 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -39,8 +39,9 @@ # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-cloud-core >= 1.4.1, <3.0.0dev", - "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", - "proto-plus >= 1.15.0", + "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", + "proto-plus >= 1.15.0, <2.0.0dev", + "protobuf >= 3.19.0, <4.0.0dev", ] extras = {} diff --git a/packages/google-cloud-logging/testing/constraints-3.6.txt b/packages/google-cloud-logging/testing/constraints-3.6.txt index 0aa016644404..415307c52542 100644 --- a/packages/google-cloud-logging/testing/constraints-3.6.txt +++ b/packages/google-cloud-logging/testing/constraints-3.6.txt @@ -8,3 +8,4 @@ google-api-core==1.31.5 google-cloud-core==1.4.1 proto-plus==1.15.0 +protobuf==3.19.0 From 8fe324472df9cc2ccecd176f684a49032059223b Mon Sep 17 00:00:00 2001 From: Drew Brown Date: Fri, 3 Jun 2022 14:42:10 -0600 Subject: [PATCH 637/855] chore: Update environment tests (#566) --- .../environment/envctl/env_scripts/python/functions.sh | 3 ++- .../tests/environment/tests/common/common.py | 6 +++--- tests/environment | 2 +- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh index 9e902a2bbf7b..b4b58e39f5f7 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh @@ -52,7 +52,8 @@ deploy() { # set up deployment directory # copy over local copy of library pushd $SUPERREPO_ROOT - tar -cvf $TMP_DIR/lib.tar --exclude tests --exclude .nox --exclude samples --exclude docs --exclude __pycache__ . + tar -cvf $TMP_DIR/lib.tar --exclude tests --exclude .nox --exclude samples \ + --exclude docs --exclude __pycache__ --exclude .git --exclude .github --exclude dist . popd mkdir $TMP_DIR/python-logging tar -xvf $TMP_DIR/lib.tar --directory $TMP_DIR/python-logging diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index 529004bc248e..9e2d19c83fa6 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -100,7 +100,7 @@ def trigger_and_retrieve( snippet, append_uuid=True, ignore_protos=True, - max_tries=3, + max_tries=5, **kwargs, ): """ @@ -138,10 +138,10 @@ def trigger_and_retrieve( except RpcError as e: print(f"RPC error: {e}") # most RpcErrors come from exceeding the reads per minute quota - # wait at least 60 seconds + # wait between 5-15 minutes # use a randomized backoff so parallel runs don't start up at # the same time again - sleep(random.randint(60, 300)) + sleep(random.randint(300, 900)) tries += 1 except LogsNotFound as e: print("logs not found...") diff --git a/tests/environment b/tests/environment index 4c7ed3d1ca34..b44573e60cb3 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 4c7ed3d1ca34b1eb2188124f7aaa2508bb6b1a5c +Subproject commit b44573e60cb3129c8776e7418bbe92f8e5046ea4 From 15b04e118edc7ac773f0037582038a647799b612 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 6 Jun 2022 17:40:22 -0400 Subject: [PATCH 638/855] chore(main): release 3.1.2 (#563) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-logging/CHANGELOG.md | 13 +++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index f77001b3dbb9..578008affebf 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,19 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.1.2](https://github.com/googleapis/python-logging/compare/v3.1.1...v3.1.2) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#564](https://github.com/googleapis/python-logging/issues/564)) ([cb0720c](https://github.com/googleapis/python-logging/commit/cb0720ceac84f4cebe3d79e4a01eea6728da8f05)) + + +### Documentation + +* fix changelog header to consistent size ([#562](https://github.com/googleapis/python-logging/issues/562)) ([3f16107](https://github.com/googleapis/python-logging/commit/3f16107ef02f85a405d2608862ffc0a73cdc8e48)) +* Update README image to absolute URL, fix PyPI rendering ([#561](https://github.com/googleapis/python-logging/issues/561)) ([76413b1](https://github.com/googleapis/python-logging/commit/76413b1c405019d8ed6044f0213f7983c38673cb)) + ## [3.1.1](https://github.com/googleapis/python-logging/compare/v3.1.0...v3.1.1) (2022-05-23) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 0b6fc74f6bae..7ad796f08c63 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "3.1.1" +version = "3.1.2" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 869df7c5eed5023ebb8478b21f58afa33359efcf Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 12 Jun 2022 11:03:46 -0400 Subject: [PATCH 639/855] chore: add prerelease nox session (#571) Source-Link: https://github.com/googleapis/synthtool/commit/050953d60f71b4ed4be563e032f03c192c50332f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:65e656411895bff71cffcae97246966460160028f253c2e45b7a25d805a5b142 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/continuous/prerelease-deps.cfg | 7 ++ .../.kokoro/presubmit/prerelease-deps.cfg | 7 ++ packages/google-cloud-logging/noxfile.py | 64 +++++++++++++++++++ 4 files changed, 80 insertions(+), 2 deletions(-) create mode 100644 packages/google-cloud-logging/.kokoro/continuous/prerelease-deps.cfg create mode 100644 packages/google-cloud-logging/.kokoro/presubmit/prerelease-deps.cfg diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 757c9dca75ad..2185b591844c 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 -# created: 2022-05-05T22:08:23.383410683Z + digest: sha256:65e656411895bff71cffcae97246966460160028f253c2e45b7a25d805a5b142 +# created: 2022-06-12T13:11:45.905884945Z diff --git a/packages/google-cloud-logging/.kokoro/continuous/prerelease-deps.cfg b/packages/google-cloud-logging/.kokoro/continuous/prerelease-deps.cfg new file mode 100644 index 000000000000..3595fb43f5c0 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/continuous/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/packages/google-cloud-logging/.kokoro/presubmit/prerelease-deps.cfg b/packages/google-cloud-logging/.kokoro/presubmit/prerelease-deps.cfg new file mode 100644 index 000000000000..3595fb43f5c0 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/presubmit/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 6cfcca2ee52b..e86d5f377fcb 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -333,3 +333,67 @@ def docfx(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + prerel_deps = [ + "protobuf", + "googleapis-common-protos", + "google-auth", + "grpcio", + "grpcio-status", + "google-api-core", + "proto-plus", + # dependencies of google-auth + "cryptography", + "pyasn1", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = ["requests"] + session.install(*other_deps) + + session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Don't overwrite prerelease packages. + deps = [dep for dep in deps if dep not in prerel_deps] + # We use --no-deps to ensure that pre-release versions aren't overwritten + # by the version ranges in setup.py. + session.install(*deps) + session.install("--no-deps", "-e", ".[all]") + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + session.run("py.test", "tests/system") + session.run("py.test", "samples/snippets") From 9d7ba1eab6d03df4248fb6473e699bf58ffd0634 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 13 Jun 2022 05:49:24 -0400 Subject: [PATCH 640/855] chore(python): add missing import for prerelease testing (#572) Source-Link: https://github.com/googleapis/synthtool/commit/d2871d98e1e767d4ad49a557ff979236d64361a1 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:b2dc5f80edcf5d4486c39068c9fa11f7f851d9568eea4dcba130f994ea9b5e97 Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-logging/noxfile.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 2185b591844c..50b29ffd2050 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:65e656411895bff71cffcae97246966460160028f253c2e45b7a25d805a5b142 -# created: 2022-06-12T13:11:45.905884945Z + digest: sha256:b2dc5f80edcf5d4486c39068c9fa11f7f851d9568eea4dcba130f994ea9b5e97 +# created: 2022-06-12T16:09:31.61859086Z diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index e86d5f377fcb..b48b74c7a79a 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -19,6 +19,7 @@ from __future__ import absolute_import import os import pathlib +import re import shutil import warnings From af31b361d8a115430e870cfcf1b2103b16a0a83d Mon Sep 17 00:00:00 2001 From: Drew Brown Date: Mon, 27 Jun 2022 11:14:35 -0700 Subject: [PATCH 641/855] chore: Update blunderbuss config (#576) --- packages/google-cloud-logging/.github/blunderbuss.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/.github/blunderbuss.yml b/packages/google-cloud-logging/.github/blunderbuss.yml index 28438484fcbf..148ebf4e81cb 100644 --- a/packages/google-cloud-logging/.github/blunderbuss.yml +++ b/packages/google-cloud-logging/.github/blunderbuss.yml @@ -1,4 +1,4 @@ assign_issues: - - arbrown + - Daniel-Sanche assign_prs: - - arbrown + - Daniel-Sanche From b7e5f690531945c657956f36826a40a6c79a229b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Jul 2022 14:38:19 -0400 Subject: [PATCH 642/855] fix: require python 3.7+ (#580) * chore(python): drop python 3.6 Source-Link: https://github.com/googleapis/synthtool/commit/4f89b13af10d086458f9b379e56a614f9d6dab7b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c * add api_description to .repo-metadata.json * require python 3.7+ in setup.py * revert templated README * remove python 3.6 sample configs * exclude templated README Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/samples/python3.6/common.cfg | 40 --------- .../.kokoro/samples/python3.6/continuous.cfg | 7 -- .../samples/python3.6/periodic-head.cfg | 11 --- .../.kokoro/samples/python3.6/periodic.cfg | 6 -- .../.kokoro/samples/python3.6/presubmit.cfg | 6 -- .../.kokoro/test-samples-impl.sh | 4 +- .../google-cloud-logging/.repo-metadata.json | 3 +- .../google-cloud-logging/CONTRIBUTING.rst | 6 +- packages/google-cloud-logging/noxfile.py | 85 ++++++++++++------- packages/google-cloud-logging/owlbot.py | 1 + .../samples/snippets/noxfile.py | 2 +- .../templates/install_deps.tmpl.rst | 2 +- packages/google-cloud-logging/setup.py | 5 +- 14 files changed, 68 insertions(+), 114 deletions(-) delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.6/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.6/periodic.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.6/presubmit.cfg diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 50b29ffd2050..1ce608523524 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:b2dc5f80edcf5d4486c39068c9fa11f7f851d9568eea4dcba130f994ea9b5e97 -# created: 2022-06-12T16:09:31.61859086Z + digest: sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c +# created: 2022-07-05T18:31:20.838186805Z diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg deleted file mode 100644 index 792a4d14c2eb..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.6/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.6" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py36" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.6/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.6/continuous.cfg deleted file mode 100644 index 7218af1499e5..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.6/continuous.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg deleted file mode 100644 index 7e2973e3b659..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.6/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.6/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.6/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.6/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh index 8a324c9c7bc6..2c6500cae0b9 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh @@ -33,7 +33,7 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.6 -m pip install --upgrade --quiet nox +python3.9 -m pip install --upgrade --quiet nox # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then @@ -76,7 +76,7 @@ for file in samples/**/requirements.txt; do echo "------------------------------------------------------------" # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" + python3.9 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? # If this is a periodic build, send the test log to the FlakyBot. diff --git a/packages/google-cloud-logging/.repo-metadata.json b/packages/google-cloud-logging/.repo-metadata.json index 9dac57e33f15..0b6c0d8ca0f9 100644 --- a/packages/google-cloud-logging/.repo-metadata.json +++ b/packages/google-cloud-logging/.repo-metadata.json @@ -12,5 +12,6 @@ "api_id": "logging.googleapis.com", "codeowner_team": "@googleapis/api-logging", "default_version": "v2", - "api_shortname": "logging" + "api_shortname": "logging", + "api_description": "allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud and Amazon Web Services. Using the BindPlane service, you can also collect this data from over 150 common application components, on-premises systems, and hybrid cloud systems. BindPlane is included with your Google Cloud project at no additional cost." } diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index 1839c49a7c50..b68256eb04f5 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. + 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -221,13 +221,11 @@ Supported Python Versions We support: -- `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ - `Python 3.9`_ - `Python 3.10`_ -.. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ @@ -239,7 +237,7 @@ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-logging/blob/main/noxfile.py -We also explicitly decided to support Python 3 beginning with version 3.6. +We also explicitly decided to support Python 3 beginning with version 3.7. Reasons for this include: - Encouraging use of newest versions of Python 3 diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index b48b74c7a79a..eaecc6c34f5b 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -31,7 +31,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -340,28 +340,15 @@ def docfx(session): def prerelease_deps(session): """Run all tests with prerelease versions of dependencies installed.""" - prerel_deps = [ - "protobuf", - "googleapis-common-protos", - "google-auth", - "grpcio", - "grpcio-status", - "google-api-core", - "proto-plus", - # dependencies of google-auth - "cryptography", - "pyasn1", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = ["requests"] - session.install(*other_deps) - + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python # version, the first version we test with in the unit tests sessions has a @@ -375,19 +362,44 @@ def prerelease_deps(session): constraints_text = constraints_file.read() # Ignore leading whitespace and comment lines. - deps = [ + constraints_deps = [ match.group(1) for match in re.finditer( r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE ) ] - # Don't overwrite prerelease packages. - deps = [dep for dep in deps if dep not in prerel_deps] - # We use --no-deps to ensure that pre-release versions aren't overwritten - # by the version ranges in setup.py. - session.install(*deps) - session.install("--no-deps", "-e", ".[all]") + session.install(*constraints_deps) + + if os.path.exists("samples/snippets/requirements.txt"): + session.install("-r", "samples/snippets/requirements.txt") + + if os.path.exists("samples/snippets/requirements-test.txt"): + session.install("-r", "samples/snippets/requirements-test.txt") + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) # Print out prerelease package versions session.run( @@ -396,5 +408,16 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("py.test", "tests/unit") - session.run("py.test", "tests/system") - session.run("py.test", "samples/snippets") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path) or os.path.exists(system_test_folder_path): + session.run("py.test", "tests/system") + + snippets_test_path = os.path.join("samples", "snippets") + + # Only run samples tests if found. + if os.path.exists(snippets_test_path): + session.run("py.test", "samples/snippets") diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index cf8252bcc2f5..ba8a593e94e4 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -67,6 +67,7 @@ "docs/multiprocessing.rst", ".github/workflows", # exclude gh actions as credentials are needed for tests ".github/auto-label.yaml", + "README.rst", # This repo has a customized README ]) # adjust .trampolinerc for environment tests diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 38bb0a572b81..5fcb9d7461f2 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/google-cloud-logging/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-logging/scripts/readme-gen/templates/install_deps.tmpl.rst index 275d649890d7..6f069c6c87a5 100644 --- a/packages/google-cloud-logging/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/packages/google-cloud-logging/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -12,7 +12,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 3.6+. +#. Create a virtualenv. Samples are compatible with Python 3.7+. .. code-block:: bash diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 7ad796f08c63..acd5eeed2be6 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -81,9 +81,10 @@ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -92,7 +93,7 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires=">=3.6", + python_requires=">=3.7", include_package_data=True, zip_safe=False, ) From 7e974ba1fa218fdb7e4193e0c8d7fa32a2bafefc Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 7 Jul 2022 10:02:19 -0400 Subject: [PATCH 643/855] chore: test minimum dependencies in python 3.7 (#565) --- .../google-cloud-logging/testing/constraints-3.7.txt | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/packages/google-cloud-logging/testing/constraints-3.7.txt b/packages/google-cloud-logging/testing/constraints-3.7.txt index e69de29bb2d1..415307c52542 100644 --- a/packages/google-cloud-logging/testing/constraints-3.7.txt +++ b/packages/google-cloud-logging/testing/constraints-3.7.txt @@ -0,0 +1,11 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +google-api-core==1.31.5 +google-cloud-core==1.4.1 +proto-plus==1.15.0 +protobuf==3.19.0 From 5a84dffcc05a0706bd51ef448da76c565616d193 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 9 Jul 2022 10:32:39 +0000 Subject: [PATCH 644/855] fix(deps): require google-api-core >= 2.8.0 (#575) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 459095142 Source-Link: https://github.com/googleapis/googleapis/commit/4f1be992601ed740a581a32cedc4e7b6c6a27793 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ae686d9cde4fc3e36d0ac02efb8643b15890c1ed Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWU2ODZkOWNkZTRmYzNlMzZkMGFjMDJlZmI4NjQzYjE1ODkwYzFlZCJ9 feat: add audience parameter PiperOrigin-RevId: 456827138 Source-Link: https://github.com/googleapis/googleapis/commit/23f1a157189581734c7a77cddfeb7c5bc1e440ae Source-Link: https://github.com/googleapis/googleapis-gen/commit/4075a8514f676691ec156688a5bbf183aa9893ce Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDA3NWE4NTE0ZjY3NjY5MWVjMTU2Njg4YTViYmYxODNhYTk4OTNjZSJ9 fix(deps): require google-cloud-core >= 2.0.0 --- .../services/config_service_v2/client.py | 1 + .../config_service_v2/transports/base.py | 16 ++++-- .../config_service_v2/transports/grpc.py | 2 + .../transports/grpc_asyncio.py | 2 + .../services/logging_service_v2/client.py | 1 + .../logging_service_v2/transports/base.py | 16 ++++-- .../logging_service_v2/transports/grpc.py | 2 + .../transports/grpc_asyncio.py | 2 + .../services/metrics_service_v2/client.py | 1 + .../metrics_service_v2/transports/base.py | 16 ++++-- .../metrics_service_v2/transports/grpc.py | 2 + .../transports/grpc_asyncio.py | 2 + packages/google-cloud-logging/setup.py | 10 +--- .../testing/constraints-3.6.txt | 11 ---- .../testing/constraints-3.7.txt | 4 +- .../logging_v2/test_config_service_v2.py | 52 +++++++++++++++++++ .../logging_v2/test_logging_service_v2.py | 52 +++++++++++++++++++ .../logging_v2/test_metrics_service_v2.py | 52 +++++++++++++++++++ 18 files changed, 208 insertions(+), 36 deletions(-) delete mode 100644 packages/google-cloud-logging/testing/constraints-3.6.txt diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 35e511abd34a..edc5d53579be 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -516,6 +516,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def list_buckets( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 685f174b4412..b65a2117b2dc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -62,6 +62,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -89,11 +90,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -114,6 +110,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -126,6 +127,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 25de4885159f..d8aca6273a99 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -61,6 +61,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -157,6 +158,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 86e67253cae0..2a36a4955001 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -106,6 +106,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -202,6 +203,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 47c5bfe82db0..a2d00ad87974 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -431,6 +431,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def delete_log( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index ceefeda8a50a..d308f7d0125b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -61,6 +61,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -88,11 +89,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -113,6 +109,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -125,6 +126,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 22affa06bc37..3c9c7743e008 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -59,6 +59,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -154,6 +155,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 1ef7198fdc7b..a7fdf5ffc2ff 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -104,6 +104,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -199,6 +200,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index df5d4d2fc917..fd63ddfb0cd5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -422,6 +422,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def list_log_metrics( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index eae5f5da7caf..f7e9c5edc02a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -61,6 +61,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -88,11 +89,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -113,6 +109,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -125,6 +126,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 12d70452f908..13cc653f77f1 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -59,6 +59,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -154,6 +155,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 28ff48f5cddb..93ca38ba5a1e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -104,6 +104,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -199,6 +200,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index acd5eeed2be6..5613519c5ab2 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -29,16 +29,10 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - # NOTE: Maintainers, please do not require google-api-core>=2.x.x - # Until this issue is closed - # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", + "google-api-core[grpc] >= 2.8.0, <3.0.0dev", "google-cloud-appengine-logging>=0.1.0, <2.0.0dev", "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", - # NOTE: Maintainers, please do not require google-api-core>=2.x.x - # Until this issue is closed - # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-cloud-core >= 1.4.1, <3.0.0dev", + "google-cloud-core >= 2.0.0, <3.0.0dev", "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", "proto-plus >= 1.15.0, <2.0.0dev", "protobuf >= 3.19.0, <4.0.0dev", diff --git a/packages/google-cloud-logging/testing/constraints-3.6.txt b/packages/google-cloud-logging/testing/constraints-3.6.txt deleted file mode 100644 index 415307c52542..000000000000 --- a/packages/google-cloud-logging/testing/constraints-3.6.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List *all* library dependencies and extras in this file. -# Pin the version to the lower bound. -# -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -# Then this file should have foo==1.14.0 -google-api-core==1.31.5 -google-cloud-core==1.4.1 -proto-plus==1.15.0 -protobuf==3.19.0 diff --git a/packages/google-cloud-logging/testing/constraints-3.7.txt b/packages/google-cloud-logging/testing/constraints-3.7.txt index 415307c52542..2cd939a883ac 100644 --- a/packages/google-cloud-logging/testing/constraints-3.7.txt +++ b/packages/google-cloud-logging/testing/constraints-3.7.txt @@ -5,7 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.31.5 -google-cloud-core==1.4.1 +google-api-core==2.8.0 +google-cloud-core==2.0.0 proto-plus==1.15.0 protobuf==3.19.0 diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 883b5162894a..8b14284347da 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -237,6 +237,7 @@ def test_config_service_v2_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -254,6 +255,7 @@ def test_config_service_v2_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -271,6 +273,7 @@ def test_config_service_v2_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -300,6 +303,25 @@ def test_config_service_v2_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -377,6 +399,7 @@ def test_config_service_v2_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -411,6 +434,7 @@ def test_config_service_v2_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -433,6 +457,7 @@ def test_config_service_v2_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -547,6 +572,7 @@ def test_config_service_v2_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -585,6 +611,7 @@ def test_config_service_v2_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -605,6 +632,7 @@ def test_config_service_v2_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -643,6 +671,7 @@ def test_config_service_v2_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -6974,6 +7003,28 @@ def test_config_service_v2_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +def test_config_service_v2_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -7598,4 +7649,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index fe1508f73148..6eeac2574103 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -239,6 +239,7 @@ def test_logging_service_v2_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -256,6 +257,7 @@ def test_logging_service_v2_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -273,6 +275,7 @@ def test_logging_service_v2_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -302,6 +305,25 @@ def test_logging_service_v2_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -379,6 +401,7 @@ def test_logging_service_v2_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -413,6 +436,7 @@ def test_logging_service_v2_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -435,6 +459,7 @@ def test_logging_service_v2_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -549,6 +574,7 @@ def test_logging_service_v2_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -587,6 +613,7 @@ def test_logging_service_v2_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -607,6 +634,7 @@ def test_logging_service_v2_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -645,6 +673,7 @@ def test_logging_service_v2_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -2516,6 +2545,28 @@ def test_logging_service_v2_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +def test_logging_service_v2_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2987,4 +3038,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 41ba6dc1f653..6a4041a1df52 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -237,6 +237,7 @@ def test_metrics_service_v2_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -254,6 +255,7 @@ def test_metrics_service_v2_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -271,6 +273,7 @@ def test_metrics_service_v2_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -300,6 +303,25 @@ def test_metrics_service_v2_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -377,6 +399,7 @@ def test_metrics_service_v2_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -411,6 +434,7 @@ def test_metrics_service_v2_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -433,6 +457,7 @@ def test_metrics_service_v2_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -547,6 +572,7 @@ def test_metrics_service_v2_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -585,6 +611,7 @@ def test_metrics_service_v2_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -605,6 +632,7 @@ def test_metrics_service_v2_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -643,6 +671,7 @@ def test_metrics_service_v2_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -2379,6 +2408,28 @@ def test_metrics_service_v2_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +def test_metrics_service_v2_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2850,4 +2901,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) From 92cd501bf03a598f464ed1d238cec8b0addce3e6 Mon Sep 17 00:00:00 2001 From: Drew Brown Date: Mon, 11 Jul 2022 12:52:33 -0700 Subject: [PATCH 645/855] feat: Add support for library instrumentation (#551) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add .python-version to .gitignore * Add initial class/test for instrumentation_source * Add version and truncate logic * Add instrumentation tests and severity info * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Add method to update and validate existing info * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Add .python-version to gitignore * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Implement hook to add instrumentation for logger * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Add tests for logger instrumentation logic * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Update structured log handler to emit info * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Refactor structured log and add unit test * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Add side effect to unit test * Ensure that instrumentation info is only called once * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Update to environment submodule * Fix linter errors * chore(main): release 3.1.0 (#479) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> * docs: Change button in README to .png file (#554) * fix: Change button to .png file * Change the "Guide Me" button to a .png file to work with RST `image` * Avoids `raw` directive disabled by PyPi * Fixes #553 * Fix unexpected unindent in docs Add copy of png file to render correctly in GitHub readme and Sphinx generated docs. * chore(main): release 3.1.1 (#557) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> * Update env-tests submodule * Minor format update * Fix system test to skip diagnostic log entry * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Update truncation logic based on feedback * Update environment tests * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Fix broken unit test * Fix broken unit test * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Change default name/version * Refactor add_instrumentation Return a new list instead of a mutated original Do not return after first log without info * Add more documentation to validation methods * Refactor add_instrumentation to be more pythonic * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Update environemnt tests * Refactor _is_valid and add test * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Add more detail to method documentation * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Move methods to private * Change instumentation_added to private * Fix some issues with validation method Add a test * Fix bug in _add_instrumentation * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Simplify string truncation * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Remove unused import to fix lint * Remove validate_and_update_instrumentation Simplify code by adding a single instrumentation entry instead of validating potential existing entries * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Remove _is_valid code (no longer checked) * Run nox blacken * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Remove extraneous unit test Co-authored-by: Owl Bot Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Daniel Sanche --- .../google/cloud/logging_v2/__init__.py | 2 + .../cloud/logging_v2/_instrumentation.py | 90 +++++++++++++++++++ .../logging_v2/handlers/structured_log.py | 13 +++ .../google/cloud/logging_v2/logger.py | 10 ++- .../tests/system/test_system.py | 5 ++ .../unit/handlers/test_structured_log.py | 24 +++++ .../tests/unit/test__instrumentation.py | 65 ++++++++++++++ .../tests/unit/test_logger.py | 53 ++++++++++- 8 files changed, 260 insertions(+), 2 deletions(-) create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/_instrumentation.py create mode 100644 packages/google-cloud-logging/tests/unit/test__instrumentation.py diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py index 98954d550294..d55e474d9da2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py @@ -41,6 +41,8 @@ """Query string to order by ascending timestamps.""" DESCENDING = "timestamp desc" """Query string to order by decending timestamps.""" +_instrumentation_emitted = False +"""Flag for whether instrumentation info has been emitted""" __all__ = ( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_instrumentation.py b/packages/google-cloud-logging/google/cloud/logging_v2/_instrumentation.py new file mode 100644 index 000000000000..0d9de76d3930 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_instrumentation.py @@ -0,0 +1,90 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Add diagnostic instrumentation source information to logs""" +from google.cloud.logging_v2.entries import StructEntry +from google.cloud.logging_v2 import __version__ + +_DIAGNOSTIC_INFO_KEY = "logging.googleapis.com/diagnostic" +_INSTRUMENTATION_SOURCE_KEY = "instrumentation_source" +_PYTHON_LIBRARY_NAME = "python" + +_LIBRARY_VERSION = __version__ + +_MAX_NAME_LENGTH = 14 +_MAX_VERSION_LENGTH = 14 +_MAX_INSTRUMENTATION_ENTRIES = 3 + + +def _add_instrumentation(entries, **kw): + """Add instrumentation information to a list of entries + + A new diagnostic entry is prepended to the list of + entries. + + Args: + entries (Sequence[Mapping[str, ...]]): sequence of mappings representing + the log entry resources to log. + + Returns: + Sequence[Mapping[str, ...]]: entries with instrumentation info added to + the beginning of list. + """ + + diagnostic_entry = _create_diagnostic_entry(**kw) + entries.insert(0, diagnostic_entry.to_api_repr()) + return entries + + +def _create_diagnostic_entry(name=_PYTHON_LIBRARY_NAME, version=_LIBRARY_VERSION, **kw): + """Create a diagnostic log entry describing this library + + The diagnostic log consists of a list of library name and version objects + that have handled a given log entry. If this library is the originator + of the log entry, it will look like: + {logging.googleapis.com/diagnostic: {instrumentation_source: [{name: "python", version: "3.0.0"}]}} + + Args: + name(str): The name of this library (e.g. 'python') + version(str) The version of this library (e.g. '3.0.0') + + Returns: + google.cloud.logging_v2.LogEntry: Log entry with library information + """ + payload = { + _DIAGNOSTIC_INFO_KEY: { + _INSTRUMENTATION_SOURCE_KEY: [_get_instrumentation_source(name, version)] + } + } + kw["severity"] = "INFO" + entry = StructEntry(payload=payload, **kw) + return entry + + +def _get_instrumentation_source(name=_PYTHON_LIBRARY_NAME, version=_LIBRARY_VERSION): + """Gets a JSON representation of the instrumentation_source + + Args: + name(str): The name of this library (e.g. 'python') + version(str) The version of this library (e.g. '3.0.0') + Returns: + obj: JSON object with library information + """ + source = {"name": name, "version": version} + # truncate strings to no more than _MAX_NAME_LENGTH characters + for key, val in source.items(): + source[key] = ( + val if len(val) <= _MAX_NAME_LENGTH else f"{val[:_MAX_NAME_LENGTH]}*" + ) + return source diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py index 2d7c5e0786bf..4a9a139e5e3e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py @@ -16,10 +16,13 @@ """ import collections import json +import logging import logging.handlers from google.cloud.logging_v2.handlers.handlers import CloudLoggingFilter from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message +import google.cloud.logging_v2 +from google.cloud.logging_v2._instrumentation import _create_diagnostic_entry GCP_FORMAT = ( "{%(_payload_str)s" @@ -84,3 +87,13 @@ def format(self, record): # convert to GCP structred logging format gcp_payload = self._gcp_formatter.format(record) return gcp_payload + + def emit(self, record): + if google.cloud.logging_v2._instrumentation_emitted is False: + self.emit_instrumentation_info() + super().emit(record) + + def emit_instrumentation_info(self): + google.cloud.logging_v2._instrumentation_emitted = True + diagnostic_object = _create_diagnostic_entry().to_api_repr() + logging.info(diagnostic_object) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py index 02ecb6905a5a..fa0af170cb27 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -23,6 +23,7 @@ from google.cloud.logging_v2.entries import TextEntry from google.cloud.logging_v2.resource import Resource from google.cloud.logging_v2.handlers._monitored_resources import detect_resource +from google.cloud.logging_v2._instrumentation import _add_instrumentation import google.protobuf.message @@ -134,6 +135,7 @@ def _do_log(self, client, _entry_class, payload=None, **kw): kw["log_name"] = kw.pop("log_name", self.full_name) kw["labels"] = kw.pop("labels", self.labels) kw["resource"] = kw.pop("resource", self.default_resource) + partial_success = False severity = kw.get("severity", None) if isinstance(severity, str) and not severity.isupper(): @@ -155,7 +157,13 @@ def _do_log(self, client, _entry_class, payload=None, **kw): entry = _entry_class(**kw) api_repr = entry.to_api_repr() - client.logging_api.write_entries([api_repr]) + entries = [api_repr] + if google.cloud.logging_v2._instrumentation_emitted is False: + partial_success = True + entries = _add_instrumentation(entries, **kw) + google.cloud.logging_v2._instrumentation_emitted = True + + client.logging_api.write_entries(entries, partial_success=partial_success) def log_empty(self, *, client=None, **kw): """Log an empty message diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 84d0c9552270..8d39408d3895 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -888,6 +888,11 @@ def test_update_sink(self): @skip_for_mtls def test_api_equality_list_logs(self): + import google.cloud.logging_v2 + + # Skip diagnostic log for this system test + google.cloud.logging_v2._instrumentation_emitted = True + unique_id = uuid.uuid1() gapic_logger = Config.CLIENT.logger(f"api-list-{unique_id}") http_logger = Config.HTTP_CLIENT.logger(f"api-list-{unique_id}") diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 5031748f9e12..d2d570e2162d 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -438,3 +438,27 @@ def test_format_with_json_fields(self): self.assertEqual(result["message"], expected_result) self.assertEqual(result["hello"], "world") self.assertEqual(result["number"], 12) + + def test_emits_instrumentation_info(self): + import logging + import mock + import google.cloud.logging_v2 + + handler = self._make_one() + logname = "loggername" + message = "Hello world!" + + record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None) + + with mock.patch.object(handler, "emit_instrumentation_info") as emit_info: + + def side_effect(): + google.cloud.logging_v2._instrumentation_emitted = True + + emit_info.side_effect = side_effect + google.cloud.logging_v2._instrumentation_emitted = False + handler.emit(record) + handler.emit(record) + + # emit_instrumentation_info should be called once + emit_info.assert_called_once() diff --git a/packages/google-cloud-logging/tests/unit/test__instrumentation.py b/packages/google-cloud-logging/tests/unit/test__instrumentation.py new file mode 100644 index 000000000000..501301c34dc3 --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/test__instrumentation.py @@ -0,0 +1,65 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import google.cloud.logging_v2._instrumentation as i + + +class TestInstrumentation(unittest.TestCase): + + TEST_NAME = "python" + # LONG_NAME > 14 characters + LONG_NAME = TEST_NAME + "789ABCDEF" + + TEST_VERSION = "1.0.0" + # LONG_VERSION > 16 characters + LONG_VERSION = TEST_VERSION + "6789ABCDEF12" + + def _get_diagonstic_value(self, entry, key): + return entry.payload[i._DIAGNOSTIC_INFO_KEY][i._INSTRUMENTATION_SOURCE_KEY][-1][ + key + ] + + def test_default_diagnostic_info(self): + entry = i._create_diagnostic_entry() + self.assertEqual( + i._PYTHON_LIBRARY_NAME, + self._get_diagonstic_value(entry, "name"), + ) + self.assertEqual( + i._LIBRARY_VERSION, self._get_diagonstic_value(entry, "version") + ) + + def test_custom_diagnostic_info(self): + entry = i._create_diagnostic_entry( + name=self.TEST_NAME, version=self.TEST_VERSION + ) + self.assertEqual( + self.TEST_NAME, + self._get_diagonstic_value(entry, "name"), + ) + self.assertEqual( + self.TEST_VERSION, self._get_diagonstic_value(entry, "version") + ) + + def test_truncate_long_values(self): + entry = i._create_diagnostic_entry( + name=self.LONG_NAME, version=self.LONG_VERSION + ) + + expected_name = self.LONG_NAME[: i._MAX_NAME_LENGTH] + "*" + expected_version = self.LONG_VERSION[: i._MAX_VERSION_LENGTH] + "*" + + self.assertEqual(expected_name, self._get_diagonstic_value(entry, "name")) + self.assertEqual(expected_version, self._get_diagonstic_value(entry, "version")) diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 597313824502..a5d01898b71e 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -34,6 +34,12 @@ class TestLogger(unittest.TestCase): LOGGER_NAME = "logger-name" TIME_FORMAT = '"%Y-%m-%dT%H:%M:%S.%f%z"' + def setUp(self): + import google.cloud.logging_v2 + + # Test instrumentation behavior in only one test + google.cloud.logging_v2._instrumentation_emitted = True + @staticmethod def _get_target_class(): from google.cloud.logging import Logger @@ -975,6 +981,43 @@ def test_list_entries_folder(self): self.assertIsNone(entry.logger) self.assertEqual(entry.log_name, LOG_NAME) + def test_first_log_emits_instrumentation(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + from google.cloud.logging_v2._instrumentation import _create_diagnostic_entry + import google.cloud.logging_v2 + + google.cloud.logging_v2._instrumentation_emitted = False + DEFAULT_LABELS = {"foo": "spam"} + resource = detect_resource(self.PROJECT) + instrumentation_entry = _create_diagnostic_entry( + resource=resource, + labels=DEFAULT_LABELS, + ).to_api_repr() + instrumentation_entry["logName"] = "projects/%s/logs/%s" % ( + self.PROJECT, + self.LOGGER_NAME, + ) + ENTRIES = [ + instrumentation_entry, + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "resource": resource._to_dict(), + "labels": DEFAULT_LABELS, + }, + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) + logger.log_empty() + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + ENTRIES = ENTRIES[-1:] + api = client.logging_api = _DummyLoggingAPI() + logger.log_empty() + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + class TestBatch(unittest.TestCase): @@ -1645,7 +1688,15 @@ class _DummyLoggingAPI(object): _write_entries_called_with = None - def write_entries(self, entries, *, logger_name=None, resource=None, labels=None): + def write_entries( + self, + entries, + *, + logger_name=None, + resource=None, + labels=None, + partial_success=False, + ): self._write_entries_called_with = (entries, logger_name, resource, labels) def logger_delete(self, logger_name): From 510e11f1b51fb231a83f50709cfed8255440f724 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 12 Jul 2022 09:43:40 -0400 Subject: [PATCH 646/855] chore(main): release 3.2.0 (#581) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-logging/CHANGELOG.md | 15 +++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 578008affebf..f7a63dd91fdf 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.2.0](https://github.com/googleapis/python-logging/compare/v3.1.2...v3.2.0) (2022-07-11) + + +### Features + +* add audience parameter ([a2eed8c](https://github.com/googleapis/python-logging/commit/a2eed8c5de4f5ee995b6b7392c0e94dc72e56415)) +* Add support for library instrumentation ([#551](https://github.com/googleapis/python-logging/issues/551)) ([8ba0023](https://github.com/googleapis/python-logging/commit/8ba002386560f2b94756c8cd0e32a8b61cdeb78b)) + + +### Bug Fixes + +* **deps:** require google-api-core >= 2.8.0 ([#575](https://github.com/googleapis/python-logging/issues/575)) ([a2eed8c](https://github.com/googleapis/python-logging/commit/a2eed8c5de4f5ee995b6b7392c0e94dc72e56415)) +* **deps:** require google-cloud-core >= 2.0.0 ([a2eed8c](https://github.com/googleapis/python-logging/commit/a2eed8c5de4f5ee995b6b7392c0e94dc72e56415)) +* require python 3.7+ ([#580](https://github.com/googleapis/python-logging/issues/580)) ([0b3eb52](https://github.com/googleapis/python-logging/commit/0b3eb5255f3294f9631deec3425a89cb06dc2c14)) + ## [3.1.2](https://github.com/googleapis/python-logging/compare/v3.1.1...v3.1.2) (2022-06-03) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 5613519c5ab2..5baaeef3b5e8 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "3.1.2" +version = "3.2.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 7d82bc18b9e1a456706b3f9e988b98e80e6a42be Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 13 Jul 2022 13:35:23 -0400 Subject: [PATCH 647/855] fix(deps): require google-api-core>=1.32.0,>=2.8.0 (#585) * fix(deps): require google-api-core>=1.32.0,>=2.8.0 * chore: update constraints --- packages/google-cloud-logging/setup.py | 2 +- packages/google-cloud-logging/testing/constraints-3.7.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 5baaeef3b5e8..cf4189b9736b 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 2.8.0, <3.0.0dev", + "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "google-cloud-appengine-logging>=0.1.0, <2.0.0dev", "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", "google-cloud-core >= 2.0.0, <3.0.0dev", diff --git a/packages/google-cloud-logging/testing/constraints-3.7.txt b/packages/google-cloud-logging/testing/constraints-3.7.txt index 2cd939a883ac..07d97d4a84a3 100644 --- a/packages/google-cloud-logging/testing/constraints-3.7.txt +++ b/packages/google-cloud-logging/testing/constraints-3.7.txt @@ -5,7 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==2.8.0 +google-api-core==1.32.0 google-cloud-core==2.0.0 proto-plus==1.15.0 protobuf==3.19.0 From c1b12abfac25a881ad091d8a47423d17f818dc55 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 14 Jul 2022 09:31:58 -0400 Subject: [PATCH 648/855] chore(main): release 3.2.1 (#586) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-logging/CHANGELOG.md | 7 +++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index f7a63dd91fdf..9faead1ed850 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.2.1](https://github.com/googleapis/python-logging/compare/v3.2.0...v3.2.1) (2022-07-13) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#585](https://github.com/googleapis/python-logging/issues/585)) ([de35eab](https://github.com/googleapis/python-logging/commit/de35eabaa346b4ab8b8436841e4d82f83e10299a)) + ## [3.2.0](https://github.com/googleapis/python-logging/compare/v3.1.2...v3.2.0) (2022-07-11) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index cf4189b9736b..f879347f88ec 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "3.2.0" +version = "3.2.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 39a40c8d8eb2339c85c9417241bbc410b824e341 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 19 Jul 2022 15:57:46 -0700 Subject: [PATCH 649/855] chore: update submodule package versions (#588) --- .../tests/environment/deployable/python/requirements.txt | 8 ++++---- .../envctl/env_scripts/python/appengine_flex_python.sh | 2 +- tests/environment | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt index b3e97e794731..161d979864a2 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt +++ b/packages/google-cloud-logging/tests/environment/deployable/python/requirements.txt @@ -1,8 +1,8 @@ -flask==1.1.2 +Flask==2.1.3 google-cloud-pubsub>=2.8.0 -click==7.1.2 +click==8.1.3 pytz==2021.1 pandas>=1.1.5 -itsdangerous==2.0.1 +itsdangerous==2.1.2 jinja2==3.0.3 -werkzeug==2.0.2 +werkzeug==2.0.2 \ No newline at end of file diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_flex_python.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_flex_python.sh index 87fba162af44..8f69bd3aabf3 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_flex_python.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/appengine_flex_python.sh @@ -63,7 +63,7 @@ deploy() { env: flex entrypoint: python router.py runtime_config: - python_version: 3 + python_version: 3.7 manual_scaling: instances: 1 env_variables: diff --git a/tests/environment b/tests/environment index b44573e60cb3..44d9b96f596d 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit b44573e60cb3129c8776e7418bbe92f8e5046ea4 +Subproject commit 44d9b96f596da8c6ced67c4edf58dfedc0d5e5c8 From 7b5437dcfe846b4377b93b28e87a787ec1da32f9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 25 Jul 2022 10:47:41 -0700 Subject: [PATCH 650/855] chore(python): fix prerelease session [autoapprove] (#589) Source-Link: https://github.com/googleapis/synthtool/commit/1b9ad7694e44ddb4d9844df55ff7af77b51a4435 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:9db98b055a7f8bd82351238ccaacfd3cda58cdf73012ab58b8da146368330021 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +-- packages/google-cloud-logging/noxfile.py | 33 ++++++++++--------- 2 files changed, 20 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 1ce608523524..0eb02fda4c09 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c -# created: 2022-07-05T18:31:20.838186805Z + digest: sha256:9db98b055a7f8bd82351238ccaacfd3cda58cdf73012ab58b8da146368330021 +# created: 2022-07-25T16:02:49.174178716Z diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index eaecc6c34f5b..8d1fa373bac7 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -342,7 +342,8 @@ def prerelease_deps(session): # Install all dependencies session.install("-e", ".[all, tests, tracing]") - session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES @@ -371,12 +372,6 @@ def prerelease_deps(session): session.install(*constraints_deps) - if os.path.exists("samples/snippets/requirements.txt"): - session.install("-r", "samples/snippets/requirements.txt") - - if os.path.exists("samples/snippets/requirements-test.txt"): - session.install("-r", "samples/snippets/requirements-test.txt") - prerel_deps = [ "protobuf", # dependency of grpc @@ -413,11 +408,19 @@ def prerelease_deps(session): system_test_folder_path = os.path.join("tests", "system") # Only run system tests if found. - if os.path.exists(system_test_path) or os.path.exists(system_test_folder_path): - session.run("py.test", "tests/system") - - snippets_test_path = os.path.join("samples", "snippets") - - # Only run samples tests if found. - if os.path.exists(snippets_test_path): - session.run("py.test", "samples/snippets") + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) From b323053975326c772503b4e134200ad3b0874ab5 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 25 Jul 2022 14:38:21 -0700 Subject: [PATCH 651/855] chore: update submodule (#590) --- .../tests/environment/envctl/env_scripts/python/functions.sh | 3 ++- packages/google-cloud-logging/tests/environment/noxfile.py | 5 +++-- tests/environment | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh index b4b58e39f5f7..4322f15ab450 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh @@ -60,7 +60,8 @@ deploy() { # copy test scripts cp $REPO_ROOT/deployable/python/router.py $TMP_DIR/main.py cp $REPO_ROOT/deployable/python/*.py $TMP_DIR/ - echo "-e ./python-logging" | cat $REPO_ROOT/deployable/python/requirements.txt - > $TMP_DIR/requirements.txt + cat $REPO_ROOT/deployable/python/requirements.txt > $TMP_DIR/requirements.txt + echo -e '\n-e ./python-logging' >> $TMP_DIR/requirements.txt # deploy function pushd $TMP_DIR gcloud functions deploy $SERVICE_NAME \ diff --git a/packages/google-cloud-logging/tests/environment/noxfile.py b/packages/google-cloud-logging/tests/environment/noxfile.py index 5e9dc99e377f..98518507c5ff 100644 --- a/packages/google-cloud-logging/tests/environment/noxfile.py +++ b/packages/google-cloud-logging/tests/environment/noxfile.py @@ -22,6 +22,7 @@ import nox +nox.options.error_on_missing_interpreters = True TEST_CONFIG = { # You can opt out from the test for specific Python versions. @@ -69,7 +70,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: # We also need to specify the rules which are ignored by default: # ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -DEFAULT_PYTHON_VERSION = "3.9" +DEFAULT_PYTHON_VERSION = os.getenv("ENV_TEST_PY_VERSION","3.9") BLACK_PATHS = ["./deployable/python"] BLACK_VERSION = "black==19.10b0" @@ -141,7 +142,7 @@ def tests(session, language, platform): session.skip("RUN_ENV_TESTS is set to false, skipping") # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") + session.error("Credentials must be set via environment variable") # Use pre-release gRPC for system tests. session.install("--pre", "grpcio") diff --git a/tests/environment b/tests/environment index 44d9b96f596d..3845d3c489f4 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 44d9b96f596da8c6ced67c4edf58dfedc0d5e5c8 +Subproject commit 3845d3c489f45e9366b5fb121846acc7893060b1 From 7bd2170da2b09a802dc13c5c8045d84d4c8dc321 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 2 Aug 2022 15:56:55 +0200 Subject: [PATCH 652/855] chore(deps): update all dependencies (#591) * chore(deps): update all dependencies * revert Co-authored-by: Anthonios Partheniou --- .../samples/snippets/requirements-test.txt | 4 ++-- .../samples/snippets/requirements.txt | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 678dbc8efd1b..ce161d15f1a4 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ -backoff==1.11.1 -pytest==7.1.1 +backoff==2.1.2 +pytest==7.1.2 diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index acdfd427675c..04668b8d64e3 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.0.0 -google-cloud-bigquery==3.0.1 -google-cloud-storage==2.3.0 -google-cloud-pubsub==2.12.0 +google-cloud-logging==3.2.1 +google-cloud-bigquery==3.3.0 +google-cloud-storage==2.4.0 +google-cloud-pubsub==2.13.4 From a1f78a6305a696fed2bfaa415c136771959252e5 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 5 Aug 2022 22:02:20 +0200 Subject: [PATCH 653/855] chore(deps): update all dependencies (#593) * chore(deps): update all dependencies * revert Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 04668b8d64e3..4a2f286a6e4f 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.2.1 google-cloud-bigquery==3.3.0 -google-cloud-storage==2.4.0 +google-cloud-storage==2.5.0 google-cloud-pubsub==2.13.4 From 12d5a502a669f7bc4c7ed5aac2c7d0430153931a Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 8 Aug 2022 16:27:44 -0400 Subject: [PATCH 654/855] chore: update supported python versions in README (#584) * chore: update supported python versions in README * typo Co-authored-by: Daniel Sanche --- packages/google-cloud-logging/README.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index cdeb4bb1cf90..2618dc37a47d 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -57,11 +57,12 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.6 +Python >= 3.7 Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ Python == 2.7. The last version of the library compatible with Python 2.7 is `google-cloud-logging==1.15.1`. +Python == 3.6. The last version of the library compatible with Python 3.6 is `google-cloud-logging==3.1.2`. Mac/Linux From fa7f16b012143dfcf6e63c5d9dece3f49d5bd8a5 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 9 Aug 2022 17:38:56 +0200 Subject: [PATCH 655/855] chore(deps): update all dependencies (#596) * chore(deps): update all dependencies * revert Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 4a2f286a6e4f..08c363c0c3fa 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.2.1 -google-cloud-bigquery==3.3.0 +google-cloud-bigquery==3.3.1 google-cloud-storage==2.5.0 google-cloud-pubsub==2.13.4 From b05128115dfa25f83af3b34533ce4828b61a50e0 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 10 Aug 2022 11:13:14 -0700 Subject: [PATCH 656/855] test: add performance tests (#595) --- .../.kokoro/performance/common.cfg | 27 ++ .../.kokoro/performance/presubmit.cfg | 1 + .../.kokoro/test-performance.sh | 44 +++ .../tests/performance/noxfile.py | 175 +++++++++ .../tests/performance/test_performance.py | 348 ++++++++++++++++++ 5 files changed, 595 insertions(+) create mode 100644 packages/google-cloud-logging/.kokoro/performance/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/performance/presubmit.cfg create mode 100755 packages/google-cloud-logging/.kokoro/test-performance.sh create mode 100644 packages/google-cloud-logging/tests/performance/noxfile.py create mode 100644 packages/google-cloud-logging/tests/performance/test_performance.py diff --git a/packages/google-cloud-logging/.kokoro/performance/common.cfg b/packages/google-cloud-logging/.kokoro/performance/common.cfg new file mode 100644 index 000000000000..cd19b2c2328d --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/performance/common.cfg @@ -0,0 +1,27 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-performance.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/performance/presubmit.cfg b/packages/google-cloud-logging/.kokoro/performance/presubmit.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/performance/presubmit.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/test-performance.sh b/packages/google-cloud-logging/.kokoro/test-performance.sh new file mode 100755 index 000000000000..a9a44c3cdebf --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/test-performance.sh @@ -0,0 +1,44 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +set -eox pipefail + +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-logging" +fi + +cd "${PROJECT_ROOT}/tests/performance" + + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + + +# Install nox +python3 -m pip install --upgrade --quiet nox + +# run performance tests +set +e +python3 -m nox +TEST_STATUS_CODE=$? + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$TEST_STATUS_CODE" diff --git a/packages/google-cloud-logging/tests/performance/noxfile.py b/packages/google-cloud-logging/tests/performance/noxfile.py new file mode 100644 index 000000000000..ae045c0c6583 --- /dev/null +++ b/packages/google-cloud-logging/tests/performance/noxfile.py @@ -0,0 +1,175 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import os +import pathlib +import re +from colorlog.escape_codes import parse_colors + +import nox + + +DEFAULT_PYTHON_VERSION = "3.8" + +PERFORMANCE_TEST_PYTHON_VERSIONS = ["3.8"] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() +REPO_ROOT_DIRECTORY = CURRENT_DIRECTORY.parent.parent + +REPO_URL = "https://github.com/googleapis/python-logging.git" +CLONE_REPO_DIR = "python-logging-main" + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = ["performance", "performance_regression"] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=PERFORMANCE_TEST_PYTHON_VERSIONS) +def performance(session): + """Run the performance test suite.""" + # Use pre-release gRPC for performance tests. + session.install("--pre", "grpcio") + + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + session.install( + "mock", + "pandas", + "rich", + "pytest", + "google-cloud-testutils", + ) + session.install("-e", str(REPO_ROOT_DIRECTORY)) + + file_path = f"perf_{session.python}_sponge_log.xml" + session.run( + "py.test", + f"--ignore={CLONE_REPO_DIR}", + "-s", + f"--junitxml={file_path}", + str(CURRENT_DIRECTORY), + *session.posargs, + ) + get_junitxml_results(file_path) + + +@nox.session(python=PERFORMANCE_TEST_PYTHON_VERSIONS) +def print_last_results(session): + """Print results from last performance test session.""" + file_path = f"perf_{session.python}_sponge_log.xml" + get_junitxml_results(file_path) + + +def get_junitxml_results(file_path, print_results=True): + """Print results from specified results file.""" + results = None + if os.path.exists(file_path): + if print_results: + print(f"{file_path} results:") + with open(file_path, "r") as file: + data = file.read().replace("\n", "") + total = 0 + results = {} + for entry in data.split("testcase classname")[1:]: + name = re.search(r'name="+(\w+)', entry)[1] + time = re.search(r'time="+([0-9\.]+)', entry)[1] + total += float(time) + if print_results: + print(f"\t{name}: {time}s") + results[name] = float(time) + if print_results: + print(f"\tTotal: {total:.3f}s") + else: + print(f"error: {file_path} not found") + return results + + +@nox.session(python=PERFORMANCE_TEST_PYTHON_VERSIONS) +def performance_regression(session, percent_threshold=10): + """Check performance against repo main.""" + + clone_dir = os.path.join(CURRENT_DIRECTORY, CLONE_REPO_DIR) + + if not os.path.exists(clone_dir): + print("downloading copy of repo at `main`") + session.run("git", "clone", REPO_URL, CLONE_REPO_DIR) + + # Use pre-release gRPC for performance tests. + session.install("--pre", "grpcio") + + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + session.install( + "mock", + "pandas", + "rich", + "pytest", + "google-cloud-testutils", + ) + + main_file_name = f"main_perf_{session.python}_sponge_log.xml" + head_file_name = f"head_perf_{session.python}_sponge_log.xml" + # test against main + print("testing against library at `main`...") + session.install("-e", str(clone_dir)) + session.run( + "py.test", + f"--ignore={CLONE_REPO_DIR}", + "-s", + f"--junitxml={main_file_name}", + str(CURRENT_DIRECTORY), + *session.posargs, + success_codes=[1, 0], # don't report failures at this step + ) + # test head + print("testing against library at `HEAD`...") + session.install("-e", str(REPO_ROOT_DIRECTORY)) + session.run( + "py.test", + f"--ignore={CLONE_REPO_DIR}", + "-s", + f"--junitxml={head_file_name}", + str(CURRENT_DIRECTORY), + *session.posargs, + success_codes=[1, 0], # don't report failures at this step + ) + # print results + main_results = get_junitxml_results(main_file_name, print_results=False) + head_results = get_junitxml_results(head_file_name, print_results=False) + all_pass = True + for test, time in head_results.items(): + if test in main_results: + prev_time = main_results[test] + diff = time - prev_time + percent_diff = diff / prev_time + test_passes = percent_diff * 100 < percent_threshold + all_pass = all_pass and test_passes + if not test_passes: + color = parse_colors("red") + elif diff > 0: + color = parse_colors("yellow") + else: + color = parse_colors("green") + print( + f"{test}: {color} {diff:+.3f}s ({percent_diff:+.1%}){parse_colors('reset')}" + ) + else: + print(f"{test}: ???") + if not all_pass: + session.error(f"performance degraded >{percent_threshold}%") diff --git a/packages/google-cloud-logging/tests/performance/test_performance.py b/packages/google-cloud-logging/tests/performance/test_performance.py new file mode 100644 index 000000000000..0f9888f7a794 --- /dev/null +++ b/packages/google-cloud-logging/tests/performance/test_performance.py @@ -0,0 +1,348 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import mock +import time +import io + +import pandas as pd +import cProfile +import pstats +from rich.panel import Panel +import rich + +import google.cloud.logging +from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client +from google.cloud.logging_v2.services.logging_service_v2.transports import ( + LoggingServiceV2Transport, +) +from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport +from google.cloud.logging_v2.handlers.transports import SyncTransport +from google.cloud.logging.handlers import CloudLoggingHandler +from google.cloud.logging.handlers import StructuredLogHandler +from google.cloud.logging_v2._http import _LoggingAPI +import google.auth.credentials +from google.cloud.logging_v2 import _gapic + +_small_text_payload = "hello world" +_large_text_payload = "abcfefghi " * 100000 +_small_json_payload = {"json_key": "hello world"} +_large_json_payload = { + f"key_{str(key)}": val + for key, val in zip(range(100), ["abcdefghij" * 10000 for i in range(100)]) +} +_payloads = [ + ("small", "text", _small_text_payload), + ("large", "text", _large_text_payload), + ("small", "json", _small_json_payload), + ("large", "json", _large_json_payload), +] + + +class MockGRPCTransport(LoggingServiceV2Transport): + """ + Mock for grpc transport. + Instead of sending logs to server, introduce artificial delay + """ + + def __init__(self, latency=0.1, **kwargs): + self.latency = latency + self._wrapped_methods = {self.write_log_entries: self.write_log_entries} + + def write_log_entries(self, *args, **kwargs): + time.sleep(self.latency) + + +class MockHttpAPI(_LoggingAPI): + """ + Mock for http API implementation. + Instead of sending logs to server, introduce artificial delay + """ + + def __init__(self, client, latency=0.1): + self._client = client + self.api_request = lambda **kwargs: time.sleep(latency) + + +def instrument_function(*args, **kwargs): + """ + Decorator that takes in a function and returns timing data, + along with the functions outpu + """ + + def inner(func): + profiler = kwargs.pop("profiler") + profiler.enable() + start = time.perf_counter() + func_output = func(*args, **kwargs) + end = time.perf_counter() + profiler.disable() + exec_time = end - start + return exec_time, func_output + + return inner + + +def _make_client(mock_network=True, use_grpc=True, mock_latency=0.01): + """ + Create and return a new test client to manage writing logs + Can optionally create a real GCP client, or a mock client with artificial network calls + Can choose between grpc and http client implementations + """ + if not mock_network: + # use a real client + client = google.cloud.logging.Client(_use_grpc=use_grpc) + elif use_grpc: + # create a mock grpc client + mock_transport = MockGRPCTransport(latency=mock_latency) + gapic_client = LoggingServiceV2Client(transport=mock_transport) + handwritten_client = mock.Mock() + api = _gapic._LoggingAPI(gapic_client, handwritten_client) + creds = mock.Mock(spec=google.auth.credentials.Credentials) + client = google.cloud.logging.Client(project="my-project", credentials=creds) + client._logging_api = api + else: + # create a mock http client + creds = mock.Mock(spec=google.auth.credentials.Credentials) + client = google.cloud.logging.Client(project="my-project", credentials=creds) + mock_http = MockHttpAPI(client, latency=mock_latency) + client._logging_api = mock_http + logger = client.logger(name="test_logger") + return client, logger + + +class TestPerformance(unittest.TestCase): + def setUp(self): + # show entire table when printing pandas dataframes + pd.set_option("display.max_colwidth", None) + + def _print_results(self, profile, results, time_limit, title, profile_rows=25): + """ + Print profile and benchmark results ater completing performance tests + Returns the combined time for all tests + """ + # print header + print() + rich.print(Panel(f"[blue]{title} Performance Tests")) + # print bnchmark results + rich.print("[cyan]Benchmark") + benchmark_df = pd.DataFrame(results).sort_values( + by="exec_time", ascending=False + ) + print(benchmark_df) + total_time = benchmark_df["exec_time"].sum() + if total_time <= time_limit: + rich.print( + f"Total Benchmark Time:[green] {total_time:.2f}s (limit: {time_limit:.1f}s) \u2705" + ) + else: + rich.print( + f"Total Benchmark Time:[red] {total_time:.2f}s (limit: {time_limit:.1f}s) \u274c" + ) + # print profile information + print() + rich.print("[cyan]Breakdown by Function") + result = io.StringIO() + pstats.Stats(profile, stream=result).sort_stats("cumtime").print_stats() + result = result.getvalue() + result = "ncalls" + result.split("ncalls")[-1] + df = pd.DataFrame([x.split(maxsplit=5) for x in result.split("\n")]) + df = df.drop(columns=[1, 2]) + df = df.rename(columns=df.iloc[0]).drop(df.index[0]) + profile_df = df[:profile_rows] + print(profile_df) + return total_time + + def _get_logger(self, name, handler): + """ + Create a fresh logger class with a specified handler + """ + logger = logging.getLogger(name) + logger.handlers.clear() + logger.addHandler(handler) + logger.propagate = False + return logger + + def test_client_init_performance(self, time_limit=0.25): + """ + Test the performance of initializing a new client + + tested variations: + - grpc vs http network protocols + """ + results = [] + pr = cProfile.Profile() + for use_grpc, network_str in [(True, "grpc"), (False, "http")]: + # create clients + exec_time, (client, logger) = instrument_function( + mock_network=True, use_grpc=use_grpc, profiler=pr + )(_make_client) + result_dict = {"protocol": network_str, "exec_time": exec_time} + results.append(result_dict) + # print results dataframe + total_time = self._print_results(pr, results, time_limit, "Client Init") + self.assertLessEqual(total_time, time_limit) + + def test_structured_logging_performance(self, time_limit=10): + """ + Test the performance of StructuredLoggingHandler + + tested variations: + - text vs json payloads + - small vs large payloads + """ + results = [] + pr = cProfile.Profile() + + def profiled_code(logger, payload, num_logs=100): + for i in range(num_logs): + logger.error(payload) + + stream = io.StringIO() + handler = StructuredLogHandler(stream=stream) + logger = self._get_logger("struct", handler) + for payload_size, payload_type, payload in _payloads: + exec_time, _ = instrument_function(logger, payload, profiler=pr)( + profiled_code + ) + result_dict = { + "payload_type": payload_type, + "payload_size": payload_size, + "exec_time": exec_time, + } + results.append(result_dict) + # print results dataframe + total_time = self._print_results( + pr, results, time_limit, "StructuredLogHandler" + ) + self.assertLessEqual(total_time, time_limit) + + def test_cloud_logging_handler_performance(self, time_limit=30): + """ + Test the performance of CloudLoggingHandler + + tested variations: + - grpc vs http network protocols + - background vs synchronous transport + - text vs json payloads + - small vs large payloads + """ + results = [] + pr = cProfile.Profile() + + def profiled_code(logger, payload, num_logs=100, flush=False): + for i in range(num_logs): + logger.error(payload) + if flush: + logger.handlers[0].transport.worker.stop() + + for use_grpc, network_str in [(True, "grpc"), (False, "http")]: + # create clients + client, logger = _make_client(mock_network=True, use_grpc=use_grpc) + for payload_size, payload_type, payload in _payloads: + # test cloud logging handler + for transport_str, transport in [ + ("background", BackgroundThreadTransport), + ("sync", SyncTransport), + ]: + handler = CloudLoggingHandler(client, transport=transport) + logger = self._get_logger("cloud", handler) + exec_time, _ = instrument_function( + logger, + payload, + flush=(transport_str == "background"), + profiler=pr, + )(profiled_code) + result_dict = { + "payload_type": payload_type, + "payload_size": payload_size, + "transport_type": transport_str, + "protocol": network_str, + "exec_time": exec_time, + } + results.append(result_dict) + # print results dataframe + total_time = self._print_results(pr, results, time_limit, "CloudLoggingHandler") + self.assertLessEqual(total_time, time_limit) + + def test_logging_performance(self, time_limit=15): + """ + Test the performance of logger + + tested variations: + - grpc vs http network protocols + - text vs json payloads + - small vs large payloads + """ + results = [] + pr = cProfile.Profile() + + def profiled_code(logger, payload, num_logs=100): + for i in range(num_logs): + logger.log(payload) + + for use_grpc, network_str in [(True, "grpc"), (False, "http")]: + # create clients + client, logger = _make_client(mock_network=True, use_grpc=use_grpc) + for payload_size, payload_type, payload in _payloads: + exec_time, _ = instrument_function(logger, payload, profiler=pr)( + profiled_code + ) + result_dict = { + "payload_type": payload_type, + "payload_size": payload_size, + "protocol": network_str, + "exec_time": exec_time, + } + results.append(result_dict) + # print results dataframe + total_time = self._print_results(pr, results, time_limit, "Logger.Log") + self.assertLessEqual(total_time, time_limit) + + def test_batch_performance(self, time_limit=10): + """ + Test the performance of logger + + tested variations: + - grpc vs http network protocols + - text vs json payloads + - small vs large payloads + """ + results = [] + pr = cProfile.Profile() + + def profiled_code(logger, payload, num_logs=100): + with logger.batch() as batch: + for i in range(num_logs): + batch.log(payload) + + for use_grpc, network_str in [(True, "grpc"), (False, "http")]: + # create clients + client, logger = _make_client(mock_network=True, use_grpc=use_grpc) + for payload_size, payload_type, payload in _payloads: + exec_time, _ = instrument_function(logger, payload, profiler=pr)( + profiled_code + ) + result_dict = { + "payload_type": payload_type, + "payload_size": payload_size, + "protocol": network_str, + "exec_time": exec_time, + } + results.append(result_dict) + # print results dataframe + total_time = self._print_results(pr, results, time_limit, "Batch.Log") + self.assertLessEqual(total_time, time_limit) From f81a988f4507b8ca2420188c5d7c5ee39d81ddc3 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 11 Aug 2022 10:56:16 -0700 Subject: [PATCH 657/855] chore: added extra variables to each kokoro config (#598) --- .../.kokoro/performance/common.cfg | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/packages/google-cloud-logging/.kokoro/performance/common.cfg b/packages/google-cloud-logging/.kokoro/performance/common.cfg index cd19b2c2328d..b3dd88793c9b 100644 --- a/packages/google-cloud-logging/.kokoro/performance/common.cfg +++ b/packages/google-cloud-logging/.kokoro/performance/common.cfg @@ -25,3 +25,18 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-logging/.kokoro/test-performance.sh" } + +# add labels to help with testgrid filtering +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + From dea3b8e3d052ae99192810f5093f63e58b017284 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 11 Aug 2022 15:16:33 -0400 Subject: [PATCH 658/855] fix(deps): allow protobuf < 5.0.0 (#600) fix(deps): require proto-plus >= 1.22.0 --- packages/google-cloud-logging/setup.py | 4 ++-- packages/google-cloud-logging/testing/constraints-3.7.txt | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index f879347f88ec..46a59dc82f3c 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -34,8 +34,8 @@ "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", "google-cloud-core >= 2.0.0, <3.0.0dev", "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", - "proto-plus >= 1.15.0, <2.0.0dev", - "protobuf >= 3.19.0, <4.0.0dev", + "proto-plus >= 1.22.0, <2.0.0dev", + "protobuf >= 3.19.0, <5.0.0dev", ] extras = {} diff --git a/packages/google-cloud-logging/testing/constraints-3.7.txt b/packages/google-cloud-logging/testing/constraints-3.7.txt index 07d97d4a84a3..2b42f18bc474 100644 --- a/packages/google-cloud-logging/testing/constraints-3.7.txt +++ b/packages/google-cloud-logging/testing/constraints-3.7.txt @@ -7,5 +7,5 @@ # Then this file should have foo==1.14.0 google-api-core==1.32.0 google-cloud-core==2.0.0 -proto-plus==1.15.0 +proto-plus==1.22.0 protobuf==3.19.0 From 47b641c071ea6124a18c27c18689add47cd537f5 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 12 Aug 2022 13:14:34 +0200 Subject: [PATCH 659/855] chore(deps): update all dependencies (#599) Co-authored-by: Daniel Sanche Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 08c363c0c3fa..84d8fe1d2025 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.2.1 google-cloud-bigquery==3.3.1 google-cloud-storage==2.5.0 -google-cloud-pubsub==2.13.4 +google-cloud-pubsub==2.13.5 From d37de11279f5d36587d90e53a72622e8626748eb Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 12 Aug 2022 13:57:20 +0200 Subject: [PATCH 660/855] chore(deps): update dependency google-cloud-pubsub to v2.13.6 (#604) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 84d8fe1d2025..8c149df07d85 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.2.1 google-cloud-bigquery==3.3.1 google-cloud-storage==2.5.0 -google-cloud-pubsub==2.13.5 +google-cloud-pubsub==2.13.6 From a5ae28a17166ddfde3f8a40390fd52fe10ea219d Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 12 Aug 2022 10:24:23 -0700 Subject: [PATCH 661/855] chore: added extra variables owlbot kokoro configs (#603) --- .../.kokoro/common_env_vars.cfg | 19 +++++++++++++++++ .../.kokoro/continuous/common.cfg | 20 ++++++++++++++++++ .../.kokoro/docs/common.cfg | 21 ++++++++++++++++++- .../appengine_flex_container/common.cfg | 15 +++++++++++++ .../appengine_flex_python/common.cfg | 15 +++++++++++++ .../environment/appengine_standard/common.cfg | 15 +++++++++++++ .../.kokoro/environment/cloudrun/common.cfg | 15 +++++++++++++ .../.kokoro/environment/compute/common.cfg | 15 +++++++++++++ .../.kokoro/environment/functions/common.cfg | 15 +++++++++++++ .../environment/functions_37/common.cfg | 15 +++++++++++++ .../.kokoro/environment/kubernetes/common.cfg | 15 +++++++++++++ .../.kokoro/presubmit/common.cfg | 20 ++++++++++++++++++ .../.kokoro/release/common.cfg | 20 ++++++++++++++++++ .../.kokoro/samples/lint/common.cfg | 21 ++++++++++++++++++- .../.kokoro/samples/python3.10/common.cfg | 21 ++++++++++++++++++- .../.kokoro/samples/python3.7/common.cfg | 21 ++++++++++++++++++- .../.kokoro/samples/python3.8/common.cfg | 21 ++++++++++++++++++- .../.kokoro/samples/python3.9/common.cfg | 21 ++++++++++++++++++- packages/google-cloud-logging/owlbot.py | 17 +++++++++++++++ 19 files changed, 336 insertions(+), 6 deletions(-) create mode 100644 packages/google-cloud-logging/.kokoro/common_env_vars.cfg diff --git a/packages/google-cloud-logging/.kokoro/common_env_vars.cfg b/packages/google-cloud-logging/.kokoro/common_env_vars.cfg new file mode 100644 index 000000000000..69ba31edf6d0 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/common_env_vars.cfg @@ -0,0 +1,19 @@ + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/packages/google-cloud-logging/.kokoro/continuous/common.cfg b/packages/google-cloud-logging/.kokoro/continuous/common.cfg index 2d5b6bac27ca..6745b353755d 100644 --- a/packages/google-cloud-logging/.kokoro/continuous/common.cfg +++ b/packages/google-cloud-logging/.kokoro/continuous/common.cfg @@ -25,3 +25,23 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-logging/.kokoro/build.sh" } + + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/packages/google-cloud-logging/.kokoro/docs/common.cfg b/packages/google-cloud-logging/.kokoro/docs/common.cfg index 4e013a7f7404..36e4a6540088 100644 --- a/packages/google-cloud-logging/.kokoro/docs/common.cfg +++ b/packages/google-cloud-logging/.kokoro/docs/common.cfg @@ -63,4 +63,23 @@ before_action { keyname: "docuploader_service_account" } } -} \ No newline at end of file +} + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg index 1555bf28f46c..c53ed690f7c7 100644 --- a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg +++ b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg @@ -32,3 +32,18 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-logging/.kokoro/environment_tests.sh" } + +# add labels to help with testgrid filtering +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg index 9d3506cb97cf..d5ea9288b615 100644 --- a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg +++ b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg @@ -32,3 +32,18 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-logging/.kokoro/environment_tests.sh" } + +# add labels to help with testgrid filtering +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg index 07242418acce..d31bde925264 100644 --- a/packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg +++ b/packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg @@ -32,3 +32,18 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-logging/.kokoro/environment_tests.sh" } + +# add labels to help with testgrid filtering +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + diff --git a/packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg b/packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg index a9a26f468471..855a6f6e10c7 100644 --- a/packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg +++ b/packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg @@ -32,3 +32,18 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-logging/.kokoro/environment_tests.sh" } + +# add labels to help with testgrid filtering +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + diff --git a/packages/google-cloud-logging/.kokoro/environment/compute/common.cfg b/packages/google-cloud-logging/.kokoro/environment/compute/common.cfg index ac601ef09d22..519d791cc03a 100644 --- a/packages/google-cloud-logging/.kokoro/environment/compute/common.cfg +++ b/packages/google-cloud-logging/.kokoro/environment/compute/common.cfg @@ -32,3 +32,18 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-logging/.kokoro/environment_tests.sh" } + +# add labels to help with testgrid filtering +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + diff --git a/packages/google-cloud-logging/.kokoro/environment/functions/common.cfg b/packages/google-cloud-logging/.kokoro/environment/functions/common.cfg index 96b0940fe842..667a285b89c6 100644 --- a/packages/google-cloud-logging/.kokoro/environment/functions/common.cfg +++ b/packages/google-cloud-logging/.kokoro/environment/functions/common.cfg @@ -32,3 +32,18 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-logging/.kokoro/environment_tests.sh" } + +# add labels to help with testgrid filtering +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + diff --git a/packages/google-cloud-logging/.kokoro/environment/functions_37/common.cfg b/packages/google-cloud-logging/.kokoro/environment/functions_37/common.cfg index 4daa52db767b..2ee8d6fc545b 100644 --- a/packages/google-cloud-logging/.kokoro/environment/functions_37/common.cfg +++ b/packages/google-cloud-logging/.kokoro/environment/functions_37/common.cfg @@ -37,3 +37,18 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-logging/.kokoro/environment_tests.sh" } + +# add labels to help with testgrid filtering +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + diff --git a/packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg b/packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg index a9fcc33e349f..b778627f488e 100644 --- a/packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg +++ b/packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg @@ -32,3 +32,18 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-logging/.kokoro/environment_tests.sh" } + +# add labels to help with testgrid filtering +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + diff --git a/packages/google-cloud-logging/.kokoro/presubmit/common.cfg b/packages/google-cloud-logging/.kokoro/presubmit/common.cfg index 2d5b6bac27ca..6745b353755d 100644 --- a/packages/google-cloud-logging/.kokoro/presubmit/common.cfg +++ b/packages/google-cloud-logging/.kokoro/presubmit/common.cfg @@ -25,3 +25,23 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-logging/.kokoro/build.sh" } + + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/packages/google-cloud-logging/.kokoro/release/common.cfg b/packages/google-cloud-logging/.kokoro/release/common.cfg index 637885e8504f..53583ee71289 100644 --- a/packages/google-cloud-logging/.kokoro/release/common.cfg +++ b/packages/google-cloud-logging/.kokoro/release/common.cfg @@ -38,3 +38,23 @@ env_vars: { key: "SECRET_MANAGER_KEYS" value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } + + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/packages/google-cloud-logging/.kokoro/samples/lint/common.cfg b/packages/google-cloud-logging/.kokoro/samples/lint/common.cfg index 9123caa63185..feb119185a02 100644 --- a/packages/google-cloud-logging/.kokoro/samples/lint/common.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/lint/common.cfg @@ -31,4 +31,23 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.10/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.10/common.cfg index 61142a001697..733aed7c4478 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.10/common.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.10/common.cfg @@ -37,4 +37,23 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg index 3ba076feaa21..5501afd73e93 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg @@ -37,4 +37,23 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg index 144751654145..f3c555136e2f 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg @@ -37,4 +37,23 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg index f6f6943a51fc..fe06e7578e54 100644 --- a/packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg +++ b/packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg @@ -37,4 +37,23 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index ba8a593e94e4..0ef7dcaa1ebe 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -16,6 +16,7 @@ import synthtool as s from synthtool import gcp from synthtool.languages import python +import os common = gcp.CommonTemplates() @@ -109,3 +110,19 @@ s.shell.run(["nox", "-s", "blacken"], hide_output=False) +# -------------------------------------------------------------------------- +# Modify test configs +# -------------------------------------------------------------------------- + +# add shared environment variables to test configs +tracked_subdirs = ["continuous", "presubmit", "release", "samples", "docs"] +for subdir in tracked_subdirs: + for path, subdirs, files in os.walk(f".kokoro/{subdir}"): + for name in files: + if name == "common.cfg": + file_path = os.path.join(path, name) + s.move( + ".kokoro/common_env_vars.cfg", + file_path, + merge=lambda src, dst, _, : f"{dst}\n{src}", + ) From 566919921ae25e4c62ed1efd4a56f294b61dc5d1 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 16 Aug 2022 14:04:09 -0400 Subject: [PATCH 662/855] chore(main): release 3.2.2 (#601) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/CHANGELOG.md | 8 ++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 9faead1ed850..210951029384 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.2.2](https://github.com/googleapis/python-logging/compare/v3.2.1...v3.2.2) (2022-08-12) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#600](https://github.com/googleapis/python-logging/issues/600)) ([8495dac](https://github.com/googleapis/python-logging/commit/8495dac2f303dae74b0143d7c95189d0f2d180b8)) +* **deps:** require proto-plus >= 1.22.0 ([8495dac](https://github.com/googleapis/python-logging/commit/8495dac2f303dae74b0143d7c95189d0f2d180b8)) + ## [3.2.1](https://github.com/googleapis/python-logging/compare/v3.2.0...v3.2.1) (2022-07-13) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 46a59dc82f3c..060a8a823597 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "3.2.1" +version = "3.2.2" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 4e59092232b0b23ca9af4b7e44110b5ec121bc2a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 17 Aug 2022 17:15:01 +0200 Subject: [PATCH 663/855] chore(deps): update dependency google-cloud-logging to v3.2.2 (#608) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 8c149df07d85..0052e92af309 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.2.1 +google-cloud-logging==3.2.2 google-cloud-bigquery==3.3.1 google-cloud-storage==2.5.0 google-cloud-pubsub==2.13.6 From afc3fc06ccf2970b3c71882850f80e5ebd6bb8c4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 19 Aug 2022 18:31:48 +0200 Subject: [PATCH 664/855] chore(deps): update dependency google-cloud-bigquery to v3.3.2 (#609) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 0052e92af309..e8d879e300db 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.2.2 -google-cloud-bigquery==3.3.1 +google-cloud-bigquery==3.3.2 google-cloud-storage==2.5.0 google-cloud-pubsub==2.13.6 From c6b9398ee4a87c93b366da20d81321aabce89c48 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 24 Aug 2022 15:07:39 -0400 Subject: [PATCH 665/855] chore(python): exclude path in renovate.json [autoapprove] (#611) Source-Link: https://github.com/googleapis/synthtool/commit/69fabaee9eca28af7ecaa02c86895e606fbbebd6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:562802bfac02e012a6ac34eda282f81d06e77326b82a32d7bbb1369ff552b387 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/publish-docs.sh | 4 +- .../google-cloud-logging/.kokoro/release.sh | 5 +- .../.kokoro/requirements.in | 8 + .../.kokoro/requirements.txt | 464 ++++++++++++++++++ packages/google-cloud-logging/renovate.json | 2 +- 6 files changed, 477 insertions(+), 10 deletions(-) create mode 100644 packages/google-cloud-logging/.kokoro/requirements.in create mode 100644 packages/google-cloud-logging/.kokoro/requirements.txt diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 0eb02fda4c09..c6acdf3f90c4 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:9db98b055a7f8bd82351238ccaacfd3cda58cdf73012ab58b8da146368330021 -# created: 2022-07-25T16:02:49.174178716Z + digest: sha256:562802bfac02e012a6ac34eda282f81d06e77326b82a32d7bbb1369ff552b387 +# created: 2022-08-24T17:07:22.006876712Z diff --git a/packages/google-cloud-logging/.kokoro/publish-docs.sh b/packages/google-cloud-logging/.kokoro/publish-docs.sh index 8acb14e802b0..1c4d62370042 100755 --- a/packages/google-cloud-logging/.kokoro/publish-docs.sh +++ b/packages/google-cloud-logging/.kokoro/publish-docs.sh @@ -21,14 +21,12 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3 -m pip install --user --upgrade --quiet nox +python3 -m pip install --require-hashes -r .kokoro/requirements.txt python3 -m nox --version # build docs nox -s docs -python3 -m pip install --user gcp-docuploader - # create metadata python3 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ diff --git a/packages/google-cloud-logging/.kokoro/release.sh b/packages/google-cloud-logging/.kokoro/release.sh index 8a4d1f432338..59f8a40b9eee 100755 --- a/packages/google-cloud-logging/.kokoro/release.sh +++ b/packages/google-cloud-logging/.kokoro/release.sh @@ -16,12 +16,9 @@ set -eo pipefail # Start the releasetool reporter -python3 -m pip install gcp-releasetool +python3 -m pip install --require-hashes -r .kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script -# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. -python3 -m pip install --upgrade twine wheel setuptools - # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 diff --git a/packages/google-cloud-logging/.kokoro/requirements.in b/packages/google-cloud-logging/.kokoro/requirements.in new file mode 100644 index 000000000000..7718391a34d7 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/requirements.in @@ -0,0 +1,8 @@ +gcp-docuploader +gcp-releasetool +importlib-metadata +typing-extensions +twine +wheel +setuptools +nox \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt new file mode 100644 index 000000000000..c4b824f247e3 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -0,0 +1,464 @@ +# +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==2.0.0 \ + --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ + --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e + # via nox +attrs==22.1.0 \ + --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ + --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c + # via gcp-releasetool +bleach==5.0.1 \ + --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ + --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c + # via readme-renderer +cachetools==5.2.0 \ + --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ + --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db + # via google-auth +certifi==2022.6.15 \ + --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ + --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 + # via requests +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via cryptography +charset-normalizer==2.1.1 \ + --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ + --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f + # via requests +click==8.0.4 \ + --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ + --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb + # via + # gcp-docuploader + # gcp-releasetool +colorlog==6.6.0 \ + --hash=sha256:344f73204009e4c83c5b6beb00b3c45dc70fcdae3c80db919e0a4171d006fde8 \ + --hash=sha256:351c51e866c86c3217f08e4b067a7974a678be78f07f85fc2d55b8babde6d94e + # via + # gcp-docuploader + # nox +commonmark==0.9.1 \ + --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ + --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 + # via rich +cryptography==37.0.4 \ + --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ + --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ + --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ + --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ + --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ + --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ + --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ + --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ + --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ + --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ + --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ + --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ + --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ + --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ + --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ + --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ + --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ + --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ + --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ + --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ + --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ + --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 + # via + # gcp-releasetool + # secretstorage +distlib==0.3.5 \ + --hash=sha256:a7f75737c70be3b25e2bee06288cec4e4c221de18455b2dd037fe2a795cab2fe \ + --hash=sha256:b710088c59f06338ca514800ad795a132da19fda270e3ce4affc74abf955a26c + # via virtualenv +docutils==0.19 \ + --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ + --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc + # via readme-renderer +filelock==3.8.0 \ + --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ + --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 + # via virtualenv +gcp-docuploader==0.6.3 \ + --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ + --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b + # via -r requirements.in +gcp-releasetool==1.8.6 \ + --hash=sha256:42e51ab8e2e789bc8e22a03c09352962cd3452951c801a2230d564816630304a \ + --hash=sha256:a3518b79d1b243c494eac392a01c7fd65187fd6d52602dcab9b529bc934d4da1 + # via -r requirements.in +google-api-core==2.8.2 \ + --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ + --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.11.0 \ + --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ + --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb + # via + # gcp-releasetool + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.3.2 \ + --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ + --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a + # via google-cloud-storage +google-cloud-storage==2.5.0 \ + --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ + --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 + # via gcp-docuploader +google-crc32c==1.3.0 \ + --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ + --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ + --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ + --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ + --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ + --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ + --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ + --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ + --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ + --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ + --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ + --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ + --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ + --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ + --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ + --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ + --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ + --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ + --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ + --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ + --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ + --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ + --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ + --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ + --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ + --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ + --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ + --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ + --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ + --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ + --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ + --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ + --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ + --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ + --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ + --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ + --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ + --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ + --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ + --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ + --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ + --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ + --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 + # via google-resumable-media +google-resumable-media==2.3.3 \ + --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ + --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 + # via google-cloud-storage +googleapis-common-protos==1.56.4 \ + --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ + --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 + # via google-api-core +idna==3.3 \ + --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ + --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d + # via requests +importlib-metadata==4.12.0 \ + --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ + --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 + # via + # -r requirements.in + # twine +jeepney==0.8.0 \ + --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ + --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 + # via + # keyring + # secretstorage +jinja2==3.1.2 \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 + # via gcp-releasetool +keyring==23.8.2 \ + --hash=sha256:0d9973f8891850f1ade5f26aafd06bb16865fbbae3fc56b0defb6a14a2624003 \ + --hash=sha256:10d2a8639663fe2090705a00b8c47c687cacdf97598ea9c11456679fa974473a + # via + # gcp-releasetool + # twine +markupsafe==2.1.1 \ + --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ + --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ + --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ + --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ + --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ + --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ + --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ + --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ + --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ + --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ + --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ + --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ + --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ + --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ + --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ + --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ + --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ + --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ + --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ + --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ + --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ + --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ + --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ + --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ + --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ + --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ + --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ + --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ + --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ + --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ + --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ + --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ + --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ + --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ + --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ + --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ + --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ + --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ + --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ + --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 + # via jinja2 +nox==2022.8.7 \ + --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ + --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c + # via -r requirements.in +packaging==21.3 \ + --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ + --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 + # via + # gcp-releasetool + # nox +pkginfo==1.8.3 \ + --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ + --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c + # via twine +platformdirs==2.5.2 \ + --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ + --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 + # via virtualenv +protobuf==3.20.1 \ + --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \ + --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \ + --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \ + --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \ + --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \ + --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \ + --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \ + --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \ + --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \ + --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \ + --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \ + --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \ + --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \ + --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \ + --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \ + --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \ + --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \ + --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \ + --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \ + --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \ + --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \ + --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \ + --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \ + --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3 + # via + # gcp-docuploader + # gcp-releasetool + # google-api-core +py==1.11.0 \ + --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ + --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 + # via nox +pyasn1==0.4.8 \ + --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ + --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 \ + --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ + --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 + # via google-auth +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +pygments==2.13.0 \ + --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ + --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 + # via + # readme-renderer + # rich +pyjwt==2.4.0 \ + --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ + --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba + # via gcp-releasetool +pyparsing==3.0.9 \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc + # via packaging +pyperclip==1.8.2 \ + --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 + # via gcp-releasetool +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via gcp-releasetool +readme-renderer==37.0 \ + --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ + --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 + # via twine +requests==2.28.1 \ + --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ + --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 + # via + # gcp-releasetool + # google-api-core + # google-cloud-storage + # requests-toolbelt + # twine +requests-toolbelt==0.9.1 \ + --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ + --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 + # via twine +rfc3986==2.0.0 \ + --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ + --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c + # via twine +rich==12.5.1 \ + --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ + --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca + # via twine +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +secretstorage==3.3.3 \ + --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ + --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 + # via keyring +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via + # bleach + # gcp-docuploader + # google-auth + # python-dateutil +twine==4.0.1 \ + --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ + --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 + # via -r requirements.in +typing-extensions==4.3.0 \ + --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ + --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 + # via -r requirements.in +urllib3==1.26.12 \ + --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ + --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 + # via + # requests + # twine +virtualenv==20.16.3 \ + --hash=sha256:4193b7bc8a6cd23e4eb251ac64f29b4398ab2c233531e66e40b19a6b7b0d30c1 \ + --hash=sha256:d86ea0bb50e06252d79e6c241507cb904fcd66090c3271381372d6221a3970f9 + # via nox +webencodings==0.5.1 \ + --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ + --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 + # via bleach +wheel==0.37.1 \ + --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ + --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 + # via -r requirements.in +zipp==3.8.1 \ + --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ + --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +setuptools==65.2.0 \ + --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ + --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 + # via -r requirements.in diff --git a/packages/google-cloud-logging/renovate.json b/packages/google-cloud-logging/renovate.json index bcd0e005a2c3..bb005193f6ed 100644 --- a/packages/google-cloud-logging/renovate.json +++ b/packages/google-cloud-logging/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] }, From a7370e1ac45fd58084488af345a5811f67e932fb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 29 Aug 2022 15:39:34 -0400 Subject: [PATCH 666/855] chore(python): exclude `grpcio==1.49.0rc1` in tests (#612) Source-Link: https://github.com/googleapis/synthtool/commit/c4dd5953003d13b239f872d329c3146586bb417e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ce3c1686bc81145c81dd269bd12c4025c6b275b22d14641358827334fddb1d72 Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-logging/.kokoro/requirements.txt | 6 +++--- packages/google-cloud-logging/noxfile.py | 7 +++++-- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index c6acdf3f90c4..23e106b65770 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:562802bfac02e012a6ac34eda282f81d06e77326b82a32d7bbb1369ff552b387 -# created: 2022-08-24T17:07:22.006876712Z + digest: sha256:ce3c1686bc81145c81dd269bd12c4025c6b275b22d14641358827334fddb1d72 +# created: 2022-08-29T17:28:30.441852797Z diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index c4b824f247e3..4b29ef247bed 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -136,9 +136,9 @@ cryptography==37.0.4 \ # via # gcp-releasetool # secretstorage -distlib==0.3.5 \ - --hash=sha256:a7f75737c70be3b25e2bee06288cec4e4c221de18455b2dd037fe2a795cab2fe \ - --hash=sha256:b710088c59f06338ca514800ad795a132da19fda270e3ce4affc74abf955a26c +distlib==0.3.6 \ + --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ + --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e # via virtualenv docutils==0.19 \ --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 8d1fa373bac7..e2bd78bd5d37 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -197,7 +197,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") + # Exclude version 1.49.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/pull/30642 + session.install("--pre", "grpcio!=1.49.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -377,7 +379,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - "grpcio", + # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 + "grpcio!=1.49.0rc1", "grpcio-status", "google-api-core", "proto-plus", From 3bfc4927d4f72f1bdc7ab5918015550f27b7e1ae Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 1 Sep 2022 19:12:23 +0000 Subject: [PATCH 667/855] ci(python): fix path to requirements.txt in release script (#614) Source-Link: https://github.com/googleapis/synthtool/commit/fdba3ed145bdb2f4f3eff434d4284b1d03b80d34 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:1f0dbd02745fb7cf255563dab5968345989308544e52b7f460deadd5e78e63b0 --- .../.github/.OwlBot.lock.yaml | 3 +-- .../google-cloud-logging/.kokoro/release.sh | 2 +- .../.kokoro/requirements.txt | 24 +++++++++---------- 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 23e106b65770..0d9eb2af9352 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ce3c1686bc81145c81dd269bd12c4025c6b275b22d14641358827334fddb1d72 -# created: 2022-08-29T17:28:30.441852797Z + digest: sha256:1f0dbd02745fb7cf255563dab5968345989308544e52b7f460deadd5e78e63b0 diff --git a/packages/google-cloud-logging/.kokoro/release.sh b/packages/google-cloud-logging/.kokoro/release.sh index 59f8a40b9eee..37524859b662 100755 --- a/packages/google-cloud-logging/.kokoro/release.sh +++ b/packages/google-cloud-logging/.kokoro/release.sh @@ -16,7 +16,7 @@ set -eo pipefail # Start the releasetool reporter -python3 -m pip install --require-hashes -r .kokoro/requirements.txt +python3 -m pip install --require-hashes -r github/python-logging/.kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script # Disable buffering, so that the logs stream through. diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 4b29ef247bed..92b2f727e777 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -100,9 +100,9 @@ click==8.0.4 \ # via # gcp-docuploader # gcp-releasetool -colorlog==6.6.0 \ - --hash=sha256:344f73204009e4c83c5b6beb00b3c45dc70fcdae3c80db919e0a4171d006fde8 \ - --hash=sha256:351c51e866c86c3217f08e4b067a7974a678be78f07f85fc2d55b8babde6d94e +colorlog==6.7.0 \ + --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ + --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 # via # gcp-docuploader # nox @@ -152,9 +152,9 @@ gcp-docuploader==0.6.3 \ --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b # via -r requirements.in -gcp-releasetool==1.8.6 \ - --hash=sha256:42e51ab8e2e789bc8e22a03c09352962cd3452951c801a2230d564816630304a \ - --hash=sha256:a3518b79d1b243c494eac392a01c7fd65187fd6d52602dcab9b529bc934d4da1 +gcp-releasetool==1.8.7 \ + --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ + --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d # via -r requirements.in google-api-core==2.8.2 \ --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ @@ -251,9 +251,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.8.2 \ - --hash=sha256:0d9973f8891850f1ade5f26aafd06bb16865fbbae3fc56b0defb6a14a2624003 \ - --hash=sha256:10d2a8639663fe2090705a00b8c47c687cacdf97598ea9c11456679fa974473a +keyring==23.9.0 \ + --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ + --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db # via # gcp-releasetool # twine @@ -440,9 +440,9 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.3 \ - --hash=sha256:4193b7bc8a6cd23e4eb251ac64f29b4398ab2c233531e66e40b19a6b7b0d30c1 \ - --hash=sha256:d86ea0bb50e06252d79e6c241507cb904fcd66090c3271381372d6221a3970f9 +virtualenv==20.16.4 \ + --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ + --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ From f34ff47d37956f9ebb7d766b55a5367098b37710 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 2 Sep 2022 00:50:29 +0000 Subject: [PATCH 668/855] chore(python): update .kokoro/requirements.txt (#615) Source-Link: https://github.com/googleapis/synthtool/commit/703554a14c7479542335b62fa69279f93a9e38ec Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/.kokoro/requirements.txt | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 0d9eb2af9352..2fa0f7c4fe15 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:1f0dbd02745fb7cf255563dab5968345989308544e52b7f460deadd5e78e63b0 + digest: sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 92b2f727e777..385f2d4d6106 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -241,6 +241,10 @@ importlib-metadata==4.12.0 \ # via # -r requirements.in # twine +jaraco-classes==3.2.2 \ + --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ + --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 + # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 @@ -299,6 +303,10 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 +more-itertools==8.14.0 \ + --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ + --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 + # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c From 36f27755e62639910a5082a476c02730d9f7c984 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 2 Sep 2022 20:06:40 +0000 Subject: [PATCH 669/855] chore(python): exclude setup.py in renovate config (#618) Source-Link: https://github.com/googleapis/synthtool/commit/56da63e80c384a871356d1ea6640802017f213b4 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/renovate.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 2fa0f7c4fe15..b8dcb4a4af99 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b + digest: sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 diff --git a/packages/google-cloud-logging/renovate.json b/packages/google-cloud-logging/renovate.json index bb005193f6ed..dde963098619 100644 --- a/packages/google-cloud-logging/renovate.json +++ b/packages/google-cloud-logging/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] }, From 9e6441835734b046d7be089bebbf629539d4ba60 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 2 Sep 2022 17:07:22 -0700 Subject: [PATCH 670/855] chore(tests): update python version used for environment tests (#620) --- .../google-cloud-logging/.kokoro/environment_tests.sh | 11 +++++------ .../performance/{presubmit.cfg => performance.cfg} | 0 .../tests/performance/test_performance.py | 6 +++--- 3 files changed, 8 insertions(+), 9 deletions(-) rename packages/google-cloud-logging/.kokoro/performance/{presubmit.cfg => performance.cfg} (100%) diff --git a/packages/google-cloud-logging/.kokoro/environment_tests.sh b/packages/google-cloud-logging/.kokoro/environment_tests.sh index 2b6fa5177e69..c80c51d65c56 100755 --- a/packages/google-cloud-logging/.kokoro/environment_tests.sh +++ b/packages/google-cloud-logging/.kokoro/environment_tests.sh @@ -49,12 +49,11 @@ gcloud config set compute/zone us-central1-b # authenticate docker gcloud auth configure-docker -q -# Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation - # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +virtualenv .venv +source .venv/bin/activate +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version # Install kubectl if [[ "${ENVIRONMENT}" == "kubernetes" ]]; then @@ -73,7 +72,7 @@ echo $ENVCTL_ID # Run the specified environment test set +e -python3.6 -m nox --session "tests(language='python', platform='$ENVIRONMENT')" +python3 -m nox --session "tests(language='python', platform='$ENVIRONMENT')" TEST_STATUS_CODE=$? # destroy resources diff --git a/packages/google-cloud-logging/.kokoro/performance/presubmit.cfg b/packages/google-cloud-logging/.kokoro/performance/performance.cfg similarity index 100% rename from packages/google-cloud-logging/.kokoro/performance/presubmit.cfg rename to packages/google-cloud-logging/.kokoro/performance/performance.cfg diff --git a/packages/google-cloud-logging/tests/performance/test_performance.py b/packages/google-cloud-logging/tests/performance/test_performance.py index 0f9888f7a794..d028a49e86f5 100644 --- a/packages/google-cloud-logging/tests/performance/test_performance.py +++ b/packages/google-cloud-logging/tests/performance/test_performance.py @@ -196,7 +196,7 @@ def test_client_init_performance(self, time_limit=0.25): total_time = self._print_results(pr, results, time_limit, "Client Init") self.assertLessEqual(total_time, time_limit) - def test_structured_logging_performance(self, time_limit=10): + def test_structured_logging_performance(self, time_limit=12): """ Test the performance of StructuredLoggingHandler @@ -278,7 +278,7 @@ def profiled_code(logger, payload, num_logs=100, flush=False): total_time = self._print_results(pr, results, time_limit, "CloudLoggingHandler") self.assertLessEqual(total_time, time_limit) - def test_logging_performance(self, time_limit=15): + def test_logging_performance(self, time_limit=20): """ Test the performance of logger @@ -312,7 +312,7 @@ def profiled_code(logger, payload, num_logs=100): total_time = self._print_results(pr, results, time_limit, "Logger.Log") self.assertLessEqual(total_time, time_limit) - def test_batch_performance(self, time_limit=10): + def test_batch_performance(self, time_limit=12): """ Test the performance of logger From e2efc478814789fef1467ce9d2fb295f66ea506e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 6 Sep 2022 17:48:09 +0200 Subject: [PATCH 671/855] chore(deps): update dependency pytest to v7.1.3 (#619) --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index ce161d15f1a4..6759e75e03a0 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==2.1.2 -pytest==7.1.2 +pytest==7.1.3 From fe17559209fb06fa13a295c53da41b616345a42b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 7 Sep 2022 14:18:23 +0000 Subject: [PATCH 672/855] chore: Bump gapic-generator-python version to 1.3.0 (#621) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 472561635 Source-Link: https://github.com/googleapis/googleapis/commit/332ecf599f8e747d8d1213b77ae7db26eff12814 Source-Link: https://github.com/googleapis/googleapis-gen/commit/4313d682880fd9d7247291164d4e9d3d5bd9f177 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDMxM2Q2ODI4ODBmZDlkNzI0NzI5MTE2NGQ0ZTlkM2Q1YmQ5ZjE3NyJ9 --- .../config_service_v2/async_client.py | 182 +++ .../services/config_service_v2/client.py | 182 +++ .../logging_service_v2/async_client.py | 46 +- .../services/logging_service_v2/client.py | 46 +- .../metrics_service_v2/async_client.py | 35 + .../services/metrics_service_v2/client.py | 35 + packages/google-cloud-logging/mypy.ini | 2 +- ...onfig_service_v2_copy_log_entries_async.py | 7 + ...config_service_v2_copy_log_entries_sync.py | 7 + ...d_config_service_v2_create_bucket_async.py | 7 + ...ed_config_service_v2_create_bucket_sync.py | 7 + ...onfig_service_v2_create_exclusion_async.py | 7 + ...config_service_v2_create_exclusion_sync.py | 7 + ...ted_config_service_v2_create_sink_async.py | 7 + ...ated_config_service_v2_create_sink_sync.py | 7 + ...ted_config_service_v2_create_view_async.py | 7 + ...ated_config_service_v2_create_view_sync.py | 7 + ...d_config_service_v2_delete_bucket_async.py | 7 + ...ed_config_service_v2_delete_bucket_sync.py | 7 + ...onfig_service_v2_delete_exclusion_async.py | 7 + ...config_service_v2_delete_exclusion_sync.py | 7 + ...ted_config_service_v2_delete_sink_async.py | 7 + ...ated_config_service_v2_delete_sink_sync.py | 7 + ...ted_config_service_v2_delete_view_async.py | 7 + ...ated_config_service_v2_delete_view_sync.py | 7 + ...ated_config_service_v2_get_bucket_async.py | 7 + ...rated_config_service_v2_get_bucket_sync.py | 7 + ...nfig_service_v2_get_cmek_settings_async.py | 7 + ...onfig_service_v2_get_cmek_settings_sync.py | 7 + ...d_config_service_v2_get_exclusion_async.py | 7 + ...ed_config_service_v2_get_exclusion_sync.py | 7 + ...ed_config_service_v2_get_settings_async.py | 7 + ...ted_config_service_v2_get_settings_sync.py | 7 + ...erated_config_service_v2_get_sink_async.py | 7 + ...nerated_config_service_v2_get_sink_sync.py | 7 + ...erated_config_service_v2_get_view_async.py | 7 + ...nerated_config_service_v2_get_view_sync.py | 7 + ...ed_config_service_v2_list_buckets_async.py | 7 + ...ted_config_service_v2_list_buckets_sync.py | 7 + ...config_service_v2_list_exclusions_async.py | 7 + ..._config_service_v2_list_exclusions_sync.py | 7 + ...ated_config_service_v2_list_sinks_async.py | 7 + ...rated_config_service_v2_list_sinks_sync.py | 7 + ...ated_config_service_v2_list_views_async.py | 7 + ...rated_config_service_v2_list_views_sync.py | 7 + ...config_service_v2_undelete_bucket_async.py | 7 + ..._config_service_v2_undelete_bucket_sync.py | 7 + ...d_config_service_v2_update_bucket_async.py | 7 + ...ed_config_service_v2_update_bucket_sync.py | 7 + ...g_service_v2_update_cmek_settings_async.py | 7 + ...ig_service_v2_update_cmek_settings_sync.py | 7 + ...onfig_service_v2_update_exclusion_async.py | 7 + ...config_service_v2_update_exclusion_sync.py | 7 + ...config_service_v2_update_settings_async.py | 7 + ..._config_service_v2_update_settings_sync.py | 7 + ...ted_config_service_v2_update_sink_async.py | 7 + ...ated_config_service_v2_update_sink_sync.py | 7 + ...ted_config_service_v2_update_view_async.py | 7 + ...ated_config_service_v2_update_view_sync.py | 7 + ...ted_logging_service_v2_delete_log_async.py | 7 + ...ated_logging_service_v2_delete_log_sync.py | 7 + ...gging_service_v2_list_log_entries_async.py | 9 +- ...ogging_service_v2_list_log_entries_sync.py | 9 +- ...ated_logging_service_v2_list_logs_async.py | 7 + ...rated_logging_service_v2_list_logs_sync.py | 7 + ...st_monitored_resource_descriptors_async.py | 7 + ...ist_monitored_resource_descriptors_sync.py | 7 + ...gging_service_v2_tail_log_entries_async.py | 9 +- ...ogging_service_v2_tail_log_entries_sync.py | 9 +- ...ging_service_v2_write_log_entries_async.py | 7 + ...gging_service_v2_write_log_entries_sync.py | 7 + ...rics_service_v2_create_log_metric_async.py | 7 + ...trics_service_v2_create_log_metric_sync.py | 7 + ...rics_service_v2_delete_log_metric_async.py | 7 + ...trics_service_v2_delete_log_metric_sync.py | 7 + ...metrics_service_v2_get_log_metric_async.py | 7 + ..._metrics_service_v2_get_log_metric_sync.py | 7 + ...trics_service_v2_list_log_metrics_async.py | 7 + ...etrics_service_v2_list_log_metrics_sync.py | 7 + ...rics_service_v2_update_log_metric_async.py | 7 + ...trics_service_v2_update_log_metric_sync.py | 7 + .../snippet_metadata_logging_v2.json | 1424 ++++++++--------- 82 files changed, 1757 insertions(+), 721 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 0a325ce86daa..80f740284e3c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -233,6 +233,13 @@ async def list_buckets( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_buckets(): @@ -349,6 +356,13 @@ async def get_bucket( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_get_bucket(): @@ -423,6 +437,13 @@ async def create_bucket( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_create_bucket(): @@ -507,6 +528,13 @@ async def update_bucket( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_update_bucket(): @@ -584,6 +612,13 @@ async def delete_bucket( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_delete_bucket(): @@ -646,6 +681,13 @@ async def undelete_bucket( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_undelete_bucket(): @@ -707,6 +749,13 @@ async def list_views( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_views(): @@ -815,6 +864,13 @@ async def get_view( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_get_view(): @@ -888,6 +944,13 @@ async def create_view( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_create_view(): @@ -965,6 +1028,13 @@ async def update_view( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_update_view(): @@ -1040,6 +1110,13 @@ async def delete_view( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_delete_view(): @@ -1101,6 +1178,13 @@ async def list_sinks( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_sinks(): @@ -1225,6 +1309,13 @@ async def get_sink( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_get_sink(): @@ -1354,6 +1445,13 @@ async def create_sink( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_create_sink(): @@ -1486,6 +1584,13 @@ async def update_sink( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_update_sink(): @@ -1650,6 +1755,13 @@ async def delete_sink( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_delete_sink(): @@ -1757,6 +1869,13 @@ async def list_exclusions( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_exclusions(): @@ -1881,6 +2000,13 @@ async def get_exclusion( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_get_exclusion(): @@ -2004,6 +2130,13 @@ async def create_exclusion( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_create_exclusion(): @@ -2133,6 +2266,13 @@ async def update_exclusion( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_update_exclusion(): @@ -2273,6 +2413,13 @@ async def delete_exclusion( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_delete_exclusion(): @@ -2385,6 +2532,13 @@ async def get_cmek_settings( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_get_cmek_settings(): @@ -2485,6 +2639,13 @@ async def update_cmek_settings( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_update_cmek_settings(): @@ -2582,6 +2743,13 @@ async def get_settings( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_get_settings(): @@ -2716,6 +2884,13 @@ async def update_settings( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_update_settings(): @@ -2834,6 +3009,13 @@ async def copy_log_entries( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_copy_log_entries(): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index edc5d53579be..b905d174112e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -532,6 +532,13 @@ def list_buckets( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_buckets(): @@ -648,6 +655,13 @@ def get_bucket( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_get_bucket(): @@ -723,6 +737,13 @@ def create_bucket( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_create_bucket(): @@ -808,6 +829,13 @@ def update_bucket( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_update_bucket(): @@ -886,6 +914,13 @@ def delete_bucket( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_delete_bucket(): @@ -949,6 +984,13 @@ def undelete_bucket( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_undelete_bucket(): @@ -1011,6 +1053,13 @@ def list_views( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_views(): @@ -1119,6 +1168,13 @@ def get_view( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_get_view(): @@ -1193,6 +1249,13 @@ def create_view( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_create_view(): @@ -1271,6 +1334,13 @@ def update_view( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_update_view(): @@ -1347,6 +1417,13 @@ def delete_view( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_delete_view(): @@ -1409,6 +1486,13 @@ def list_sinks( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_sinks(): @@ -1522,6 +1606,13 @@ def get_sink( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_get_sink(): @@ -1640,6 +1731,13 @@ def create_sink( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_create_sink(): @@ -1772,6 +1870,13 @@ def update_sink( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_update_sink(): @@ -1925,6 +2030,13 @@ def delete_sink( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_delete_sink(): @@ -2021,6 +2133,13 @@ def list_exclusions( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_exclusions(): @@ -2134,6 +2253,13 @@ def get_exclusion( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_get_exclusion(): @@ -2246,6 +2372,13 @@ def create_exclusion( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_create_exclusion(): @@ -2375,6 +2508,13 @@ def update_exclusion( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_update_exclusion(): @@ -2515,6 +2655,13 @@ def delete_exclusion( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_delete_exclusion(): @@ -2616,6 +2763,13 @@ def get_cmek_settings( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_get_cmek_settings(): @@ -2717,6 +2871,13 @@ def update_cmek_settings( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_update_cmek_settings(): @@ -2815,6 +2976,13 @@ def get_settings( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_get_settings(): @@ -2949,6 +3117,13 @@ def update_settings( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_update_settings(): @@ -3067,6 +3242,13 @@ def copy_log_entries( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_copy_log_entries(): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 32a1d1808cc6..d0c9cc06bea5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -232,6 +232,13 @@ async def delete_log( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_delete_log(): @@ -345,6 +352,13 @@ async def write_log_entries( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_write_log_entries(): @@ -536,6 +550,13 @@ async def list_log_entries( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_log_entries(): @@ -544,7 +565,7 @@ async def sample_list_log_entries(): # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # Make the request @@ -693,6 +714,13 @@ async def list_monitored_resource_descriptors( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_monitored_resource_descriptors(): @@ -786,6 +814,13 @@ async def list_logs( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_logs(): @@ -908,6 +943,13 @@ def tail_log_entries( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_tail_log_entries(): @@ -916,7 +958,7 @@ async def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # This method expects an iterator which contains diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index a2d00ad87974..930f94c619e5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -451,6 +451,13 @@ def delete_log( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_delete_log(): @@ -553,6 +560,13 @@ def write_log_entries( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_write_log_entries(): @@ -732,6 +746,13 @@ def list_log_entries( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_log_entries(): @@ -740,7 +761,7 @@ def sample_list_log_entries(): # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # Make the request @@ -878,6 +899,13 @@ def list_monitored_resource_descriptors( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_monitored_resource_descriptors(): @@ -963,6 +991,13 @@ def list_logs( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_logs(): @@ -1074,6 +1109,13 @@ def tail_log_entries( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_tail_log_entries(): @@ -1082,7 +1124,7 @@ def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # This method expects an iterator which contains diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 816b70695642..2e8996404ab7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -218,6 +218,13 @@ async def list_log_metrics( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_log_metrics(): @@ -339,6 +346,13 @@ async def get_log_metric( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_get_log_metric(): @@ -459,6 +473,13 @@ async def create_log_metric( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_create_log_metric(): @@ -584,6 +605,13 @@ async def update_log_metric( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_update_log_metric(): @@ -720,6 +748,13 @@ async def delete_log_metric( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_delete_log_metric(): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index fd63ddfb0cd5..9a707b2ac3dd 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -438,6 +438,13 @@ def list_log_metrics( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_log_metrics(): @@ -548,6 +555,13 @@ def get_log_metric( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_get_log_metric(): @@ -657,6 +671,13 @@ def create_log_metric( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_create_log_metric(): @@ -782,6 +803,13 @@ def update_log_metric( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_update_log_metric(): @@ -907,6 +935,13 @@ def delete_log_metric( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_delete_log_metric(): diff --git a/packages/google-cloud-logging/mypy.ini b/packages/google-cloud-logging/mypy.ini index 4505b485436b..574c5aed394b 100644 --- a/packages/google-cloud-logging/mypy.ini +++ b/packages/google-cloud-logging/mypy.ini @@ -1,3 +1,3 @@ [mypy] -python_version = 3.6 +python_version = 3.7 namespace_packages = True diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py index abe149bd66c2..9d97df8749d6 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CopyLogEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py index 90eb5354e226..949dde286ac7 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py index 1c5c329c802c..f399b226f588 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateBucket_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py index 9b309322914f..374173f52b55 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py index 5be1a9ad3da5..8f14d777f8cb 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateExclusion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py index 3b57560f3cec..11bd92ad58fa 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateExclusion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py index 789598d4c7f2..0fc007986813 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateSink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py index e22bc60555c5..d6d2a0d7db7a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateSink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py index 499d4eeba920..016f3e70b081 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateView_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py index 8e6425d712b1..3ee9567f1a8c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateView_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py index def3e5abcb4a..6633aa3c4344 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteBucket_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py index 64c95c992455..ec39ca1a09b7 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py index 4c042c3bec23..9d6146c57c8b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteExclusion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py index dc313658435a..bc051e4e025e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py index fe5acb523f2d..62a27ea1d1a8 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteSink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py index d9ddc66a02a4..fa7d6f6e7b91 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteSink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py index fd1eee969886..2fed68bbcbe9 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteView_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py index 1169b400046e..53bda04c937e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteView_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py index 4b964aa7435d..1a91ecdd63d9 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetBucket_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py index 1b299dd569ac..337050c45a7e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py index 356f0db9fbf1..6998c4b4af97 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetCmekSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py index 83dfc8d2c997..a91ec6042d7a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py index 27a7644458dc..83a5bcda4016 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetExclusion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py index 980914dac1cb..913ec9968d8c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetExclusion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py index 0da6e2a7ec06..c095649bc27a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py index ccbc05d502b3..2b5350a5a5c7 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py index fa3d7cf7f5b6..0739e175be61 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetSink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py index 48581e4706bc..ff34156f3826 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetSink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py index 9f26a54178fa..5de975ecc9cd 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetView_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py index f88c15d2e30f..a9818b572059 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetView_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py index 4e3bfea5582b..614e9ec66b6c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListBuckets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py index 3522c4c8979d..0c7912f7f038 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListBuckets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py index 788436d6ad15..a1aa5ed6f4dc 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListExclusions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py index 8ea9407a1066..8cdac9d12438 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListExclusions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py index b43b5682a28e..ea81f5c8b29c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListSinks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py index 235395e6d593..05a35323d6bb 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListSinks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py index 27910c9f7cfa..c39fb2d9e0a1 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListViews_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py index 2e5b6e53b3d1..270e5c14578f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListViews_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py index 020866b755b9..eeca015d1b41 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UndeleteBucket_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py index 0dfb39a11a07..6355e2a62226 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py index 78245abfc6f0..450f5662a3fc 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateBucket_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py index c285fd542862..81aa0fb920f4 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py index 8d49b85e7b8c..e1a2421fec56 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py index 7b04208d4c58..1828ce84c3a7 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py index d06cf80d4f63..873fecb737b1 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateExclusion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py index c0dba34ccb9e..958572b9301a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py index dba1d4e8eb90..531b431f1b52 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py index f70f520361f1..3381a4aadc71 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py index c46b9ab427d2..400d57897c58 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateSink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py index 9639ece285e4..cc3a1be435c6 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateSink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py index 250d3f9dc881..8ccc9f3c3319 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateView_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py index 1397848800fe..33014bf23658 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateView_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py index 6338b9abcbd0..209dd510d93c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_DeleteLog_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py index 36280057bfe7..86f6c9ccb3aa 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_DeleteLog_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index 4a8692b04e47..d968835f75a2 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_ListLogEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 @@ -33,7 +40,7 @@ async def sample_list_log_entries(): # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # Make the request diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index 062075af9091..d200793fa90c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_ListLogEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 @@ -33,7 +40,7 @@ def sample_list_log_entries(): # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # Make the request diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py index fb0106199bd0..eebad0bfd01e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_ListLogs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py index 0f775572f57b..8d132377e740 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_ListLogs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py index b8f3397012de..4b99bc6f038d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py index 736d64d614d1..c54a2201fc14 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index 3e77920f81e1..f1afa6fae199 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_TailLogEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 @@ -33,7 +40,7 @@ async def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # This method expects an iterator which contains diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index ee1108b33f02..29461c6f8613 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_TailLogEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 @@ -33,7 +40,7 @@ def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # This method expects an iterator which contains diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py index 28025d777037..bd7954828811 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_WriteLogEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py index 31569811c655..d28fbe7a037c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py index 96690c2c36b4..d351980e9679 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_CreateLogMetric_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py index 051694d31470..bb9a56579e6f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py index bf2ee5e4abc2..54a73b14122e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py index eae109200224..73276ef182fb 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py index cea94a356e29..d6ef03c486b6 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_GetLogMetric_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py index eea36222a80b..6ab2bb57fd83 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_GetLogMetric_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py index 9dac7793736b..92c92395a253 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_ListLogMetrics_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py index 97b3c2f1364b..5a3e646926f7 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py index c94c70e76264..9a794a4ccb6d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py index bcdff32693bc..39a6e72e3072 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_logging_v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_logging_v2.json index 657563cd6dec..3c6bc46bf854 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_logging_v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_logging_v2.json @@ -55,33 +55,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 40, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -131,33 +131,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 40, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -208,33 +208,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -284,33 +284,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -369,33 +369,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -453,33 +453,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -538,33 +538,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -622,33 +622,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -699,33 +699,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -775,33 +775,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -851,31 +851,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -924,31 +924,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -1002,31 +1002,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -1079,31 +1079,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -1157,31 +1157,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -1234,31 +1234,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -1308,31 +1308,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -1381,31 +1381,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -1456,33 +1456,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1532,33 +1532,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1609,33 +1609,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1685,33 +1685,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1766,33 +1766,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1846,33 +1846,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1927,33 +1927,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2007,33 +2007,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2088,33 +2088,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2168,33 +2168,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2245,33 +2245,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2321,33 +2321,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2402,33 +2402,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2482,33 +2482,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2563,33 +2563,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2643,33 +2643,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2724,33 +2724,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2804,33 +2804,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2885,33 +2885,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2965,33 +2965,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3041,31 +3041,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -3114,31 +3114,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -3189,33 +3189,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3265,33 +3265,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3342,33 +3342,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3418,33 +3418,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3507,33 +3507,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -3595,33 +3595,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -3680,33 +3680,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3764,33 +3764,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3853,33 +3853,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -3941,33 +3941,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -4018,33 +4018,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -4094,33 +4094,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -4174,31 +4174,31 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -4251,31 +4251,31 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -4338,33 +4338,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -4426,33 +4426,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -4507,33 +4507,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -4587,33 +4587,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -4664,33 +4664,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 41, + "end": 52, + "start": 48, "type": "RESPONSE_HANDLING" } ], @@ -4740,33 +4740,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 41, + "end": 52, + "start": 48, "type": "RESPONSE_HANDLING" } ], @@ -4817,33 +4817,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_async", "segments": [ { - "end": 55, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 62, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 48, - "start": 34, + "end": 55, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 58, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 52, + "end": 63, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -4893,33 +4893,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_sync", "segments": [ { - "end": 55, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 62, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 48, - "start": 34, + "end": 55, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 58, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 52, + "end": 63, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -4986,33 +4986,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_async", "segments": [ { - "end": 47, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 54, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 41, - "start": 34, + "end": 48, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 44, - "start": 42, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 48, - "start": 45, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -5078,33 +5078,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync", "segments": [ { - "end": 47, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 54, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 41, - "start": 34, + "end": 48, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 44, - "start": 42, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 48, - "start": 45, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -5163,33 +5163,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -5247,33 +5247,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -5327,31 +5327,31 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -5404,31 +5404,31 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -5483,33 +5483,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -5563,33 +5563,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -5644,33 +5644,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -5724,33 +5724,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -5809,33 +5809,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -5893,33 +5893,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], From e18501c6b738e5a8833febd5223dee67962532ec Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 8 Sep 2022 13:48:17 +0000 Subject: [PATCH 673/855] chore: use gapic-generator-python 1.3.1 (#622) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 472772457 Source-Link: https://github.com/googleapis/googleapis/commit/855b74d203deeb0f7a0215f9454cdde62a1f9b86 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b64b1e7da3e138f15ca361552ef0545e54891b4f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjY0YjFlN2RhM2UxMzhmMTVjYTM2MTU1MmVmMDU0NWU1NDg5MWI0ZiJ9 --- .../tests/unit/gapic/logging_v2/test_config_service_v2.py | 4 ++-- .../tests/unit/gapic/logging_v2/test_logging_service_v2.py | 4 ++-- .../tests/unit/gapic/logging_v2/test_metrics_service_v2.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 8b14284347da..64c02c806c62 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -18,8 +18,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 6eeac2574103..3460907cb32d 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -18,8 +18,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 6a4041a1df52..a1cc43167c46 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -18,8 +18,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc From 77decde27fca6b0ce68b0ef6ce17a677bae19c41 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 13 Sep 2022 10:39:48 -0400 Subject: [PATCH 674/855] chore: use gapic generator python 1.4.1 (#627) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: integrate gapic-generator-python-1.4.1 and enable more py_test targets PiperOrigin-RevId: 473833416 Source-Link: https://github.com/googleapis/googleapis/commit/565a5508869557a3228b871101e4e4ebd8f93d11 Source-Link: https://github.com/googleapis/googleapis-gen/commit/1ee1a06c6de3ca8b843572c1fde0548f84236989 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMWVlMWEwNmM2ZGUzY2E4Yjg0MzU3MmMxZmRlMDU0OGY4NDIzNjk4OSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../tests/unit/gapic/logging_v2/test_config_service_v2.py | 2 +- .../tests/unit/gapic/logging_v2/test_logging_service_v2.py | 2 +- .../tests/unit/gapic/logging_v2/test_metrics_service_v2.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 64c02c806c62..be77714c0196 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -27,7 +27,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api_core import client_options from google.api_core import exceptions as core_exceptions diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 3460907cb32d..3a169cc9da10 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -27,7 +27,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api import monitored_resource_pb2 # type: ignore from google.api_core import client_options diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index a1cc43167c46..37726ba52fd4 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -27,7 +27,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api import distribution_pb2 # type: ignore from google.api import label_pb2 # type: ignore From 107931fcd94af58b01da346c376b56073996e7e0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 13 Sep 2022 16:24:24 +0000 Subject: [PATCH 675/855] chore: detect samples tests in nested directories (#628) Source-Link: https://github.com/googleapis/synthtool/commit/50db768f450a50d7c1fd62513c113c9bb96fd434 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/samples/snippets/noxfile.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index b8dcb4a4af99..aa547962eb0a 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 + digest: sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 5fcb9d7461f2..0398d72ff690 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -207,8 +207,8 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("*_test.py") + glob.glob("test_*.py") - test_list.extend(glob.glob("tests")) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: print("No tests found, skipping directory.") From d647bee59ae526dbf45b6557cabbf4326e78369d Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 29 Sep 2022 15:31:50 -0700 Subject: [PATCH 676/855] fix: structured log handler drops reserved fields in json_fields (#634) --- .../logging_v2/handlers/structured_log.py | 24 ++ .../tests/environment/deployable/go/go.mod | 5 +- .../tests/environment/deployable/go/go.sum | 246 ++++++++++++++++-- .../deployable/java/Dockerfile.cloudfunctions | 55 ++++ .../deployable/java/functions/pom.xml | 103 ++++++++ .../deployable/CloudFunctionTrigger.java | 62 +++++ .../environment/deployable/python/snippets.py | 22 +- .../envctl/env_scripts/go/functions.sh | 25 +- .../envctl/env_scripts/go/functions_v2.sh | 22 ++ .../envctl/env_scripts/java/functions.sh | 84 ++++++ .../envctl/env_scripts/java/functions_v2.sh | 22 ++ .../envctl/env_scripts/nodejs/functions.sh | 7 +- .../envctl/env_scripts/nodejs/functions_v2.sh | 22 ++ .../envctl/env_scripts/python/compute.sh | 5 +- .../envctl/env_scripts/python/functions.sh | 7 +- .../envctl/env_scripts/python/functions_v2.sh | 22 ++ .../tests/environment/envctl/envctl | 5 +- .../tests/environment/noxfile.py | 1 + .../tests/environment/tests/common/python.py | 19 ++ .../environment/tests/go/test_functions_v2.py | 33 +++ .../environment/tests/java/test_functions.py | 30 +++ .../tests/java/test_functions_v2.py | 30 +++ .../tests/nodejs/test_functions_v2.py | 34 +++ .../tests/python/test_functions_v2.py | 52 ++++ .../unit/handlers/test_structured_log.py | 48 ++++ tests/environment | 2 +- 26 files changed, 939 insertions(+), 48 deletions(-) create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile.cloudfunctions create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/functions/pom.xml create mode 100644 packages/google-cloud-logging/tests/environment/deployable/java/functions/src/main/java/envtest/deployable/CloudFunctionTrigger.java create mode 100644 packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions_v2.sh create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/functions.sh create mode 100644 packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/functions_v2.sh create mode 100644 packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions_v2.sh create mode 100644 packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions_v2.sh create mode 100644 packages/google-cloud-logging/tests/environment/tests/go/test_functions_v2.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/java/test_functions.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/java/test_functions_v2.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions_v2.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/python/test_functions_v2.py diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py index 4a9a139e5e3e..bfaebdab5253 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py @@ -36,6 +36,26 @@ "}" ) +# reserved fields taken from Structured Logging documentation: +# https://cloud.google.com/logging/docs/structured-logging +GCP_STRUCTURED_LOGGING_FIELDS = frozenset( + { + "severity", + "httpRequest", + "time", + "timestamp", + "timestampSeconds", + "timestampNanos", + "logging.googleapis.com/insertId", + "logging.googleapis.com/labels", + "logging.googleapis.com/operation", + "logging.googleapis.com/sourceLocation", + "logging.googleapis.com/spanId", + "logging.googleapis.com/trace", + "logging.googleapis.com/trace_sampled", + } +) + class StructuredLogHandler(logging.StreamHandler): """Handler to format logs into the Cloud Logging structured log format, @@ -70,6 +90,10 @@ def format(self, record): message = _format_and_parse_message(record, super(StructuredLogHandler, self)) if isinstance(message, collections.abc.Mapping): + # remove any special fields + for key in list(message.keys()): + if key in GCP_STRUCTURED_LOGGING_FIELDS: + del message[key] # if input is a dictionary, encode it as a json string encoded_msg = json.dumps(message, ensure_ascii=False) # strip out open and close parentheses diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/go.mod b/packages/google-cloud-logging/tests/environment/deployable/go/go.mod index 716a271e893c..ebd37f95d89f 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/go.mod +++ b/packages/google-cloud-logging/tests/environment/deployable/go/go.mod @@ -3,10 +3,11 @@ module github.com/googleapis/env-tests-logging/deployable/go/main go 1.15 require ( - cloud.google.com/go v0.81.0 + cloud.google.com/go/compute v1.7.0 + cloud.google.com/go/kms v1.4.0 // indirect cloud.google.com/go/logging v1.4.0 cloud.google.com/go/pubsub v1.3.1 - google.golang.org/grpc v1.37.0 + google.golang.org/grpc v1.48.0 ) replace cloud.google.com/go/logging => ./logging diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/go.sum b/packages/google-cloud-logging/tests/environment/deployable/go/go.sum index d1437bdb5a8d..26c99a533a4c 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/go.sum +++ b/packages/google-cloud-logging/tests/environment/deployable/go/go.sum @@ -17,16 +17,40 @@ cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKP cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= -cloud.google.com/go v0.81.0 h1:at8Tk2zUz63cLPR0JPWm5vp77pEZmzxEQBEfRKn1VV8= cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= +cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= +cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= +cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= +cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= +cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= +cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= +cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= +cloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U= +cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= +cloud.google.com/go v0.102.0/go.mod h1:oWcCzKlqJ5zgHQt9YsaeTY9KzIvjyy0ArmiBUgpQ+nc= +cloud.google.com/go v0.102.1 h1:vpK6iQWv/2uUeFJth4/cBHsQAGjn1iIE6AAlxipRaA0= +cloud.google.com/go v0.102.1/go.mod h1:XZ77E9qnTEnrgEOvr4xzfdX5TRo7fB4T2F4O6+34hIU= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= +cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= +cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= +cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s= +cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU= +cloud.google.com/go/compute v1.7.0 h1:v/k9Eueb8aAJ0vZuxKMrgm6kPhCLZU9HxFU+AFDs9Uk= +cloud.google.com/go/compute v1.7.0/go.mod h1:435lt8av5oL9P3fv1OEzSbSUe+ybHXGMPQHHZWZxy9U= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/iam v0.1.0/go.mod h1:vcUNEa0pEm0qRVpmWepWaFMIAI8/hjB9mO8rNCJtF6c= +cloud.google.com/go/iam v0.3.0 h1:exkAomrVUuzx9kWFI1wm3KI0uoDeUFPB4kKGzx6x+Gc= +cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= +cloud.google.com/go/kms v1.4.0 h1:iElbfoE61VeLhnZcGOltqL8HIly8Nhbe5t6JlH9GXjo= +cloud.google.com/go/kms v1.4.0/go.mod h1:fajBHndQ+6ubNw6Ss2sSd+SWvjL26RNo/dr7uxsnnOA= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -36,12 +60,18 @@ cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiy cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0 h1:STgFzyU5/8miMl0//zKh2aQeTyeaUH3WN9bSUiJ09bA= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= +cloud.google.com/go/storage v1.23.0 h1:wWRIaDURQA8xxHguFCshYepGlrWIrbBnAmc7wfg07qY= +cloud.google.com/go/storage v1.23.0/go.mod h1:vOEEDNFnciUMhBeT6hsJIn3ieU5cFRmzeLgDvXzfIXc= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -49,6 +79,12 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= @@ -56,7 +92,11 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -73,6 +113,7 @@ github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -91,6 +132,8 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.3 h1:fHPg5GQYlCeLIPB9BZqMVR5nR9A+IM5zcgeTdjMYmLA= +github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -103,13 +146,17 @@ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= +github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0 h1:wCKgOCHuUEVfsaQLpPSJb7VdYCdTVZQAuOdYm1yc/60= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.2.1 h1:d8MncMlErDFTwQGBK1xhv026j9kqhvw1Qv9IbWT1VLQ= +github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -121,17 +168,32 @@ github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.0.0-20220520183353-fd19c99a87aa/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= +github.com/googleapis/enterprise-certificate-proxy v0.1.0 h1:zO8WHNx/MYiAKJ3d5spxZXZE6KHmIQGQcAzwUzV7qQw= +github.com/googleapis/enterprise-certificate-proxy v0.1.0/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= +github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= +github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= +github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM= +github.com/googleapis/gax-go/v2 v2.4.0 h1:dS9eYAjhrE2RjmzYw2XAPvcXfmcQLtFEQWn0CR82awk= +github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c= +github.com/googleapis/go-type-adapters v1.0.0 h1:9XdMn+d/G57qq1s8dNc5IesGCXHf6V2HZ2JwRxfA2tA= +github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -139,15 +201,19 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -156,6 +222,7 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -183,8 +250,8 @@ golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5 h1:2M3HP5CCK1Si9FQhwnzYhXdG6DXeebvUHFpre8QvbyI= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= @@ -194,8 +261,8 @@ golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1 h1:Kvvh58BN8Y9/lBi7hTekvtMpm07eUZ0ck5pRHpsMWrY= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -228,8 +295,18 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4 h1:b0LrWgu8+q7z4J+0Y3Umo5q1dL7NXBkKBWkaVkAq17E= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220617184016-355a448f1bc9/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e h1:TsQ7F31D3bUCLeqPT0u+yjp1guoArKaNKmCr22PYgTQ= +golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -241,8 +318,18 @@ golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210413134643-5e61552d6c78 h1:rPRtHfUb0UKZeZ6GH4K4Nt4YRbE9V1u+QZX5upZXqJQ= -golang.org/x/oauth2 v0.0.0-20210413134643-5e61552d6c78/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220608161450-d0670ef3b1eb/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE= +golang.org/x/oauth2 v0.0.0-20220822191816-0ebed06d0094/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= +golang.org/x/oauth2 v0.0.0-20220909003341-f21342109be1 h1:lxqLZaMad/dJHMFZH0NiNpiEZI/nhgWhe4wgzpE+MuA= +golang.org/x/oauth2 v0.0.0-20220909003341-f21342109be1/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -253,8 +340,9 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f h1:Ax0t5p6N38Ga0dThY21weqDEyz2oklo4IvDkpigvkD8= +golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -290,17 +378,44 @@ golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210412220455-f1c623a9e750 h1:ZBu6861dZq7xBnG1bn5SRU0vA8nx42at4+kP07FMTog= -golang.org/x/sys v0.0.0-20210412220455-f1c623a9e750/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220610221304-9f5ed59c137d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220624220833-87e55d714810 h1:rHZQSjJdAI4Xf5Qzeh2bBc5YJIkPFVM6oDtMFYmgws0= +golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -349,13 +464,20 @@ golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f h1:uF6paiQQebLeSXkrTqHqz0MXhXXS1KgF41eUdBNvxK0= +golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= @@ -377,8 +499,27 @@ google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34q google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= -google.golang.org/api v0.45.0 h1:pqMffJFLBVUDIoYsHcqtxgQVTsmxMDpYLOc5MT4Jrww= -google.golang.org/api v0.45.0/go.mod h1:ISLIJCedJolbZvDfAk+Ctuq5hf+aJ33WgtUsfyFoLXA= +google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= +google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= +google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= +google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= +google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= +google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= +google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= +google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= +google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= +google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA= +google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= +google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= +google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= +google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw= +google.golang.org/api v0.80.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg= +google.golang.org/api v0.84.0/go.mod h1:NTsGnUFJMYROtiquksZHBWtHfeMC7iYthki7Eq3pa8o= +google.golang.org/api v0.85.0/go.mod h1:AqZf8Ep9uZ2pyTvgL+x0D3Zt0eoT9b5E8fmzfu6FO2g= +google.golang.org/api v0.95.0 h1:d1c24AAS01DYqXreBeuVV7ewY/U8Mnhh47pwtsgVtYg= +google.golang.org/api v0.95.0/go.mod h1:eADj+UBuxkh5zlrSntJghuNeg8HwQ1w5lTKkuqaETEI= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -410,6 +551,7 @@ google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= @@ -425,10 +567,48 @@ google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210329143202-679c6ae281ee/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= -google.golang.org/genproto v0.0.0-20210413151531-c14fb6ef47c3/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= -google.golang.org/genproto v0.0.0-20210420162539-3c870d7478d2 h1:g2sJMUGCpeHZqTx8p3wsAWRS64nFq20i4dvJWcKGqvY= -google.golang.org/genproto v0.0.0-20210420162539-3c870d7478d2/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= +google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= +google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= +google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= +google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220523171625-347a074981d8/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220608133413-ed9918b62aac/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220616135557-88e70c0c3a90/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220617124728-180714bec0ad/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220624142145-8cd45d7dbd1f h1:hJ/Y5SqPXbarffmAsApliUlcvMU+wScNGfyop4bZm8o= +google.golang.org/genproto v0.0.0-20220624142145-8cd45d7dbd1f/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -442,13 +622,27 @@ google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3Iji google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.37.0 h1:uSZWeQJX5j11bIQ4AJoj+McDBo29cY1MCoC1wO3ts+c= google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.47.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.48.0 h1:rQOsyJ/8+ufEDJd/Gdsz7HG220Mh9HAhFHRGnIjda0w= +google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -460,12 +654,16 @@ google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpAD google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile.cloudfunctions b/packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile.cloudfunctions new file mode 100644 index 000000000000..6146aa4a0f1b --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/Dockerfile.cloudfunctions @@ -0,0 +1,55 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# compile local java-logging library +FROM maven:3.8.4 AS lib-env +WORKDIR /app +COPY _library ./ +RUN mvn verify +RUN mvn -Dmaven.test.skip=true package +# copy the latest jar file to java-logging.jar +RUN cp `ls google-cloud-logging/target/*.jar | grep -v "test" | sort -V | tail -n1` /app/java-logging.jar +RUN cp `ls proto-google-cloud-logging-v2/target/*.jar | grep -v "test" | sort -V | tail -n1` /app/proto-java-logging.jar + +# Compile the deployable code. +FROM maven:3.8.4 AS build-env +WORKDIR /app +COPY functions/pom.xml /app/pom.xml +# copy over compiled library +COPY --from=lib-env /app/java-logging.jar /app/java-logging.jar +COPY --from=lib-env /app/proto-java-logging.jar /app/proto-java-logging.jar +# install java-logging +RUN mvn install:install-file \ + -Dfile=/app/java-logging.jar \ + -DgroupId=com.google.cloud.local \ + -DartifactId=google-cloud-logging \ + -Dversion=0.0.1 \ + -Dpackaging=jar \ + -DgeneratePom=true +RUN mvn install:install-file \ + -Dfile=/app/proto-java-logging.jar \ + -DgroupId=com.google.api.grpc.local \ + -DartifactId=proto-google-cloud-logging-v2 \ + -Dversion=0.0.1 \ + -Dpackaging=jar \ + -DgeneratePom=true + +# download dependencies as specified in pom.xml +RUN mvn verify +# copy over cloud functions code +COPY functions/src /app/src +# copy over shared Snippets.java file +COPY src/main/java/envtest/deployable/Snippets.java /app/src/main/java/envtest/deployable +# build uberjar with dependencies included +RUN mvn -Dmaven.test.skip=true package diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/functions/pom.xml b/packages/google-cloud-logging/tests/environment/deployable/java/functions/pom.xml new file mode 100644 index 000000000000..ab86d9697e16 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/functions/pom.xml @@ -0,0 +1,103 @@ + + + + + + 4.0.0 + + com.google.cloud.tests + deployable + 1.0.0 + Java deployable for environment tests + + + + 11 + 11 + UTF-8 + + + + + + com.google.cloud.functions + functions-framework-api + 1.0.4 + provided + + + com.google.cloud.local + google-cloud-logging + 0.0.1 + + + com.google.api.grpc.local + proto-google-cloud-logging-v2 + 0.0.1 + + + + com.google.cloud + google-cloud-core + 2.8.3 + + + com.google.cloud + google-cloud-core-grpc + 2.8.3 + + + + + + + org.apache.maven.plugins + maven-shade-plugin + 2.3 + + + + package + + shade + + + + + + envtest.deployable.CloudFunctionTrigger + + + + + + + + *:* + + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + + + + + + + diff --git a/packages/google-cloud-logging/tests/environment/deployable/java/functions/src/main/java/envtest/deployable/CloudFunctionTrigger.java b/packages/google-cloud-logging/tests/environment/deployable/java/functions/src/main/java/envtest/deployable/CloudFunctionTrigger.java new file mode 100644 index 000000000000..5a485df49fb2 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/deployable/java/functions/src/main/java/envtest/deployable/CloudFunctionTrigger.java @@ -0,0 +1,62 @@ +/* +/* Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package envtest.deployable; + +import com.google.cloud.functions.BackgroundFunction; +import com.google.cloud.functions.Context; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.logging.Level; +import java.util.logging.Logger; + +import java.util.Map; +import java.util.HashMap; +import java.nio.charset.StandardCharsets; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; + +public class CloudFunctionTrigger implements BackgroundFunction { + + @Override + public void accept(PubSubMessage message, Context context) { + String fnName = new String(Base64.getDecoder().decode(message.data.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8); + Map args = message.attributes; + if (args == null){ + args = new HashMap(); + } + triggerSnippet(fnName, args); + return; + } + + public static void triggerSnippet(String fnName, Map args) { + try { + Snippets obj = new Snippets(); + Class c = obj.getClass(); + Method found = c.getDeclaredMethod(fnName, new Class[] {Map.class}); + found.invoke(obj, args); + } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { + e.printStackTrace(); + } + } +} + +class PubSubMessage { + public String data; + public Map attributes; + public String messageId; + public String publishTime; +} diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py index e7db34651a6f..6668cde8315b 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -122,6 +122,27 @@ def pylogging(log_text="pylogging", severity="WARNING", **kwargs): else: logging.critical(log_text, extra=kwargs) +def pylogging_json_extras(log_text="pylogging with json extras", severity="WARNING", **kwargs): + # allowed severity: debug, info, warning, error, critical + + # build json message + metadata = {} + prefix = "jsonfield_" + for k, v in kwargs.items(): + if k.startswith(prefix): + metadata[k.replace(prefix, "")] = int(v) if v.isnumeric() else v + + severity = severity.upper() + if severity == "DEBUG": + logging.debug(log_text, extra={"json_fields": metadata}) + elif severity == "INFO": + logging.info(log_text, extra={"json_fields": metadata}) + elif severity == "WARNING": + logging.warning(log_text, extra={"json_fields": metadata}) + elif severity == "ERROR": + logging.error(log_text, extra={"json_fields": metadata}) + else: + logging.critical(log_text, extra={"json_fields": metadata}) def pylogging_multiline(log_text="pylogging", second_line="line 2", **kwargs): logging.error(f"{log_text}\n{second_line}") @@ -146,7 +167,6 @@ def pylogging_with_formatter( def pylogging_with_arg(log_text="my_arg", **kwargs): logging.error("Arg: %s", log_text) - def pylogging_flask( log_text="pylogging_flask", path="/", diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh index d9d8bd4b69aa..d2d712549ac0 100644 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh @@ -17,7 +17,7 @@ set -e # exit on any failure set -o pipefail # any step in pipe caused failure set -u # undefined variables cause exit -SERVICE_NAME="log-go-func-$(echo $ENVCTL_ID | head -c 8)x" +SERVICE_NAME="log-go-func-$(echo $ENVCTL_ID | head -c 8)" destroy() { set +e @@ -25,13 +25,13 @@ destroy() { gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null # delete service - gcloud functions delete $SERVICE_NAME --region us-west2 -q 2> /dev/null + gcloud functions delete $SERVICE_NAME --region us-west2 -q ${EXTRA_FUNCTIONS_FLAGS-} 2> /dev/null set -e } verify() { set +e - gcloud functions describe $SERVICE_NAME --region us-west2 + gcloud functions describe $SERVICE_NAME --region us-west2 ${EXTRA_FUNCTIONS_FLAGS-} &> /dev/null if [[ $? == 0 ]]; then echo "TRUE" exit 0 @@ -47,25 +47,29 @@ deploy() { set +e gcloud pubsub topics create $SERVICE_NAME 2>/dev/null set -e - # Note: functions only supports go111 and go113 at the moment - local RUNTIME="go113" - + # Note: functions only supports go111, go113 and go116 at the moment + local RUNTIME="go116" + + # Copy over local copy of library to use as dependency _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE pushd $SUPERREPO_ROOT/logging - tar -cvf $_deployable_dir/lib.tar --exclude internal/env-tests-logging --exclude .nox --exclude docs --exclude __pycache__ . + tar -cvf $_deployable_dir/lib.tar \ + --exclude logging --exclude */env-tests-logging \ + --exclude .nox --exclude docs --exclude __pycache__ . popd mkdir -p $_deployable_dir/logging tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/logging - + # Create vendor folder based on local dependency pushd $REPO_ROOT/deployable/go + go mod tidy go mod vendor popd # move code into a temp directory used to deploy the cloud function cp -rf $REPO_ROOT/deployable/go/vendor $TMP_DIR/vendor - + # Renames package as Cloud Functions cannot be 'main' packages. sed 's/package main.*/package function/g' $REPO_ROOT/deployable/go/main.go > $TMP_DIR/main.go @@ -80,7 +84,8 @@ deploy() { --entry-point PubsubFunction \ --trigger-topic $SERVICE_NAME \ --runtime $RUNTIME \ - --region us-west2 + --region us-west2 \ + ${EXTRA_FUNCTIONS_FLAGS-} popd } diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions_v2.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions_v2.sh new file mode 100644 index 000000000000..377316cd7d86 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions_v2.sh @@ -0,0 +1,22 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# Functions gen 2 uses the logic of functions gen 1, +# with an extra flag for all gcloud commands +source "$SCRIPT_DIR/env_scripts/$LANGUAGE/functions.sh" + +SERVICE_NAME="v2-${SERVICE_NAME-}" +EXTRA_FUNCTIONS_FLAGS="--gen2 ${EXTRA_FUNCTIONS_FLAGS-}" diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/functions.sh new file mode 100755 index 000000000000..e9cb70203324 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/functions.sh @@ -0,0 +1,84 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + + +SERVICE_NAME="logging-java-func-$(echo $ENVCTL_ID | head -c 10)"\ + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # delete service + gcloud functions delete $SERVICE_NAME --region us-west2 -q ${EXTRA_FUNCTIONS_FLAGS-} 2> /dev/null + set -e +} + +verify() { + set +e + gcloud functions describe $SERVICE_NAME --region us-west2 ${EXTRA_FUNCTIONS_FLAGS-} > /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +deploy() { + # available runtimes on Jun'22 are Java 11 (java11) and Java 17 (java17) + # use java11 since it is closest to LTS Java runtime (Java 9) + RUNTIME="${RUNTIME:-java17}" + + # create pub/sub topic + set +e + gcloud pubsub topics create $SERVICE_NAME 2>/dev/null + set -e + + # use custom cloud functions Dockerfile + export ENV_TEST_DOCKERFILE=Dockerfile.cloudfunctions + # extract container + build_container nopush + id=$(docker create $GCR_PATH) + docker cp $id:/app/target/deployable-1.0.0.jar $TMP_DIR/deployable-1.0.0.jar + docker rm -v $id + ls $TMP_DIR + + # deploy + pushd $TMP_DIR + gcloud functions deploy $SERVICE_NAME \ + --entry-point envtest.deployable.CloudFunctionTrigger \ + --source $TMP_DIR \ + --memory 512MB \ + --trigger-topic $SERVICE_NAME \ + --runtime $RUNTIME \ + --region us-west2 \ + ${EXTRA_FUNCTIONS_FLAGS-} + popd + +} + +filter-string() { + echo "resource.type=\"cloud_function\" AND resource.labels.module_id=\"$SERVICE_NAME\"" +} + + + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/functions_v2.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/functions_v2.sh new file mode 100644 index 000000000000..377316cd7d86 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/java/functions_v2.sh @@ -0,0 +1,22 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# Functions gen 2 uses the logic of functions gen 1, +# with an extra flag for all gcloud commands +source "$SCRIPT_DIR/env_scripts/$LANGUAGE/functions.sh" + +SERVICE_NAME="v2-${SERVICE_NAME-}" +EXTRA_FUNCTIONS_FLAGS="--gen2 ${EXTRA_FUNCTIONS_FLAGS-}" diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh index b521beed3ec0..d7e27314d2da 100644 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions.sh @@ -25,13 +25,13 @@ destroy() { gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null # delete service - gcloud functions delete $SERVICE_NAME --region us-west2 -q 2> /dev/null + gcloud functions delete $SERVICE_NAME --region us-west2 -q ${EXTRA_FUNCTIONS_FLAGS-} 2> /dev/null set -e } verify() { set +e - gcloud functions describe $SERVICE_NAME --region us-west2 + gcloud functions describe $SERVICE_NAME --region us-west2 ${EXTRA_FUNCTIONS_FLAGS-} &> /dev/null if [[ $? == 0 ]]; then echo "TRUE" exit 0 @@ -72,7 +72,8 @@ deploy() { --entry-point pubsubFunction \ --trigger-topic $SERVICE_NAME \ --runtime $RUNTIME \ - --region us-west2 + --region us-west2 \ + ${EXTRA_FUNCTIONS_FLAGS-} popd } diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions_v2.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions_v2.sh new file mode 100644 index 000000000000..377316cd7d86 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/nodejs/functions_v2.sh @@ -0,0 +1,22 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# Functions gen 2 uses the logic of functions gen 1, +# with an extra flag for all gcloud commands +source "$SCRIPT_DIR/env_scripts/$LANGUAGE/functions.sh" + +SERVICE_NAME="v2-${SERVICE_NAME-}" +EXTRA_FUNCTIONS_FLAGS="--gen2 ${EXTRA_FUNCTIONS_FLAGS-}" diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/compute.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/compute.sh index 84b9982726a6..42bf32aef30d 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/compute.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/compute.sh @@ -28,13 +28,13 @@ destroy() { export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME gcloud container images delete $GCR_PATH -q --force-delete-tags 2> /dev/null # delete service - gcloud compute instances delete $SERVICE_NAME -q + gcloud compute instances delete $SERVICE_NAME --zone us-west1-c -q set -e } verify() { set +e - gcloud compute instances describe $SERVICE_NAME > /dev/null 2> /dev/null + gcloud compute instances describe $SERVICE_NAME --zone us-west1-c > /dev/null 2> /dev/null if [[ $? == 0 ]]; then echo "TRUE" exit 0 @@ -50,6 +50,7 @@ deploy() { gcloud compute instances create-with-container \ $SERVICE_NAME \ --container-image $GCR_PATH \ + --zone us-west1-c \ --container-env PUBSUB_TOPIC="$SERVICE_NAME",ENABLE_SUBSCRIBER="true" # wait for the pub/sub subscriber to start NUM_SUBSCRIBERS=0 diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh index 4322f15ab450..aaa9e62d611a 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions.sh @@ -26,13 +26,13 @@ destroy() { gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null # delete service - gcloud functions delete $SERVICE_NAME --region us-west2 -q 2> /dev/null + gcloud functions delete $SERVICE_NAME --region us-west2 -q ${EXTRA_FUNCTIONS_FLAGS-} 2> /dev/null set -e } verify() { set +e - gcloud functions describe $SERVICE_NAME --region us-west2 + gcloud functions describe $SERVICE_NAME --region us-west2 ${EXTRA_FUNCTIONS_FLAGS-} if [[ $? == 0 ]]; then echo "TRUE" exit 0 @@ -68,7 +68,8 @@ deploy() { --entry-point pubsub_gcf \ --trigger-topic $SERVICE_NAME \ --runtime $RUNTIME \ - --region us-west2 + --region us-west2 \ + ${EXTRA_FUNCTIONS_FLAGS-} popd } diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions_v2.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions_v2.sh new file mode 100644 index 000000000000..377316cd7d86 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/functions_v2.sh @@ -0,0 +1,22 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# Functions gen 2 uses the logic of functions gen 1, +# with an extra flag for all gcloud commands +source "$SCRIPT_DIR/env_scripts/$LANGUAGE/functions.sh" + +SERVICE_NAME="v2-${SERVICE_NAME-}" +EXTRA_FUNCTIONS_FLAGS="--gen2 ${EXTRA_FUNCTIONS_FLAGS-}" diff --git a/packages/google-cloud-logging/tests/environment/envctl/envctl b/packages/google-cloud-logging/tests/environment/envctl/envctl index 745c1c06ebf8..fe2fce39f1e2 100755 --- a/packages/google-cloud-logging/tests/environment/envctl/envctl +++ b/packages/google-cloud-logging/tests/environment/envctl/envctl @@ -25,7 +25,7 @@ fi set -e # create and destroy temporary dir -UUID=$(python -c 'import uuid; print(uuid.uuid1())') +UUID="${UUID:-$(python${ENV_TEST_PY_VERSION-} -c 'import uuid; print(uuid.uuid1())')}" TMP_DIR=$REPO_ROOT/tmp-$UUID mkdir $TMP_DIR function finish { @@ -41,6 +41,7 @@ build_container() { # copy super-repo into deployable dir _env_tests_relative_path=${REPO_ROOT#"$SUPERREPO_ROOT/"} _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE + _dockerfile_name="${ENV_TEST_DOCKERFILE:-Dockerfile}" # copy over local copy of library pushd $SUPERREPO_ROOT tar -cvf $_deployable_dir/lib.tar \ @@ -52,7 +53,7 @@ build_container() { mkdir -p $_deployable_dir/_library tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/_library # build container - docker build -t $GCR_PATH $_deployable_dir + docker build -t $GCR_PATH -f $_deployable_dir/$_dockerfile_name $_deployable_dir if [[ "$ARG" != "nopush" ]]; then docker push $GCR_PATH fi diff --git a/packages/google-cloud-logging/tests/environment/noxfile.py b/packages/google-cloud-logging/tests/environment/noxfile.py index 98518507c5ff..48da724c1cca 100644 --- a/packages/google-cloud-logging/tests/environment/noxfile.py +++ b/packages/google-cloud-logging/tests/environment/noxfile.py @@ -133,6 +133,7 @@ def blacken(session: nox.sessions.Session) -> None: "kubernetes", "cloudrun", "functions", + "functions_v2" ], ) @nox.parametrize("language", ["python", "go", "nodejs", "java"]) diff --git a/packages/google-cloud-logging/tests/environment/tests/common/python.py b/packages/google-cloud-logging/tests/environment/tests/common/python.py index d5f160db59fc..3ff80d652d65 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/python.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/python.py @@ -356,3 +356,22 @@ def test_pylogging_pandas(self): df = df.append({"log_text": log_text}, ignore_index=True) self.assertEqual(str(df), message) + + def test_pylogging_conflicting_severity(self): + # test different severity values in json_fields and logger + log_text = f"{inspect.currentframe().f_code.co_name} {uuid.uuid1()}" + log_dict = { + "jsonfield_component": "arbitrary-property", + "jsonfield_severity":"error", + "jsonfield_message": "duplicate message" + } + log_list = self.trigger_and_retrieve( + log_text, "pylogging_json_extras", append_uuid=False, severity="info", **log_dict + ) + + found_log = log_list[-1] + + self.assertIsNotNone(found_log, "expected log text not found") + self.assertTrue(isinstance(found_log.payload, dict), "expected jsonPayload") + self.assertEqual(found_log.payload["message"], log_text) + self.assertEqual(found_log.severity.lower(), "info", "severity should be set by logger") diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_functions_v2.py b/packages/google-cloud-logging/tests/environment/tests/go/test_functions_v2.py new file mode 100644 index 000000000000..b21c939cecf5 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_functions_v2.py @@ -0,0 +1,33 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest + +import google.cloud.logging + +from ..common.common import Common + + +class TestCloudFunctions_v2(Common, unittest.TestCase): + + environment = "functions_v2" + language = "go" + + monitored_resource_name = "cloud_function" + monitored_resource_labels = [ + "region", + "function_name", + "project_id", + ] diff --git a/packages/google-cloud-logging/tests/environment/tests/java/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/java/test_functions.py new file mode 100644 index 000000000000..46cf681893f2 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/java/test_functions.py @@ -0,0 +1,30 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect + +import google.cloud.logging + +from ..common.common import Common + + +class TestCloudFunctions(Common, unittest.TestCase): + + environment = "functions" + language = "java" + + monitored_resource_name = "cloud_function" + monitored_resource_labels = ["region", "function_name"] diff --git a/packages/google-cloud-logging/tests/environment/tests/java/test_functions_v2.py b/packages/google-cloud-logging/tests/environment/tests/java/test_functions_v2.py new file mode 100644 index 000000000000..6d16e571647a --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/java/test_functions_v2.py @@ -0,0 +1,30 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect + +import google.cloud.logging + +from ..common.common import Common + + +class TestCloudFunctions(Common, unittest.TestCase): + + environment = "functions_v2" + language = "java" + + monitored_resource_name = "cloud_function" + monitored_resource_labels = ["region", "function_name"] diff --git a/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions_v2.py b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions_v2.py new file mode 100644 index 000000000000..ab738e65fa07 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/nodejs/test_functions_v2.py @@ -0,0 +1,34 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect + +import google.cloud.logging + +from ..common.common import Common + + +class TestCloudFunctions(Common, unittest.TestCase): + + environment = "functions_v2" + language = "nodejs" + + monitored_resource_name = "cloud_function" + monitored_resource_labels = [ + "region", + "function_name", + "project_id", + ] diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_functions_v2.py b/packages/google-cloud-logging/tests/environment/tests/python/test_functions_v2.py new file mode 100644 index 000000000000..33d7ca6c52cf --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_functions_v2.py @@ -0,0 +1,52 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect + +import google.cloud.logging + +from ..common.common import Common +from ..common.python import CommonPython + + +class TestCloudFunctions(Common, CommonPython, unittest.TestCase): + + environment = "functions_v2" + language = "python" + + monitored_resource_name = "cloud_function" + monitored_resource_labels = ["region", "function_name"] + + def test_default_http_request_pylogging(self): + """ + Cloud Functions should automatically attach http request information + """ + log_text = f"{inspect.currentframe().f_code.co_name}" + + log_list = self.trigger_and_retrieve(log_text, "pylogging") + found_request = log_list[-1].http_request + found_trace = log_list[-1].trace + + self.assertIsNotNone(found_request) + self.assertIsNotNone(found_request["requestMethod"]) + self.assertIsNotNone(found_request["requestUrl"]) + self.assertIsNotNone(found_request["userAgent"]) + self.assertIsNotNone(found_request["protocol"]) + self.assertEqual(found_request["requestMethod"], "POST") + self.assertEqual(found_request["protocol"], "HTTP/1.1") + + self.assertIsNotNone(found_trace) + self.assertIn("projects/", found_trace) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index d2d570e2162d..77bb97ab61e4 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -207,6 +207,54 @@ def test_format_with_custom_formatter(self): self.assertIn(expected_result, result) self.assertIn("message", result) + def test_format_with_reserved_json_field(self): + # drop json_field data with reserved names + # related issue: https://github.com/googleapis/python-logging/issues/543 + import logging + import json + + handler = self._make_one() + message = "hello world" + extra = "still here" + json_fields = { + "message": "override?", + "severity": "error", + "logging.googleapis.com/trace_sampled": True, + "time": "none", + "extra": extra, + "SEVERITY": "error", + } + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + message, + None, + None, + ) + record.created = None + setattr(record, "json_fields", json_fields) + expected_payload = { + "message": message, + "severity": "INFO", + "SEVERITY": "error", + "logging.googleapis.com/trace": "", + "logging.googleapis.com/spanId": "", + "logging.googleapis.com/trace_sampled": False, + "logging.googleapis.com/sourceLocation": {}, + "httpRequest": {}, + "logging.googleapis.com/labels": {}, + "extra": extra, + } + handler.filter(record) + result = json.loads(handler.format(record)) + self.assertEqual(set(expected_payload.keys()), set(result.keys())) + for (key, value) in expected_payload.items(): + self.assertEqual( + value, result[key], f"expected_payload[{key}] != result[{key}]" + ) + def test_dict(self): """ Handler should parse json encoded as a string diff --git a/tests/environment b/tests/environment index 3845d3c489f4..d67a68bb81b3 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 3845d3c489f45e9366b5fb121846acc7893060b1 +Subproject commit d67a68bb81b33143ddf371d20fa4c2ce4d0c2199 From 4cfb160153f26db8a04e79270e8681f9a95762d6 Mon Sep 17 00:00:00 2001 From: "Leah E. Cole" <6719667+leahecole@users.noreply.github.com> Date: Fri, 30 Sep 2022 14:45:13 -0400 Subject: [PATCH 677/855] fix: mimic the pubsub test to clean up bigquery datasets (#639) * fix: mimic the pubsub test to clean up bigquery datasets * fix: add explanatory comment --- .../samples/snippets/usage_guide.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/usage_guide.py b/packages/google-cloud-logging/samples/snippets/usage_guide.py index fdbbe1211dc5..5c9e869909dd 100644 --- a/packages/google-cloud-logging/samples/snippets/usage_guide.py +++ b/packages/google-cloud-logging/samples/snippets/usage_guide.py @@ -315,13 +315,19 @@ def _sink_bigquery_setup(client): client.update_dataset(dataset, ["access_entries"]) # API call # [END sink_dataset_permissions] - return dataset + # create callback wrapper to delete dataset when done + class DatasetDeleter: + def delete(self): + client.delete_dataset(dataset, delete_contents=True) + + return dataset, DatasetDeleter() @snippet def sink_bigquery(client, to_delete): """Sink log entries to bigquery.""" - dataset = _sink_bigquery_setup(client) + dataset, dataset_deleter = _sink_bigquery_setup(client) + to_delete.append(dataset_deleter) sink_name = "robots-bigquery-%d" % (_millis(),) filter_str = "textPayload:robot" From dbe4831399886037ca8cc57d93f3e9bb1025ead9 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 30 Sep 2022 13:31:34 -0700 Subject: [PATCH 678/855] fix: nested json with structured log handler (#636) --- .../logging_v2/handlers/structured_log.py | 10 +-- .../environment/deployable/python/snippets.py | 7 +- .../tests/environment/noxfile.py | 4 +- .../tests/environment/tests/common/python.py | 14 ++-- .../tests/unit/handlers/test_handlers.py | 41 +++++++++++ .../unit/handlers/test_structured_log.py | 72 +++++++++++++++++++ .../transports/test_background_thread.py | 23 ++++++ .../unit/handlers/transports/test_sync.py | 2 +- .../tests/unit/test_logger.py | 22 ++++++ tests/environment | 2 +- 10 files changed, 184 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py index bfaebdab5253..65528254fc16 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py @@ -96,8 +96,10 @@ def format(self, record): del message[key] # if input is a dictionary, encode it as a json string encoded_msg = json.dumps(message, ensure_ascii=False) - # strip out open and close parentheses - payload = encoded_msg.lstrip("{").rstrip("}") + "," + # all json.dumps strings should start and end with parentheses + # strip them out to embed these fields in the larger JSON payload + if len(encoded_msg) > 2: + payload = encoded_msg[1:-1] + "," elif message: # properly break any formatting in string to make it json safe encoded_message = json.dumps(message, ensure_ascii=False) @@ -119,5 +121,5 @@ def emit(self, record): def emit_instrumentation_info(self): google.cloud.logging_v2._instrumentation_emitted = True - diagnostic_object = _create_diagnostic_entry().to_api_repr() - logging.info(diagnostic_object) + diagnostic_object = _create_diagnostic_entry() + logging.info(diagnostic_object.payload) diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py index 6668cde8315b..277dcdc9a8af 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -122,7 +122,10 @@ def pylogging(log_text="pylogging", severity="WARNING", **kwargs): else: logging.critical(log_text, extra=kwargs) -def pylogging_json_extras(log_text="pylogging with json extras", severity="WARNING", **kwargs): + +def pylogging_json_extras( + log_text="pylogging with json extras", severity="WARNING", **kwargs +): # allowed severity: debug, info, warning, error, critical # build json message @@ -144,6 +147,7 @@ def pylogging_json_extras(log_text="pylogging with json extras", severity="WARNI else: logging.critical(log_text, extra={"json_fields": metadata}) + def pylogging_multiline(log_text="pylogging", second_line="line 2", **kwargs): logging.error(f"{log_text}\n{second_line}") @@ -167,6 +171,7 @@ def pylogging_with_formatter( def pylogging_with_arg(log_text="my_arg", **kwargs): logging.error("Arg: %s", log_text) + def pylogging_flask( log_text="pylogging_flask", path="/", diff --git a/packages/google-cloud-logging/tests/environment/noxfile.py b/packages/google-cloud-logging/tests/environment/noxfile.py index 48da724c1cca..282ffb4e42e6 100644 --- a/packages/google-cloud-logging/tests/environment/noxfile.py +++ b/packages/google-cloud-logging/tests/environment/noxfile.py @@ -70,7 +70,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: # We also need to specify the rules which are ignored by default: # ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -DEFAULT_PYTHON_VERSION = os.getenv("ENV_TEST_PY_VERSION","3.9") +DEFAULT_PYTHON_VERSION = os.getenv("ENV_TEST_PY_VERSION", "3.9") BLACK_PATHS = ["./deployable/python"] BLACK_VERSION = "black==19.10b0" @@ -133,7 +133,7 @@ def blacken(session: nox.sessions.Session) -> None: "kubernetes", "cloudrun", "functions", - "functions_v2" + "functions_v2", ], ) @nox.parametrize("language", ["python", "go", "nodejs", "java"]) diff --git a/packages/google-cloud-logging/tests/environment/tests/common/python.py b/packages/google-cloud-logging/tests/environment/tests/common/python.py index 3ff80d652d65..83e13401b583 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/python.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/python.py @@ -362,11 +362,15 @@ def test_pylogging_conflicting_severity(self): log_text = f"{inspect.currentframe().f_code.co_name} {uuid.uuid1()}" log_dict = { "jsonfield_component": "arbitrary-property", - "jsonfield_severity":"error", - "jsonfield_message": "duplicate message" + "jsonfield_severity": "error", + "jsonfield_message": "duplicate message", } log_list = self.trigger_and_retrieve( - log_text, "pylogging_json_extras", append_uuid=False, severity="info", **log_dict + log_text, + "pylogging_json_extras", + append_uuid=False, + severity="info", + **log_dict, ) found_log = log_list[-1] @@ -374,4 +378,6 @@ def test_pylogging_conflicting_severity(self): self.assertIsNotNone(found_log, "expected log text not found") self.assertTrue(isinstance(found_log.payload, dict), "expected jsonPayload") self.assertEqual(found_log.payload["message"], log_text) - self.assertEqual(found_log.severity.lower(), "info", "severity should be set by logger") + self.assertEqual( + found_log.severity.lower(), "info", "severity should be set by logger" + ) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 0bcde45de0b4..ad70061a6951 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -573,6 +573,47 @@ def test_emit_w_json_extras(self): ), ) + def test_format_with_nested_json(self): + """ + JSON can contain nested dictionaries of data + """ + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + import logging + + client = _Client(self.PROJECT) + handler = self._make_one( + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, + ) + json_fields = {"outer": {"inner": {"hello": "world"}}} + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) + record.created = None + setattr(record, "json_fields", json_fields) + handler.handle(record) + self.assertEqual( + handler.transport.send_called_with, + ( + record, + json_fields, + _GLOBAL_RESOURCE, + None, + None, + None, + False, + None, + None, + ), + ) + def test_emit_with_encoded_json(self): """ Handler should parse json encoded as a string diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 77bb97ab61e4..3fe322fe376b 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -487,6 +487,30 @@ def test_format_with_json_fields(self): self.assertEqual(result["hello"], "world") self.assertEqual(result["number"], 12) + def test_format_with_nested_json(self): + """ + JSON can contain nested dictionaries of data + """ + import logging + import json + + handler = self._make_one() + json_fields = {"outer": {"inner": {"hello": "world"}}} + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) + record.created = None + setattr(record, "json_fields", json_fields) + handler.filter(record) + result = json.loads(handler.format(record)) + self.assertEqual(result["outer"], json_fields["outer"]) + def test_emits_instrumentation_info(self): import logging import mock @@ -510,3 +534,51 @@ def side_effect(): # emit_instrumentation_info should be called once emit_info.assert_called_once() + + def test_valid_instrumentation_info(self): + import logging + import mock + import json + + with mock.patch.object(logging, "info") as mock_log: + handler = self._make_one() + handler.emit_instrumentation_info() + mock_log.assert_called_once() + # ensure instrumentaiton payload is formatted as expected + called_payload = mock_log.call_args.args[0] + self.assertEqual(len(called_payload.keys()), 1) + self.assertIn("logging.googleapis.com/diagnostic", called_payload.keys()) + inst_source_dict = called_payload["logging.googleapis.com/diagnostic"] + self.assertEqual(len(inst_source_dict.keys()), 1) + self.assertIn("instrumentation_source", inst_source_dict.keys()) + source_list = inst_source_dict["instrumentation_source"] + self.assertEqual( + len(source_list), 1, "expected single instrumentation source" + ) + for source_dict in source_list: + self.assertEqual( + len(source_dict.keys()), + 2, + f"expected two keys in payload: {source_dict.keys()}", + ) + self.assertIn("name", source_dict.keys()) + self.assertIn("version", source_dict.keys()) + self.assertEqual(source_dict["name"], "python") + # ensure it is parsed properly by handler + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + called_payload, + None, + None, + ) + record.created = None + handler.filter(record) + result = json.loads(handler.format(record)) + self.assertEqual( + result["logging.googleapis.com/diagnostic"], + inst_source_dict, + "instrumentation payload not logged properly", + ) diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index 07e1a7e663e6..d4954ff7b5e0 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -69,6 +69,29 @@ def test_send(self): resource=_GLOBAL_RESOURCE, ) + def test_send_json(self): + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + name = "python_logger" + + transport, _ = self._make_one(client, name) + + python_logger_name = "mylogger" + message = {"hello": {"world": "!"}} + + record = logging.LogRecord( + python_logger_name, logging.INFO, None, None, message, None, None + ) + + transport.send(record, message, resource=_GLOBAL_RESOURCE) + + transport.worker.enqueue.assert_called_once_with( + record, + message, + resource=_GLOBAL_RESOURCE, + ) + def test_trace_send(self): from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py index bdc78d89a463..752a96d9fa8f 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py @@ -70,7 +70,7 @@ def test_send_struct(self): client_name = "python" python_logger_name = "mylogger" transport = self._make_one(client, client_name) - message = {"message": "hello world", "extra": "test"} + message = {"message": "hello world", "extra": "test", "nested": {"one": 2}} record = logging.LogRecord( python_logger_name, logging.INFO, None, None, message, None, None ) diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index a5d01898b71e..539fdcf7bf88 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -304,6 +304,28 @@ def test_log_struct_defaults(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_nested_struct(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + + STRUCT = {"message": "MESSAGE", "weather": "cloudy", "nested": {"one": 2}} + RESOURCE = detect_resource(self.PROJECT)._to_dict() + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "jsonPayload": STRUCT, + "resource": RESOURCE, + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log(STRUCT) + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_struct_w_default_labels(self): from google.cloud.logging_v2.handlers._monitored_resources import ( detect_resource, diff --git a/tests/environment b/tests/environment index d67a68bb81b3..a4994224b6fe 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit d67a68bb81b33143ddf371d20fa4c2ce4d0c2199 +Subproject commit a4994224b6fe03ed1d659fb9448df621f2ef7a76 From 345b3c64ff2f46936d3374a66292f2d3a294af24 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 30 Sep 2022 17:16:26 -0700 Subject: [PATCH 679/855] chore(main): release 3.2.3 (#637) --- packages/google-cloud-logging/CHANGELOG.md | 9 +++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 210951029384..43ffc32e3e8f 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,15 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.2.3](https://github.com/googleapis/python-logging/compare/v3.2.2...v3.2.3) (2022-09-30) + + +### Bug Fixes + +* Mimic the pubsub test to clean up bigquery datasets ([#639](https://github.com/googleapis/python-logging/issues/639)) ([44deb1b](https://github.com/googleapis/python-logging/commit/44deb1bd63a1367528c57d6a732ba8a4e0dce318)) +* Nested json with structured log handler ([#636](https://github.com/googleapis/python-logging/issues/636)) ([b840435](https://github.com/googleapis/python-logging/commit/b8404351484cbec22a372a8c2b16dcb4bf4b756a)) +* Structured log handler drops reserved fields in json_fields ([#634](https://github.com/googleapis/python-logging/issues/634)) ([4ef38b3](https://github.com/googleapis/python-logging/commit/4ef38b3942f6750348916a28e995e2c1c169e071)) + ## [3.2.2](https://github.com/googleapis/python-logging/compare/v3.2.1...v3.2.2) (2022-08-12) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 060a8a823597..d50569657a95 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "3.2.2" +version = "3.2.3" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From eaf3558bd6f4d29aed014f46e64792f29ff75fb9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 3 Oct 2022 12:11:02 -0400 Subject: [PATCH 680/855] fix(deps): require protobuf >= 3.20.2 (#633) * chore: exclude requirements.txt file from renovate-bot Source-Link: https://github.com/googleapis/synthtool/commit/f58d3135a2fab20e225d98741dbc06d57459b816 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 * update constraints files * fix(deps): require protobuf 3.20.2 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: Daniel Sanche --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/requirements.txt | 49 +++++++++---------- packages/google-cloud-logging/setup.py | 2 +- .../testing/constraints-3.7.txt | 2 +- 4 files changed, 27 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index aa547962eb0a..3815c983cb16 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 + digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 385f2d4d6106..d15994bac93c 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -325,31 +325,30 @@ platformdirs==2.5.2 \ --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 # via virtualenv -protobuf==3.20.1 \ - --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \ - --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \ - --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \ - --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \ - --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \ - --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \ - --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \ - --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \ - --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \ - --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \ - --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \ - --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \ - --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \ - --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \ - --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \ - --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \ - --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \ - --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \ - --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \ - --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \ - --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \ - --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \ - --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \ - --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3 +protobuf==3.20.2 \ + --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ + --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ + --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ + --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ + --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ + --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ + --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ + --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ + --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ + --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ + --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ + --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ + --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ + --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ + --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ + --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ + --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ + --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ + --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ + --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ + --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ + --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ + --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 # via # gcp-docuploader # gcp-releasetool diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index d50569657a95..4c7f8238c0bd 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -35,7 +35,7 @@ "google-cloud-core >= 2.0.0, <3.0.0dev", "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", - "protobuf >= 3.19.0, <5.0.0dev", + "protobuf >= 3.20.2, <5.0.0dev", ] extras = {} diff --git a/packages/google-cloud-logging/testing/constraints-3.7.txt b/packages/google-cloud-logging/testing/constraints-3.7.txt index 2b42f18bc474..81bd9e5bda0d 100644 --- a/packages/google-cloud-logging/testing/constraints-3.7.txt +++ b/packages/google-cloud-logging/testing/constraints-3.7.txt @@ -8,4 +8,4 @@ google-api-core==1.32.0 google-cloud-core==2.0.0 proto-plus==1.22.0 -protobuf==3.19.0 +protobuf==3.20.2 From 0243989bbb8bb3a8de7c4fe18d719b5dd5a7885f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 Oct 2022 02:56:58 +0200 Subject: [PATCH 681/855] chore(deps): update all dependencies (#630) --- .../google-cloud-logging/samples/snippets/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index e8d879e300db..43f992fde5be 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.2.2 -google-cloud-bigquery==3.3.2 +google-cloud-logging==3.2.3 +google-cloud-bigquery==3.3.3 google-cloud-storage==2.5.0 -google-cloud-pubsub==2.13.6 +google-cloud-pubsub==2.13.7 From 2dcc7c461938f055bfbdef8905e326638781b930 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 3 Oct 2022 21:13:55 -0400 Subject: [PATCH 682/855] chore(main): release 3.2.4 (#641) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-logging/CHANGELOG.md | 7 +++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 43ffc32e3e8f..9fe42730c7f7 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.2.4](https://github.com/googleapis/python-logging/compare/v3.2.3...v3.2.4) (2022-10-04) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#633](https://github.com/googleapis/python-logging/issues/633)) ([b4da4ce](https://github.com/googleapis/python-logging/commit/b4da4ceaaf757235c019e90022aaefcefd47150a)) + ## [3.2.3](https://github.com/googleapis/python-logging/compare/v3.2.2...v3.2.3) (2022-09-30) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 4c7f8238c0bd..fbc8f8606093 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "3.2.3" +version = "3.2.4" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 15e116f6d0d00c49388f5c3205dd5023cdae0333 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 Oct 2022 15:32:08 +0200 Subject: [PATCH 683/855] chore(deps): update dependency google-cloud-logging to v3.2.4 (#642) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 43f992fde5be..59aab11ad32a 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.2.3 +google-cloud-logging==3.2.4 google-cloud-bigquery==3.3.3 google-cloud-storage==2.5.0 google-cloud-pubsub==2.13.7 From 4b844a17301eed888d456278c4e01f9a26481d18 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 6 Oct 2022 15:34:59 +0200 Subject: [PATCH 684/855] chore(deps): update dependency backoff to v2.2.1 (#643) --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 6759e75e03a0..3bc76cee52db 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ -backoff==2.1.2 +backoff==2.2.1 pytest==7.1.3 From 354b300a4273851665ec2f46c748615bebe9ee9a Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 6 Oct 2022 15:04:14 -0700 Subject: [PATCH 685/855] feat: support Django asgi middleware (#625) --- .../logging_v2/handlers/middleware/request.py | 19 ++++++++----------- .../environment/deployable/python/router.py | 6 +++++- .../unit/handlers/middleware/test_request.py | 17 ++++++++++------- .../tests/unit/handlers/test__helpers.py | 10 +++++----- tests/environment | 2 +- 5 files changed, 29 insertions(+), 25 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/middleware/request.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/middleware/request.py index 1804947ec367..a061d5d47871 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/middleware/request.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/middleware/request.py @@ -33,19 +33,10 @@ def _get_django_request(): return getattr(_thread_locals, "request", None) -try: - from django.utils.deprecation import MiddlewareMixin -except ImportError: # pragma: NO COVER - MiddlewareMixin = object - - -class RequestMiddleware(MiddlewareMixin): +def RequestMiddleware(get_response): """Saves the request in thread local""" - def __init__(self, get_response): - self.get_response = get_response - - def process_request(self, request): + def middleware(request): """Called on each request, before Django decides which view to execute. Args: @@ -53,3 +44,9 @@ def process_request(self, request): Django http request. """ _thread_locals.request = request + if get_response: + return get_response(request) + else: + return None + + return middleware diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/router.py b/packages/google-cloud-logging/tests/environment/deployable/python/router.py index a0ab19c8964a..9c98231c1d63 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/router.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/router.py @@ -98,6 +98,7 @@ def initialize_client(): initialize_client() if os.getenv("ENABLE_SUBSCRIBER", None): + from google.api_core.exceptions import AlreadyExists # set up pubsub listener topic_id = os.getenv("PUBSUB_TOPIC", "logging-test") _, project_id = google.auth.default() @@ -105,7 +106,10 @@ def initialize_client(): subscriber = pubsub_v1.SubscriberClient() topic_name = f"projects/{project_id}/topics/{topic_id}" subscription_name = f"projects/{project_id}/subscriptions/{subscription_id}" - subscriber.create_subscription(name=subscription_name, topic=topic_name) + try: + subscriber.create_subscription(name=subscription_name, topic=topic_name) + except AlreadyExists: + pass future = subscriber.subscribe(subscription_name, pubsub_callback) try: print(f"listening for pubsub messages at {topic_id}") diff --git a/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py b/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py index d0e3daf2473d..68f7f5a6db7a 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py +++ b/packages/google-cloud-logging/tests/unit/handlers/middleware/test_request.py @@ -46,26 +46,29 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) + def _mock_get_response(self, req): + return req + def test_process_request(self): from django.test import RequestFactory from google.cloud.logging_v2.handlers.middleware import request middleware = self._make_one() mock_request = RequestFactory().get("/") - middleware.process_request(mock_request) + middleware(mock_request) django_request = request._get_django_request() self.assertEqual(django_request, mock_request) def test_can_instantiate_middleware_without_kwargs(self): - handler = mock.Mock() - middleware = self._make_one(handler) - self.assertEqual(middleware.get_response, handler) + middleware = self._make_one(self._mock_get_response) + mock_request = "test_req" + self.assertEqual(middleware(mock_request), mock_request) def test_can_instantiate_middleware_with_kwargs(self): - handler = mock.Mock() - middleware = self._make_one(get_response=handler) - self.assertEqual(middleware.get_response, handler) + middleware = self._make_one(get_response=self._mock_get_response) + mock_request = "test_req" + self.assertEqual(middleware(mock_request), mock_request) class Test__get_django_request(DjangoBase): diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index 9946c8eb5c7d..6a7ff245fdd9 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -153,7 +153,7 @@ def test_no_context_header(self): django_request = RequestFactory().get("/") middleware = request.RequestMiddleware(None) - middleware.process_request(django_request) + middleware(django_request) http_request, trace_id, span_id, sampled = self._call_fut() self.assertEqual(http_request["requestMethod"], "GET") @@ -175,7 +175,7 @@ def test_xcloud_header(self): ) middleware = request.RequestMiddleware(None) - middleware.process_request(django_request) + middleware(django_request) http_request, trace_id, span_id, sampled = self._call_fut() self.assertEqual(trace_id, expected_trace_id) @@ -195,7 +195,7 @@ def test_traceparent_header(self): django_request = RequestFactory().get("/", **{django_trace_header: header}) middleware = request.RequestMiddleware(None) - middleware.process_request(django_request) + middleware(django_request) http_request, trace_id, span_id, sampled = self._call_fut() self.assertEqual(trace_id, expected_trace_id) @@ -222,7 +222,7 @@ def test_http_request_populated(self): django_request.read() middleware = request.RequestMiddleware(None) - middleware.process_request(django_request) + middleware(django_request) http_request, *_ = self._call_fut() self.assertEqual(http_request["requestMethod"], "PUT") self.assertEqual(http_request["requestUrl"], expected_path) @@ -236,7 +236,7 @@ def test_http_request_sparse(self): expected_path = "http://testserver/123" django_request = RequestFactory().put(expected_path) middleware = request.RequestMiddleware(None) - middleware.process_request(django_request) + middleware(django_request) http_request, *_ = self._call_fut() self.assertEqual(http_request["requestMethod"], "PUT") self.assertEqual(http_request["requestUrl"], expected_path) diff --git a/tests/environment b/tests/environment index a4994224b6fe..a4922381d1a2 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit a4994224b6fe03ed1d659fb9448df621f2ef7a76 +Subproject commit a4922381d1a228367b9341cd20053a8909de5445 From 4e6aace69619a4b3b5e182fd5a36e625bfdea93e Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 7 Oct 2022 16:50:54 -0400 Subject: [PATCH 686/855] fix(deps): allow protobuf 3.19.5 (#644) Co-authored-by: Daniel Sanche --- packages/google-cloud-logging/setup.py | 2 +- packages/google-cloud-logging/testing/constraints-3.7.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index fbc8f8606093..c5f649b6a321 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -35,7 +35,7 @@ "google-cloud-core >= 2.0.0, <3.0.0dev", "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", - "protobuf >= 3.20.2, <5.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = {} diff --git a/packages/google-cloud-logging/testing/constraints-3.7.txt b/packages/google-cloud-logging/testing/constraints-3.7.txt index 81bd9e5bda0d..6db145a06148 100644 --- a/packages/google-cloud-logging/testing/constraints-3.7.txt +++ b/packages/google-cloud-logging/testing/constraints-3.7.txt @@ -8,4 +8,4 @@ google-api-core==1.32.0 google-cloud-core==2.0.0 proto-plus==1.22.0 -protobuf==3.20.2 +protobuf==3.19.5 From c54fbb4cd7a6a58a14fa3867e5bda5ec776b0744 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 12 Oct 2022 12:00:12 -0700 Subject: [PATCH 687/855] chore: update blunderbuss asignments (#651) --- packages/google-cloud-logging/.github/blunderbuss.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/.github/blunderbuss.yml b/packages/google-cloud-logging/.github/blunderbuss.yml index 148ebf4e81cb..a9d3f44e3967 100644 --- a/packages/google-cloud-logging/.github/blunderbuss.yml +++ b/packages/google-cloud-logging/.github/blunderbuss.yml @@ -1,4 +1,4 @@ assign_issues: - - Daniel-Sanche + - googleapis/api-logging-reviewers assign_prs: - - Daniel-Sanche + - googleapis/api-logging-reviewers From cd50b2e8038630a1145e89b758ac50fde5375b16 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 17 Oct 2022 12:17:22 -0700 Subject: [PATCH 688/855] feat: set partial_success to default to true for batched logs (#649) --- .../google/cloud/logging_v2/_gapic.py | 2 +- .../google/cloud/logging_v2/_http.py | 2 +- .../google/cloud/logging_v2/logger.py | 16 ++- .../tests/unit/test__gapic.py | 2 +- .../tests/unit/test__http.py | 4 +- .../tests/unit/test_logger.py | 104 +++++++++++++----- 6 files changed, 91 insertions(+), 39 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py index 4b5429f2f871..5fa31b9e77de 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py @@ -121,7 +121,7 @@ def write_entries( logger_name=None, resource=None, labels=None, - partial_success=False, + partial_success=True, dry_run=False, ): """Log an entry resource via a POST request diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_http.py b/packages/google-cloud-logging/google/cloud/logging_v2/_http.py index cb5fd61eb931..581dce35edd7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_http.py @@ -144,7 +144,7 @@ def write_entries( logger_name=None, resource=None, labels=None, - partial_success=False, + partial_success=True, dry_run=False, ): """Log an entry resource via a POST request diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py index fa0af170cb27..5abc0f2c79b8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -135,7 +135,6 @@ def _do_log(self, client, _entry_class, payload=None, **kw): kw["log_name"] = kw.pop("log_name", self.full_name) kw["labels"] = kw.pop("labels", self.labels) kw["resource"] = kw.pop("resource", self.default_resource) - partial_success = False severity = kw.get("severity", None) if isinstance(severity, str) and not severity.isupper(): @@ -159,11 +158,10 @@ def _do_log(self, client, _entry_class, payload=None, **kw): api_repr = entry.to_api_repr() entries = [api_repr] if google.cloud.logging_v2._instrumentation_emitted is False: - partial_success = True entries = _add_instrumentation(entries, **kw) google.cloud.logging_v2._instrumentation_emitted = True - - client.logging_api.write_entries(entries, partial_success=partial_success) + # partial_success is true to avoid dropping instrumentation logs + client.logging_api.write_entries(entries, partial_success=True) def log_empty(self, *, client=None, **kw): """Log an empty message @@ -437,13 +435,17 @@ def log(self, message=None, **kw): entry_type = TextEntry self.entries.append(entry_type(payload=message, **kw)) - def commit(self, *, client=None): + def commit(self, *, client=None, partial_success=True): """Send saved log entries as a single API call. Args: client (Optional[~logging_v2.client.Client]): The client to use. If not passed, falls back to the ``client`` stored on the current batch. + partial_success (Optional[bool]): + Whether a batch's valid entries should be written even + if some other entry failed due to a permanent error such + as INVALID_ARGUMENT or PERMISSION_DENIED. """ if client is None: client = self.client @@ -458,5 +460,7 @@ def commit(self, *, client=None): entries = [entry.to_api_repr() for entry in self.entries] - client.logging_api.write_entries(entries, **kwargs) + client.logging_api.write_entries( + entries, partial_success=partial_success, **kwargs + ) del self.entries[:] diff --git a/packages/google-cloud-logging/tests/unit/test__gapic.py b/packages/google-cloud-logging/tests/unit/test__gapic.py index 127c856b4ad9..8bf25870ab96 100644 --- a/packages/google-cloud-logging/tests/unit/test__gapic.py +++ b/packages/google-cloud-logging/tests/unit/test__gapic.py @@ -167,7 +167,7 @@ def test_write_entries_single(self): # Check the request call.assert_called_once() request = call.call_args.args[0] - assert request.partial_success is False + assert request.partial_success is True assert len(request.entries) == 1 assert request.entries[0].log_name == entry["logName"] assert request.entries[0].resource.type == entry["resource"]["type"] diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index 2154b6f5735b..f9b60cfa6040 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -306,7 +306,7 @@ def test_write_entries_single(self): client = _Client(conn) api = self._make_one(client) - api.write_entries([ENTRY]) + api.write_entries([ENTRY], partial_success=False) self.assertEqual(conn._called_with["method"], "POST") path = f"/{self.WRITE_ENTRIES_PATH}" @@ -325,7 +325,7 @@ def test_write_entries_multiple(self): "resource": RESOURCE, "labels": LABELS, "entries": [ENTRY1, ENTRY2], - "partialSuccess": False, + "partialSuccess": True, "dry_run": False, } conn = _Connection({}) diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 539fdcf7bf88..76113748faf5 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -123,7 +123,9 @@ def test_log_empty_defaults_w_default_labels(self): logger.log_empty() - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_empty_w_explicit(self): import datetime @@ -177,7 +179,9 @@ def test_log_empty_w_explicit(self): trace_sampled=True, ) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_text_defaults(self): from google.cloud.logging_v2.handlers._monitored_resources import ( @@ -199,7 +203,9 @@ def test_log_text_defaults(self): logger.log_text(TEXT) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_text_w_unicode_and_default_labels(self): from google.cloud.logging_v2.handlers._monitored_resources import ( @@ -223,7 +229,9 @@ def test_log_text_w_unicode_and_default_labels(self): logger.log_text(TEXT) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_text_explicit(self): import datetime @@ -280,7 +288,9 @@ def test_log_text_explicit(self): trace_sampled=True, ) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_struct_defaults(self): from google.cloud.logging_v2.handlers._monitored_resources import ( @@ -302,7 +312,9 @@ def test_log_struct_defaults(self): logger.log_struct(STRUCT) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_nested_struct(self): from google.cloud.logging_v2.handlers._monitored_resources import ( @@ -324,7 +336,9 @@ def test_log_nested_struct(self): logger.log(STRUCT) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_struct_w_default_labels(self): from google.cloud.logging_v2.handlers._monitored_resources import ( @@ -348,7 +362,9 @@ def test_log_struct_w_default_labels(self): logger.log_struct(STRUCT) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_struct_w_explicit(self): import datetime @@ -405,7 +421,9 @@ def test_log_struct_w_explicit(self): trace_sampled=True, ) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_struct_inference(self): """ @@ -439,7 +457,9 @@ def test_log_struct_inference(self): logger.log_struct(STRUCT, resource=RESOURCE) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_w_dict_resource(self): """ @@ -467,7 +487,9 @@ def test_log_w_dict_resource(self): } ] logger.log(MESSAGE, resource=resource) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_lowercase_severity(self): """ @@ -505,7 +527,7 @@ def test_log_lowercase_severity(self): logger.log(MESSAGE, severity=lower_severity) self.assertEqual( - api._write_entries_called_with, (ENTRIES, None, None, None) + api._write_entries_called_with, (ENTRIES, None, None, None, True) ) def test_log_proto_defaults(self): @@ -530,7 +552,9 @@ def test_log_proto_defaults(self): logger.log_proto(message) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_proto_w_default_labels(self): from google.cloud.logging_v2.handlers._monitored_resources import ( @@ -556,7 +580,9 @@ def test_log_proto_w_default_labels(self): logger.log_proto(message) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_proto_w_explicit(self): import json @@ -617,7 +643,9 @@ def test_log_proto_w_explicit(self): trace_sampled=True, ) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_inference_empty(self): from google.cloud.logging_v2.handlers._monitored_resources import ( @@ -638,7 +666,9 @@ def test_log_inference_empty(self): logger.log() - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_inference_text(self): from google.cloud.logging_v2.handlers._monitored_resources import ( @@ -659,7 +689,9 @@ def test_log_inference_text(self): logger.log(TEXT) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_inference_struct(self): from google.cloud.logging_v2.handlers._monitored_resources import ( @@ -680,7 +712,9 @@ def test_log_inference_struct(self): logger.log(STRUCT) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_log_inference_proto(self): import json @@ -704,7 +738,9 @@ def test_log_inference_proto(self): logger.log(message) - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) @@ -1033,12 +1069,16 @@ def test_first_log_emits_instrumentation(self): api = client.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) logger.log_empty() - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) ENTRIES = ENTRIES[-1:] api = client.logging_api = _DummyLoggingAPI() logger.log_empty() - self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None, True) + ) class TestBatch(unittest.TestCase): @@ -1436,7 +1476,8 @@ def test_commit_w_unknown_entry_type(self): self.assertEqual(list(batch.entries), []) self.assertEqual( - api._write_entries_called_with, ([ENTRY], logger.full_name, None, None) + api._write_entries_called_with, + ([ENTRY], logger.full_name, None, None, True), ) def test_commit_w_resource_specified(self): @@ -1461,7 +1502,7 @@ def test_commit_w_resource_specified(self): batch.commit() self.assertEqual( api._write_entries_called_with, - (ENTRIES, logger.full_name, RESOURCE._to_dict(), None), + (ENTRIES, logger.full_name, RESOURCE._to_dict(), None, True), ) def test_commit_w_bound_client(self): @@ -1550,7 +1591,8 @@ def test_commit_w_bound_client(self): self.assertEqual(list(batch.entries), []) self.assertEqual( - api._write_entries_called_with, (ENTRIES, logger.full_name, None, None) + api._write_entries_called_with, + (ENTRIES, logger.full_name, None, None, True), ) def test_commit_w_alternate_client(self): @@ -1597,12 +1639,12 @@ def test_commit_w_alternate_client(self): batch.log_text(TEXT, labels=LABELS) batch.log_struct(STRUCT, severity=SEVERITY) batch.log_proto(message, http_request=REQUEST) - batch.commit(client=client2) + batch.commit(client=client2, partial_success=False) self.assertEqual(list(batch.entries), []) self.assertEqual( api._write_entries_called_with, - (ENTRIES, logger.full_name, None, DEFAULT_LABELS), + (ENTRIES, logger.full_name, None, DEFAULT_LABELS, False), ) def test_context_mgr_success(self): @@ -1653,7 +1695,7 @@ def test_context_mgr_success(self): self.assertEqual(list(batch.entries), []) self.assertEqual( api._write_entries_called_with, - (ENTRIES, logger.full_name, None, DEFAULT_LABELS), + (ENTRIES, logger.full_name, None, DEFAULT_LABELS, True), ) def test_context_mgr_failure(self): @@ -1719,7 +1761,13 @@ def write_entries( labels=None, partial_success=False, ): - self._write_entries_called_with = (entries, logger_name, resource, labels) + self._write_entries_called_with = ( + entries, + logger_name, + resource, + labels, + partial_success, + ) def logger_delete(self, logger_name): self._logger_delete_called_with = logger_name From e9ebe9b4758133c28248e32bf3769585958811ae Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 18 Oct 2022 17:26:04 +0200 Subject: [PATCH 689/855] chore(deps): update all dependencies (#647) Co-authored-by: Anthonios Partheniou --- .../google-cloud-logging/samples/snippets/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 59aab11ad32a..7ba2142d60f9 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.2.4 -google-cloud-bigquery==3.3.3 +google-cloud-logging==3.2.5 +google-cloud-bigquery==3.3.5 google-cloud-storage==2.5.0 -google-cloud-pubsub==2.13.7 +google-cloud-pubsub==2.13.10 From 4c536f0fbbafa3af23ac881e76d351ceeb02cb20 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 18 Oct 2022 11:55:38 -0700 Subject: [PATCH 690/855] feat: include context on batch log errors (#650) --- .../google/cloud/logging_v2/logger.py | 43 +++++++- .../tests/unit/test_logger.py | 102 ++++++++++++++++++ 2 files changed, 141 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py index 5abc0f2c79b8..85007b796335 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -15,6 +15,7 @@ """Define API Loggers.""" import collections +import re from google.cloud.logging_v2._helpers import _add_defaults_to_filter from google.cloud.logging_v2.entries import LogEntry @@ -25,6 +26,9 @@ from google.cloud.logging_v2.handlers._monitored_resources import detect_resource from google.cloud.logging_v2._instrumentation import _add_instrumentation +from google.api_core.exceptions import InvalidArgument +from google.rpc.error_details_pb2 import DebugInfo + import google.protobuf.message _GLOBAL_RESOURCE = Resource(type="global", labels={}) @@ -459,8 +463,39 @@ def commit(self, *, client=None, partial_success=True): kwargs["labels"] = self.logger.labels entries = [entry.to_api_repr() for entry in self.entries] - - client.logging_api.write_entries( - entries, partial_success=partial_success, **kwargs - ) + try: + client.logging_api.write_entries( + entries, partial_success=partial_success, **kwargs + ) + except InvalidArgument as e: + # InvalidArgument is often sent when a log is too large + # attempt to attach extra contex on which log caused error + self._append_context_to_error(e) + raise e del self.entries[:] + + def _append_context_to_error(self, err): + """ + Attempts to Modify `write_entries` exception messages to contain + context on which log in the batch caused the error. + + Best-effort basis. If another exception occurs while processing the + input exception, the input will be left unmodified + + Args: + err (~google.api_core.exceptions.InvalidArgument): + The original exception object + """ + try: + # find debug info proto if in details + debug_info = next(x for x in err.details if isinstance(x, DebugInfo)) + # parse out the index of the faulty entry + error_idx = re.search("(?<=key: )[0-9]+", debug_info.detail).group(0) + # find the faulty entry object + found_entry = self.entries[int(error_idx)] + str_entry = str(found_entry.to_api_repr()) + # modify error message to contain extra context + err.message = f"{err.message}: {str_entry:.2000}..." + except Exception: + # if parsing fails, abort changes and leave err unmodified + pass diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 76113748faf5..3091693e322a 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -16,8 +16,10 @@ from datetime import datetime from datetime import timedelta from datetime import timezone +import sys import unittest +import pytest import mock @@ -1739,6 +1741,87 @@ def test_context_mgr_failure(self): self.assertEqual(list(batch.entries), UNSENT) self.assertIsNone(api._write_entries_called_with) + @pytest.mark.skipif( + sys.version_info < (3, 8), + reason="InvalidArgument init with details requires python3.8+", + ) + def test_append_context_to_error(self): + """ + If an InvalidArgument exception contains info on the log that threw it, + we should be able to add it to the exceptiom message. If not, the + exception should be unchanged + """ + from google.api_core.exceptions import InvalidArgument + from google.rpc.error_details_pb2 import DebugInfo + from google.cloud.logging import TextEntry + + logger = _Logger() + client = _Client(project=self.PROJECT) + batch = self._make_one(logger, client=client) + test_entries = [TextEntry(payload=str(i)) for i in range(11)] + batch.entries = test_entries + starting_message = "test" + # test that properly formatted exceptions add log details + for idx, entry in enumerate(test_entries): + api_entry = entry.to_api_repr() + err = InvalidArgument( + starting_message, details=["padding", DebugInfo(detail=f"key: {idx}")] + ) + batch._append_context_to_error(err) + self.assertEqual(err.message, f"{starting_message}: {str(api_entry)}...") + self.assertIn(str(idx), str(entry)) + # test with missing debug info + err = InvalidArgument(starting_message, details=[]) + batch._append_context_to_error(err) + self.assertEqual( + err.message, starting_message, "message should have been unchanged" + ) + # test with missing key + err = InvalidArgument( + starting_message, details=["padding", DebugInfo(detail="no k e y here")] + ) + batch._append_context_to_error(err) + self.assertEqual( + err.message, starting_message, "message should have been unchanged" + ) + # test with key out of range + err = InvalidArgument( + starting_message, details=["padding", DebugInfo(detail="key: 100")] + ) + batch._append_context_to_error(err) + self.assertEqual( + err.message, starting_message, "message should have been unchanged" + ) + + @pytest.mark.skipif( + sys.version_info < (3, 8), + reason="InvalidArgument init with details requires python3.8+", + ) + def test_batch_error_gets_context(self): + """ + Simulate an InvalidArgument sent as part of a batch commit, to ensure + _append_context_to_error is thrown + """ + from google.api_core.exceptions import InvalidArgument + from google.rpc.error_details_pb2 import DebugInfo + from google.cloud.logging import TextEntry + + logger = _Logger() + client = _Client(project=self.PROJECT) + starting_message = "hello" + exception = InvalidArgument( + starting_message, details=[DebugInfo(detail="key: 1")] + ) + client.logging_api = _DummyLoggingExceptionAPI(exception) + batch = self._make_one(logger, client=client) + test_entries = [TextEntry(payload=str(i)) for i in range(11)] + batch.entries = test_entries + with self.assertRaises(InvalidArgument) as e: + batch.commit() + expected_log = test_entries[1] + api_entry = expected_log.to_api_repr() + self.assertEqual(e.message, f"{starting_message}: {str(api_entry)}...") + class _Logger(object): @@ -1773,6 +1856,25 @@ def logger_delete(self, logger_name): self._logger_delete_called_with = logger_name +class _DummyLoggingExceptionAPI(object): + def __init__(self, exception): + self.exception = exception + + def write_entries( + self, + entries, + *, + logger_name=None, + resource=None, + labels=None, + partial_success=False, + ): + raise self.exception + + def logger_delete(self, logger_name): + raise self.exception + + class _Client(object): def __init__(self, project, connection=None): self.project = project From 24689d85d3310dcb62d69bbc1997eac578aa5b46 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 25 Oct 2022 09:13:03 -0700 Subject: [PATCH 691/855] fix: json fields dictionary has modification side effect (#654) --- .../cloud/logging_v2/handlers/handlers.py | 1 + .../tests/unit/handlers/test_handlers.py | 18 +++++++++++ .../unit/handlers/test_structured_log.py | 30 +++++++++++++++++++ 3 files changed, 49 insertions(+) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index f6fa90d71e5d..28960ae71ca4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -252,6 +252,7 @@ def _format_and_parse_message(record, formatter_handler): pass # if json_fields was set, create a dictionary using that if passed_json_fields and isinstance(passed_json_fields, collections.abc.Mapping): + passed_json_fields = passed_json_fields.copy() if message != "None": passed_json_fields["message"] = message return passed_json_fields diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index ad70061a6951..1e431f1aab30 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -846,6 +846,24 @@ def test_json_fields_with_json_message(self): self.assertEqual(result["key_m"], message["key_m"]) self.assertEqual(result["key_j"], json_fields["key_j"]) + def test_json_fields_input_unmodified(self): + # Related issue: https://github.com/googleapis/python-logging/issues/652 + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = "hello world" + json_fields = {"hello": "world"} + json_fields_orig = json_fields.copy() + record = logging.LogRecord("logname", None, None, None, message, None, None) + setattr(record, "json_fields", json_fields) + handler = logging.StreamHandler() + _format_and_parse_message(record, handler) + # ensure json_fields has no side-effects + self.assertEqual(set(json_fields.keys()), set(json_fields_orig.keys())) + for (key, value) in json_fields_orig.items(): + self.assertEqual( + value, json_fields[key], f"expected_payload[{key}] != result[{key}]" + ) + class TestSetupLogging(unittest.TestCase): def _call_fut(self, handler, excludes=None): diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 3fe322fe376b..61bf36f656f6 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -511,6 +511,36 @@ def test_format_with_nested_json(self): result = json.loads(handler.format(record)) self.assertEqual(result["outer"], json_fields["outer"]) + def test_json_fields_input_unmodified(self): + # Related issue: https://github.com/googleapis/python-logging/issues/652 + import logging + + handler = self._make_one() + message = "hello world" + json_fields = { + "hello": "world", + } + json_fields_orig = json_fields.copy() + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + message, + None, + None, + ) + record.created = None + setattr(record, "json_fields", json_fields) + handler.filter(record) + handler.format(record) + # ensure json_fields has no side-effects + self.assertEqual(set(json_fields.keys()), set(json_fields_orig.keys())) + for (key, value) in json_fields_orig.items(): + self.assertEqual( + value, json_fields[key], f"expected_payload[{key}] != result[{key}]" + ) + def test_emits_instrumentation_info(self): import logging import mock From b5caa2c0d9535a707793ec3693bdf1f9a1afe3e3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 26 Oct 2022 13:13:19 +0200 Subject: [PATCH 692/855] chore(deps): update dependency pytest to v7.2.0 (#655) Co-authored-by: Anthonios Partheniou --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 3bc76cee52db..b90fc387d015 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==2.2.1 -pytest==7.1.3 +pytest==7.2.0 From 156878aaa00ddd04e61702271e16fcc5c1066950 Mon Sep 17 00:00:00 2001 From: Thiago Hiromi <83295+thiromi@users.noreply.github.com> Date: Thu, 27 Oct 2022 01:09:04 +0200 Subject: [PATCH 693/855] feat: add support to custom JSON encoders (#657) --- .../logging_v2/handlers/structured_log.py | 15 ++++-- .../unit/handlers/test_structured_log.py | 54 +++++++++++++++++++ 2 files changed, 66 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py index 65528254fc16..55ed9c2d0a9e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py @@ -62,12 +62,15 @@ class StructuredLogHandler(logging.StreamHandler): and write them to standard output """ - def __init__(self, *, labels=None, stream=None, project_id=None): + def __init__( + self, *, labels=None, stream=None, project_id=None, json_encoder_cls=None + ): """ Args: labels (Optional[dict]): Additional labels to attach to logs. stream (Optional[IO]): Stream to be used by the handler. project (Optional[str]): Project Id associated with the logs. + json_encoder_cls (Optional[Type[JSONEncoder]]): Custom JSON encoder. Defaults to json.JSONEncoder """ super(StructuredLogHandler, self).__init__(stream=stream) self.project_id = project_id @@ -79,6 +82,8 @@ def __init__(self, *, labels=None, stream=None, project_id=None): # make logs appear in GCP structured logging format self._gcp_formatter = logging.Formatter(GCP_FORMAT) + self._json_encoder_cls = json_encoder_cls or json.JSONEncoder + def format(self, record): """Format the message into structured log JSON. Args: @@ -95,14 +100,18 @@ def format(self, record): if key in GCP_STRUCTURED_LOGGING_FIELDS: del message[key] # if input is a dictionary, encode it as a json string - encoded_msg = json.dumps(message, ensure_ascii=False) + encoded_msg = json.dumps( + message, ensure_ascii=False, cls=self._json_encoder_cls + ) # all json.dumps strings should start and end with parentheses # strip them out to embed these fields in the larger JSON payload if len(encoded_msg) > 2: payload = encoded_msg[1:-1] + "," elif message: # properly break any formatting in string to make it json safe - encoded_message = json.dumps(message, ensure_ascii=False) + encoded_message = json.dumps( + message, ensure_ascii=False, cls=self._json_encoder_cls + ) payload = '"message": {},'.format(encoded_message) record._payload_str = payload or "" diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 61bf36f656f6..d930da763214 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -46,6 +46,15 @@ def test_ctor_w_project(self): handler = self._make_one(project_id="foo") self.assertEqual(handler.project_id, "foo") + def test_ctor_w_encoder(self): + import json + + class CustomJSONEncoder(json.JSONEncoder): + pass + + handler = self._make_one(json_encoder_cls=CustomJSONEncoder) + self.assertEqual(handler._json_encoder_cls, CustomJSONEncoder) + def test_format(self): import logging import json @@ -207,6 +216,51 @@ def test_format_with_custom_formatter(self): self.assertIn(expected_result, result) self.assertIn("message", result) + def test_format_with_custom_json_encoder(self): + import json + import logging + + from pathlib import Path + from typing import Any + + class CustomJSONEncoder(json.JSONEncoder): + def default(self, obj: Any) -> Any: + if isinstance(obj, Path): + return str(obj) + return json.JSONEncoder.default(self, obj) + + handler = self._make_one(json_encoder_cls=CustomJSONEncoder) + + message = "hello world" + json_fields = {"path": Path("/path")} + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + message, + None, + None, + ) + setattr(record, "json_fields", json_fields) + expected_payload = { + "message": message, + "severity": "INFO", + "logging.googleapis.com/trace": "", + "logging.googleapis.com/spanId": "", + "logging.googleapis.com/trace_sampled": False, + "logging.googleapis.com/sourceLocation": {}, + "httpRequest": {}, + "logging.googleapis.com/labels": {}, + "path": "/path", + } + handler.filter(record) + + result = json.loads(handler.format(record)) + + self.assertEqual(set(expected_payload.keys()), set(result.keys())) + self.assertEqual(result["path"], "/path") + def test_format_with_reserved_json_field(self): # drop json_field data with reserved names # related issue: https://github.com/googleapis/python-logging/issues/543 From 57ddaaac1744ec1d59fab4c6b9b5153e79f1194a Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 8 Nov 2022 10:39:07 -0800 Subject: [PATCH 694/855] chore: fix environment tests by changing uuid generation (#668) --- packages/google-cloud-logging/.kokoro/environment_tests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/.kokoro/environment_tests.sh b/packages/google-cloud-logging/.kokoro/environment_tests.sh index c80c51d65c56..29913aeaf581 100755 --- a/packages/google-cloud-logging/.kokoro/environment_tests.sh +++ b/packages/google-cloud-logging/.kokoro/environment_tests.sh @@ -65,7 +65,7 @@ if [[ "${ENVIRONMENT}" == "kubernetes" ]]; then fi # create a unique id for this run -UUID=$(python -c 'import uuid; print(uuid.uuid1())' | head -c 7) +UUID=$(python -c 'import uuid; print(str(uuid.uuid1())[:7])') export ENVCTL_ID=ci-$UUID echo $ENVCTL_ID From 023ddf1446bfc432cfca108db7dd59cac8b68bec Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 19 Nov 2022 17:07:44 +0100 Subject: [PATCH 695/855] chore(deps): update all dependencies (#661) --- .../google-cloud-logging/samples/snippets/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 7ba2142d60f9..a14ceafcbc49 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.2.5 -google-cloud-bigquery==3.3.5 -google-cloud-storage==2.5.0 -google-cloud-pubsub==2.13.10 +google-cloud-bigquery==3.4.0 +google-cloud-storage==2.6.0 +google-cloud-pubsub==2.13.11 From 58fac58a13330a30c597d196d27b05772d3402f6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 26 Nov 2022 18:49:02 -0500 Subject: [PATCH 696/855] chore(python): drop flake8-import-order in samples noxfile (#676) Source-Link: https://github.com/googleapis/synthtool/commit/6ed3a831cb9ff69ef8a504c353e098ec0192ad93 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3abfa0f1886adaf0b83f07cb117b24a639ea1cb9cffe56d43280b977033563eb Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/docker/docs/Dockerfile | 12 +- .../.kokoro/requirements.in | 4 +- .../.kokoro/requirements.txt | 354 ++++++++++-------- packages/google-cloud-logging/noxfile.py | 15 +- .../samples/snippets/noxfile.py | 26 +- 6 files changed, 217 insertions(+), 196 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 3815c983cb16..bb21147e4c23 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 + digest: sha256:3abfa0f1886adaf0b83f07cb117b24a639ea1cb9cffe56d43280b977033563eb diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile index 238b87b9d1c9..f8137d0ae497 100644 --- a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile @@ -60,16 +60,16 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.8.11 +###################### Install python 3.9.13 -# Download python 3.8.11 -RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz +# Download python 3.9.13 +RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz # Extract files -RUN tar -xvf Python-3.8.11.tgz +RUN tar -xvf Python-3.9.13.tgz -# Install python 3.8.11 -RUN ./Python-3.8.11/configure --enable-optimizations +# Install python 3.9.13 +RUN ./Python-3.9.13/configure --enable-optimizations RUN make altinstall ###################### Install pip diff --git a/packages/google-cloud-logging/.kokoro/requirements.in b/packages/google-cloud-logging/.kokoro/requirements.in index 7718391a34d7..cbd7e77f44db 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.in +++ b/packages/google-cloud-logging/.kokoro/requirements.in @@ -5,4 +5,6 @@ typing-extensions twine wheel setuptools -nox \ No newline at end of file +nox +charset-normalizer<3 +click<8.1.0 diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index d15994bac93c..9c1b9be34e6b 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.6.15 \ - --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ - --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 +certifi==2022.9.24 \ + --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ + --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ @@ -93,11 +93,14 @@ cffi==1.15.1 \ charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via requests + # via + # -r requirements.in + # requests click==8.0.4 \ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb # via + # -r requirements.in # gcp-docuploader # gcp-releasetool colorlog==6.7.0 \ @@ -110,29 +113,33 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==37.0.4 \ - --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ - --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ - --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ - --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ - --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ - --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ - --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ - --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ - --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ - --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ - --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ - --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ - --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ - --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ - --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ - --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ - --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ - --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ - --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ - --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ - --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ - --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 +cryptography==38.0.3 \ + --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ + --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ + --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ + --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ + --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ + --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ + --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ + --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ + --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ + --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ + --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ + --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ + --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ + --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ + --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ + --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ + --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ + --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ + --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ + --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ + --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ + --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ + --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ + --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ + --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ + --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 # via # gcp-releasetool # secretstorage @@ -148,23 +155,23 @@ filelock==3.8.0 \ --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 # via virtualenv -gcp-docuploader==0.6.3 \ - --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ - --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b +gcp-docuploader==0.6.4 \ + --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ + --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.8.7 \ - --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ - --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d +gcp-releasetool==1.10.0 \ + --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ + --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d # via -r requirements.in -google-api-core==2.8.2 \ - --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ - --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 +google-api-core==2.10.2 \ + --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ + --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e # via # google-cloud-core # google-cloud-storage -google-auth==2.11.0 \ - --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ - --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb +google-auth==2.14.1 \ + --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ + --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 # via # gcp-releasetool # google-api-core @@ -174,76 +181,102 @@ google-cloud-core==2.3.2 \ --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a # via google-cloud-storage -google-cloud-storage==2.5.0 \ - --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ - --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 +google-cloud-storage==2.6.0 \ + --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ + --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 # via gcp-docuploader -google-crc32c==1.3.0 \ - --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ - --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ - --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ - --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ - --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ - --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ - --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ - --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ - --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ - --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ - --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ - --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ - --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ - --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ - --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ - --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ - --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ - --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ - --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ - --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ - --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ - --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ - --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ - --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ - --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ - --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ - --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ - --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ - --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ - --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ - --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ - --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ - --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ - --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ - --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ - --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ - --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ - --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ - --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ - --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ - --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ - --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ - --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 +google-crc32c==1.5.0 \ + --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ + --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ + --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ + --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ + --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ + --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ + --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ + --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ + --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ + --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ + --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ + --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ + --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ + --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ + --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ + --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ + --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ + --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ + --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ + --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ + --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ + --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ + --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ + --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ + --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ + --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ + --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ + --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ + --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ + --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ + --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ + --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ + --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ + --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ + --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ + --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ + --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ + --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ + --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ + --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ + --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ + --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ + --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ + --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ + --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ + --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ + --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ + --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ + --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ + --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ + --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ + --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ + --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ + --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ + --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ + --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ + --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ + --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ + --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ + --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ + --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ + --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ + --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ + --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ + --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ + --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ + --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ + --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 # via google-resumable-media -google-resumable-media==2.3.3 \ - --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ - --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 +google-resumable-media==2.4.0 \ + --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ + --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage -googleapis-common-protos==1.56.4 \ - --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ - --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 +googleapis-common-protos==1.57.0 \ + --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ + --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c # via google-api-core -idna==3.3 \ - --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ - --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==4.12.0 \ - --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ - --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 +importlib-metadata==5.0.0 \ + --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ + --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in + # keyring # twine -jaraco-classes==3.2.2 \ - --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ - --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 +jaraco-classes==3.2.3 \ + --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ + --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -255,9 +288,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.0 \ - --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ - --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db +keyring==23.11.0 \ + --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ + --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 # via # gcp-releasetool # twine @@ -303,9 +336,9 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 -more-itertools==8.14.0 \ - --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ - --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 +more-itertools==9.0.0 \ + --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ + --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ @@ -321,34 +354,33 @@ pkginfo==1.8.3 \ --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c # via twine -platformdirs==2.5.2 \ - --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ - --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 +platformdirs==2.5.4 \ + --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ + --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 # via virtualenv -protobuf==3.20.2 \ - --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ - --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ - --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ - --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ - --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ - --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ - --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ - --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ - --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ - --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ - --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ - --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ - --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ - --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ - --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ - --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ - --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ - --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ - --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ - --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ - --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ - --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ - --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 +protobuf==3.20.3 \ + --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ + --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ + --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ + --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ + --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ + --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ + --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ + --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ + --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ + --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ + --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ + --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ + --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ + --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ + --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ + --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ + --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ + --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ + --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ + --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ + --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ + --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee # via # gcp-docuploader # gcp-releasetool @@ -377,9 +409,9 @@ pygments==2.13.0 \ # via # readme-renderer # rich -pyjwt==2.4.0 \ - --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ - --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba +pyjwt==2.6.0 \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 # via gcp-releasetool pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ @@ -392,9 +424,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.0 \ - --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ - --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 +readme-renderer==37.3 \ + --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ + --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine requests==2.28.1 \ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ @@ -405,17 +437,17 @@ requests==2.28.1 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.9.1 \ - --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ - --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 +requests-toolbelt==0.10.1 \ + --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ + --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.5.1 \ - --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ - --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca +rich==12.6.0 \ + --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ + --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -437,9 +469,9 @@ twine==4.0.1 \ --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 # via -r requirements.in -typing-extensions==4.3.0 \ - --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ - --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in urllib3==1.26.12 \ --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ @@ -447,25 +479,25 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.4 \ - --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ - --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 +virtualenv==20.16.7 \ + --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ + --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 # via bleach -wheel==0.37.1 \ - --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ - --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via -r requirements.in -zipp==3.8.1 \ - --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ - --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 +zipp==3.10.0 \ + --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ + --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.2.0 \ - --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ - --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 +setuptools==65.5.1 \ + --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ + --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f # via -r requirements.in diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index e2bd78bd5d37..85e9dbb8421c 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -281,12 +281,16 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -303,13 +307,16 @@ def docs(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") session.install( - "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 0398d72ff690..f5c32b22789b 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -18,7 +18,7 @@ import os from pathlib import Path import sys -from typing import Callable, Dict, List, Optional +from typing import Callable, Dict, Optional import nox @@ -109,22 +109,6 @@ def get_pytest_env_vars() -> Dict[str, str]: # -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - # Linting with flake8. # # We ignore the following rules: @@ -139,7 +123,6 @@ def _determine_local_import_names(start_dir: str) -> List[str]: "--show-source", "--builtin=gettext", "--max-complexity=20", - "--import-order-style=google", "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", "--max-line-length=88", @@ -149,14 +132,11 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") + session.install("flake8") else: - session.install("flake8", "flake8-import-order", "flake8-annotations") + session.install("flake8", "flake8-annotations") - local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), ".", ] session.run("flake8", *args) From a4242c99dbb55d6f2a49815f6fbd10a7adb31ac0 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 29 Nov 2022 12:19:02 -0800 Subject: [PATCH 697/855] chore(main): release 3.3.0 (#645) --- packages/google-cloud-logging/CHANGELOG.md | 16 ++++++++++++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 9fe42730c7f7..b8816116fcff 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,22 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.3.0](https://github.com/googleapis/python-logging/compare/v3.2.4...v3.3.0) (2022-11-26) + + +### Features + +* Add support to custom JSON encoders ([#657](https://github.com/googleapis/python-logging/issues/657)) ([77e621c](https://github.com/googleapis/python-logging/commit/77e621cb85b726a801227da85c31319f25969d19)) +* Include context on batch log errors ([#650](https://github.com/googleapis/python-logging/issues/650)) ([d08be9a](https://github.com/googleapis/python-logging/commit/d08be9ae26c70f94e4a264e2bc518dd8a8dbae91)) +* Set partial_success to default to true for batched logs ([#649](https://github.com/googleapis/python-logging/issues/649)) ([e56d3e8](https://github.com/googleapis/python-logging/commit/e56d3e83859e20fbbd9cd22d3275ff123f79d6c9)) +* Support Django asgi middleware ([#625](https://github.com/googleapis/python-logging/issues/625)) ([f52b3aa](https://github.com/googleapis/python-logging/commit/f52b3aae28e7ed5809fc5470213b2a1c6f7d77bd)) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#644](https://github.com/googleapis/python-logging/issues/644)) ([12f3001](https://github.com/googleapis/python-logging/commit/12f3001d834e858dbbbcdaab5894ec25a14c6820)) +* Json fields dictionary has modification side effect ([#654](https://github.com/googleapis/python-logging/issues/654)) ([a62a0d6](https://github.com/googleapis/python-logging/commit/a62a0d66cb8c07e617636e5ed54961f5e28164ae)) + ## [3.2.4](https://github.com/googleapis/python-logging/compare/v3.2.3...v3.2.4) (2022-10-04) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index c5f649b6a321..bf432bd460a3 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "3.2.4" +version = "3.3.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From e4f7fc92de47f0f416a9889e0e4a940cad119f47 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 1 Dec 2022 17:28:06 +0100 Subject: [PATCH 698/855] chore(deps): update dependency google-cloud-logging to v3.3.0 (#678) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index a14ceafcbc49..0504505e2a75 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.2.5 +google-cloud-logging==3.3.0 google-cloud-bigquery==3.4.0 google-cloud-storage==2.6.0 google-cloud-pubsub==2.13.11 From 8d553f2deceed005169ff0b0bc7f17e68015b31a Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 5 Dec 2022 15:26:41 -0800 Subject: [PATCH 699/855] fix: client_info default values (#681) --- .../google/cloud/logging_v2/_gapic.py | 40 ++++++++++++- .../google/cloud/logging_v2/client.py | 4 ++ .../logging_v2/test_config_service_v2.py | 16 +++++ .../logging_v2/test_logging_service_v2.py | 16 +++++ .../logging_v2/test_metrics_service_v2.py | 16 +++++ .../tests/unit/test_client.py | 58 +++++++++++++++++++ 6 files changed, 147 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py index 5fa31b9e77de..b71d3d92c982 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py @@ -35,6 +35,9 @@ from google.cloud.logging_v2.sink import Sink from google.cloud.logging_v2.metric import Metric +from google.api_core import client_info +from google.api_core import gapic_v1 + class _LoggingAPI(object): """Helper mapping logging-related APIs.""" @@ -562,6 +565,22 @@ def _log_entry_mapping_to_pb(mapping): return LogEntryPB(entry_pb) +def _client_info_to_gapic(input_info): + """ + Helper function to convert api_core.client_info to + api_core.gapic_v1.client_info subclass + """ + return gapic_v1.client_info.ClientInfo( + python_version=input_info.python_version, + grpc_version=input_info.grpc_version, + api_core_version=input_info.api_core_version, + gapic_version=input_info.gapic_version, + client_library_version=input_info.client_library_version, + user_agent=input_info.user_agent, + rest_version=input_info.rest_version, + ) + + def make_logging_api(client): """Create an instance of the Logging API adapter. @@ -572,9 +591,14 @@ def make_logging_api(client): Returns: _LoggingAPI: A metrics API instance with the proper credentials. """ + info = client._client_info + if type(info) == client_info.ClientInfo: + # convert into gapic-compatible subclass + info = _client_info_to_gapic(info) + generated = LoggingServiceV2Client( credentials=client._credentials, - client_info=client._client_info, + client_info=info, client_options=client._client_options, ) return _LoggingAPI(generated, client) @@ -590,9 +614,14 @@ def make_metrics_api(client): Returns: _MetricsAPI: A metrics API instance with the proper credentials. """ + info = client._client_info + if type(info) == client_info.ClientInfo: + # convert into gapic-compatible subclass + info = _client_info_to_gapic(info) + generated = MetricsServiceV2Client( credentials=client._credentials, - client_info=client._client_info, + client_info=info, client_options=client._client_options, ) return _MetricsAPI(generated, client) @@ -608,9 +637,14 @@ def make_sinks_api(client): Returns: _SinksAPI: A metrics API instance with the proper credentials. """ + info = client._client_info + if type(info) == client_info.ClientInfo: + # convert into gapic-compatible subclass + info = _client_info_to_gapic(info) + generated = ConfigServiceV2Client( credentials=client._credentials, - client_info=client._client_info, + client_info=info, client_options=client._client_options, ) return _SinksAPI(generated, client) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py index 049737861b79..218eee09530a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -137,6 +137,10 @@ def __init__( kw_args["api_endpoint"] = api_endpoint self._connection = Connection(self, **kw_args) + if client_info is None: + # if client info not passed in, use the discovered + # client info from _connection object + client_info = self._connection._client_info self._client_info = client_info self._client_options = client_options diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index be77714c0196..8e8671e683c1 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -99,6 +99,22 @@ def test__get_default_mtls_endpoint(): ) +def test_config_default_client_info_headers(): + import re + import pkg_resources + + # test that DEFAULT_CLIENT_INFO contains the expected gapic headers + gapic_header_regex = re.compile( + r"gapic\/[0-9]+\.[\w.-]+ gax\/[0-9]+\.[\w.-]+ gl-python\/[0-9]+\.[\w.-]+ grpc\/[0-9]+\.[\w.-]+" + ) + detected_info = ( + google.cloud.logging_v2.services.config_service_v2.transports.base.DEFAULT_CLIENT_INFO + ) + assert detected_info is not None + detected_agent = " ".join(sorted(detected_info.to_user_agent().split(" "))) + assert gapic_header_regex.match(detected_agent) + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 3a169cc9da10..832ad63dc351 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -71,6 +71,22 @@ def modify_default_endpoint(client): ) +def test_logging_default_client_info_headers(): + import re + import pkg_resources + + # test that DEFAULT_CLIENT_INFO contains the expected gapic headers + gapic_header_regex = re.compile( + r"gapic\/[0-9]+\.[\w.-]+ gax\/[0-9]+\.[\w.-]+ gl-python\/[0-9]+\.[\w.-]+ grpc\/[0-9]+\.[\w.-]+" + ) + detected_info = ( + google.cloud.logging_v2.services.logging_service_v2.transports.base.DEFAULT_CLIENT_INFO + ) + assert detected_info is not None + detected_agent = " ".join(sorted(detected_info.to_user_agent().split(" "))) + assert gapic_header_regex.match(detected_agent) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 37726ba52fd4..4f9e2347ccc9 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -99,6 +99,22 @@ def test__get_default_mtls_endpoint(): ) +def test_metrics_default_client_info_headers(): + import re + import pkg_resources + + # test that DEFAULT_CLIENT_INFO contains the expected gapic headers + gapic_header_regex = re.compile( + r"gapic\/[0-9]+\.[\w.-]+ gax\/[0-9]+\.[\w.-]+ gl-python\/[0-9]+\.[\w.-]+ grpc\/[0-9]+\.[\w.-]+" + ) + detected_info = ( + google.cloud.logging_v2.services.metrics_service_v2.transports.base.DEFAULT_CLIENT_INFO + ) + assert detected_info is not None + detected_agent = " ".join(sorted(detected_info.to_user_agent().split(" "))) + assert gapic_header_regex.match(detected_agent) + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 1a31e9c0c277..1c47a343b148 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -16,11 +16,16 @@ from datetime import datetime from datetime import timedelta from datetime import timezone +import re import unittest import mock +VENEER_HEADER_REGEX = re.compile( + r"gapic\/[0-9]+\.[\w.-]+ gax\/[0-9]+\.[\w.-]+ gccl\/[0-9]+\.[\w.-]+ gl-python\/[0-9]+\.[\w.-]+ grpc\/[0-9]+\.[\w.-]+" +) + def _make_credentials(): import google.auth.credentials @@ -148,6 +153,59 @@ def make_api(client_obj): again = client.logging_api self.assertIs(again, api) + def test_veneer_grpc_headers(self): + # test that client APIs have client_info populated with the expected veneer headers + # required for proper instrumentation + creds = _make_credentials() + # ensure client info is set on client object + client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) + self.assertIsNotNone(client._client_info) + user_agent_sorted = " ".join( + sorted(client._client_info.to_user_agent().split(" ")) + ) + self.assertTrue(VENEER_HEADER_REGEX.match(user_agent_sorted)) + # ensure client info is propagated to gapic wrapped methods + patch = mock.patch("google.api_core.gapic_v1.method.wrap_method") + with patch as gapic_mock: + client.logging_api # initialize logging api + client.metrics_api # initialize metrics api + client.sinks_api # initialize sinks api + wrapped_call_list = gapic_mock.call_args_list + num_api_calls = 37 # expected number of distinct APIs in all gapic services (logging,metrics,sinks) + self.assertGreaterEqual( + len(wrapped_call_list), + num_api_calls, + "unexpected number of APIs wrapped", + ) + for call in wrapped_call_list: + client_info = call.kwargs["client_info"] + self.assertIsNotNone(client_info) + wrapped_user_agent_sorted = " ".join( + sorted(client_info.to_user_agent().split(" ")) + ) + self.assertTrue(VENEER_HEADER_REGEX.match(wrapped_user_agent_sorted)) + + def test_veneer_http_headers(self): + # test that http APIs have client_info populated with the expected veneer headers + # required for proper instrumentation + creds = _make_credentials() + # ensure client info is set on client object + client = self._make_one( + project=self.PROJECT, credentials=creds, _use_grpc=False + ) + self.assertIsNotNone(client._client_info) + user_agent_sorted = " ".join( + sorted(client._client_info.to_user_agent().split(" ")) + ) + self.assertTrue(VENEER_HEADER_REGEX.match(user_agent_sorted)) + # ensure client info is propagated to _connection object + connection_user_agent = client._connection._client_info.to_user_agent() + self.assertIsNotNone(connection_user_agent) + connection_user_agent_sorted = " ".join( + sorted(connection_user_agent.split(" ")) + ) + self.assertTrue(VENEER_HEADER_REGEX.match(connection_user_agent_sorted)) + def test_no_gapic_ctor(self): from google.cloud.logging_v2._http import _LoggingAPI From 509f16e43198aec53d6474a8d982eedd65172768 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 6 Dec 2022 16:06:19 -0800 Subject: [PATCH 700/855] chore(main): release 3.3.1 (#683) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-logging/CHANGELOG.md | 7 +++++++ packages/google-cloud-logging/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index b8816116fcff..838f36fcad28 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.3.1](https://github.com/googleapis/python-logging/compare/v3.3.0...v3.3.1) (2022-12-05) + + +### Bug Fixes + +* Client_info default values ([#681](https://github.com/googleapis/python-logging/issues/681)) ([b74d2a8](https://github.com/googleapis/python-logging/commit/b74d2a8c4f34aece5dc851de0e7542b3229751ba)) + ## [3.3.0](https://github.com/googleapis/python-logging/compare/v3.2.4...v3.3.0) (2022-11-26) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index bf432bd460a3..d1fc8f037a83 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "3.3.0" +version = "3.3.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 2705b08d265afcf88fff50e2513e6b0ea3e9ebd4 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 8 Dec 2022 20:59:56 +0100 Subject: [PATCH 701/855] chore(deps): update all dependencies (#685) --- .../google-cloud-logging/samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 0504505e2a75..4d5a11e58580 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.3.0 +google-cloud-logging==3.3.1 google-cloud-bigquery==3.4.0 -google-cloud-storage==2.6.0 +google-cloud-storage==2.7.0 google-cloud-pubsub==2.13.11 From d0a72f3d64868b2700c9a4fbe3d6931512a93d77 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 9 Dec 2022 12:43:31 -0500 Subject: [PATCH 702/855] build(deps): bump certifi from 2022.9.24 to 2022.12.7 [autoapprove] (#687) * build(deps): bump certifi from 2022.9.24 to 2022.12.7 in /synthtool/gcp/templates/python_library/.kokoro Source-Link: https://github.com/googleapis/synthtool/commit/b4fe62efb5114b6738ad4b13d6f654f2bf4b7cc0 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 * trigger ci Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 3 ++- packages/google-cloud-logging/.kokoro/requirements.txt | 6 +++--- packages/google-cloud-logging/.pre-commit-config.yaml | 2 +- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index bb21147e4c23..df2cfe5d8965 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3abfa0f1886adaf0b83f07cb117b24a639ea1cb9cffe56d43280b977033563eb + digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 + diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 9c1b9be34e6b..05dc4672edaa 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.9.24 \ - --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ - --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index 46d237160f6d..5405cc8ff1f3 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: rev: 22.3.0 hooks: - id: black -- repo: https://gitlab.com/pycqa/flake8 +- repo: https://github.com/pycqa/flake8 rev: 3.9.2 hooks: - id: flake8 From 005e748836ae4eb38c6c4c02a135f5afc4d134db Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 14 Dec 2022 15:41:03 -0500 Subject: [PATCH 703/855] fix(deps): Require google-api-core >=1.34.0, >=2.11.0 (#659) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update to gapic-generator-python 1.5.0 feat: add support for `google.cloud..__version__` PiperOrigin-RevId: 484665853 Source-Link: https://github.com/googleapis/googleapis/commit/8eb249a19db926c2fbc4ecf1dc09c0e521a88b22 Source-Link: https://github.com/googleapis/googleapis-gen/commit/c8aa327b5f478865fc3fd91e3c2768e54e26ad44 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYzhhYTMyN2I1ZjQ3ODg2NWZjM2ZkOTFlM2MyNzY4ZTU0ZTI2YWQ0NCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update version in gapic_version.py * feat: new APIs added to reflect updates to the filestore service - Add ENTERPRISE Tier - Add snapshot APIs: RevertInstance, ListSnapshots, CreateSnapshot, DeleteSnapshot, UpdateSnapshot - Add multi-share APIs: ListShares, GetShare, CreateShare, DeleteShare, UpdateShare - Add ConnectMode to NetworkConfig (for Private Service Access support) - New status codes (SUSPENDED/SUSPENDING, REVERTING/RESUMING) - Add SuspensionReason (for KMS related suspension) - Add new fields to Instance information: max_capacity_gb, capacity_step_size_gb, max_share_count, capacity_gb, multi_share_enabled PiperOrigin-RevId: 487492758 Source-Link: https://github.com/googleapis/googleapis/commit/5be5981f50322cf0c7388595e0f31ac5d0693469 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ab0e217f560cc2c1afc11441c2eab6b6950efd2b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWIwZTIxN2Y1NjBjYzJjMWFmYzExNDQxYzJlYWI2YjY5NTBlZmQyYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.6.1 PiperOrigin-RevId: 488036204 Source-Link: https://github.com/googleapis/googleapis/commit/08f275f5c1c0d99056e1cb68376323414459ee19 Source-Link: https://github.com/googleapis/googleapis-gen/commit/555c0945e60649e38739ae64bc45719cdf72178f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTU1YzA5NDVlNjA2NDllMzg3MzlhZTY0YmM0NTcxOWNkZjcyMTc4ZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * move version to gapic_version.py; clean up setup.py and owlbot.py * customize testing/constraints-3.7.txt * fix(deps): Require google-api-core >=1.34.0, >=2.11.0 fix: Drop usage of pkg_resources fix: Fix timeout default values docs(samples): Snippetgen should call await on the operation coroutine before calling result PiperOrigin-RevId: 493260409 Source-Link: https://github.com/googleapis/googleapis/commit/fea43879f83a8d0dacc9353b3f75f8f46d37162f Source-Link: https://github.com/googleapis/googleapis-gen/commit/387b7344c7529ee44be84e613b19a820508c612b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzg3YjczNDRjNzUyOWVlNDRiZTg0ZTYxM2IxOWE4MjA1MDhjNjEyYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update version * restore test Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: Daniel Sanche --- packages/google-cloud-logging/.coveragerc | 5 - .../.github/release-please.yml | 1 + .../.release-please-manifest.json | 3 + .../google/cloud/logging/gapic_version.py | 16 ++ .../google/cloud/logging_v2/__init__.py | 8 +- .../google/cloud/logging_v2/gapic_version.py | 16 ++ .../config_service_v2/async_client.py | 230 +++++++++-------- .../services/config_service_v2/client.py | 182 ++++++------- .../config_service_v2/transports/base.py | 16 +- .../config_service_v2/transports/grpc.py | 20 +- .../transports/grpc_asyncio.py | 16 +- .../logging_service_v2/async_client.py | 80 +++--- .../services/logging_service_v2/client.py | 76 +++--- .../logging_service_v2/transports/base.py | 16 +- .../logging_service_v2/transports/grpc.py | 20 +- .../transports/grpc_asyncio.py | 16 +- .../metrics_service_v2/async_client.py | 74 +++--- .../services/metrics_service_v2/client.py | 70 ++--- .../metrics_service_v2/transports/base.py | 16 +- .../metrics_service_v2/transports/grpc.py | 20 +- .../transports/grpc_asyncio.py | 16 +- .../cloud/logging_v2/types/log_entry.py | 58 +++-- .../google/cloud/logging_v2/types/logging.py | 90 +++---- .../cloud/logging_v2/types/logging_config.py | 242 +++++++++--------- .../cloud/logging_v2/types/logging_metrics.py | 50 ++-- packages/google-cloud-logging/owlbot.py | 96 ++++--- .../release-please-config.json | 25 ++ ...onfig_service_v2_copy_log_entries_async.py | 2 +- ...> snippet_metadata_google.logging.v2.json} | 15 +- .../samples/snippets/noxfile.py | 15 +- packages/google-cloud-logging/setup.py | 51 ++-- .../testing/constraints-3.10.txt | 6 + .../testing/constraints-3.11.txt | 6 + .../testing/constraints-3.7.txt | 11 +- .../testing/constraints-3.8.txt | 6 + .../testing/constraints-3.9.txt | 6 + .../logging_v2/test_config_service_v2.py | 16 -- .../logging_v2/test_logging_service_v2.py | 28 +- .../logging_v2/test_metrics_service_v2.py | 31 ++- 39 files changed, 888 insertions(+), 783 deletions(-) create mode 100644 packages/google-cloud-logging/.release-please-manifest.json create mode 100644 packages/google-cloud-logging/google/cloud/logging/gapic_version.py create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py create mode 100644 packages/google-cloud-logging/release-please-config.json rename packages/google-cloud-logging/samples/generated_samples/{snippet_metadata_logging_v2.json => snippet_metadata_google.logging.v2.json} (99%) diff --git a/packages/google-cloud-logging/.coveragerc b/packages/google-cloud-logging/.coveragerc index b38d22e21fd1..c0f6e82dff6e 100644 --- a/packages/google-cloud-logging/.coveragerc +++ b/packages/google-cloud-logging/.coveragerc @@ -10,8 +10,3 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/packages/google-cloud-logging/.github/release-please.yml b/packages/google-cloud-logging/.github/release-please.yml index 5161ab347cdf..dbd2cc9debee 100644 --- a/packages/google-cloud-logging/.github/release-please.yml +++ b/packages/google-cloud-logging/.github/release-please.yml @@ -1,5 +1,6 @@ releaseType: python handleGHRelease: true +manifest: true # NOTE: this section is generated by synthtool.languages.python # See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py branches: diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json new file mode 100644 index 000000000000..466b7195f893 --- /dev/null +++ b/packages/google-cloud-logging/.release-please-manifest.json @@ -0,0 +1,3 @@ +{ + ".": "3.3.1" +} diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py new file mode 100644 index 000000000000..c443818a0ed0 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "3.3.1" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py index d55e474d9da2..9860f1e06415 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py @@ -14,13 +14,9 @@ from __future__ import absolute_import -import pkg_resources - -try: - __version__ = pkg_resources.get_distribution("google-cloud-logging").version -except pkg_resources.DistributionNotFound: - __version__ = None +from google.cloud.logging_v2 import gapic_version as package_version +__version__ = package_version.__version__ from google.cloud.logging_v2.client import Client from google.cloud.logging_v2.entries import logger_name_from_path diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py new file mode 100644 index 000000000000..c443818a0ed0 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "3.3.1" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 80f740284e3c..f691bd684b1f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -16,8 +16,19 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.cloud.logging_v2 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -176,9 +187,9 @@ def transport(self) -> ConfigServiceV2Transport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, ConfigServiceV2Transport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the config service v2 client. @@ -222,11 +233,11 @@ def __init__( async def list_buckets( self, - request: Union[logging_config.ListBucketsRequest, dict] = None, + request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsAsyncPager: r"""Lists log buckets. @@ -259,7 +270,7 @@ async def sample_list_buckets(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]]): The request object. The parameters to `ListBuckets`. parent (:class:`str`): Required. The parent resource whose buckets are to be @@ -346,10 +357,10 @@ async def sample_list_buckets(): async def get_bucket( self, - request: Union[logging_config.GetBucketRequest, dict] = None, + request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Gets a log bucket. @@ -381,7 +392,7 @@ async def sample_get_bucket(): print(response) Args: - request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.GetBucketRequest, dict]]): The request object. The parameters to `GetBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -425,10 +436,10 @@ async def sample_get_bucket(): async def create_bucket( self, - request: Union[logging_config.CreateBucketRequest, dict] = None, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log @@ -463,7 +474,7 @@ async def sample_create_bucket(): print(response) Args: - request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): The request object. The parameters to `CreateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -507,10 +518,10 @@ async def sample_create_bucket(): async def update_bucket( self, - request: Union[logging_config.UpdateBucketRequest, dict] = None, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Updates a log bucket. This method replaces the following fields @@ -553,7 +564,7 @@ async def sample_update_bucket(): print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): The request object. The parameters to `UpdateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -597,10 +608,10 @@ async def sample_update_bucket(): async def delete_bucket( self, - request: Union[logging_config.DeleteBucketRequest, dict] = None, + request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a log bucket. @@ -634,7 +645,7 @@ async def sample_delete_bucket(): await client.delete_bucket(request=request) Args: - request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]]): The request object. The parameters to `DeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -669,10 +680,10 @@ async def sample_delete_bucket(): async def undelete_bucket( self, - request: Union[logging_config.UndeleteBucketRequest, dict] = None, + request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Undeletes a log bucket. A bucket that has been @@ -703,7 +714,7 @@ async def sample_undelete_bucket(): await client.undelete_bucket(request=request) Args: - request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]]): The request object. The parameters to `UndeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -738,11 +749,11 @@ async def sample_undelete_bucket(): async def list_views( self, - request: Union[logging_config.ListViewsRequest, dict] = None, + request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsAsyncPager: r"""Lists views on a log bucket. @@ -775,7 +786,7 @@ async def sample_list_views(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListViewsRequest, dict]]): The request object. The parameters to `ListViews`. parent (:class:`str`): Required. The bucket whose views are to be listed: @@ -854,10 +865,10 @@ async def sample_list_views(): async def get_view( self, - request: Union[logging_config.GetViewRequest, dict] = None, + request: Optional[Union[logging_config.GetViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Gets a view on a log bucket.. @@ -889,7 +900,7 @@ async def sample_get_view(): print(response) Args: - request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.GetViewRequest, dict]]): The request object. The parameters to `GetView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -933,10 +944,10 @@ async def sample_get_view(): async def create_view( self, - request: Union[logging_config.CreateViewRequest, dict] = None, + request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A @@ -970,7 +981,7 @@ async def sample_create_view(): print(response) Args: - request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.CreateViewRequest, dict]]): The request object. The parameters to `CreateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -1014,10 +1025,10 @@ async def sample_create_view(): async def update_view( self, - request: Union[logging_config.UpdateViewRequest, dict] = None, + request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the @@ -1053,7 +1064,7 @@ async def sample_update_view(): print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]]): The request object. The parameters to `UpdateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -1097,10 +1108,10 @@ async def sample_update_view(): async def delete_view( self, - request: Union[logging_config.DeleteViewRequest, dict] = None, + request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is @@ -1132,7 +1143,7 @@ async def sample_delete_view(): await client.delete_view(request=request) Args: - request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]]): The request object. The parameters to `DeleteView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -1167,11 +1178,11 @@ async def sample_delete_view(): async def list_sinks( self, - request: Union[logging_config.ListSinksRequest, dict] = None, + request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. @@ -1204,7 +1215,7 @@ async def sample_list_sinks(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListSinksRequest, dict]]): The request object. The parameters to `ListSinks`. parent (:class:`str`): Required. The parent resource whose sinks are to be @@ -1298,11 +1309,11 @@ async def sample_list_sinks(): async def get_sink( self, - request: Union[logging_config.GetSinkRequest, dict] = None, + request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, *, - sink_name: str = None, + sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Gets a sink. @@ -1334,7 +1345,7 @@ async def sample_get_sink(): print(response) Args: - request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.GetSinkRequest, dict]]): The request object. The parameters to `GetSink`. sink_name (:class:`str`): Required. The resource name of the sink: @@ -1429,12 +1440,12 @@ async def sample_get_sink(): async def create_sink( self, - request: Union[logging_config.CreateSinkRequest, dict] = None, + request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, *, - parent: str = None, - sink: logging_config.LogSink = None, + parent: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a @@ -1475,7 +1486,7 @@ async def sample_create_sink(): print(response) Args: - request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]]): The request object. The parameters to `CreateSink`. parent (:class:`str`): Required. The resource in which to create the sink: @@ -1566,13 +1577,13 @@ async def sample_create_sink(): async def update_sink( self, - request: Union[logging_config.UpdateSinkRequest, dict] = None, + request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, *, - sink_name: str = None, - sink: logging_config.LogSink = None, - update_mask: field_mask_pb2.FieldMask = None, + sink_name: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the @@ -1614,7 +1625,7 @@ async def sample_update_sink(): print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]]): The request object. The parameters to `UpdateSink`. sink_name (:class:`str`): Required. The full resource name of the sink to update, @@ -1743,11 +1754,11 @@ async def sample_update_sink(): async def delete_sink( self, - request: Union[logging_config.DeleteSinkRequest, dict] = None, + request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, *, - sink_name: str = None, + sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, @@ -1777,7 +1788,7 @@ async def sample_delete_sink(): await client.delete_sink(request=request) Args: - request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]]): The request object. The parameters to `DeleteSink`. sink_name (:class:`str`): Required. The full resource name of the sink to delete, @@ -1857,11 +1868,11 @@ async def sample_delete_sink(): async def list_exclusions( self, - request: Union[logging_config.ListExclusionsRequest, dict] = None, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListExclusionsAsyncPager: r"""Lists all the exclusions on the \_Default sink in a parent @@ -1895,7 +1906,7 @@ async def sample_list_exclusions(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]]): The request object. The parameters to `ListExclusions`. parent (:class:`str`): Required. The parent resource whose exclusions are to be @@ -1989,11 +2000,11 @@ async def sample_list_exclusions(): async def get_exclusion( self, - request: Union[logging_config.GetExclusionRequest, dict] = None, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. @@ -2025,7 +2036,7 @@ async def sample_get_exclusion(): print(response) Args: - request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]]): The request object. The parameters to `GetExclusion`. name (:class:`str`): Required. The resource name of an existing exclusion: @@ -2116,12 +2127,12 @@ async def sample_get_exclusion(): async def create_exclusion( self, - request: Union[logging_config.CreateExclusionRequest, dict] = None, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, *, - parent: str = None, - exclusion: logging_config.LogExclusion = None, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified @@ -2160,7 +2171,7 @@ async def sample_create_exclusion(): print(response) Args: - request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]]): The request object. The parameters to `CreateExclusion`. parent (:class:`str`): Required. The parent resource in which to create the @@ -2252,13 +2263,13 @@ async def sample_create_exclusion(): async def update_exclusion( self, - request: Union[logging_config.UpdateExclusionRequest, dict] = None, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, *, - name: str = None, - exclusion: logging_config.LogExclusion = None, - update_mask: field_mask_pb2.FieldMask = None, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the @@ -2296,7 +2307,7 @@ async def sample_update_exclusion(): print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]]): The request object. The parameters to `UpdateExclusion`. name (:class:`str`): Required. The resource name of the exclusion to update: @@ -2402,11 +2413,11 @@ async def sample_update_exclusion(): async def delete_exclusion( self, - request: Union[logging_config.DeleteExclusionRequest, dict] = None, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an exclusion in the \_Default sink. @@ -2435,7 +2446,7 @@ async def sample_delete_exclusion(): await client.delete_exclusion(request=request) Args: - request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]]): The request object. The parameters to `DeleteExclusion`. name (:class:`str`): Required. The resource name of an existing exclusion to @@ -2513,10 +2524,10 @@ async def sample_delete_exclusion(): async def get_cmek_settings( self, - request: Union[logging_config.GetCmekSettingsRequest, dict] = None, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. @@ -2557,7 +2568,7 @@ async def sample_get_cmek_settings(): print(response) Args: - request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]]): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. See [Enabling CMEK for Log @@ -2615,10 +2626,10 @@ async def sample_get_cmek_settings(): async def update_cmek_settings( self, - request: Union[logging_config.UpdateCmekSettingsRequest, dict] = None, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. @@ -2664,7 +2675,7 @@ async def sample_update_cmek_settings(): print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]]): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. See [Enabling CMEK for Log @@ -2722,11 +2733,11 @@ async def sample_update_cmek_settings(): async def get_settings( self, - request: Union[logging_config.GetSettingsRequest, dict] = None, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. @@ -2768,7 +2779,7 @@ async def sample_get_settings(): print(response) Args: - request (Union[google.cloud.logging_v2.types.GetSettingsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.GetSettingsRequest, dict]]): The request object. The parameters to [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. See [Enabling CMEK for Log @@ -2855,12 +2866,12 @@ async def sample_get_settings(): async def update_settings( self, - request: Union[logging_config.UpdateSettingsRequest, dict] = None, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, *, - settings: logging_config.Settings = None, - update_mask: field_mask_pb2.FieldMask = None, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. @@ -2909,7 +2920,7 @@ async def sample_update_settings(): print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateSettingsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.UpdateSettingsRequest, dict]]): The request object. The parameters to [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. See [Enabling CMEK for Log @@ -2998,10 +3009,10 @@ async def sample_update_settings(): async def copy_log_entries( self, - request: Union[logging_config.CopyLogEntriesRequest, dict] = None, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Copies a set of log entries from a log bucket to a @@ -3033,13 +3044,13 @@ async def sample_copy_log_entries(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) Args: - request (Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]]): The request object. The parameters to CopyLogEntries. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -3094,14 +3105,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("ConfigServiceV2AsyncClient",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index b905d174112e..7ebbe333f545 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -16,8 +16,20 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.cloud.logging_v2 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -61,7 +73,7 @@ class ConfigServiceV2ClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[ConfigServiceV2Transport]: """Returns an appropriate transport class. @@ -424,8 +436,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ConfigServiceV2Transport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, ConfigServiceV2Transport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the config service v2 client. @@ -439,7 +451,7 @@ def __init__( transport (Union[str, ConfigServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -469,6 +481,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -521,11 +534,11 @@ def __init__( def list_buckets( self, - request: Union[logging_config.ListBucketsRequest, dict] = None, + request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsPager: r"""Lists log buckets. @@ -645,10 +658,10 @@ def sample_list_buckets(): def get_bucket( self, - request: Union[logging_config.GetBucketRequest, dict] = None, + request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Gets a log bucket. @@ -725,10 +738,10 @@ def sample_get_bucket(): def create_bucket( self, - request: Union[logging_config.CreateBucketRequest, dict] = None, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log @@ -808,10 +821,10 @@ def sample_create_bucket(): def update_bucket( self, - request: Union[logging_config.UpdateBucketRequest, dict] = None, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Updates a log bucket. This method replaces the following fields @@ -899,10 +912,10 @@ def sample_update_bucket(): def delete_bucket( self, - request: Union[logging_config.DeleteBucketRequest, dict] = None, + request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a log bucket. @@ -972,10 +985,10 @@ def sample_delete_bucket(): def undelete_bucket( self, - request: Union[logging_config.UndeleteBucketRequest, dict] = None, + request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Undeletes a log bucket. A bucket that has been @@ -1042,11 +1055,11 @@ def sample_undelete_bucket(): def list_views( self, - request: Union[logging_config.ListViewsRequest, dict] = None, + request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsPager: r"""Lists views on a log bucket. @@ -1158,10 +1171,10 @@ def sample_list_views(): def get_view( self, - request: Union[logging_config.GetViewRequest, dict] = None, + request: Optional[Union[logging_config.GetViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Gets a view on a log bucket.. @@ -1238,10 +1251,10 @@ def sample_get_view(): def create_view( self, - request: Union[logging_config.CreateViewRequest, dict] = None, + request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A @@ -1320,10 +1333,10 @@ def sample_create_view(): def update_view( self, - request: Union[logging_config.UpdateViewRequest, dict] = None, + request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the @@ -1404,10 +1417,10 @@ def sample_update_view(): def delete_view( self, - request: Union[logging_config.DeleteViewRequest, dict] = None, + request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is @@ -1475,11 +1488,11 @@ def sample_delete_view(): def list_sinks( self, - request: Union[logging_config.ListSinksRequest, dict] = None, + request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSinksPager: r"""Lists sinks. @@ -1595,11 +1608,11 @@ def sample_list_sinks(): def get_sink( self, - request: Union[logging_config.GetSinkRequest, dict] = None, + request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, *, - sink_name: str = None, + sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Gets a sink. @@ -1715,12 +1728,12 @@ def sample_get_sink(): def create_sink( self, - request: Union[logging_config.CreateSinkRequest, dict] = None, + request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, *, - parent: str = None, - sink: logging_config.LogSink = None, + parent: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a @@ -1852,13 +1865,13 @@ def sample_create_sink(): def update_sink( self, - request: Union[logging_config.UpdateSinkRequest, dict] = None, + request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, *, - sink_name: str = None, - sink: logging_config.LogSink = None, - update_mask: field_mask_pb2.FieldMask = None, + sink_name: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the @@ -2018,11 +2031,11 @@ def sample_update_sink(): def delete_sink( self, - request: Union[logging_config.DeleteSinkRequest, dict] = None, + request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, *, - sink_name: str = None, + sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, @@ -2121,11 +2134,11 @@ def sample_delete_sink(): def list_exclusions( self, - request: Union[logging_config.ListExclusionsRequest, dict] = None, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListExclusionsPager: r"""Lists all the exclusions on the \_Default sink in a parent @@ -2242,11 +2255,11 @@ def sample_list_exclusions(): def get_exclusion( self, - request: Union[logging_config.GetExclusionRequest, dict] = None, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. @@ -2358,12 +2371,12 @@ def sample_get_exclusion(): def create_exclusion( self, - request: Union[logging_config.CreateExclusionRequest, dict] = None, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, *, - parent: str = None, - exclusion: logging_config.LogExclusion = None, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified @@ -2494,13 +2507,13 @@ def sample_create_exclusion(): def update_exclusion( self, - request: Union[logging_config.UpdateExclusionRequest, dict] = None, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, *, - name: str = None, - exclusion: logging_config.LogExclusion = None, - update_mask: field_mask_pb2.FieldMask = None, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the @@ -2644,11 +2657,11 @@ def sample_update_exclusion(): def delete_exclusion( self, - request: Union[logging_config.DeleteExclusionRequest, dict] = None, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an exclusion in the \_Default sink. @@ -2744,10 +2757,10 @@ def sample_delete_exclusion(): def get_cmek_settings( self, - request: Union[logging_config.GetCmekSettingsRequest, dict] = None, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. @@ -2847,10 +2860,10 @@ def sample_get_cmek_settings(): def update_cmek_settings( self, - request: Union[logging_config.UpdateCmekSettingsRequest, dict] = None, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. @@ -2955,11 +2968,11 @@ def sample_update_cmek_settings(): def get_settings( self, - request: Union[logging_config.GetSettingsRequest, dict] = None, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. @@ -3088,12 +3101,12 @@ def sample_get_settings(): def update_settings( self, - request: Union[logging_config.UpdateSettingsRequest, dict] = None, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, *, - settings: logging_config.Settings = None, - update_mask: field_mask_pb2.FieldMask = None, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. @@ -3231,10 +3244,10 @@ def sample_update_settings(): def copy_log_entries( self, - request: Union[logging_config.CopyLogEntriesRequest, dict] = None, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Copies a set of log entries from a log bucket to a @@ -3335,14 +3348,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("ConfigServiceV2Client",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index b65a2117b2dc..53046583b8b6 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.logging_v2 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -30,14 +31,9 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class ConfigServiceV2Transport(abc.ABC): @@ -56,7 +52,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index d8aca6273a99..97c220686c9a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -50,14 +50,14 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -185,8 +185,8 @@ def __init__( def create_channel( cls, host: str = "logging.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 2a36a4955001..0d0737576858 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -52,7 +52,7 @@ class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): def create_channel( cls, host: str = "logging.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -95,15 +95,15 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index d0c9cc06bea5..3f5ed6a0c650 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -19,6 +19,8 @@ from typing import ( Dict, Mapping, + MutableMapping, + MutableSequence, Optional, AsyncIterable, Awaitable, @@ -28,7 +30,8 @@ Type, Union, ) -import pkg_resources + +from google.cloud.logging_v2 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -171,9 +174,9 @@ def transport(self) -> LoggingServiceV2Transport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, LoggingServiceV2Transport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the logging service v2 client. @@ -217,11 +220,11 @@ def __init__( async def delete_log( self, - request: Union[logging.DeleteLogRequest, dict] = None, + request: Optional[Union[logging.DeleteLogRequest, dict]] = None, *, - log_name: str = None, + log_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes all the log entries in a log for the \_Default Log @@ -254,7 +257,7 @@ async def sample_delete_log(): await client.delete_log(request=request) Args: - request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]]): The request object. The parameters to DeleteLog. log_name (:class:`str`): Required. The resource name of the log to delete: @@ -332,14 +335,14 @@ async def sample_delete_log(): async def write_log_entries( self, - request: Union[logging.WriteLogEntriesRequest, dict] = None, + request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, *, - log_name: str = None, - resource: monitored_resource_pb2.MonitoredResource = None, - labels: Mapping[str, str] = None, - entries: Sequence[log_entry.LogEntry] = None, + log_name: Optional[str] = None, + resource: Optional[monitored_resource_pb2.MonitoredResource] = None, + labels: Optional[MutableMapping[str, str]] = None, + entries: Optional[MutableSequence[log_entry.LogEntry]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the @@ -380,7 +383,7 @@ async def sample_write_log_entries(): print(response) Args: - request (Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]]): The request object. The parameters to WriteLogEntries. log_name (:class:`str`): Optional. A default log resource name that is assigned @@ -424,7 +427,7 @@ async def sample_write_log_entries(): This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Mapping[str, str]`): + labels (:class:`MutableMapping[str, str]`): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a @@ -434,7 +437,7 @@ async def sample_write_log_entries(): This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - entries (:class:`Sequence[google.cloud.logging_v2.types.LogEntry]`): + entries (:class:`MutableSequence[google.cloud.logging_v2.types.LogEntry]`): Required. The log entries to send to Logging. The order of log entries in this list does not matter. Values supplied in this method's ``log_name``, ``resource``, @@ -534,13 +537,13 @@ async def sample_write_log_entries(): async def list_log_entries( self, - request: Union[logging.ListLogEntriesRequest, dict] = None, + request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, *, - resource_names: Sequence[str] = None, - filter: str = None, - order_by: str = None, + resource_names: Optional[MutableSequence[str]] = None, + filter: Optional[str] = None, + order_by: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogEntriesAsyncPager: r"""Lists log entries. Use this method to retrieve log entries that @@ -576,9 +579,9 @@ async def sample_list_log_entries(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]]): The request object. The parameters to `ListLogEntries`. - resource_names (:class:`Sequence[str]`): + resource_names (:class:`MutableSequence[str]`): Required. Names of one or more parent resources from which to retrieve log entries: @@ -703,10 +706,12 @@ async def sample_list_log_entries(): async def list_monitored_resource_descriptors( self, - request: Union[logging.ListMonitoredResourceDescriptorsRequest, dict] = None, + request: Optional[ + Union[logging.ListMonitoredResourceDescriptorsRequest, dict] + ] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: r"""Lists the descriptors for monitored resource types @@ -739,7 +744,7 @@ async def sample_list_monitored_resource_descriptors(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]]): The request object. The parameters to ListMonitoredResourceDescriptors retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -801,11 +806,11 @@ async def sample_list_monitored_resource_descriptors(): async def list_logs( self, - request: Union[logging.ListLogsRequest, dict] = None, + request: Optional[Union[logging.ListLogsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogsAsyncPager: r"""Lists the logs in projects, organizations, folders, @@ -840,7 +845,7 @@ async def sample_list_logs(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListLogsRequest, dict]]): The request object. The parameters to ListLogs. parent (:class:`str`): Required. The resource name that owns the logs: @@ -931,10 +936,10 @@ async def sample_list_logs(): def tail_log_entries( self, - requests: AsyncIterator[logging.TailLogEntriesRequest] = None, + requests: Optional[AsyncIterator[logging.TailLogEntriesRequest]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: r"""Streaming read of log entries as they are ingested. @@ -1029,14 +1034,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("LoggingServiceV2AsyncClient",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 930f94c619e5..fa3e15e6c2f0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -19,6 +19,8 @@ from typing import ( Dict, Mapping, + MutableMapping, + MutableSequence, Optional, Iterable, Iterator, @@ -26,8 +28,10 @@ Tuple, Type, Union, + cast, ) -import pkg_resources + +from google.cloud.logging_v2 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -69,7 +73,7 @@ class LoggingServiceV2ClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[LoggingServiceV2Transport]: """Returns an appropriate transport class. @@ -339,8 +343,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, LoggingServiceV2Transport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, LoggingServiceV2Transport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the logging service v2 client. @@ -354,7 +358,7 @@ def __init__( transport (Union[str, LoggingServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -384,6 +388,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -436,11 +441,11 @@ def __init__( def delete_log( self, - request: Union[logging.DeleteLogRequest, dict] = None, + request: Optional[Union[logging.DeleteLogRequest, dict]] = None, *, - log_name: str = None, + log_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes all the log entries in a log for the \_Default Log @@ -540,14 +545,14 @@ def sample_delete_log(): def write_log_entries( self, - request: Union[logging.WriteLogEntriesRequest, dict] = None, + request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, *, - log_name: str = None, - resource: monitored_resource_pb2.MonitoredResource = None, - labels: Mapping[str, str] = None, - entries: Sequence[log_entry.LogEntry] = None, + log_name: Optional[str] = None, + resource: Optional[monitored_resource_pb2.MonitoredResource] = None, + labels: Optional[MutableMapping[str, str]] = None, + entries: Optional[MutableSequence[log_entry.LogEntry]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the @@ -632,7 +637,7 @@ def sample_write_log_entries(): This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a @@ -642,7 +647,7 @@ def sample_write_log_entries(): This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + entries (MutableSequence[google.cloud.logging_v2.types.LogEntry]): Required. The log entries to send to Logging. The order of log entries in this list does not matter. Values supplied in this method's ``log_name``, ``resource``, @@ -730,13 +735,13 @@ def sample_write_log_entries(): def list_log_entries( self, - request: Union[logging.ListLogEntriesRequest, dict] = None, + request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, *, - resource_names: Sequence[str] = None, - filter: str = None, - order_by: str = None, + resource_names: Optional[MutableSequence[str]] = None, + filter: Optional[str] = None, + order_by: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogEntriesPager: r"""Lists log entries. Use this method to retrieve log entries that @@ -774,7 +779,7 @@ def sample_list_log_entries(): Args: request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): The request object. The parameters to `ListLogEntries`. - resource_names (Sequence[str]): + resource_names (MutableSequence[str]): Required. Names of one or more parent resources from which to retrieve log entries: @@ -888,10 +893,12 @@ def sample_list_log_entries(): def list_monitored_resource_descriptors( self, - request: Union[logging.ListMonitoredResourceDescriptorsRequest, dict] = None, + request: Optional[ + Union[logging.ListMonitoredResourceDescriptorsRequest, dict] + ] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMonitoredResourceDescriptorsPager: r"""Lists the descriptors for monitored resource types @@ -978,11 +985,11 @@ def sample_list_monitored_resource_descriptors(): def list_logs( self, - request: Union[logging.ListLogsRequest, dict] = None, + request: Optional[Union[logging.ListLogsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogsPager: r"""Lists the logs in projects, organizations, folders, @@ -1097,10 +1104,10 @@ def sample_list_logs(): def tail_log_entries( self, - requests: Iterator[logging.TailLogEntriesRequest] = None, + requests: Optional[Iterator[logging.TailLogEntriesRequest]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[logging.TailLogEntriesResponse]: r"""Streaming read of log entries as they are ingested. @@ -1187,14 +1194,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("LoggingServiceV2Client",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index d308f7d0125b..22665b15e9fd 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.logging_v2 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -28,14 +29,9 @@ from google.cloud.logging_v2.types import logging from google.protobuf import empty_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class LoggingServiceV2Transport(abc.ABC): @@ -55,7 +51,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 3c9c7743e008..a6878b6fa52b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -48,14 +48,14 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -182,8 +182,8 @@ def __init__( def create_channel( cls, host: str = "logging.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index a7fdf5ffc2ff..2e8f76017345 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -50,7 +50,7 @@ class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): def create_channel( cls, host: str = "logging.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -93,15 +93,15 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 2e8996404ab7..14a3d5f2a446 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -16,8 +16,19 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.cloud.logging_v2 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -161,9 +172,9 @@ def transport(self) -> MetricsServiceV2Transport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, MetricsServiceV2Transport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the metrics service v2 client. @@ -207,11 +218,11 @@ def __init__( async def list_log_metrics( self, - request: Union[logging_metrics.ListLogMetricsRequest, dict] = None, + request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. @@ -244,7 +255,7 @@ async def sample_list_log_metrics(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]]): The request object. The parameters to ListLogMetrics. parent (:class:`str`): Required. The name of the project containing the @@ -335,11 +346,11 @@ async def sample_list_log_metrics(): async def get_log_metric( self, - request: Union[logging_metrics.GetLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, *, - metric_name: str = None, + metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. @@ -371,7 +382,7 @@ async def sample_get_log_metric(): print(response) Args: - request (Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]]): The request object. The parameters to GetLogMetric. metric_name (:class:`str`): Required. The resource name of the desired metric: @@ -461,12 +472,12 @@ async def sample_get_log_metric(): async def create_log_metric( self, - request: Union[logging_metrics.CreateLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, *, - parent: str = None, - metric: logging_metrics.LogMetric = None, + parent: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. @@ -503,7 +514,7 @@ async def sample_create_log_metric(): print(response) Args: - request (Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]]): The request object. The parameters to CreateLogMetric. parent (:class:`str`): Required. The resource name of the project in which to @@ -593,12 +604,12 @@ async def sample_create_log_metric(): async def update_log_metric( self, - request: Union[logging_metrics.UpdateLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, *, - metric_name: str = None, - metric: logging_metrics.LogMetric = None, + metric_name: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. @@ -635,7 +646,7 @@ async def sample_update_log_metric(): print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]]): The request object. The parameters to UpdateLogMetric. metric_name (:class:`str`): Required. The resource name of the metric to update: @@ -737,11 +748,11 @@ async def sample_update_log_metric(): async def delete_log_metric( self, - request: Union[logging_metrics.DeleteLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, *, - metric_name: str = None, + metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a logs-based metric. @@ -770,7 +781,7 @@ async def sample_delete_log_metric(): await client.delete_log_metric(request=request) Args: - request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]]): The request object. The parameters to DeleteLogMetric. metric_name (:class:`str`): Required. The resource name of the metric to delete: @@ -847,14 +858,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("MetricsServiceV2AsyncClient",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 9a707b2ac3dd..c8bacd4340fd 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -16,8 +16,20 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.cloud.logging_v2 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -60,7 +72,7 @@ class MetricsServiceV2ClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[MetricsServiceV2Transport]: """Returns an appropriate transport class. @@ -330,8 +342,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, MetricsServiceV2Transport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, MetricsServiceV2Transport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the metrics service v2 client. @@ -345,7 +357,7 @@ def __init__( transport (Union[str, MetricsServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -375,6 +387,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -427,11 +440,11 @@ def __init__( def list_log_metrics( self, - request: Union[logging_metrics.ListLogMetricsRequest, dict] = None, + request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. @@ -544,11 +557,11 @@ def sample_list_log_metrics(): def get_log_metric( self, - request: Union[logging_metrics.GetLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, *, - metric_name: str = None, + metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. @@ -659,12 +672,12 @@ def sample_get_log_metric(): def create_log_metric( self, - request: Union[logging_metrics.CreateLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, *, - parent: str = None, - metric: logging_metrics.LogMetric = None, + parent: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. @@ -791,12 +804,12 @@ def sample_create_log_metric(): def update_log_metric( self, - request: Union[logging_metrics.UpdateLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, *, - metric_name: str = None, - metric: logging_metrics.LogMetric = None, + metric_name: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. @@ -924,11 +937,11 @@ def sample_update_log_metric(): def delete_log_metric( self, - request: Union[logging_metrics.DeleteLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, *, - metric_name: str = None, + metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a logs-based metric. @@ -1030,14 +1043,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("MetricsServiceV2Client",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index f7e9c5edc02a..6e0f1698e105 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.logging_v2 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -28,14 +29,9 @@ from google.cloud.logging_v2.types import logging_metrics from google.protobuf import empty_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class MetricsServiceV2Transport(abc.ABC): @@ -55,7 +51,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 13cc653f77f1..7eac78848617 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -48,14 +48,14 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -182,8 +182,8 @@ def __init__( def create_channel( cls, host: str = "logging.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 93ca38ba5a1e..99764a592906 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -50,7 +50,7 @@ class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): def create_channel( cls, host: str = "logging.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -93,15 +93,15 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py index cc34694281ad..60da219abb5a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.api import monitored_resource_pb2 # type: ignore @@ -144,7 +146,7 @@ class LogEntry(proto.Message): http_request (google.logging.type.http_request_pb2.HttpRequest): Optional. Information about the HTTP request associated with this log entry, if applicable. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Optional. A map of key, value pairs that provides additional information about the log entry. The labels can be user-defined or system-defined. @@ -196,84 +198,84 @@ class LogEntry(proto.Message): entries split from a single LogEntry. """ - log_name = proto.Field( + log_name: str = proto.Field( proto.STRING, number=12, ) - resource = proto.Field( + resource: monitored_resource_pb2.MonitoredResource = proto.Field( proto.MESSAGE, number=8, message=monitored_resource_pb2.MonitoredResource, ) - proto_payload = proto.Field( + proto_payload: any_pb2.Any = proto.Field( proto.MESSAGE, number=2, oneof="payload", message=any_pb2.Any, ) - text_payload = proto.Field( + text_payload: str = proto.Field( proto.STRING, number=3, oneof="payload", ) - json_payload = proto.Field( + json_payload: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=6, oneof="payload", message=struct_pb2.Struct, ) - timestamp = proto.Field( + timestamp: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp, ) - receive_timestamp = proto.Field( + receive_timestamp: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=24, message=timestamp_pb2.Timestamp, ) - severity = proto.Field( + severity: log_severity_pb2.LogSeverity = proto.Field( proto.ENUM, number=10, enum=log_severity_pb2.LogSeverity, ) - insert_id = proto.Field( + insert_id: str = proto.Field( proto.STRING, number=4, ) - http_request = proto.Field( + http_request: http_request_pb2.HttpRequest = proto.Field( proto.MESSAGE, number=7, message=http_request_pb2.HttpRequest, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=11, ) - operation = proto.Field( + operation: "LogEntryOperation" = proto.Field( proto.MESSAGE, number=15, message="LogEntryOperation", ) - trace = proto.Field( + trace: str = proto.Field( proto.STRING, number=22, ) - span_id = proto.Field( + span_id: str = proto.Field( proto.STRING, number=27, ) - trace_sampled = proto.Field( + trace_sampled: bool = proto.Field( proto.BOOL, number=30, ) - source_location = proto.Field( + source_location: "LogEntrySourceLocation" = proto.Field( proto.MESSAGE, number=23, message="LogEntrySourceLocation", ) - split = proto.Field( + split: "LogSplit" = proto.Field( proto.MESSAGE, number=35, message="LogSplit", @@ -302,19 +304,19 @@ class LogEntryOperation(proto.Message): last log entry in the operation. """ - id = proto.Field( + id: str = proto.Field( proto.STRING, number=1, ) - producer = proto.Field( + producer: str = proto.Field( proto.STRING, number=2, ) - first = proto.Field( + first: bool = proto.Field( proto.BOOL, number=3, ) - last = proto.Field( + last: bool = proto.Field( proto.BOOL, number=4, ) @@ -342,15 +344,15 @@ class LogEntrySourceLocation(proto.Message): (Go), ``function`` (Python). """ - file = proto.Field( + file: str = proto.Field( proto.STRING, number=1, ) - line = proto.Field( + line: int = proto.Field( proto.INT64, number=2, ) - function = proto.Field( + function: str = proto.Field( proto.STRING, number=3, ) @@ -377,15 +379,15 @@ class LogSplit(proto.Message): original LogEntry was split into. """ - uid = proto.Field( + uid: str = proto.Field( proto.STRING, number=1, ) - index = proto.Field( + index: int = proto.Field( proto.INT32, number=2, ) - total_splits = proto.Field( + total_splits: int = proto.Field( proto.INT32, number=3, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py index 42bb9dbb8846..903f875ad07c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.api import monitored_resource_pb2 # type: ignore @@ -60,7 +62,7 @@ class DeleteLogRequest(proto.Message): [LogEntry][google.logging.v2.LogEntry]. """ - log_name = proto.Field( + log_name: str = proto.Field( proto.STRING, number=1, ) @@ -103,13 +105,13 @@ class WriteLogEntriesRequest(proto.Message): "zone": "us-central1-a", "instance_id": "00000000000000000000" }} See [LogEntry][google.logging.v2.LogEntry]. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a label in this parameter, then the log entry's label is not changed. See [LogEntry][google.logging.v2.LogEntry]. - entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + entries (MutableSequence[google.cloud.logging_v2.types.LogEntry]): Required. The log entries to send to Logging. The order of log entries in this list does not matter. Values supplied in this method's ``log_name``, ``resource``, and ``labels`` @@ -155,30 +157,30 @@ class WriteLogEntriesRequest(proto.Message): properly before sending valuable data. """ - log_name = proto.Field( + log_name: str = proto.Field( proto.STRING, number=1, ) - resource = proto.Field( + resource: monitored_resource_pb2.MonitoredResource = proto.Field( proto.MESSAGE, number=2, message=monitored_resource_pb2.MonitoredResource, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=3, ) - entries = proto.RepeatedField( + entries: MutableSequence[log_entry.LogEntry] = proto.RepeatedField( proto.MESSAGE, number=4, message=log_entry.LogEntry, ) - partial_success = proto.Field( + partial_success: bool = proto.Field( proto.BOOL, number=5, ) - dry_run = proto.Field( + dry_run: bool = proto.Field( proto.BOOL, number=6, ) @@ -192,7 +194,7 @@ class WriteLogEntriesPartialErrors(proto.Message): r"""Error details for WriteLogEntries with partial success. Attributes: - log_entry_errors (Mapping[int, google.rpc.status_pb2.Status]): + log_entry_errors (MutableMapping[int, google.rpc.status_pb2.Status]): When ``WriteLogEntriesRequest.partial_success`` is true, records the error status for entries that were not written due to a permanent error, keyed by the entry's zero-based @@ -202,7 +204,7 @@ class WriteLogEntriesPartialErrors(proto.Message): include per-entry errors. """ - log_entry_errors = proto.MapField( + log_entry_errors: MutableMapping[int, status_pb2.Status] = proto.MapField( proto.INT32, proto.MESSAGE, number=1, @@ -214,7 +216,7 @@ class ListLogEntriesRequest(proto.Message): r"""The parameters to ``ListLogEntries``. Attributes: - resource_names (Sequence[str]): + resource_names (MutableSequence[str]): Required. Names of one or more parent resources from which to retrieve log entries: @@ -265,23 +267,23 @@ class ListLogEntriesRequest(proto.Message): should be identical to those in the previous call. """ - resource_names = proto.RepeatedField( + resource_names: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=8, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=2, ) - order_by = proto.Field( + order_by: str = proto.Field( proto.STRING, number=3, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=4, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=5, ) @@ -291,7 +293,7 @@ class ListLogEntriesResponse(proto.Message): r"""Result returned from ``ListLogEntries``. Attributes: - entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + entries (MutableSequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. If ``entries`` is empty, ``nextPageToken`` may still be returned, indicating that more entries may exist. See ``nextPageToken`` for more @@ -316,12 +318,12 @@ class ListLogEntriesResponse(proto.Message): def raw_page(self): return self - entries = proto.RepeatedField( + entries: MutableSequence[log_entry.LogEntry] = proto.RepeatedField( proto.MESSAGE, number=1, message=log_entry.LogEntry, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -344,11 +346,11 @@ class ListMonitoredResourceDescriptorsRequest(proto.Message): should be identical to those in the previous call. """ - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=1, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=2, ) @@ -358,7 +360,7 @@ class ListMonitoredResourceDescriptorsResponse(proto.Message): r"""Result returned from ListMonitoredResourceDescriptors. Attributes: - resource_descriptors (Sequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): + resource_descriptors (MutableSequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): A list of resource descriptors. next_page_token (str): If there might be more results than those appearing in this @@ -371,12 +373,14 @@ class ListMonitoredResourceDescriptorsResponse(proto.Message): def raw_page(self): return self - resource_descriptors = proto.RepeatedField( + resource_descriptors: MutableSequence[ + monitored_resource_pb2.MonitoredResourceDescriptor + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=monitored_resource_pb2.MonitoredResourceDescriptor, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -404,7 +408,7 @@ class ListLogsRequest(proto.Message): ``pageToken`` must be the value of ``nextPageToken`` from the previous response. The values of other method parameters should be identical to those in the previous call. - resource_names (Sequence[str]): + resource_names (MutableSequence[str]): Optional. The resource name that owns the logs: - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` @@ -420,19 +424,19 @@ class ListLogsRequest(proto.Message): - ``folders/[FOLDER_ID]`` """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) - resource_names = proto.RepeatedField( + resource_names: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=8, ) @@ -442,7 +446,7 @@ class ListLogsResponse(proto.Message): r"""Result returned from ListLogs. Attributes: - log_names (Sequence[str]): + log_names (MutableSequence[str]): A list of log names. For example, ``"projects/my-project/logs/syslog"`` or ``"organizations/123/logs/cloudresourcemanager.googleapis.com%2Factivity"``. @@ -457,11 +461,11 @@ class ListLogsResponse(proto.Message): def raw_page(self): return self - log_names = proto.RepeatedField( + log_names: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -471,7 +475,7 @@ class TailLogEntriesRequest(proto.Message): r"""The parameters to ``TailLogEntries``. Attributes: - resource_names (Sequence[str]): + resource_names (MutableSequence[str]): Required. Name of a parent resource from which to retrieve log entries: @@ -505,15 +509,15 @@ class TailLogEntriesRequest(proto.Message): milliseconds. """ - resource_names = proto.RepeatedField( + resource_names: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=2, ) - buffer_window = proto.Field( + buffer_window: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=3, message=duration_pb2.Duration, @@ -524,12 +528,12 @@ class TailLogEntriesResponse(proto.Message): r"""Result returned from ``TailLogEntries``. Attributes: - entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + entries (MutableSequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. Each response in the stream will order entries with increasing values of ``LogEntry.timestamp``. Ordering is not guaranteed between separate responses. - suppression_info (Sequence[google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo]): + suppression_info (MutableSequence[google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo]): If entries that otherwise would have been included in the session were not sent back to the client, counts of relevant entries omitted @@ -558,22 +562,22 @@ class Reason(proto.Enum): RATE_LIMIT = 1 NOT_CONSUMED = 2 - reason = proto.Field( + reason: "TailLogEntriesResponse.SuppressionInfo.Reason" = proto.Field( proto.ENUM, number=1, enum="TailLogEntriesResponse.SuppressionInfo.Reason", ) - suppressed_count = proto.Field( + suppressed_count: int = proto.Field( proto.INT32, number=2, ) - entries = proto.RepeatedField( + entries: MutableSequence[log_entry.LogEntry] = proto.RepeatedField( proto.MESSAGE, number=1, message=log_entry.LogEntry, ) - suppression_info = proto.RepeatedField( + suppression_info: MutableSequence[SuppressionInfo] = proto.RepeatedField( proto.MESSAGE, number=2, message=SuppressionInfo, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index c2fcf30bde70..bc81412e7848 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -133,7 +135,7 @@ class LogBucket(proto.Message): if they are empty. lifecycle_state (google.cloud.logging_v2.types.LifecycleState): Output only. The bucket lifecycle state. - restricted_fields (Sequence[str]): + restricted_fields (MutableSequence[str]): Log entry field paths that are denied access in this bucket. The following fields and their children are eligible: @@ -153,42 +155,42 @@ class LogBucket(proto.Message): KMS key is allowed. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=3, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) - retention_days = proto.Field( + retention_days: int = proto.Field( proto.INT32, number=11, ) - locked = proto.Field( + locked: bool = proto.Field( proto.BOOL, number=9, ) - lifecycle_state = proto.Field( + lifecycle_state: "LifecycleState" = proto.Field( proto.ENUM, number=12, enum="LifecycleState", ) - restricted_fields = proto.RepeatedField( + restricted_fields: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=15, ) - cmek_settings = proto.Field( + cmek_settings: "CmekSettings" = proto.Field( proto.MESSAGE, number=19, message="CmekSettings", @@ -230,25 +232,25 @@ class LogView(proto.Message): "gce_instance" AND LOG_ID("stdout") """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=3, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=7, ) @@ -305,7 +307,7 @@ class LogSink(proto.Message): disabled (bool): Optional. If set to true, then this sink is disabled and it does not export any log entries. - exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): + exclusions (MutableSequence[google.cloud.logging_v2.types.LogExclusion]): Optional. Log entries that match any of these exclusion filters will not be exported. @@ -374,56 +376,56 @@ class VersionFormat(proto.Enum): V2 = 1 V1 = 2 - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - destination = proto.Field( + destination: str = proto.Field( proto.STRING, number=3, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=5, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=18, ) - disabled = proto.Field( + disabled: bool = proto.Field( proto.BOOL, number=19, ) - exclusions = proto.RepeatedField( + exclusions: MutableSequence["LogExclusion"] = proto.RepeatedField( proto.MESSAGE, number=16, message="LogExclusion", ) - output_version_format = proto.Field( + output_version_format: VersionFormat = proto.Field( proto.ENUM, number=6, enum=VersionFormat, ) - writer_identity = proto.Field( + writer_identity: str = proto.Field( proto.STRING, number=8, ) - include_children = proto.Field( + include_children: bool = proto.Field( proto.BOOL, number=9, ) - bigquery_options = proto.Field( + bigquery_options: "BigQueryOptions" = proto.Field( proto.MESSAGE, number=12, oneof="options", message="BigQueryOptions", ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, @@ -457,11 +459,11 @@ class BigQueryOptions(proto.Message): will have this field set to false. """ - use_partitioned_tables = proto.Field( + use_partitioned_tables: bool = proto.Field( proto.BOOL, number=1, ) - uses_timestamp_column_partitioning = proto.Field( + uses_timestamp_column_partitioning: bool = proto.Field( proto.BOOL, number=3, ) @@ -498,15 +500,15 @@ class ListBucketsRequest(proto.Message): results might be available. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) @@ -516,7 +518,7 @@ class ListBucketsResponse(proto.Message): r"""The response from ListBuckets. Attributes: - buckets (Sequence[google.cloud.logging_v2.types.LogBucket]): + buckets (MutableSequence[google.cloud.logging_v2.types.LogBucket]): A list of buckets. next_page_token (str): If there might be more results than appear in this response, @@ -529,12 +531,12 @@ class ListBucketsResponse(proto.Message): def raw_page(self): return self - buckets = proto.RepeatedField( + buckets: MutableSequence["LogBucket"] = proto.RepeatedField( proto.MESSAGE, number=1, message="LogBucket", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -566,15 +568,15 @@ class CreateBucketRequest(proto.Message): name field in the bucket is ignored. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - bucket_id = proto.Field( + bucket_id: str = proto.Field( proto.STRING, number=2, ) - bucket = proto.Field( + bucket: "LogBucket" = proto.Field( proto.MESSAGE, number=3, message="LogBucket", @@ -612,16 +614,16 @@ class UpdateBucketRequest(proto.Message): For example: ``updateMask=retention_days`` """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - bucket = proto.Field( + bucket: "LogBucket" = proto.Field( proto.MESSAGE, number=2, message="LogBucket", ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, @@ -647,7 +649,7 @@ class GetBucketRequest(proto.Message): ``"projects/my-project/locations/global/buckets/my-bucket"`` """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -672,7 +674,7 @@ class DeleteBucketRequest(proto.Message): ``"projects/my-project/locations/global/buckets/my-bucket"`` """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -697,7 +699,7 @@ class UndeleteBucketRequest(proto.Message): ``"projects/my-project/locations/global/buckets/my-bucket"`` """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -728,15 +730,15 @@ class ListViewsRequest(proto.Message): results might be available. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) @@ -746,7 +748,7 @@ class ListViewsResponse(proto.Message): r"""The response from ListViews. Attributes: - views (Sequence[google.cloud.logging_v2.types.LogView]): + views (MutableSequence[google.cloud.logging_v2.types.LogView]): A list of views. next_page_token (str): If there might be more results than appear in this response, @@ -759,12 +761,12 @@ class ListViewsResponse(proto.Message): def raw_page(self): return self - views = proto.RepeatedField( + views: MutableSequence["LogView"] = proto.RepeatedField( proto.MESSAGE, number=1, message="LogView", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -790,15 +792,15 @@ class CreateViewRequest(proto.Message): Required. The new view. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - view_id = proto.Field( + view_id: str = proto.Field( proto.STRING, number=2, ) - view = proto.Field( + view: "LogView" = proto.Field( proto.MESSAGE, number=3, message="LogView", @@ -833,16 +835,16 @@ class UpdateViewRequest(proto.Message): For example: ``updateMask=filter`` """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - view = proto.Field( + view: "LogView" = proto.Field( proto.MESSAGE, number=2, message="LogView", ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, @@ -865,7 +867,7 @@ class GetViewRequest(proto.Message): ``"projects/my-project/locations/global/buckets/my-bucket/views/my-view"`` """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -889,7 +891,7 @@ class DeleteViewRequest(proto.Message): `"projects/my-project/locations/global/buckets/my-bucket/views/my-view"` """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -921,15 +923,15 @@ class ListSinksRequest(proto.Message): results might be available. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) @@ -939,7 +941,7 @@ class ListSinksResponse(proto.Message): r"""Result returned from ``ListSinks``. Attributes: - sinks (Sequence[google.cloud.logging_v2.types.LogSink]): + sinks (MutableSequence[google.cloud.logging_v2.types.LogSink]): A list of sinks. next_page_token (str): If there might be more results than appear in this response, @@ -952,12 +954,12 @@ class ListSinksResponse(proto.Message): def raw_page(self): return self - sinks = proto.RepeatedField( + sinks: MutableSequence["LogSink"] = proto.RepeatedField( proto.MESSAGE, number=1, message="LogSink", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -982,7 +984,7 @@ class GetSinkRequest(proto.Message): ``"projects/my-project/sinks/my-sink"`` """ - sink_name = proto.Field( + sink_name: str = proto.Field( proto.STRING, number=1, ) @@ -1026,16 +1028,16 @@ class CreateSinkRequest(proto.Message): [LogSink][google.logging.v2.LogSink]. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - sink = proto.Field( + sink: "LogSink" = proto.Field( proto.MESSAGE, number=2, message="LogSink", ) - unique_writer_identity = proto.Field( + unique_writer_identity: bool = proto.Field( proto.BOOL, number=3, ) @@ -1098,20 +1100,20 @@ class UpdateSinkRequest(proto.Message): For example: ``updateMask=filter`` """ - sink_name = proto.Field( + sink_name: str = proto.Field( proto.STRING, number=1, ) - sink = proto.Field( + sink: "LogSink" = proto.Field( proto.MESSAGE, number=2, message="LogSink", ) - unique_writer_identity = proto.Field( + unique_writer_identity: bool = proto.Field( proto.BOOL, number=3, ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, @@ -1138,7 +1140,7 @@ class DeleteSinkRequest(proto.Message): ``"projects/my-project/sinks/my-sink"`` """ - sink_name = proto.Field( + sink_name: str = proto.Field( proto.STRING, number=1, ) @@ -1190,28 +1192,28 @@ class LogExclusion(proto.Message): exclusions. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=2, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=3, ) - disabled = proto.Field( + disabled: bool = proto.Field( proto.BOOL, number=4, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp, @@ -1245,15 +1247,15 @@ class ListExclusionsRequest(proto.Message): results might be available. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) @@ -1263,7 +1265,7 @@ class ListExclusionsResponse(proto.Message): r"""Result returned from ``ListExclusions``. Attributes: - exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): + exclusions (MutableSequence[google.cloud.logging_v2.types.LogExclusion]): A list of exclusions. next_page_token (str): If there might be more results than appear in this response, @@ -1276,12 +1278,12 @@ class ListExclusionsResponse(proto.Message): def raw_page(self): return self - exclusions = proto.RepeatedField( + exclusions: MutableSequence["LogExclusion"] = proto.RepeatedField( proto.MESSAGE, number=1, message="LogExclusion", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -1306,7 +1308,7 @@ class GetExclusionRequest(proto.Message): ``"projects/my-project/exclusions/my-exclusion"`` """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -1337,11 +1339,11 @@ class CreateExclusionRequest(proto.Message): resource. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - exclusion = proto.Field( + exclusion: "LogExclusion" = proto.Field( proto.MESSAGE, number=2, message="LogExclusion", @@ -1381,16 +1383,16 @@ class UpdateExclusionRequest(proto.Message): ``"filter,description"``. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - exclusion = proto.Field( + exclusion: "LogExclusion" = proto.Field( proto.MESSAGE, number=2, message="LogExclusion", ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, @@ -1417,7 +1419,7 @@ class DeleteExclusionRequest(proto.Message): ``"projects/my-project/exclusions/my-exclusion"`` """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -1452,7 +1454,7 @@ class GetCmekSettingsRequest(proto.Message): projects and folders in the Google Cloud organization. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -1503,16 +1505,16 @@ class UpdateCmekSettingsRequest(proto.Message): For example: ``"updateMask=kmsKeyName"`` """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - cmek_settings = proto.Field( + cmek_settings: "CmekSettings" = proto.Field( proto.MESSAGE, number=2, message="CmekSettings", ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, @@ -1585,15 +1587,15 @@ class CmekSettings(proto.Message): for more information. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - kms_key_name = proto.Field( + kms_key_name: str = proto.Field( proto.STRING, number=2, ) - service_account_id = proto.Field( + service_account_id: str = proto.Field( proto.STRING, number=3, ) @@ -1629,7 +1631,7 @@ class GetSettingsRequest(proto.Message): and folders in the Google Cloud organization. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -1677,16 +1679,16 @@ class UpdateSettingsRequest(proto.Message): For example: ``"updateMask=kmsKeyName"`` """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - settings = proto.Field( + settings: "Settings" = proto.Field( proto.MESSAGE, number=2, message="Settings", ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, @@ -1763,23 +1765,23 @@ class Settings(proto.Message): manually if needed. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - kms_key_name = proto.Field( + kms_key_name: str = proto.Field( proto.STRING, number=2, ) - kms_service_account_id = proto.Field( + kms_service_account_id: str = proto.Field( proto.STRING, number=3, ) - storage_location = proto.Field( + storage_location: str = proto.Field( proto.STRING, number=4, ) - disable_default_sink = proto.Field( + disable_default_sink: bool = proto.Field( proto.BOOL, number=5, ) @@ -1805,15 +1807,15 @@ class CopyLogEntriesRequest(proto.Message): entries. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=3, ) - destination = proto.Field( + destination: str = proto.Field( proto.STRING, number=4, ) @@ -1847,35 +1849,35 @@ class CopyLogEntriesMetadata(proto.Message): For example: ``"serviceAccount:foo@bar.com"`` """ - start_time = proto.Field( + start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - state = proto.Field( + state: "OperationState" = proto.Field( proto.ENUM, number=3, enum="OperationState", ) - cancellation_requested = proto.Field( + cancellation_requested: bool = proto.Field( proto.BOOL, number=4, ) - request = proto.Field( + request: "CopyLogEntriesRequest" = proto.Field( proto.MESSAGE, number=5, message="CopyLogEntriesRequest", ) - progress = proto.Field( + progress: int = proto.Field( proto.INT32, number=6, ) - writer_identity = proto.Field( + writer_identity: str = proto.Field( proto.STRING, number=7, ) @@ -1889,7 +1891,7 @@ class CopyLogEntriesResponse(proto.Message): Number of log entries copied. """ - log_entries_copied_count = proto.Field( + log_entries_copied_count: int = proto.Field( proto.INT64, number=1, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py index bcad752b3bf6..7bad4e319e3b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.api import distribution_pb2 # type: ignore @@ -128,7 +130,7 @@ class LogMetric(proto.Message): Example: ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` - label_extractors (Mapping[str, str]): + label_extractors (MutableMapping[str, str]): Optional. A map from a label key string to an extractor expression which is used to extract data from a log entry field and assign as the label value. Each label key @@ -170,52 +172,52 @@ class ApiVersion(proto.Enum): V2 = 0 V1 = 1 - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=2, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=3, ) - disabled = proto.Field( + disabled: bool = proto.Field( proto.BOOL, number=12, ) - metric_descriptor = proto.Field( + metric_descriptor: metric_pb2.MetricDescriptor = proto.Field( proto.MESSAGE, number=5, message=metric_pb2.MetricDescriptor, ) - value_extractor = proto.Field( + value_extractor: str = proto.Field( proto.STRING, number=6, ) - label_extractors = proto.MapField( + label_extractors: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=7, ) - bucket_options = proto.Field( + bucket_options: distribution_pb2.Distribution.BucketOptions = proto.Field( proto.MESSAGE, number=8, message=distribution_pb2.Distribution.BucketOptions, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=10, message=timestamp_pb2.Timestamp, ) - version = proto.Field( + version: ApiVersion = proto.Field( proto.ENUM, number=4, enum=ApiVersion, @@ -245,15 +247,15 @@ class ListLogMetricsRequest(proto.Message): results might be available. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) @@ -263,7 +265,7 @@ class ListLogMetricsResponse(proto.Message): r"""Result returned from ListLogMetrics. Attributes: - metrics (Sequence[google.cloud.logging_v2.types.LogMetric]): + metrics (MutableSequence[google.cloud.logging_v2.types.LogMetric]): A list of logs-based metrics. next_page_token (str): If there might be more results than appear in this response, @@ -276,12 +278,12 @@ class ListLogMetricsResponse(proto.Message): def raw_page(self): return self - metrics = proto.RepeatedField( + metrics: MutableSequence["LogMetric"] = proto.RepeatedField( proto.MESSAGE, number=1, message="LogMetric", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -299,7 +301,7 @@ class GetLogMetricRequest(proto.Message): "projects/[PROJECT_ID]/metrics/[METRIC_ID]". """ - metric_name = proto.Field( + metric_name: str = proto.Field( proto.STRING, number=1, ) @@ -323,11 +325,11 @@ class CreateLogMetricRequest(proto.Message): must not have an identifier that already exists. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - metric = proto.Field( + metric: "LogMetric" = proto.Field( proto.MESSAGE, number=2, message="LogMetric", @@ -353,11 +355,11 @@ class UpdateLogMetricRequest(proto.Message): Required. The updated metric. """ - metric_name = proto.Field( + metric_name: str = proto.Field( proto.STRING, number=1, ) - metric = proto.Field( + metric: "LogMetric" = proto.Field( proto.MESSAGE, number=2, message="LogMetric", @@ -376,7 +378,7 @@ class DeleteLogMetricRequest(proto.Message): "projects/[PROJECT_ID]/metrics/[METRIC_ID]". """ - metric_name = proto.Field( + metric_name: str = proto.Field( proto.STRING, number=1, ) diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index 0ef7dcaa1ebe..3e932c854938 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -1,4 +1,4 @@ -# Copyright 2018 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,29 +12,61 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""This script is used to synthesize generated parts of this library.""" +import json +import os +import shutil + import synthtool as s -from synthtool import gcp +import synthtool.gcp as gcp from synthtool.languages import python -import os -common = gcp.CommonTemplates() +# ---------------------------------------------------------------------------- +# Copy the generated client from the owl-bot staging directory +# ---------------------------------------------------------------------------- + +clean_up_generated_samples = True -default_version = "v2" +# Load the default version defined in .repo-metadata.json. +default_version = json.load(open(".repo-metadata.json", "rt")).get("default_version") + +def place_before(path, text, *before_text, escape=None): + replacement = "\n".join(before_text) + "\n" + text + if escape: + for c in escape: + text = text.replace(c, '\\' + c) + s.replace([path], text, replacement) + +test_metrics_default_client_info_headers = \ +"""def test_metrics_default_client_info_headers(): + import re + + # test that DEFAULT_CLIENT_INFO contains the expected gapic headers + gapic_header_regex = re.compile( + r"gapic\\\\/[0-9]+\\.[\\\\w.-]+ gax\\/[0-9]+\.[\\\\w.-]+ gl-python\\/[0-9]+\\.[\\\\w.-]+ grpc\\/[0-9]+\.[\\\\w.-]+" + ) + detected_info = ( + google.cloud.logging_v2.services.metrics_service_v2.transports.base.DEFAULT_CLIENT_INFO + ) + assert detected_info is not None + detected_agent = " ".join(sorted(detected_info.to_user_agent().split(" "))) + assert gapic_header_regex.match(detected_agent)\n\n\n""" for library in s.get_staging_dirs(default_version): - if library.name == "v2": - # Fix generated unit tests - s.replace( - library / "tests/unit/gapic/logging_v2/test_logging_service_v2.py", - "MonitoredResource\(\s*type_", - "MonitoredResource(type" - ) - - s.move( - library, - excludes=[ + if clean_up_generated_samples: + shutil.rmtree("samples/generated_samples", ignore_errors=True) + clean_up_generated_samples = False + + place_before( + library / "tests/unit/gapic/logging_v2/test_metrics_service_v2.py", + "def test_metrics_service_v2_client_get_transport_class()", + test_metrics_default_client_info_headers, + escape="()", + ) + + s.move([library], excludes=[ + "**/gapic_version.py", "setup.py", + "testing/constraints-3.7.txt", "README.rst", "google/cloud/logging/__init__.py", # generated types are hidden from users "google/cloud/logging_v2/__init__.py", @@ -49,10 +81,11 @@ # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library( - unit_cov_level=95, + +templated_files = gcp.CommonTemplates().py_library( cov_level=99, microgenerator=True, + versions=gcp.common.detect_versions(path="./google", default_first=True), system_test_external_dependencies=[ "google-cloud-bigquery", "google-cloud-pubsub", @@ -62,32 +95,32 @@ unit_test_external_dependencies=["flask", "webob", "django"], samples=True, ) + s.move(templated_files, excludes=[ - ".coveragerc", + "docs/index.rst", + ".github/release-please.yml", + ".coveragerc", "docs/multiprocessing.rst", ".github/workflows", # exclude gh actions as credentials are needed for tests ".github/auto-label.yaml", "README.rst", # This repo has a customized README - ]) + ], +) # adjust .trampolinerc for environment tests -s.replace( - ".trampolinerc", - "required_envvars[^\)]*\)", - "required_envvars+=()" -) +s.replace(".trampolinerc", "required_envvars[^\)]*\)", "required_envvars+=()") s.replace( ".trampolinerc", "pass_down_envvars\+\=\(", - 'pass_down_envvars+=(\n "ENVIRONMENT"\n "RUNTIME"' + 'pass_down_envvars+=(\n "ENVIRONMENT"\n "RUNTIME"', ) # don't lint environment tests s.replace( ".flake8", "exclude =", - 'exclude =\n # Exclude environment test code.\n tests/environment/**\n' + "exclude =\n # Exclude environment test code.\n tests/environment/**\n", ) # use conventional commits for renovate bot @@ -97,7 +130,7 @@ }""", """}, "semanticCommits": "enabled" -}""" +}""", ) # -------------------------------------------------------------------------- @@ -106,9 +139,8 @@ python.py_samples() -python.configure_previous_major_version_branches() - s.shell.run(["nox", "-s", "blacken"], hide_output=False) +s.shell.run(["nox", "-s", "blacken"], cwd="samples/snippets", hide_output=False) # -------------------------------------------------------------------------- # Modify test configs @@ -124,5 +156,5 @@ s.move( ".kokoro/common_env_vars.cfg", file_path, - merge=lambda src, dst, _, : f"{dst}\n{src}", + merge=lambda src, dst, _,: f"{dst}\n{src}", ) diff --git a/packages/google-cloud-logging/release-please-config.json b/packages/google-cloud-logging/release-please-config.json new file mode 100644 index 000000000000..264e357f4fe4 --- /dev/null +++ b/packages/google-cloud-logging/release-please-config.json @@ -0,0 +1,25 @@ +{ + "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json", + "packages": { + ".": { + "release-type": "python", + "extra-files": [ + "google/cloud/logging_v2/gapic_version.py", + "google/cloud/logging/gapic_version.py", + { + "type": "json", + "path": "samples/generated_samples/snippet_metadata_google.logging.v2.json", + "jsonpath": "$.clientLibrary.version" + } + ] + } + }, + "release-type": "python", + "plugins": [ + { + "type": "sentence-case" + } + ], + "initial-version": "0.1.0" +} + \ No newline at end of file diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py index 9d97df8749d6..dead26544520 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py @@ -49,7 +49,7 @@ async def sample_copy_log_entries(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_logging_v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json similarity index 99% rename from packages/google-cloud-logging/samples/generated_samples/snippet_metadata_logging_v2.json rename to packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 3c6bc46bf854..6fc255d8251b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_logging_v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-cloud-logging" + "name": "google-cloud-logging", + "version": "0.1.0" }, "snippets": [ { @@ -4305,7 +4306,7 @@ }, { "name": "resource_names", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "filter", @@ -4393,7 +4394,7 @@ }, { "name": "resource_names", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "filter", @@ -4957,11 +4958,11 @@ }, { "name": "labels", - "type": "Mapping[str, str]" + "type": "MutableMapping[str, str]" }, { "name": "entries", - "type": "Sequence[google.cloud.logging_v2.types.LogEntry]" + "type": "MutableSequence[google.cloud.logging_v2.types.LogEntry]" }, { "name": "retry", @@ -5049,11 +5050,11 @@ }, { "name": "labels", - "type": "Mapping[str, str]" + "type": "MutableMapping[str, str]" }, { "name": "entries", - "type": "Sequence[google.cloud.logging_v2.types.LogEntry]" + "type": "MutableSequence[google.cloud.logging_v2.types.LogEntry]" }, { "name": "retry", diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index f5c32b22789b..e8283c38d4a0 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -160,6 +160,7 @@ def blacken(session: nox.sessions.Session) -> None: # format = isort + black # + @nox.session def format(session: nox.sessions.Session) -> None: """ @@ -187,7 +188,9 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: @@ -209,9 +212,7 @@ def _session_tests( if os.path.exists("requirements-test.txt"): if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") else: session.install("-r", "requirements-test.txt") with open("requirements-test.txt") as rtfile: @@ -224,9 +225,9 @@ def _session_tests( post_install(session) if "pytest-parallel" in packages: - concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) elif "pytest-xdist" in packages: - concurrent_args.extend(['-n', 'auto']) + concurrent_args.extend(["-n", "auto"]) session.run( "pytest", @@ -256,7 +257,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index d1fc8f037a83..80f95a572feb 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -1,4 +1,5 @@ -# Copyright 2018 Google LLC +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,36 +12,39 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +# import io import os -import setuptools +import setuptools # type: ignore - -# Package metadata. +package_root = os.path.abspath(os.path.dirname(__file__)) name = "google-cloud-logging" + + description = "Stackdriver Logging API client library" -version = "3.3.1" -# Should be one of: -# 'Development Status :: 3 - Alpha' -# 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Production/Stable' -release_status = "Development Status :: 5 - Production/Stable" + +version = {} +with open(os.path.join(package_root, "google/cloud/logging/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + dependencies = [ - "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "google-cloud-appengine-logging>=0.1.0, <2.0.0dev", "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", "google-cloud-core >= 2.0.0, <3.0.0dev", "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] -extras = {} - - -# Setup boilerplate below this line. +url = "https://github.com/googleapis/python-logging" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -48,18 +52,16 @@ with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() -# Only include packages under the 'google' namespace. Do not include tests, -# benchmarks, etc. packages = [ - package for package in setuptools.find_packages() if package.startswith("google") + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") ] -# Determine which namespaces are needed. namespaces = ["google"] if "google.cloud" in packages: namespaces.append("google.cloud") - setuptools.setup( name=name, version=version, @@ -68,7 +70,7 @@ author="Google LLC", author_email="googleapis-packages@google.com", license="Apache 2.0", - url="https://github.com/googleapis/python-logging", + url=url, classifiers=[ release_status, "Intended Audience :: Developers", @@ -84,10 +86,9 @@ ], platforms="Posix; MacOS X; Windows", packages=packages, + python_requires=">=3.7", namespace_packages=namespaces, install_requires=dependencies, - extras_require=extras, - python_requires=">=3.7", include_package_data=True, zip_safe=False, ) diff --git a/packages/google-cloud-logging/testing/constraints-3.10.txt b/packages/google-cloud-logging/testing/constraints-3.10.txt index e69de29bb2d1..ed7f9aed2559 100644 --- a/packages/google-cloud-logging/testing/constraints-3.10.txt +++ b/packages/google-cloud-logging/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-logging/testing/constraints-3.11.txt b/packages/google-cloud-logging/testing/constraints-3.11.txt index e69de29bb2d1..ed7f9aed2559 100644 --- a/packages/google-cloud-logging/testing/constraints-3.11.txt +++ b/packages/google-cloud-logging/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-logging/testing/constraints-3.7.txt b/packages/google-cloud-logging/testing/constraints-3.7.txt index 6db145a06148..587626c54bb2 100644 --- a/packages/google-cloud-logging/testing/constraints-3.7.txt +++ b/packages/google-cloud-logging/testing/constraints-3.7.txt @@ -1,11 +1,10 @@ # This constraints file is used to check that lower bounds # are correct in setup.py -# List *all* library dependencies and extras in this file. +# List all library dependencies and extras in this file. # Pin the version to the lower bound. -# -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -# Then this file should have foo==1.14.0 -google-api-core==1.32.0 -google-cloud-core==2.0.0 +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.33.2 proto-plus==1.22.0 protobuf==3.19.5 +google-cloud-core==2.0.0 diff --git a/packages/google-cloud-logging/testing/constraints-3.8.txt b/packages/google-cloud-logging/testing/constraints-3.8.txt index e69de29bb2d1..ed7f9aed2559 100644 --- a/packages/google-cloud-logging/testing/constraints-3.8.txt +++ b/packages/google-cloud-logging/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-logging/testing/constraints-3.9.txt b/packages/google-cloud-logging/testing/constraints-3.9.txt index e69de29bb2d1..ed7f9aed2559 100644 --- a/packages/google-cloud-logging/testing/constraints-3.9.txt +++ b/packages/google-cloud-logging/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 8e8671e683c1..be77714c0196 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -99,22 +99,6 @@ def test__get_default_mtls_endpoint(): ) -def test_config_default_client_info_headers(): - import re - import pkg_resources - - # test that DEFAULT_CLIENT_INFO contains the expected gapic headers - gapic_header_regex = re.compile( - r"gapic\/[0-9]+\.[\w.-]+ gax\/[0-9]+\.[\w.-]+ gl-python\/[0-9]+\.[\w.-]+ grpc\/[0-9]+\.[\w.-]+" - ) - detected_info = ( - google.cloud.logging_v2.services.config_service_v2.transports.base.DEFAULT_CLIENT_INFO - ) - assert detected_info is not None - detected_agent = " ".join(sorted(detected_info.to_user_agent().split(" "))) - assert gapic_header_regex.match(detected_agent) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 832ad63dc351..d8bbd5b730c2 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -71,22 +71,6 @@ def modify_default_endpoint(client): ) -def test_logging_default_client_info_headers(): - import re - import pkg_resources - - # test that DEFAULT_CLIENT_INFO contains the expected gapic headers - gapic_header_regex = re.compile( - r"gapic\/[0-9]+\.[\w.-]+ gax\/[0-9]+\.[\w.-]+ gl-python\/[0-9]+\.[\w.-]+ grpc\/[0-9]+\.[\w.-]+" - ) - detected_info = ( - google.cloud.logging_v2.services.logging_service_v2.transports.base.DEFAULT_CLIENT_INFO - ) - assert detected_info is not None - detected_agent = " ".join(sorted(detected_info.to_user_agent().split(" "))) - assert gapic_header_regex.match(detected_agent) - - def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -1051,7 +1035,7 @@ def test_write_log_entries_flattened(): # using the keyword arguments to the method. client.write_log_entries( log_name="log_name_value", - resource=monitored_resource_pb2.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -1064,7 +1048,7 @@ def test_write_log_entries_flattened(): mock_val = "log_name_value" assert arg == mock_val arg = args[0].resource - mock_val = monitored_resource_pb2.MonitoredResource(type="type__value") + mock_val = monitored_resource_pb2.MonitoredResource(type="type_value") assert arg == mock_val arg = args[0].labels mock_val = {"key_value": "value_value"} @@ -1085,7 +1069,7 @@ def test_write_log_entries_flattened_error(): client.write_log_entries( logging.WriteLogEntriesRequest(), log_name="log_name_value", - resource=monitored_resource_pb2.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -1111,7 +1095,7 @@ async def test_write_log_entries_flattened_async(): # using the keyword arguments to the method. response = await client.write_log_entries( log_name="log_name_value", - resource=monitored_resource_pb2.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -1124,7 +1108,7 @@ async def test_write_log_entries_flattened_async(): mock_val = "log_name_value" assert arg == mock_val arg = args[0].resource - mock_val = monitored_resource_pb2.MonitoredResource(type="type__value") + mock_val = monitored_resource_pb2.MonitoredResource(type="type_value") assert arg == mock_val arg = args[0].labels mock_val = {"key_value": "value_value"} @@ -1146,7 +1130,7 @@ async def test_write_log_entries_flattened_error_async(): await client.write_log_entries( logging.WriteLogEntriesRequest(), log_name="log_name_value", - resource=monitored_resource_pb2.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 4f9e2347ccc9..39a005c97cbe 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -99,22 +99,6 @@ def test__get_default_mtls_endpoint(): ) -def test_metrics_default_client_info_headers(): - import re - import pkg_resources - - # test that DEFAULT_CLIENT_INFO contains the expected gapic headers - gapic_header_regex = re.compile( - r"gapic\/[0-9]+\.[\w.-]+ gax\/[0-9]+\.[\w.-]+ gl-python\/[0-9]+\.[\w.-]+ grpc\/[0-9]+\.[\w.-]+" - ) - detected_info = ( - google.cloud.logging_v2.services.metrics_service_v2.transports.base.DEFAULT_CLIENT_INFO - ) - assert detected_info is not None - detected_agent = " ".join(sorted(detected_info.to_user_agent().split(" "))) - assert gapic_header_regex.match(detected_agent) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -193,6 +177,21 @@ def test_metrics_service_v2_client_from_service_account_file( assert client.transport._host == ("logging.googleapis.com:443") +def test_metrics_default_client_info_headers(): + import re + + # test that DEFAULT_CLIENT_INFO contains the expected gapic headers + gapic_header_regex = re.compile( + r"gapic\/[0-9]+\.[\w.-]+ gax\/[0-9]+\.[\w.-]+ gl-python\/[0-9]+\.[\w.-]+ grpc\/[0-9]+\.[\w.-]+" + ) + detected_info = ( + google.cloud.logging_v2.services.metrics_service_v2.transports.base.DEFAULT_CLIENT_INFO + ) + assert detected_info is not None + detected_agent = " ".join(sorted(detected_info.to_user_agent().split(" "))) + assert gapic_header_regex.match(detected_agent) + + def test_metrics_service_v2_client_get_transport_class(): transport = MetricsServiceV2Client.get_transport_class() available_transports = [ From f292ba524d94407e96fe1143efa9630101b95d7b Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 15 Dec 2022 18:01:31 +0100 Subject: [PATCH 704/855] chore(deps): update dependency google-cloud-bigquery to v3.4.1 (#689) Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 4d5a11e58580..32a82202cc3f 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.3.1 -google-cloud-bigquery==3.4.0 +google-cloud-bigquery==3.4.1 google-cloud-storage==2.7.0 google-cloud-pubsub==2.13.11 From 3d17dd6ee1ec3dc1b5b2572293f2b752fd73c84a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 27 Dec 2022 13:54:36 -0800 Subject: [PATCH 705/855] chore(main): release 3.4.0 (#691) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-logging/CHANGELOG.md | 21 +++++++++++++++++++ .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 25 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json index 466b7195f893..395791d451f3 100644 --- a/packages/google-cloud-logging/.release-please-manifest.json +++ b/packages/google-cloud-logging/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.3.1" + ".": "3.4.0" } diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 838f36fcad28..e82a7e1daad5 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,27 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.4.0](https://github.com/googleapis/python-logging/compare/v3.3.1...v3.4.0) (2022-12-15) + + +### Features + +* Add typing to proto.Message based class attributes ([9d505f1](https://github.com/googleapis/python-logging/commit/9d505f1147ccac8632590b4322945a900ff3c5c2)) + + +### Bug Fixes + +* Add dict typing for client_options ([9d505f1](https://github.com/googleapis/python-logging/commit/9d505f1147ccac8632590b4322945a900ff3c5c2)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([9d505f1](https://github.com/googleapis/python-logging/commit/9d505f1147ccac8632590b4322945a900ff3c5c2)) +* Drop usage of pkg_resources ([9d505f1](https://github.com/googleapis/python-logging/commit/9d505f1147ccac8632590b4322945a900ff3c5c2)) +* Fix timeout default values ([9d505f1](https://github.com/googleapis/python-logging/commit/9d505f1147ccac8632590b4322945a900ff3c5c2)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([9d505f1](https://github.com/googleapis/python-logging/commit/9d505f1147ccac8632590b4322945a900ff3c5c2)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([9d505f1](https://github.com/googleapis/python-logging/commit/9d505f1147ccac8632590b4322945a900ff3c5c2)) + ## [3.3.1](https://github.com/googleapis/python-logging/compare/v3.3.0...v3.3.1) (2022-12-05) diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py index c443818a0ed0..431567aa07df 100644 --- a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.3.1" # {x-release-please-version} +__version__ = "3.4.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py index c443818a0ed0..431567aa07df 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.3.1" # {x-release-please-version} +__version__ = "3.4.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 6fc255d8251b..e1fe3fcc1090 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "0.1.0" + "version": "3.4.0" }, "snippets": [ { From 5d668e94b5dfb9cc92afa5c13a1559dfa216fd6e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 3 Jan 2023 16:03:22 +0100 Subject: [PATCH 706/855] chore(deps): update dependency google-cloud-logging to v3.4.0 (#697) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 32a82202cc3f..01bee9fd435f 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.3.1 +google-cloud-logging==3.4.0 google-cloud-bigquery==3.4.1 google-cloud-storage==2.7.0 google-cloud-pubsub==2.13.11 From ddf77af73c5c7322035a6792b3881c7dfcb3879c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Jan 2023 13:40:54 -0500 Subject: [PATCH 707/855] chore(python): add support for python 3.11 (#698) Source-Link: https://github.com/googleapis/synthtool/commit/7197a001ffb6d8ce7b0b9b11c280f0c536c1033a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 5 +- .../.kokoro/samples/python3.11/common.cfg | 59 +++++++++++++++++++ .../.kokoro/samples/python3.11/continuous.cfg | 6 ++ .../samples/python3.11/periodic-head.cfg | 11 ++++ .../.kokoro/samples/python3.11/periodic.cfg | 6 ++ .../.kokoro/samples/python3.11/presubmit.cfg | 6 ++ .../google-cloud-logging/CONTRIBUTING.rst | 6 +- packages/google-cloud-logging/noxfile.py | 2 +- .../samples/snippets/noxfile.py | 2 +- 9 files changed, 96 insertions(+), 7 deletions(-) create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.11/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.11/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.11/periodic-head.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.11/periodic.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.11/presubmit.cfg diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index df2cfe5d8965..889f77dfa25d 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 - + digest: sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.11/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.11/common.cfg new file mode 100644 index 000000000000..557e750529d6 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.11/common.cfg @@ -0,0 +1,59 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.11" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-311" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.11/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.11/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.11/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.11/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.11/periodic-head.cfg new file mode 100644 index 000000000000..7e2973e3b659 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.11/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.11/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.11/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.11/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.11/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.11/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.11/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index b68256eb04f5..6fa7a4dac6a4 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.10 -- -k + $ nox -s unit-3.11 -- -k .. note:: @@ -225,11 +225,13 @@ We support: - `Python 3.8`_ - `Python 3.9`_ - `Python 3.10`_ +- `Python 3.11`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 85e9dbb8421c..820c51d007a5 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -31,7 +31,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index e8283c38d4a0..1224cbe212e4 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From 37d251c9af0cef365e198ad6f9d01e76be5b6126 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 9 Jan 2023 23:19:49 -0500 Subject: [PATCH 708/855] feat: Add support for python 3.11 (#700) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add support for python 3.11 chore: Update gapic-generator-python to v1.8.0 PiperOrigin-RevId: 500768693 Source-Link: https://github.com/googleapis/googleapis/commit/190b612e3d0ff8f025875a669e5d68a1446d43c1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/7bf29a414b9ecac3170f0b65bdc2a95705c0ef1a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2JmMjlhNDE0YjllY2FjMzE3MGYwYjY1YmRjMmE5NTcwNWMwZWYxYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * require proto-plus 1.22.2 for python 3.11 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../logging_v2/services/config_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/client.py | 2 +- .../logging_v2/services/logging_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/client.py | 2 +- .../logging_v2/services/metrics_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/client.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- packages/google-cloud-logging/setup.py | 2 ++ packages/google-cloud-logging/testing/constraints-3.12.txt | 6 ++++++ 9 files changed, 15 insertions(+), 7 deletions(-) create mode 100644 packages/google-cloud-logging/testing/constraints-3.12.txt diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index f691bd684b1f..ebda77d561e0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -151,7 +151,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 7ebbe333f545..1c5cc5bb40c2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -381,7 +381,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 3f5ed6a0c650..909895ad6f21 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -138,7 +138,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index fa3e15e6c2f0..10d49a8e7461 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -288,7 +288,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 14a3d5f2a446..bcffd416c779 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -136,7 +136,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index c8bacd4340fd..2597c8d29223 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -287,7 +287,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index e1fe3fcc1090..6fc255d8251b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.4.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 80f95a572feb..f43fd0bf9e52 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -42,6 +42,7 @@ "google-cloud-core >= 2.0.0, <3.0.0dev", "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-logging" @@ -81,6 +82,7 @@ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/google-cloud-logging/testing/constraints-3.12.txt b/packages/google-cloud-logging/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-logging/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf From afafb3c018cd4e2c9df45f7457b2fe28fe96ee9c Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 10 Jan 2023 19:13:22 +0000 Subject: [PATCH 709/855] chore(deps): update dependency google-cloud-pubsub to v2.13.12 (#702) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 01bee9fd435f..2a47f5dff4d5 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.4.0 google-cloud-bigquery==3.4.1 google-cloud-storage==2.7.0 -google-cloud-pubsub==2.13.11 +google-cloud-pubsub==2.13.12 From e177b605ffd071637e75995ebc5ae13aa94b7ec7 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 14 Jan 2023 18:07:13 +0000 Subject: [PATCH 710/855] chore(deps): update dependency pytest to v7.2.1 (#706) --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index b90fc387d015..9f013668bd64 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==2.2.1 -pytest==7.2.0 +pytest==7.2.1 From d1d737beeb457859ff5f0bd4e4bd56fb30e05991 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 17 Jan 2023 23:12:25 +0000 Subject: [PATCH 711/855] fix: instrumentation entries should not contain user labels (#703) --- .../google/cloud/logging_v2/_instrumentation.py | 6 ++++-- .../tests/unit/test__instrumentation.py | 16 ++++++++++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_instrumentation.py b/packages/google-cloud-logging/google/cloud/logging_v2/_instrumentation.py index 0d9de76d3930..553b3f94c21a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/_instrumentation.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_instrumentation.py @@ -67,8 +67,10 @@ def _create_diagnostic_entry(name=_PYTHON_LIBRARY_NAME, version=_LIBRARY_VERSION _INSTRUMENTATION_SOURCE_KEY: [_get_instrumentation_source(name, version)] } } - kw["severity"] = "INFO" - entry = StructEntry(payload=payload, **kw) + # only keep the log_name and resource from the parent log + allow_list = ("log_name", "resource") + active_kws = {k: v for k, v in kw.items() if k in allow_list} + entry = StructEntry(payload=payload, **active_kws) return entry diff --git a/packages/google-cloud-logging/tests/unit/test__instrumentation.py b/packages/google-cloud-logging/tests/unit/test__instrumentation.py index 501301c34dc3..dc330b0ca21a 100644 --- a/packages/google-cloud-logging/tests/unit/test__instrumentation.py +++ b/packages/google-cloud-logging/tests/unit/test__instrumentation.py @@ -63,3 +63,19 @@ def test_truncate_long_values(self): self.assertEqual(expected_name, self._get_diagonstic_value(entry, "name")) self.assertEqual(expected_version, self._get_diagonstic_value(entry, "version")) + + def test_drop_labels(self): + """Labels should not be copied in instrumentation log""" + test_logname = "test-name" + test_labels = {"hello": "world"} + entry = i._create_diagnostic_entry( + name=self.LONG_NAME, + version=self.LONG_VERSION, + log_name=test_logname, + labels=test_labels, + ) + self.assertEqual(entry.log_name, test_logname) + self.assertIsNone(entry.labels) + # ensure only expected fields exist in entry + expected_keys = set(["logName", "resource", "jsonPayload"]) + self.assertEqual(set(entry.to_api_repr().keys()), expected_keys) From 52c7798b1e6ebf7db2f597398d345e4d2667b158 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 18 Jan 2023 16:39:37 +0000 Subject: [PATCH 712/855] chore(deps): update dependency google-cloud-bigquery to v3.4.2 (#707) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 2a47f5dff4d5..0c534dc67aff 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.4.0 -google-cloud-bigquery==3.4.1 +google-cloud-bigquery==3.4.2 google-cloud-storage==2.7.0 google-cloud-pubsub==2.13.12 From a2705101f3de190e84c07f6f635dab9d61725089 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 18 Jan 2023 21:06:10 +0000 Subject: [PATCH 713/855] fix: send StructuredLogHandler instrumentation log using an explicit logger (#705) --- .../google/cloud/logging_v2/handlers/structured_log.py | 6 +++++- .../tests/unit/handlers/test_structured_log.py | 3 ++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py index 55ed9c2d0a9e..fac9b26b3b7d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py @@ -131,4 +131,8 @@ def emit(self, record): def emit_instrumentation_info(self): google.cloud.logging_v2._instrumentation_emitted = True diagnostic_object = _create_diagnostic_entry() - logging.info(diagnostic_object.payload) + struct_logger = logging.getLogger(__name__) + struct_logger.addHandler(self) + struct_logger.setLevel(logging.INFO) + struct_logger.info(diagnostic_object.payload) + struct_logger.handlers.clear() diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index d930da763214..353530ed1a7e 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -624,7 +624,8 @@ def test_valid_instrumentation_info(self): import mock import json - with mock.patch.object(logging, "info") as mock_log: + logger = logging.getLogger("google.cloud.logging_v2.handlers.structured_log") + with mock.patch.object(logger, "info") as mock_log: handler = self._make_one() handler.emit_instrumentation_info() mock_log.assert_called_once() From 71bf94b06e49c3d9512a6244f07b0629e4b53631 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Jan 2023 13:52:08 -0500 Subject: [PATCH 714/855] docs: Add documentation for enums (#709) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Add documentation for enums fix: Add context manager return types chore: Update gapic-generator-python to v1.8.1 PiperOrigin-RevId: 503210727 Source-Link: https://github.com/googleapis/googleapis/commit/a391fd1dac18dfdfa00c18c8404f2c3a6ff8e98e Source-Link: https://github.com/googleapis/googleapis-gen/commit/0080f830dec37c3384157082bce279e37079ea58 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDA4MGY4MzBkZWMzN2MzMzg0MTU3MDgyYmNlMjc5ZTM3MDc5ZWE1OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/config_service_v2/client.py | 2 +- .../services/logging_service_v2/client.py | 2 +- .../services/metrics_service_v2/client.py | 2 +- .../google/cloud/logging_v2/types/logging.py | 15 ++++++- .../cloud/logging_v2/types/logging_config.py | 42 ++++++++++++++++++- .../cloud/logging_v2/types/logging_metrics.py | 9 +++- 6 files changed, 65 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 1c5cc5bb40c2..4723239b471f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -3334,7 +3334,7 @@ def sample_copy_log_entries(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "ConfigServiceV2Client": return self def __exit__(self, type, value, traceback): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 10d49a8e7461..83c36302ae3d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1180,7 +1180,7 @@ def request_generator(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "LoggingServiceV2Client": return self def __exit__(self, type, value, traceback): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 2597c8d29223..a75c1e9358cc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1029,7 +1029,7 @@ def sample_delete_log_metric(): metadata=metadata, ) - def __enter__(self): + def __enter__(self) -> "MetricsServiceV2Client": return self def __exit__(self, type, value, traceback): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py index 903f875ad07c..4c300f705ef1 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py @@ -557,7 +557,20 @@ class SuppressionInfo(proto.Message): """ class Reason(proto.Enum): - r"""An indicator of why entries were omitted.""" + r"""An indicator of why entries were omitted. + + Values: + REASON_UNSPECIFIED (0): + Unexpected default. + RATE_LIMIT (1): + Indicates suppression occurred due to relevant entries being + received in excess of rate limits. For quotas and limits, + see `Logging API quotas and + limits `__. + NOT_CONSUMED (2): + Indicates suppression occurred due to the + client not consuming responses quickly enough. + """ REASON_UNSPECIFIED = 0 RATE_LIMIT = 1 NOT_CONSUMED = 2 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index bc81412e7848..c3c3da099d20 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -70,7 +70,19 @@ class LifecycleState(proto.Enum): - r"""LogBucket lifecycle states.""" + r"""LogBucket lifecycle states. + + Values: + LIFECYCLE_STATE_UNSPECIFIED (0): + Unspecified state. This is only used/useful + for distinguishing unset values. + ACTIVE (1): + The normal and active state. + DELETE_REQUESTED (2): + The resource has been marked for deletion by + the user. For some resources (e.g. buckets), + this can be reversed by an un-delete operation. + """ LIFECYCLE_STATE_UNSPECIFIED = 0 ACTIVE = 1 DELETE_REQUESTED = 2 @@ -83,6 +95,22 @@ class OperationState(proto.Enum): is created, the current state of the operation can be queried even before the operation is finished and the final result is available. + + Values: + OPERATION_STATE_UNSPECIFIED (0): + Should not be used. + OPERATION_STATE_SCHEDULED (1): + The operation is scheduled. + OPERATION_STATE_WAITING_FOR_PERMISSIONS (2): + Waiting for necessary permissions. + OPERATION_STATE_RUNNING (3): + The operation is running. + OPERATION_STATE_SUCCEEDED (4): + The operation was completed successfully. + OPERATION_STATE_FAILED (5): + The operation failed. + OPERATION_STATE_CANCELLED (6): + The operation was cancelled by the user. """ OPERATION_STATE_UNSPECIFIED = 0 OPERATION_STATE_SCHEDULED = 1 @@ -371,7 +399,17 @@ class LogSink(proto.Message): """ class VersionFormat(proto.Enum): - r"""Deprecated. This is unused.""" + r"""Deprecated. This is unused. + + Values: + VERSION_FORMAT_UNSPECIFIED (0): + An unspecified format version that will + default to V2. + V2 (1): + ``LogEntry`` version 2 format. + V1 (2): + ``LogEntry`` version 1 format. + """ VERSION_FORMAT_UNSPECIFIED = 0 V2 = 1 V1 = 2 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py index 7bad4e319e3b..128e6369e79b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py @@ -168,7 +168,14 @@ class LogMetric(proto.Message): """ class ApiVersion(proto.Enum): - r"""Logging API version.""" + r"""Logging API version. + + Values: + V2 (0): + Logging API v2. + V1 (1): + Logging API v1. + """ V2 = 0 V1 = 1 From 36b0a3f6ad8aa17f2a93c4da0c5a905d545f0e92 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 24 Jan 2023 15:11:27 +0000 Subject: [PATCH 715/855] chore(deps): update dependency google-cloud-pubsub to v2.14.0 (#708) Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 0c534dc67aff..8258225ff9d7 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.4.0 google-cloud-bigquery==3.4.2 google-cloud-storage==2.7.0 -google-cloud-pubsub==2.13.12 +google-cloud-pubsub==2.14.0 From b46768a2873f6e59176c2af548a0a3ee276ea90a Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 24 Jan 2023 18:00:16 +0000 Subject: [PATCH 716/855] chore: update environment test submodule (#711) --- .../tests/environment/.gitignore | 2 + .../environment/deployable/go/Dockerfile | 3 +- .../tests/environment/deployable/go/go.mod | 4 +- .../tests/environment/deployable/go/main.go | 267 +++++++++++++++--- .../environment/deployable/python/snippets.py | 25 ++ .../envctl/env_scripts/go/compute.sh | 2 +- .../envctl/env_scripts/go/functions.sh | 5 +- .../envctl/env_scripts/go/local.sh | 88 ++++++ .../envctl/env_scripts/python/local.sh | 71 +++++ .../tests/environment/noxfile.py | 2 + .../tests/environment/tests/common/common.py | 2 +- .../tests/environment/tests/go/go.py | 110 ++++++++ .../tests/environment/tests/go/stdout.py | 84 ++++++ .../tests/go/test_appengine_standard.py | 4 +- .../environment/tests/go/test_cloudrun.py | 5 +- .../environment/tests/go/test_compute.py | 5 +- .../environment/tests/go/test_functions.py | 5 +- .../environment/tests/go/test_kubernetes.py | 5 +- .../tests/{common => python}/python.py | 0 .../python/test_appengine_flex_container.py | 2 +- .../python/test_appengine_flex_python.py | 2 +- .../tests/python/test_appengine_standard.py | 2 +- .../environment/tests/python/test_cloudrun.py | 2 +- .../environment/tests/python/test_compute.py | 2 +- .../tests/python/test_functions.py | 2 +- .../tests/python/test_kubernetes.py | 2 +- .../environment/tests/python/test_local.py | 31 ++ tests/environment | 2 +- 28 files changed, 672 insertions(+), 64 deletions(-) create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/local.sh create mode 100755 packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/local.sh create mode 100644 packages/google-cloud-logging/tests/environment/tests/go/go.py create mode 100644 packages/google-cloud-logging/tests/environment/tests/go/stdout.py rename packages/google-cloud-logging/tests/environment/tests/{common => python}/python.py (100%) create mode 100644 packages/google-cloud-logging/tests/environment/tests/python/test_local.py diff --git a/packages/google-cloud-logging/tests/environment/.gitignore b/packages/google-cloud-logging/tests/environment/.gitignore index 407f5b71e3fe..357d4e1fd332 100644 --- a/packages/google-cloud-logging/tests/environment/.gitignore +++ b/packages/google-cloud-logging/tests/environment/.gitignore @@ -1,5 +1,7 @@ .idea +_library/ + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/Dockerfile b/packages/google-cloud-logging/tests/environment/deployable/go/Dockerfile index 56eea7df8390..166211cd0d9b 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/Dockerfile +++ b/packages/google-cloud-logging/tests/environment/deployable/go/Dockerfile @@ -16,7 +16,7 @@ # Use the offical golang image to create a binary. # This is based on Debian and sets the GOPATH to /go. # https://hub.docker.com/_/golang -FROM golang:1.15-buster as builder +FROM golang:1.19-buster as builder # Create and change to the app directory. WORKDIR /app @@ -26,6 +26,7 @@ COPY . ./ # Build the binary RUN go mod download +RUN go mod tidy RUN go build -v -o server # Use the official Debian slim image for a lean production container. diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/go.mod b/packages/google-cloud-logging/tests/environment/deployable/go/go.mod index ebd37f95d89f..099bb4094d99 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/go.mod +++ b/packages/google-cloud-logging/tests/environment/deployable/go/go.mod @@ -1,6 +1,6 @@ module github.com/googleapis/env-tests-logging/deployable/go/main -go 1.15 +go 1.16 require ( cloud.google.com/go/compute v1.7.0 @@ -11,3 +11,5 @@ require ( ) replace cloud.google.com/go/logging => ./logging + +replace golang.org/x/sys => golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab diff --git a/packages/google-cloud-logging/tests/environment/deployable/go/main.go b/packages/google-cloud-logging/tests/environment/deployable/go/main.go index 00eca7fc73cd..a03148c4eddf 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/go/main.go +++ b/packages/google-cloud-logging/tests/environment/deployable/go/main.go @@ -21,7 +21,10 @@ import ( "io/ioutil" "log" "net/http" + "net/url" "os" + "reflect" + "strconv" "strings" "time" @@ -116,9 +119,22 @@ func pullMsgsSync(sub *pubsub.Subscription) error { // Initializations for all GCP services var ctx context.Context +// global project id +var projectID string + // init executes for all environments, regardless if its a program or package func init() { ctx = context.Background() + // populate projectId + var found bool + projectID, found = os.LookupEnv("PROJECT_ID") + if !found { + var err error + projectID, err = metadata.ProjectID() + if err != nil { + log.Fatalf("metadata.ProjectID: %v", err) + } + } } // main runs for all environments except GCF @@ -126,10 +142,7 @@ func main() { // ****************** GAE, GKE, GCE ****************** // Enable app subscriber for all environments except GCR if os.Getenv("ENABLE_SUBSCRIBER") == "true" { - projectID, err := metadata.ProjectID() - if err != nil { - log.Fatalf("metadata.ProjectID: %v", err) - } + // first look for project id in env var, then check the metadata topicID := os.Getenv("PUBSUB_TOPIC") if topicID == "" { topicID = "logging-test" @@ -153,6 +166,7 @@ func main() { } // Blocking call, pulls messages from pubsub until context is cancelled or test ends + log.Printf("Waiting for pubsub messages...") err = pullMsgsSync(sub) if err != nil { log.Fatalf("pullMsgsSync failed: %v", err) @@ -185,13 +199,51 @@ func main() { } // ****************** Test Cases ****************** + +type Snippets struct{} + // [Optional] envctl go trigger simplelog log_name=foo,log_text=bar -func simplelog(args map[string]string) { +func (s Snippets) Simplelog(args map[string]string) { ctx := context.Background() - projectID, err := metadata.ProjectID() + client, err := logging.NewClient(ctx, projectID) if err != nil { - log.Fatalf("metadata.ProjectID: %v", err) + log.Fatalf("Failed to create client: %v", err) + } + defer client.Close() + + logname := "my-log" + if val, ok := args["log_name"]; ok { + logname = val + } + + logtext := "hello world" + if val, ok := args["log_text"]; ok { + logtext = val + } + + logseverity := s._parseSeverity(args["severity"]) + + entry := logging.Entry{ + Payload: logtext, + Severity: logseverity, + } + // attach http request object if passed in + if input_url, ok := args["http_request_url"]; ok { + if parsed_url, err := url.Parse(input_url); err == nil { + entry.HTTPRequest = &logging.HTTPRequest{ + Request: &http.Request{ + URL: parsed_url, + Method: "POST", + }, + } + } } + client.Logger(logname).Log(entry) +} + +// [Optional] envctl go trigger jsonlog log_name=foo,log_text=bar +func (s Snippets) Jsonlog(args map[string]string) { + ctx := context.Background() client, err := logging.NewClient(ctx, projectID) if err != nil { log.Fatalf("Failed to create client: %v", err) @@ -208,44 +260,181 @@ func simplelog(args map[string]string) { logtext = val } - logseverity := logging.Info - if val, ok := args["severity"]; ok { - switch strings.ToUpper(val) { - case "DEFAULT": - logseverity = logging.Default - case "DEBUG": - logseverity = logging.Debug - case "INFO": - logseverity = logging.Info - case "NOTICE": - logseverity = logging.Notice - case "WARNING": - logseverity = logging.Warning - case "ERROR": - logseverity = logging.Error - case "CRITICAL": - logseverity = logging.Critical - case "ALERT": - logseverity = logging.Alert - case "EMERGENCY": - logseverity = logging.Emergency - default: - break + logseverity := s._parseSeverity(args["severity"]) + + payload := make(map[string]interface{}) + for k, v := range args { + if k != "log_name" && k != "log_text" && k != "severity" { + // convert int inputs when possible + if intVal, err := strconv.Atoi(v); err == nil { + payload[k] = intVal + } else { + payload[k] = v + } } } + payload["message"] = logtext + entry := logging.Entry{ + Payload: payload, + Severity: logseverity, + } + client.Logger(logname).Log(entry) +} + +// https://pkg.go.dev/cloud.google.com/go/logging#hdr-The_Standard_Logger +// [Optional] envctl go trigger standardlogger log_name=foo,log_text=bar +func (s Snippets) Standardlogger(args map[string]string) { + ctx := context.Background() + client, err := logging.NewClient(ctx, projectID) + if err != nil { + log.Fatalf("Failed to create client: %v", err) + } + defer client.Close() - logger := client.Logger(logname).StandardLogger(logseverity) - logger.Println(logtext) + logname := "my-log" + if val, ok := args["log_name"]; ok { + logname = val + } + + logtext := "hello world" + if val, ok := args["log_text"]; ok { + logtext = val + } + + logseverity := s._parseSeverity(args["severity"]) + + lg := client.Logger(logname) + stdlg := lg.StandardLogger(logseverity) + stdlg.Println(logtext) } -// testLog is a helper function which invokes the correct test functions -func testLog(message string, attrs map[string]string) { - switch message { - case "simplelog": - simplelog(attrs) - case "stdlog": - break +// https://pkg.go.dev/cloud.google.com/go/logging#hdr-Synchronous_Logging +// [Optional] envctl go trigger synclog log_name=foo,log_text=bar +func (s Snippets) Synclog(args map[string]string) { + ctx := context.Background() + client, err := logging.NewClient(ctx, projectID) + if err != nil { + log.Fatalf("Failed to create client: %v", err) + } + defer client.Close() + + logname := "my-log" + if val, ok := args["log_name"]; ok { + logname = val + } + + logtext := "hello world" + if val, ok := args["log_text"]; ok { + logtext = val + } + + logseverity := s._parseSeverity(args["severity"]) + + lg := client.Logger(logname) + entry := logging.Entry{ + Payload: logtext, + Severity: logseverity, + } + // attach http request object if passed in + if input_url, ok := args["http_request_url"]; ok { + if parsed_url, err := url.Parse(input_url); err == nil { + entry.HTTPRequest = &logging.HTTPRequest{ + Request: &http.Request{ + URL: parsed_url, + Method: "POST", + }, + } + entry.HTTPRequest.Latency = 100000 + } + } + lg.LogSync(ctx, entry) +} + +// https://pkg.go.dev/cloud.google.com/go/logging#hdr-Redirecting_log_ingestion +// [Optional] envctl go trigger stdoutlog log_name=foo,log_text=bar +func (s Snippets) Stdoutlog(args map[string]string) { + ctx := context.Background() + client, err := logging.NewClient(ctx, projectID) + if err != nil { + log.Fatalf("Failed to create client: %v", err) + } + defer client.Close() + + logname := "my-log" + if val, ok := args["log_name"]; ok { + logname = val + } + + logtext := "hello world" + if val, ok := args["log_text"]; ok { + logtext = val + } + + logseverity := s._parseSeverity(args["severity"]) + + lg := client.Logger(logname, logging.RedirectAsJSON(os.Stdout)) + entry := logging.Entry{ + Payload: logtext, + Severity: logseverity, + } + // attach http request object if passed in + if input_url, ok := args["http_request_url"]; ok { + if parsed_url, err := url.Parse(input_url); err == nil { + entry.HTTPRequest = &logging.HTTPRequest{ + Request: &http.Request{ + URL: parsed_url, + Method: "POST", + }, + } + entry.HTTPRequest.Latency = 100000 + } + } + lg.LogSync(ctx, entry) +} + +func (s Snippets) _parseSeverity(val string) logging.Severity { + logseverity := logging.Info + switch strings.ToUpper(val) { + case "DEFAULT": + logseverity = logging.Default + case "DEBUG": + logseverity = logging.Debug + case "INFO": + logseverity = logging.Info + case "NOTICE": + logseverity = logging.Notice + case "WARNING": + logseverity = logging.Warning + case "ERROR": + logseverity = logging.Error + case "CRITICAL": + logseverity = logging.Critical + case "ALERT": + logseverity = logging.Alert + case "EMERGENCY": + logseverity = logging.Emergency default: break } + return logseverity +} + +func (s Snippets) Test() { + log.Printf("Test") +} + +// testLog is a helper function which invokes the correct test functions +func testLog(message string, attrs map[string]string) { + // call the requested snippet using reflection + snippets := Snippets{} + // only exported methods can be called through reflection + // capitalize input to match method signature + methodName := strings.Title(message) + method := reflect.ValueOf(snippets).MethodByName(methodName) + if method.IsValid() { + in := []reflect.Value{reflect.ValueOf(attrs)} + method.Call(in) + } else { + log.Printf("invalid snippet") + } } diff --git a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py index 277dcdc9a8af..b440e20518fa 100644 --- a/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py +++ b/packages/google-cloud-logging/tests/environment/deployable/python/snippets.py @@ -55,6 +55,31 @@ def jsonlog(log_name=None, log_text=None, severity="DEFAULT", **kwargs): logger.log_struct(message, severity=severity) +def large_log(log_name=None, log_text="simple_log", severity="DEFAULT", buffer_chars=270000, **kwargs): + # allowed severity: default, debug, info, notice, warning, error, critical, alert, emergency + severity = severity.upper() + client = google.cloud.logging.Client() + logger = client.logger(log_name) + logger.log({"message": log_text, "buffer":"0"*int(float(buffer_chars))}, severity=severity) + +def pylogging_large_log(log_name=None, log_text="simple_log", buffer_chars=270000, **kwargs): + # allowed severity: default, debug, info, notice, warning, error, critical, alert, emergency + client = google.cloud.logging.Client() + logger = client.logger(log_name) + logging.error(log_text, extra={"json_fields":{"buffer":"0"*int(float(buffer_chars))}}) + +def batch_large_log(log_name=None, log_text="simple_log", severity="DEFAULT", batch_size=10, large_idx=4, buffer_chars=270000, **kwargs): + # allowed severity: default, debug, info, notice, warning, error, critical, alert, emergency + from google.api_core.exceptions import InvalidArgument + client = google.cloud.logging.Client() + logger = client.logger(log_name) + batch = logger.batch() + for i in range(int(batch_size)): + if i == int(large_idx): + batch.log({"message": log_text, "buffer":"0"*int(float(buffer_chars))}, severity=severity) + batch.log(log_text) + batch.commit() + def pylogging_json(log_text=None, severity="WARNING", string_encode=False, **kwargs): # allowed severity: debug, info, warning, error, critical diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/compute.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/compute.sh index 0552de289b00..54f115349b62 100644 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/compute.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/compute.sh @@ -54,7 +54,7 @@ build_go_container(){ # copy over local copy of library pushd $SUPERREPO_ROOT/logging - tar -cvf $_deployable_dir/lib.tar --exclude internal/env-tests-logging --exclude .nox --exclude docs --exclude __pycache__ . + tar -cvf $_deployable_dir/lib.tar --exclude env-tests-logging --exclude internal/env-tests-logging --exclude .nox --exclude docs --exclude __pycache__ . popd mkdir -p $_deployable_dir/logging tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/logging diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh index d2d712549ac0..ad36002ef445 100644 --- a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/functions.sh @@ -47,9 +47,8 @@ deploy() { set +e gcloud pubsub topics create $SERVICE_NAME 2>/dev/null set -e - # Note: functions only supports go111, go113 and go116 at the moment - local RUNTIME="go116" - + # Note: functions supports go111, go113 go116, go118, and go119 + local RUNTIME="go119" # Copy over local copy of library to use as dependency _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/local.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/local.sh new file mode 100755 index 000000000000..e7573790be29 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/go/local.sh @@ -0,0 +1,88 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + + +SERVICE_NAME="log-go-local-$(echo $ENVCTL_ID | head -c 10)" +SA_NAME=$SERVICE_NAME-invoker + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # stop container + docker stop $SERVICE_NAME 2> /dev/null + set -e +} + +verify() { + set +e + docker container inspect -f '{{.State.Running}}' $SERVICE_NAME > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +build_go_container() { + export GCR_PATH=gcr.io/$PROJECT_ID/logging:$SERVICE_NAME + # copy super-repo into deployable dir + _env_tests_relative_path=${REPO_ROOT#"$SUPERREPO_ROOT/"} + _deployable_dir=$REPO_ROOT/deployable/$LANGUAGE + + # copy over local copy of library + pushd $SUPERREPO_ROOT/logging + tar -cvf $_deployable_dir/lib.tar --exclude internal/env-tests-logging --exclude env-tests-logging --exclude .nox --exclude docs --exclude __pycache__ . + popd + mkdir -p $_deployable_dir/logging + tar -xvf $_deployable_dir/lib.tar --directory $_deployable_dir/logging + # build container + docker build -t $GCR_PATH $_deployable_dir +} + +deploy() { + ARG=${1:-none} + if [[ -z "${GOOGLE_APPLICATION_CREDENTIALS}" ]]; then + echo "GOOGLE_APPLICATION_CREDENTIALS not set" + echo "should point to a valid service account to mount into container" + exit 1 + fi + if [[ "$ARG" == "-i" ]]; then + FLAG="-i" + else + FLAG="-d" + fi + build_go_container nopush + docker run --rm \ + --name $SERVICE_NAME \ + -v $GOOGLE_APPLICATION_CREDENTIALS:/service-account.json \ + -e GOOGLE_APPLICATION_CREDENTIALS=/service-account.json \ + -e ENABLE_SUBSCRIBER=true -e PUBSUB_TOPIC="$SERVICE_NAME" \ + -e PROJECT_ID=$(gcloud config get-value project) \ + $FLAG -t $GCR_PATH +} + +filter-string() { + echo "unimplemented" +} + diff --git a/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/local.sh b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/local.sh new file mode 100755 index 000000000000..914834f9b2d7 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/envctl/env_scripts/python/local.sh @@ -0,0 +1,71 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e # exit on any failure +set -o pipefail # any step in pipe caused failure +set -u # undefined variables cause exit + + +SERVICE_NAME="log-py-local-$(echo $ENVCTL_ID | head -c 10)" +SA_NAME=$SERVICE_NAME-invoker + +destroy() { + set +e + # delete pubsub resources + gcloud pubsub topics delete $SERVICE_NAME -q 2> /dev/null + gcloud pubsub subscriptions delete $SERVICE_NAME-subscriber -q 2> /dev/null + # stop container + docker stop $SERVICE_NAME 2> /dev/null + set -e +} + +verify() { + set +e + docker container inspect -f '{{.State.Running}}' $SERVICE_NAME > /dev/null 2> /dev/null + if [[ $? == 0 ]]; then + echo "TRUE" + exit 0 + else + echo "FALSE" + exit 1 + fi + set -e +} + +deploy() { + ARG=${1:-none} + if [[ -z "${GOOGLE_APPLICATION_CREDENTIALS}" ]]; then + echo "GOOGLE_APPLICATION_CREDENTIALS not set" + echo "should point to a valid service account to mount into container" + exit 1 + fi + if [[ "$ARG" == "-i" ]]; then + FLAG="-i" + else + FLAG="-d" + fi + build_container nopush + docker run --rm \ + --name $SERVICE_NAME \ + -v $GOOGLE_APPLICATION_CREDENTIALS:/service-account.json \ + -e GOOGLE_APPLICATION_CREDENTIALS=/service-account.json \ + -e ENABLE_SUBSCRIBER=true -e PUBSUB_TOPIC="$SERVICE_NAME" \ + $FLAG -t $GCR_PATH +} + +filter-string() { + echo "unimplemented" +} + diff --git a/packages/google-cloud-logging/tests/environment/noxfile.py b/packages/google-cloud-logging/tests/environment/noxfile.py index 282ffb4e42e6..3dc11d422ef5 100644 --- a/packages/google-cloud-logging/tests/environment/noxfile.py +++ b/packages/google-cloud-logging/tests/environment/noxfile.py @@ -134,6 +134,7 @@ def blacken(session: nox.sessions.Session) -> None: "cloudrun", "functions", "functions_v2", + "local", ], ) @nox.parametrize("language", ["python", "go", "nodejs", "java"]) @@ -151,6 +152,7 @@ def tests(session, language, platform): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install( + "grpcio==1.51.1", # pinned to avoid https://github.com/grpc/grpc/issues/31885 TODO: revert "mock", "pytest", "google-cloud-testutils", diff --git a/packages/google-cloud-logging/tests/environment/tests/common/common.py b/packages/google-cloud-logging/tests/environment/tests/common/common.py index 9e2d19c83fa6..c5a730943b2c 100644 --- a/packages/google-cloud-logging/tests/environment/tests/common/common.py +++ b/packages/google-cloud-logging/tests/environment/tests/common/common.py @@ -217,7 +217,7 @@ def test_receive_unicode_log(self): self.assertIsNotNone(found_log, "expected unicode log not found") def test_json_log(self): - if self.language not in ["python"]: + if self.language not in ["python", "go"]: # TODO: other languages to also support this test return True log_text = f"{inspect.currentframe().f_code.co_name} {uuid.uuid1()}" diff --git a/packages/google-cloud-logging/tests/environment/tests/go/go.py b/packages/google-cloud-logging/tests/environment/tests/go/go.py new file mode 100644 index 000000000000..312655740c31 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/go/go.py @@ -0,0 +1,110 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect +import re +import uuid +import json + +import google.cloud.logging + +from ..common.common import Common + + +class CommonGolang: + """ + Common tests to run on all go environments + """ + + def test_standard_logger_receive_log(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text, "standardlogger") + + found_log = log_list[-1] + + self.assertIsNotNone(found_log, "expected log text not found") + self.assertTrue(isinstance(found_log.payload, str), "expected textPayload") + self.assertTrue(found_log.payload.startswith(log_text)) + self.assertEqual(len(log_list), 1, "expected 1 log") + + def test_standard_logger_receive_unicode_log(self): + log_text = f"{inspect.currentframe().f_code.co_name} 嗨 世界 😀" + log_list = self.trigger_and_retrieve(log_text, "standardlogger") + + found_log = log_list[-1] + + self.assertIsNotNone(found_log, "expected log text not found") + self.assertTrue(isinstance(found_log.payload, str), "expected textPayload") + self.assertTrue(found_log.payload.startswith(log_text)) + + def test_severity_standard_logger(self): + severities = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"] + for severity in severities: + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve( + log_text, "standardlogger", severity=severity + ) + found_severity = log_list[-1].severity + + self.assertEqual(found_severity.lower(), severity.lower()) + + def test_synclog_receive_log(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve(log_text, "synclog") + + found_log = log_list[-1] + + self.assertIsNotNone(found_log, "expected log text not found") + self.assertTrue(isinstance(found_log.payload, str), "expected textPayload") + self.assertTrue(found_log.payload.startswith(log_text)) + self.assertEqual(len(log_list), 1, "expected 1 log") + + def test_synclog_receive_unicode_log(self): + log_text = f"{inspect.currentframe().f_code.co_name} 嗨 世界 😀" + log_list = self.trigger_and_retrieve(log_text, "synclog") + + found_log = log_list[-1] + + self.assertIsNotNone(found_log, "expected log text not found") + self.assertTrue(isinstance(found_log.payload, str), "expected textPayload") + self.assertTrue(found_log.payload.startswith(log_text)) + + def test_severity_synclog(self): + severities = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"] + for severity in severities: + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve( + log_text, "synclog", severity=severity + ) + found_severity = log_list[-1].severity + + self.assertEqual(found_severity.lower(), severity.lower()) + + def test_http_request(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + test_url = "www.google.com" + log_list = self.trigger_and_retrieve(log_text, "simplelog", http_request_url=test_url) + + found_log = log_list[-1] + + self.assertIsNotNone(found_log, "expected log text not found") + + found_request = log_list[-1].http_request + self.assertIsNotNone(found_request) + self.assertIsNotNone(found_request["requestUrl"]) + self.assertEqual(found_request["requestUrl"], test_url) + + diff --git a/packages/google-cloud-logging/tests/environment/tests/go/stdout.py b/packages/google-cloud-logging/tests/environment/tests/go/stdout.py new file mode 100644 index 000000000000..8d656c409bc3 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/go/stdout.py @@ -0,0 +1,84 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect +import re +import uuid +import json + +import google.cloud.logging + +from ..common.common import Common + + +class CommonStdout: + """ + Common set of tests shared by standard out logs, ensuring logs are written + and parsed in the proper format + + Currently only used by go + """ + + # Go doesn't currently support textpayloads over stdout: https://github.com/googleapis/google-cloud-go/issues/6995 + # commenting out these tests until this is addressed + + # def test_stdout_receive_log(self): + # log_text = f"{inspect.currentframe().f_code.co_name}" + # log_list = self.trigger_and_retrieve(log_text, "stdoutlog") + + # found_log = log_list[-1] + + # self.assertIsNotNone(found_log, "expected log text not found") + # self.assertTrue(isinstance(found_log.payload, str), "expected textPayload") + # self.assertTrue(found_log.payload.startswith(log_text)) + # self.assertEqual(len(log_list), 1, "expected 1 log") + + # def test_stdout_receive_unicode_log(self): + # log_text = f"{inspect.currentframe().f_code.co_name} 嗨 世界 😀" + # log_list = self.trigger_and_retrieve(log_text, "stdoutlog") + + # found_log = log_list[-1] + + # self.assertIsNotNone(found_log, "expected log text not found") + # self.assertTrue(isinstance(found_log.payload, str), "expected textPayload") + # self.assertTrue(found_log.payload.startswith(log_text)) + + def test_severity_stdout(self): + severities = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"] + for severity in severities: + log_text = f"{inspect.currentframe().f_code.co_name}" + log_list = self.trigger_and_retrieve( + log_text, "stdoutlog", severity=severity + ) + found_severity = log_list[-1].severity + + self.assertEqual(found_severity.lower(), severity.lower()) + + def test_http_request_stdout(self): + log_text = f"{inspect.currentframe().f_code.co_name}" + test_url = "www.google.com" + log_list = self.trigger_and_retrieve(log_text, "stdoutlog", http_request_url=test_url) + + found_log = log_list[-1] + + self.assertIsNotNone(found_log, "expected log text not found") + + found_request = log_list[-1].http_request + self.assertIsNotNone(found_request) + self.assertIsNotNone(found_request["requestUrl"]) + self.assertEqual(found_request["requestUrl"], test_url) + + diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_standard.py b/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_standard.py index d7525a9a2578..3f2f6f251c3c 100644 --- a/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_standard.py +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_appengine_standard.py @@ -18,9 +18,11 @@ import google.cloud.logging from ..common.common import Common +from .go import CommonGolang +from .stdout import CommonStdout -class TestAppEngineStandard(Common, unittest.TestCase): +class TestAppEngineStandard(Common, CommonGolang, CommonStdout, unittest.TestCase): environment = "appengine_standard" language = "go" diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/go/test_cloudrun.py index 0e6ca4c4f44b..b64c98ce5450 100644 --- a/packages/google-cloud-logging/tests/environment/tests/go/test_cloudrun.py +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_cloudrun.py @@ -18,9 +18,10 @@ import google.cloud.logging from ..common.common import Common +from .go import CommonGolang +from .stdout import CommonStdout - -class TestCloudRun(Common, unittest.TestCase): +class TestCloudRun(Common, CommonGolang, CommonStdout, unittest.TestCase): environment = "cloudrun" language = "go" diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_compute.py b/packages/google-cloud-logging/tests/environment/tests/go/test_compute.py index 9c75cf012f90..c7720d1decad 100644 --- a/packages/google-cloud-logging/tests/environment/tests/go/test_compute.py +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_compute.py @@ -16,11 +16,10 @@ import unittest import google.cloud.logging - from ..common.common import Common +from .go import CommonGolang - -class TestComputeEngine(Common, unittest.TestCase): +class TestComputeEngine(Common, CommonGolang, unittest.TestCase): environment = "compute" language = "go" diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/go/test_functions.py index 5bb45ea5a59c..12ca12961b35 100644 --- a/packages/google-cloud-logging/tests/environment/tests/go/test_functions.py +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_functions.py @@ -18,9 +18,10 @@ import google.cloud.logging from ..common.common import Common +from .go import CommonGolang +from .stdout import CommonStdout - -class TestCloudFunctions(Common, unittest.TestCase): +class TestCloudFunctions(Common, CommonGolang, CommonStdout, unittest.TestCase): environment = "functions" language = "go" diff --git a/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py index 0e9654986c0f..98e08a878ab2 100644 --- a/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py +++ b/packages/google-cloud-logging/tests/environment/tests/go/test_kubernetes.py @@ -18,9 +18,10 @@ import google.cloud.logging from ..common.common import Common +from .go import CommonGolang +from .stdout import CommonStdout - -class TestKubernetesEngine(Common, unittest.TestCase): +class TestKubernetesEngine(Common, CommonGolang, CommonStdout, unittest.TestCase): environment = "kubernetes" language = "go" diff --git a/packages/google-cloud-logging/tests/environment/tests/common/python.py b/packages/google-cloud-logging/tests/environment/tests/python/python.py similarity index 100% rename from packages/google-cloud-logging/tests/environment/tests/common/python.py rename to packages/google-cloud-logging/tests/environment/tests/python/python.py diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py index 62137c058828..37efffbda020 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_container.py @@ -19,7 +19,7 @@ import google.cloud.logging from ..common.common import Common -from ..common.python import CommonPython +from .python import CommonPython class TestAppEngineFlexContainer(Common, CommonPython, unittest.TestCase): diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py index 62d874979757..94ef351ce760 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_flex_python.py @@ -19,7 +19,7 @@ import google.cloud.logging from ..common.common import Common -from ..common.python import CommonPython +from .python import CommonPython class TestAppEngineFlex(Common, CommonPython, unittest.TestCase): diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py index ef8e10ed4bb4..3c086c544588 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_appengine_standard.py @@ -19,7 +19,7 @@ import google.cloud.logging from ..common.common import Common -from ..common.python import CommonPython +from .python import CommonPython class TestAppEngineStandard(Common, CommonPython, unittest.TestCase): diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py index eecab08ae0c2..18581a217b14 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_cloudrun.py @@ -21,7 +21,7 @@ from google.cloud.logging_v2.resource import Resource from ..common.common import Common -from ..common.python import CommonPython +from .python import CommonPython class TestCloudRun(Common, CommonPython, unittest.TestCase): diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_compute.py b/packages/google-cloud-logging/tests/environment/tests/python/test_compute.py index fac1e4db1e82..5337b13949da 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_compute.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_compute.py @@ -19,7 +19,7 @@ import google.cloud.logging from ..common.common import Common -from ..common.python import CommonPython +from .python import CommonPython class TestComputeEngine(Common, CommonPython, unittest.TestCase): diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py b/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py index 7eaa0c119df7..63f32c0ff044 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_functions.py @@ -19,7 +19,7 @@ import google.cloud.logging from ..common.common import Common -from ..common.python import CommonPython +from .python import CommonPython class TestCloudFunctions(Common, CommonPython, unittest.TestCase): diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py b/packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py index 89e7e45f96b0..83ad3fc542e5 100644 --- a/packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_kubernetes.py @@ -19,7 +19,7 @@ import google.cloud.logging from ..common.common import Common -from ..common.python import CommonPython +from .python import CommonPython class TestKubernetesEngine(Common, CommonPython, unittest.TestCase): diff --git a/packages/google-cloud-logging/tests/environment/tests/python/test_local.py b/packages/google-cloud-logging/tests/environment/tests/python/test_local.py new file mode 100644 index 000000000000..660cf24c0860 --- /dev/null +++ b/packages/google-cloud-logging/tests/environment/tests/python/test_local.py @@ -0,0 +1,31 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import unittest +import inspect + +import google.cloud.logging + +from ..common.common import Common +from ..common.python import CommonPython + + +class TestLocal(Common, CommonPython, unittest.TestCase): + + environment = "local" + language = "python" + + monitored_resource_name = "global" + monitored_resource_labels = [] diff --git a/tests/environment b/tests/environment index a4922381d1a2..b2f060f30170 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit a4922381d1a228367b9341cd20053a8909de5445 +Subproject commit b2f060f30170d95a0fd813dc39cdaa6abca69ca9 From 4569796a73e4faee5733537bc5156f0491ed8255 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 24 Jan 2023 14:52:41 -0800 Subject: [PATCH 717/855] chore: Update gapic-generator-python to v1.8.2 (#712) --- packages/google-cloud-logging/.coveragerc | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-logging/.coveragerc b/packages/google-cloud-logging/.coveragerc index c0f6e82dff6e..5ae4d709b8ba 100644 --- a/packages/google-cloud-logging/.coveragerc +++ b/packages/google-cloud-logging/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/cloud/logging/__init__.py + google/cloud/logging/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER From ea7838d2b00d72ae41dd99c4330f59b60903edde Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 24 Jan 2023 15:44:09 -0800 Subject: [PATCH 718/855] chore(main): release 3.5.0 (#701) --- .../.release-please-manifest.json | 2 +- packages/google-cloud-logging/CHANGELOG.md | 20 +++++++++++++++++++ .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 24 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json index 395791d451f3..155f1bdd1db8 100644 --- a/packages/google-cloud-logging/.release-please-manifest.json +++ b/packages/google-cloud-logging/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.4.0" + ".": "3.5.0" } diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index e82a7e1daad5..0dd576c00354 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,26 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.5.0](https://github.com/googleapis/python-logging/compare/v3.4.0...v3.5.0) (2023-01-24) + + +### Features + +* Add support for Python 3.11 ([412d481](https://github.com/googleapis/python-logging/commit/412d481701a62dcdeaa728414dbc82a6edb72c31)) + + +### Bug Fixes + +* Add context manager return types ([e2278c1](https://github.com/googleapis/python-logging/commit/e2278c11fba3d489efad1d42d09d5f05783f2a46)) +* Instrumentation entries should not contain user labels ([#703](https://github.com/googleapis/python-logging/issues/703)) ([e05d132](https://github.com/googleapis/python-logging/commit/e05d132437739e61983bbda4742d5f4587eecdf2)) +* Require proto-plus 1.22.2 for Python 3.11 ([412d481](https://github.com/googleapis/python-logging/commit/412d481701a62dcdeaa728414dbc82a6edb72c31)) +* Send StructuredLogHandler instrumentation log using an explicit logger ([#705](https://github.com/googleapis/python-logging/issues/705)) ([ac6a2c6](https://github.com/googleapis/python-logging/commit/ac6a2c6742b9a620599fa0f4707badf138689645)) + + +### Documentation + +* Add documentation for enums ([e2278c1](https://github.com/googleapis/python-logging/commit/e2278c11fba3d489efad1d42d09d5f05783f2a46)) + ## [3.4.0](https://github.com/googleapis/python-logging/compare/v3.3.1...v3.4.0) (2022-12-15) diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py index 431567aa07df..4576813f0a11 100644 --- a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.4.0" # {x-release-please-version} +__version__ = "3.5.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py index 431567aa07df..4576813f0a11 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.4.0" # {x-release-please-version} +__version__ = "3.5.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 6fc255d8251b..1b2aba1dddad 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "0.1.0" + "version": "3.5.0" }, "snippets": [ { From 542ddbc9cd7c59bced3cd3fc7675867df0fa1507 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 25 Jan 2023 21:53:09 +0000 Subject: [PATCH 719/855] chore(deps): update dependency google-cloud-logging to v3.5.0 (#714) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 8258225ff9d7..18e8ae016582 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.4.0 +google-cloud-logging==3.5.0 google-cloud-bigquery==3.4.2 google-cloud-storage==2.7.0 google-cloud-pubsub==2.14.0 From 29f7d58a7d58433dc4d8b321aa3ee46f3b2bddda Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Jan 2023 16:40:31 +0000 Subject: [PATCH 720/855] chore: fix prerelease_deps nox session [autoapprove] (#717) Source-Link: https://togithub.com/googleapis/synthtool/commit/26c7505b2f76981ec1707b851e1595c8c06e90fc Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 --- .../google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-logging/noxfile.py | 14 ++++++-------- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 889f77dfa25d..f0f3b24b20cd 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 + digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 820c51d007a5..554745bb464a 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -197,9 +197,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -354,9 +354,7 @@ def prerelease_deps(session): unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES session.install(*unit_deps_all) system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS + SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES ) session.install(*system_deps_all) @@ -386,8 +384,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", From 485d0ba64d853ae802f39ed15d8709cd5c090ad9 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 1 Feb 2023 23:27:43 +0000 Subject: [PATCH 721/855] fix: properly handle None from metadata server (#718) --- .../handlers/_monitored_resources.py | 14 ++-- .../handlers/test__monitored_resources.py | 80 ++++++++++++++++++- 2 files changed, 86 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py index 144258749d00..a5b8dfee3269 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py @@ -71,8 +71,8 @@ def _create_functions_resource(): resource = Resource( type="cloud_function", labels={ - "project_id": project, - "function_name": function_name, + "project_id": project if project else "", + "function_name": function_name if function_name else "", "region": region.split("/")[-1] if region else "", }, ) @@ -91,7 +91,7 @@ def _create_kubernetes_resource(): resource = Resource( type="k8s_container", labels={ - "project_id": project, + "project_id": project if project else "", "location": zone if zone else "", "cluster_name": cluster_name if cluster_name else "", }, @@ -110,7 +110,7 @@ def _create_compute_resource(): resource = Resource( type="gce_instance", labels={ - "project_id": project, + "project_id": project if project else "", "instance_id": instance if instance else "", "zone": zone if zone else "", }, @@ -128,7 +128,7 @@ def _create_cloud_run_resource(): resource = Resource( type="cloud_run_revision", labels={ - "project_id": project, + "project_id": project if project else "", "service_name": os.environ.get(_CLOUD_RUN_SERVICE_ID, ""), "revision_name": os.environ.get(_CLOUD_RUN_REVISION_ID, ""), "location": region.split("/")[-1] if region else "", @@ -148,7 +148,7 @@ def _create_app_engine_resource(): resource = Resource( type="gae_app", labels={ - "project_id": project, + "project_id": project if project else "", "module_id": os.environ.get(_GAE_SERVICE_ENV, ""), "version_id": os.environ.get(_GAE_VERSION_ENV, ""), "zone": zone if zone else "", @@ -164,7 +164,7 @@ def _create_global_resource(project): Returns: google.cloud.logging.Resource """ - return Resource(type="global", labels={"project_id": project}) + return Resource(type="global", labels={"project_id": project if project else ""}) def detect_resource(project=""): diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py b/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py index 5acced157e44..3c62cba88958 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py @@ -16,7 +16,7 @@ import mock import os - +import functools from google.cloud.logging_v2.handlers._monitored_resources import ( _create_functions_resource, @@ -66,6 +66,20 @@ def _mock_metadata(self, endpoint): else: return None + def _mock_metadata_no_project(self, endpoint): + if ( + endpoint == _monitored_resources._ZONE_ID + or endpoint == _monitored_resources._REGION_ID + ): + return self.LOCATION + elif ( + endpoint == _monitored_resources._GKE_CLUSTER_NAME + or endpoint == _monitored_resources._GCE_INSTANCE_ID + ): + return self.NAME + else: + return None + def setUp(self): os.environ.clear() @@ -100,6 +114,23 @@ def test_create_modern_functions_resource(self): self.assertEqual(func_resource.labels["function_name"], self.NAME) self.assertEqual(func_resource.labels["region"], self.LOCATION) + def test_functions_resource_no_name(self): + """ + Simulate functions environment with function name returned as None + https://github.com/googleapis/python-logging/pull/718 + """ + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + wraps=self._mock_metadata_no_project, + ) + with patch: + func_resource = _create_functions_resource() + + self.assertIsInstance(func_resource, Resource) + self.assertEqual(func_resource.type, "cloud_function") + self.assertEqual(func_resource.labels["project_id"], "") + self.assertEqual(func_resource.labels["function_name"], "") + def test_create_kubernetes_resource(self): patch = mock.patch( @@ -169,6 +200,29 @@ def test_global_resource(self): self.assertEqual(resource.type, "global") self.assertEqual(resource.labels["project_id"], self.PROJECT) + def test_with_no_project_from_server(self): + """ + Ensure project_id uses an empty string if not known + https://github.com/googleapis/python-logging/issues/710 + """ + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + wraps=self._mock_metadata_no_project, + ) + with patch: + _global_resource_patched = functools.partial(_create_global_resource, None) + resource_fns = [ + _global_resource_patched, + _create_app_engine_resource, + _create_cloud_run_resource, + _create_compute_resource, + _create_kubernetes_resource, + _create_functions_resource, + ] + for fn in resource_fns: + resource = fn() + self.assertEqual(resource.labels["project_id"], "") + class Test_Resource_Detection(unittest.TestCase): @@ -189,6 +243,14 @@ def _mock_gce_metadata(self, endpoint): else: return None + def _mock_partial_metadata(self, endpoint): + if endpoint == _monitored_resources._ZONE_ID: + return "ZONE" + elif endpoint == _monitored_resources._GCE_INSTANCE_ID: + return "instance" + else: + return None + def setUp(self): os.environ.clear() @@ -249,3 +311,19 @@ def test_detection_unknown(self): resource = detect_resource(self.PROJECT) self.assertIsInstance(resource, Resource) self.assertEqual(resource.type, "global") + + def test_detect_partial_data(self): + """ + Test case where the metadata server returns partial data + """ + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + wraps=self._mock_partial_metadata, + ) + with patch: + resource = detect_resource(self.PROJECT) + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "gce_instance") + # project id not returned from metadata serve + # should be empty string + self.assertEqual(resource.labels["project_id"], "") From 42f6cd877bda577d8a7d08979247cee67963544e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 2 Feb 2023 11:15:58 +0000 Subject: [PATCH 722/855] chore(deps): update dependency google-cloud-bigquery to v3.5.0 (#719) Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 18e8ae016582..60bc7251ec8b 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.5.0 -google-cloud-bigquery==3.4.2 +google-cloud-bigquery==3.5.0 google-cloud-storage==2.7.0 google-cloud-pubsub==2.14.0 From d28fba998b5b3325189f5f0dbe77b326a77c690e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 16:32:17 +0000 Subject: [PATCH 723/855] build(deps): bump cryptography from 38.0.3 to 39.0.1 in /synthtool/gcp/templates/python_library/.kokoro (#723) Source-Link: https://togithub.com/googleapis/synthtool/commit/bb171351c3946d3c3c32e60f5f18cee8c464ec51 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 49 +++++++++---------- 2 files changed, 25 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index f0f3b24b20cd..3075557d200b 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,4 +13,6 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 + digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf + + diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 05dc4672edaa..096e4800a9ac 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -113,33 +113,28 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==38.0.3 \ - --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ - --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ - --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ - --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ - --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ - --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ - --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ - --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ - --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ - --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ - --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ - --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ - --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ - --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ - --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ - --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ - --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ - --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ - --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ - --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ - --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ - --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ - --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ - --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ - --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ - --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 +cryptography==39.0.1 \ + --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ + --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ + --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ + --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ + --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ + --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ + --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ + --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ + --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ + --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ + --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ + --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ + --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ + --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ + --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ + --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ + --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ + --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ + --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ + --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ + --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 # via # gcp-releasetool # secretstorage From b679ae11ea3bd8697d660c1313e4531837237a1c Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 15 Feb 2023 22:44:00 +0000 Subject: [PATCH 724/855] chore(deps): update dependency google-cloud-pubsub to v2.14.1 (#724) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 60bc7251ec8b..845cb0499718 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.5.0 google-cloud-bigquery==3.5.0 google-cloud-storage==2.7.0 -google-cloud-pubsub==2.14.0 +google-cloud-pubsub==2.14.1 From fc24290fb835d73dbe890c350e7b3cd6b0bc123c Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 27 Feb 2023 17:08:51 +0000 Subject: [PATCH 725/855] chore(deps): update all dependencies (#726) --- .../google-cloud-logging/samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 845cb0499718..80177c3a302d 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.5.0 -google-cloud-bigquery==3.5.0 +google-cloud-bigquery==3.6.0 google-cloud-storage==2.7.0 -google-cloud-pubsub==2.14.1 +google-cloud-pubsub==2.15.0 From 433e92461e7c98b79fa042b7ad68fdec897368fe Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 28 Feb 2023 06:05:27 -0500 Subject: [PATCH 726/855] chore(python): upgrade gcp-releasetool in .kokoro [autoapprove] (#728) Source-Link: https://github.com/googleapis/synthtool/commit/5f2a6089f73abf06238fe4310f6a14d6f6d1eed3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 +--- packages/google-cloud-logging/.kokoro/requirements.in | 2 +- packages/google-cloud-logging/.kokoro/requirements.txt | 6 +++--- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 3075557d200b..5fc5daa31783 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,6 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf - - + digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 diff --git a/packages/google-cloud-logging/.kokoro/requirements.in b/packages/google-cloud-logging/.kokoro/requirements.in index cbd7e77f44db..882178ce6001 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.in +++ b/packages/google-cloud-logging/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool +gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x importlib-metadata typing-extensions twine diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 096e4800a9ac..fa99c12908f0 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -154,9 +154,9 @@ gcp-docuploader==0.6.4 \ --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.10.0 \ - --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ - --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d +gcp-releasetool==1.10.5 \ + --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ + --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 # via -r requirements.in google-api-core==2.10.2 \ --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ From ea44b09ee4488a9cc89245b8aaf570764a3255e6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 28 Feb 2023 06:39:55 -0500 Subject: [PATCH 727/855] chore: Update gapic-generator-python to v1.8.5 (#727) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.8.5 PiperOrigin-RevId: 511892190 Source-Link: https://github.com/googleapis/googleapis/commit/a45d9c09c1287ffdf938f4e8083e791046c0b23b Source-Link: https://github.com/googleapis/googleapis-gen/commit/1907294b1d8365ea24f8c5f2e059a64124c4ed3b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTkwNzI5NGIxZDgzNjVlYTI0ZjhjNWYyZTA1OWE2NDEyNGM0ZWQzYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google/cloud/logging_v2/types/log_entry.py | 2 ++ .../google/cloud/logging_v2/types/logging.py | 2 ++ .../google/cloud/logging_v2/types/logging_config.py | 2 ++ .../google/cloud/logging_v2/types/logging_metrics.py | 2 ++ .../generated_samples/snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py index 60da219abb5a..0536e4db555b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py index 4c300f705ef1..4d27176d1cc6 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index c3c3da099d20..9ed3a767c731 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py index 128e6369e79b..0d31860a01c3 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 1b2aba1dddad..6fc255d8251b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.5.0" + "version": "0.1.0" }, "snippets": [ { From ff5e996382daef569dd252b27ace5f316501205f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 16 Mar 2023 11:19:16 -0400 Subject: [PATCH 728/855] chore(deps): Update nox in .kokoro/requirements.in [autoapprove] (#731) Source-Link: https://github.com/googleapis/synthtool/commit/92006bb3cdc84677aa93c7f5235424ec2b157146 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 Co-authored-by: Owl Bot --- .../google-cloud-logging/.github/.OwlBot.lock.yaml | 2 +- .../google-cloud-logging/.kokoro/requirements.in | 2 +- .../google-cloud-logging/.kokoro/requirements.txt | 14 +++++--------- 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 5fc5daa31783..b8edda51cf46 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 + digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 diff --git a/packages/google-cloud-logging/.kokoro/requirements.in b/packages/google-cloud-logging/.kokoro/requirements.in index 882178ce6001..ec867d9fd65a 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.in +++ b/packages/google-cloud-logging/.kokoro/requirements.in @@ -5,6 +5,6 @@ typing-extensions twine wheel setuptools -nox +nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index fa99c12908f0..66a2172a76a8 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: # # pip-compile --allow-unsafe --generate-hashes requirements.in # @@ -335,9 +335,9 @@ more-itertools==9.0.0 \ --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes -nox==2022.8.7 \ - --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ - --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c +nox==2022.11.21 \ + --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ + --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 # via -r requirements.in packaging==21.3 \ --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ @@ -380,10 +380,6 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core -py==1.11.0 \ - --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ - --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 - # via nox pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba From 470eb4455f4447dcbddea5c4aef9f85121154a9b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 23 Mar 2023 09:30:42 -0400 Subject: [PATCH 729/855] docs: Fix formatting of request arg in docstring (#734) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Fix formatting of request arg in docstring chore: Update gapic-generator-python to v1.9.1 PiperOrigin-RevId: 518604533 Source-Link: https://github.com/googleapis/googleapis/commit/8a085aeddfa010af5bcef090827aac5255383d7e Source-Link: https://github.com/googleapis/googleapis-gen/commit/b2ab4b0a0ae2907e812c209198a74e0898afcb04 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjJhYjRiMGEwYWUyOTA3ZTgxMmMyMDkxOThhNzRlMDg5OGFmY2IwNCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../config_service_v2/async_client.py | 62 ++++++++++--------- .../services/config_service_v2/client.py | 62 ++++++++++--------- .../logging_service_v2/async_client.py | 4 +- .../services/logging_service_v2/client.py | 4 +- 4 files changed, 70 insertions(+), 62 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index ebda77d561e0..7549eea4813d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -271,7 +271,7 @@ async def sample_list_buckets(): Args: request (Optional[Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]]): - The request object. The parameters to `ListBuckets`. + The request object. The parameters to ``ListBuckets``. parent (:class:`str`): Required. The parent resource whose buckets are to be listed: @@ -393,7 +393,7 @@ async def sample_get_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetBucketRequest, dict]]): - The request object. The parameters to `GetBucket`. + The request object. The parameters to ``GetBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -475,7 +475,7 @@ async def sample_create_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): - The request object. The parameters to `CreateBucket`. + The request object. The parameters to ``CreateBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -565,7 +565,7 @@ async def sample_update_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): - The request object. The parameters to `UpdateBucket`. + The request object. The parameters to ``UpdateBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -646,7 +646,7 @@ async def sample_delete_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]]): - The request object. The parameters to `DeleteBucket`. + The request object. The parameters to ``DeleteBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -715,7 +715,7 @@ async def sample_undelete_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]]): - The request object. The parameters to `UndeleteBucket`. + The request object. The parameters to ``UndeleteBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -787,7 +787,7 @@ async def sample_list_views(): Args: request (Optional[Union[google.cloud.logging_v2.types.ListViewsRequest, dict]]): - The request object. The parameters to `ListViews`. + The request object. The parameters to ``ListViews``. parent (:class:`str`): Required. The bucket whose views are to be listed: @@ -901,7 +901,7 @@ async def sample_get_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetViewRequest, dict]]): - The request object. The parameters to `GetView`. + The request object. The parameters to ``GetView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -982,7 +982,7 @@ async def sample_create_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateViewRequest, dict]]): - The request object. The parameters to `CreateView`. + The request object. The parameters to ``CreateView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1065,7 +1065,7 @@ async def sample_update_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]]): - The request object. The parameters to `UpdateView`. + The request object. The parameters to ``UpdateView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1144,7 +1144,7 @@ async def sample_delete_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]]): - The request object. The parameters to `DeleteView`. + The request object. The parameters to ``DeleteView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1216,7 +1216,7 @@ async def sample_list_sinks(): Args: request (Optional[Union[google.cloud.logging_v2.types.ListSinksRequest, dict]]): - The request object. The parameters to `ListSinks`. + The request object. The parameters to ``ListSinks``. parent (:class:`str`): Required. The parent resource whose sinks are to be listed: @@ -1346,7 +1346,7 @@ async def sample_get_sink(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetSinkRequest, dict]]): - The request object. The parameters to `GetSink`. + The request object. The parameters to ``GetSink``. sink_name (:class:`str`): Required. The resource name of the sink: @@ -1487,7 +1487,7 @@ async def sample_create_sink(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]]): - The request object. The parameters to `CreateSink`. + The request object. The parameters to ``CreateSink``. parent (:class:`str`): Required. The resource in which to create the sink: @@ -1626,7 +1626,7 @@ async def sample_update_sink(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]]): - The request object. The parameters to `UpdateSink`. + The request object. The parameters to ``UpdateSink``. sink_name (:class:`str`): Required. The full resource name of the sink to update, including the parent resource and the sink identifier: @@ -1789,7 +1789,7 @@ async def sample_delete_sink(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]]): - The request object. The parameters to `DeleteSink`. + The request object. The parameters to ``DeleteSink``. sink_name (:class:`str`): Required. The full resource name of the sink to delete, including the parent resource and the sink identifier: @@ -1907,7 +1907,7 @@ async def sample_list_exclusions(): Args: request (Optional[Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]]): - The request object. The parameters to `ListExclusions`. + The request object. The parameters to ``ListExclusions``. parent (:class:`str`): Required. The parent resource whose exclusions are to be listed. @@ -2037,7 +2037,7 @@ async def sample_get_exclusion(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]]): - The request object. The parameters to `GetExclusion`. + The request object. The parameters to ``GetExclusion``. name (:class:`str`): Required. The resource name of an existing exclusion: @@ -2172,7 +2172,7 @@ async def sample_create_exclusion(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]]): - The request object. The parameters to `CreateExclusion`. + The request object. The parameters to ``CreateExclusion``. parent (:class:`str`): Required. The parent resource in which to create the exclusion: @@ -2308,7 +2308,7 @@ async def sample_update_exclusion(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]]): - The request object. The parameters to `UpdateExclusion`. + The request object. The parameters to ``UpdateExclusion``. name (:class:`str`): Required. The resource name of the exclusion to update: @@ -2447,7 +2447,7 @@ async def sample_delete_exclusion(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]]): - The request object. The parameters to `DeleteExclusion`. + The request object. The parameters to ``DeleteExclusion``. name (:class:`str`): Required. The resource name of an existing exclusion to delete: @@ -2571,8 +2571,9 @@ async def sample_get_cmek_settings(): request (Optional[Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]]): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. - See [Enabling CMEK for Log - Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + + See `Enabling CMEK for Log + Router `__ for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -2678,8 +2679,9 @@ async def sample_update_cmek_settings(): request (Optional[Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]]): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - See [Enabling CMEK for Log - Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + + See `Enabling CMEK for Log + Router `__ for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -2782,8 +2784,9 @@ async def sample_get_settings(): request (Optional[Union[google.cloud.logging_v2.types.GetSettingsRequest, dict]]): The request object. The parameters to [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. - See [Enabling CMEK for Log - Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + + See `Enabling CMEK for Log + Router `__ for more information. name (:class:`str`): Required. The resource for which to retrieve settings. @@ -2923,8 +2926,9 @@ async def sample_update_settings(): request (Optional[Union[google.cloud.logging_v2.types.UpdateSettingsRequest, dict]]): The request object. The parameters to [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. - See [Enabling CMEK for Log - Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + + See `Enabling CMEK for Log + Router `__ for more information. settings (:class:`google.cloud.logging_v2.types.Settings`): Required. The settings to update. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 4723239b471f..c76b46fa9543 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -572,7 +572,7 @@ def sample_list_buckets(): Args: request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): - The request object. The parameters to `ListBuckets`. + The request object. The parameters to ``ListBuckets``. parent (str): Required. The parent resource whose buckets are to be listed: @@ -694,7 +694,7 @@ def sample_get_bucket(): Args: request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): - The request object. The parameters to `GetBucket`. + The request object. The parameters to ``GetBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -777,7 +777,7 @@ def sample_create_bucket(): Args: request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): - The request object. The parameters to `CreateBucket`. + The request object. The parameters to ``CreateBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -868,7 +868,7 @@ def sample_update_bucket(): Args: request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): - The request object. The parameters to `UpdateBucket`. + The request object. The parameters to ``UpdateBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -950,7 +950,7 @@ def sample_delete_bucket(): Args: request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): - The request object. The parameters to `DeleteBucket`. + The request object. The parameters to ``DeleteBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1020,7 +1020,7 @@ def sample_undelete_bucket(): Args: request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): - The request object. The parameters to `UndeleteBucket`. + The request object. The parameters to ``UndeleteBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1093,7 +1093,7 @@ def sample_list_views(): Args: request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): - The request object. The parameters to `ListViews`. + The request object. The parameters to ``ListViews``. parent (str): Required. The bucket whose views are to be listed: @@ -1207,7 +1207,7 @@ def sample_get_view(): Args: request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): - The request object. The parameters to `GetView`. + The request object. The parameters to ``GetView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1289,7 +1289,7 @@ def sample_create_view(): Args: request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): - The request object. The parameters to `CreateView`. + The request object. The parameters to ``CreateView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1373,7 +1373,7 @@ def sample_update_view(): Args: request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): - The request object. The parameters to `UpdateView`. + The request object. The parameters to ``UpdateView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1453,7 +1453,7 @@ def sample_delete_view(): Args: request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): - The request object. The parameters to `DeleteView`. + The request object. The parameters to ``DeleteView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1526,7 +1526,7 @@ def sample_list_sinks(): Args: request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): - The request object. The parameters to `ListSinks`. + The request object. The parameters to ``ListSinks``. parent (str): Required. The parent resource whose sinks are to be listed: @@ -1645,7 +1645,7 @@ def sample_get_sink(): Args: request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): - The request object. The parameters to `GetSink`. + The request object. The parameters to ``GetSink``. sink_name (str): Required. The resource name of the sink: @@ -1775,7 +1775,7 @@ def sample_create_sink(): Args: request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): - The request object. The parameters to `CreateSink`. + The request object. The parameters to ``CreateSink``. parent (str): Required. The resource in which to create the sink: @@ -1914,7 +1914,7 @@ def sample_update_sink(): Args: request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): - The request object. The parameters to `UpdateSink`. + The request object. The parameters to ``UpdateSink``. sink_name (str): Required. The full resource name of the sink to update, including the parent resource and the sink identifier: @@ -2066,7 +2066,7 @@ def sample_delete_sink(): Args: request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): - The request object. The parameters to `DeleteSink`. + The request object. The parameters to ``DeleteSink``. sink_name (str): Required. The full resource name of the sink to delete, including the parent resource and the sink identifier: @@ -2173,7 +2173,7 @@ def sample_list_exclusions(): Args: request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): - The request object. The parameters to `ListExclusions`. + The request object. The parameters to ``ListExclusions``. parent (str): Required. The parent resource whose exclusions are to be listed. @@ -2292,7 +2292,7 @@ def sample_get_exclusion(): Args: request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): - The request object. The parameters to `GetExclusion`. + The request object. The parameters to ``GetExclusion``. name (str): Required. The resource name of an existing exclusion: @@ -2416,7 +2416,7 @@ def sample_create_exclusion(): Args: request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): - The request object. The parameters to `CreateExclusion`. + The request object. The parameters to ``CreateExclusion``. parent (str): Required. The parent resource in which to create the exclusion: @@ -2552,7 +2552,7 @@ def sample_update_exclusion(): Args: request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): - The request object. The parameters to `UpdateExclusion`. + The request object. The parameters to ``UpdateExclusion``. name (str): Required. The resource name of the exclusion to update: @@ -2691,7 +2691,7 @@ def sample_delete_exclusion(): Args: request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): - The request object. The parameters to `DeleteExclusion`. + The request object. The parameters to ``DeleteExclusion``. name (str): Required. The resource name of an existing exclusion to delete: @@ -2804,8 +2804,9 @@ def sample_get_cmek_settings(): request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. - See [Enabling CMEK for Log - Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + + See `Enabling CMEK for Log + Router `__ for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -2912,8 +2913,9 @@ def sample_update_cmek_settings(): request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - See [Enabling CMEK for Log - Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + + See `Enabling CMEK for Log + Router `__ for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -3017,8 +3019,9 @@ def sample_get_settings(): request (Union[google.cloud.logging_v2.types.GetSettingsRequest, dict]): The request object. The parameters to [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. - See [Enabling CMEK for Log - Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + + See `Enabling CMEK for Log + Router `__ for more information. name (str): Required. The resource for which to retrieve settings. @@ -3158,8 +3161,9 @@ def sample_update_settings(): request (Union[google.cloud.logging_v2.types.UpdateSettingsRequest, dict]): The request object. The parameters to [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. - See [Enabling CMEK for Log - Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + + See `Enabling CMEK for Log + Router `__ for more information. settings (google.cloud.logging_v2.types.Settings): Required. The settings to update. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 909895ad6f21..bd8ba63f060b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -580,7 +580,7 @@ async def sample_list_log_entries(): Args: request (Optional[Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]]): - The request object. The parameters to `ListLogEntries`. + The request object. The parameters to ``ListLogEntries``. resource_names (:class:`MutableSequence[str]`): Required. Names of one or more parent resources from which to retrieve log entries: @@ -985,7 +985,7 @@ def request_generator(): Args: requests (AsyncIterator[`google.cloud.logging_v2.types.TailLogEntriesRequest`]): - The request object AsyncIterator. The parameters to `TailLogEntries`. + The request object AsyncIterator. The parameters to ``TailLogEntries``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 83c36302ae3d..7949a41a9fb4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -778,7 +778,7 @@ def sample_list_log_entries(): Args: request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): - The request object. The parameters to `ListLogEntries`. + The request object. The parameters to ``ListLogEntries``. resource_names (MutableSequence[str]): Required. Names of one or more parent resources from which to retrieve log entries: @@ -1153,7 +1153,7 @@ def request_generator(): Args: requests (Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]): - The request object iterator. The parameters to `TailLogEntries`. + The request object iterator. The parameters to ``TailLogEntries``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. From 9e84c486222faf7014ec253409e6ba09a39252a5 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 6 Apr 2023 17:13:01 +0100 Subject: [PATCH 730/855] chore(deps): update all dependencies (#732) --- .../samples/snippets/requirements-test.txt | 2 +- .../google-cloud-logging/samples/snippets/requirements.txt | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 9f013668bd64..8d6117f168d6 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==2.2.1 -pytest==7.2.1 +pytest==7.2.2 diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 80177c3a302d..4fbb1feea4a0 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.5.0 -google-cloud-bigquery==3.6.0 -google-cloud-storage==2.7.0 -google-cloud-pubsub==2.15.0 +google-cloud-bigquery==3.9.0 +google-cloud-storage==2.8.0 +google-cloud-pubsub==2.15.2 From 5b1dd583d677dfb2649b40a4fc1c6c87a6cd0662 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 18 Apr 2023 20:34:38 +0200 Subject: [PATCH 731/855] chore(deps): update all dependencies (#738) --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 8d6117f168d6..96aa71dab7f6 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==2.2.1 -pytest==7.2.2 +pytest==7.3.1 diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 4fbb1feea4a0..f3a55761f07a 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.5.0 google-cloud-bigquery==3.9.0 google-cloud-storage==2.8.0 -google-cloud-pubsub==2.15.2 +google-cloud-pubsub==2.16.0 From f26682da00bfe3c4170288ff0873b6bf998b4add Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 19 Apr 2023 18:09:39 +0200 Subject: [PATCH 732/855] chore(deps): update dependency google-cloud-bigquery to v3.10.0 (#745) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index f3a55761f07a..0acd1af76fdd 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.5.0 -google-cloud-bigquery==3.9.0 +google-cloud-bigquery==3.10.0 google-cloud-storage==2.8.0 google-cloud-pubsub==2.16.0 From 6fb47b0d402120c1a4e6ee4cd5b5ec07ff9cf220 Mon Sep 17 00:00:00 2001 From: Yoshi Yamaguchi Date: Fri, 21 Apr 2023 09:22:17 +0900 Subject: [PATCH 733/855] feat: add logic to convert severity string to uppercase (#744) --- .../google/cloud/logging_v2/entries.py | 5 +- .../google/cloud/logging_v2/logger.py | 2 +- .../tests/unit/test_logger.py | 161 ++++++++++-------- 3 files changed, 97 insertions(+), 71 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/entries.py b/packages/google-cloud-logging/google/cloud/logging_v2/entries.py index cb485da61189..9db020f67ffc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/entries.py @@ -229,7 +229,10 @@ def to_api_repr(self): if self.insert_id is not None: info["insertId"] = self.insert_id if self.severity is not None: - info["severity"] = self.severity + if isinstance(self.severity, str): + info["severity"] = self.severity.upper() + else: + info["severity"] = self.severity if self.http_request is not None: info["httpRequest"] = self.http_request if self.timestamp is not None: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py index 85007b796335..88424b27cae7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -141,7 +141,7 @@ def _do_log(self, client, _entry_class, payload=None, **kw): kw["resource"] = kw.pop("resource", self.default_resource) severity = kw.get("severity", None) - if isinstance(severity, str) and not severity.isupper(): + if isinstance(severity, str): # convert severity to upper case, as expected by enum definition kw["severity"] = severity.upper() diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 3091693e322a..16c89959bb7b 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -12,16 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from copy import deepcopy -from datetime import datetime -from datetime import timedelta -from datetime import timezone import sys - import unittest -import pytest +from copy import deepcopy +from datetime import datetime, timedelta, timezone import mock +import pytest def _make_credentials(): @@ -131,6 +128,7 @@ def test_log_empty_defaults_w_default_labels(self): def test_log_empty_w_explicit(self): import datetime + from google.cloud.logging import Resource ALT_LOG_NAME = "projects/foo/logs/alt.log.name" @@ -237,6 +235,7 @@ def test_log_text_w_unicode_and_default_labels(self): def test_log_text_explicit(self): import datetime + from google.cloud.logging import Resource ALT_LOG_NAME = "projects/foo/logs/alt.log.name" @@ -370,6 +369,7 @@ def test_log_struct_w_default_labels(self): def test_log_struct_w_explicit(self): import datetime + from google.cloud.logging import Resource ALT_LOG_NAME = "projects/foo/logs/alt.log.name" @@ -533,10 +533,11 @@ def test_log_lowercase_severity(self): ) def test_log_proto_defaults(self): + import json + from google.cloud.logging_v2.handlers._monitored_resources import ( detect_resource, ) - import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value @@ -559,10 +560,11 @@ def test_log_proto_defaults(self): ) def test_log_proto_w_default_labels(self): + import json + from google.cloud.logging_v2.handlers._monitored_resources import ( detect_resource, ) - import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value @@ -587,12 +589,12 @@ def test_log_proto_w_default_labels(self): ) def test_log_proto_w_explicit(self): - import json import datetime - from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value + import json + from google.cloud.logging import Resource + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={"foo": Value(bool_value=True)}) ALT_LOG_NAME = "projects/foo/logs/alt.log.name" @@ -720,11 +722,12 @@ def test_log_inference_struct(self): def test_log_inference_proto(self): import json - from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct, Value + from google.cloud.logging_v2.handlers._monitored_resources import ( detect_resource, ) + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={"foo": Value(bool_value=True)}) ENTRIES = [ @@ -809,8 +812,7 @@ def test_list_entries_defaults(self): self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit(self): - from google.cloud.logging import DESCENDING - from google.cloud.logging import Client + from google.cloud.logging import DESCENDING, Client PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" @@ -870,8 +872,7 @@ def test_list_entries_explicit(self): self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit_timestamp(self): - from google.cloud.logging import DESCENDING - from google.cloud.logging import Client + from google.cloud.logging import DESCENDING, Client PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" @@ -916,11 +917,13 @@ def test_list_entries_explicit_timestamp(self): ) def test_list_entries_limit(self): - from google.cloud.logging import DESCENDING - from google.cloud.logging import ProtobufEntry - from google.cloud.logging import StructEntry - from google.cloud.logging import Logger - from google.cloud.logging import Client + from google.cloud.logging import ( + DESCENDING, + Client, + Logger, + ProtobufEntry, + StructEntry, + ) PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" @@ -1010,8 +1013,7 @@ def test_list_entries_limit(self): ) def test_list_entries_folder(self): - from google.cloud.logging import TextEntry - from google.cloud.logging import Client + from google.cloud.logging import Client, TextEntry client = Client( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False @@ -1042,11 +1044,11 @@ def test_list_entries_folder(self): self.assertEqual(entry.log_name, LOG_NAME) def test_first_log_emits_instrumentation(self): + import google.cloud.logging_v2 + from google.cloud.logging_v2._instrumentation import _create_diagnostic_entry from google.cloud.logging_v2.handlers._monitored_resources import ( detect_resource, ) - from google.cloud.logging_v2._instrumentation import _create_diagnostic_entry - import google.cloud.logging_v2 google.cloud.logging_v2._instrumentation_emitted = False DEFAULT_LABELS = {"foo": "spam"} @@ -1116,8 +1118,8 @@ def test_log_empty_defaults(self): def test_log_empty_explicit(self): import datetime - from google.cloud.logging import Resource - from google.cloud.logging import LogEntry + + from google.cloud.logging import LogEntry, Resource LABELS = {"foo": "bar", "baz": "qux"} IID = "IID" @@ -1161,8 +1163,8 @@ def test_log_empty_explicit(self): self.assertEqual(batch.entries, [ENTRY]) def test_log_text_defaults(self): - from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE from google.cloud.logging import TextEntry + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE TEXT = "This is the entry text" ENTRY = TextEntry(payload=TEXT, resource=_GLOBAL_RESOURCE) @@ -1174,8 +1176,8 @@ def test_log_text_defaults(self): def test_log_text_explicit(self): import datetime - from google.cloud.logging import Resource - from google.cloud.logging import TextEntry + + from google.cloud.logging import Resource, TextEntry TEXT = "This is the entry text" LABELS = {"foo": "bar", "baz": "qux"} @@ -1222,8 +1224,8 @@ def test_log_text_explicit(self): self.assertEqual(batch.entries, [ENTRY]) def test_log_struct_defaults(self): - from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE from google.cloud.logging import StructEntry + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE STRUCT = {"message": "Message text", "weather": "partly cloudy"} ENTRY = StructEntry(payload=STRUCT, resource=_GLOBAL_RESOURCE) @@ -1235,8 +1237,8 @@ def test_log_struct_defaults(self): def test_log_struct_explicit(self): import datetime - from google.cloud.logging import Resource - from google.cloud.logging import StructEntry + + from google.cloud.logging import Resource, StructEntry STRUCT = {"message": "Message text", "weather": "partly cloudy"} LABELS = {"foo": "bar", "baz": "qux"} @@ -1283,10 +1285,9 @@ def test_log_struct_explicit(self): self.assertEqual(batch.entries, [ENTRY]) def test_log_proto_defaults(self): - from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE from google.cloud.logging import ProtobufEntry - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={"foo": Value(bool_value=True)}) ENTRY = ProtobufEntry(payload=message, resource=_GLOBAL_RESOURCE) @@ -1298,10 +1299,9 @@ def test_log_proto_defaults(self): def test_log_proto_explicit(self): import datetime - from google.cloud.logging import Resource - from google.cloud.logging import ProtobufEntry - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value + + from google.cloud.logging import ProtobufEntry, Resource + from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={"foo": Value(bool_value=True)}) LABELS = {"foo": "bar", "baz": "qux"} @@ -1365,8 +1365,8 @@ def test_log_inference_text(self): When calling batch.log with text input, it should call batch.log_text """ - from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE from google.cloud.logging import TextEntry + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE TEXT = "This is the entry text" ENTRY = TextEntry(payload=TEXT, resource=_GLOBAL_RESOURCE) @@ -1381,8 +1381,8 @@ def test_log_inference_struct(self): When calling batch.struct with text input, it should call batch.log_struct """ - from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE from google.cloud.logging import StructEntry + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE STRUCT = {"message": "Message text", "weather": "partly cloudy"} ENTRY = StructEntry(payload=STRUCT, resource=_GLOBAL_RESOURCE) @@ -1397,10 +1397,9 @@ def test_log_inference_proto(self): When calling batch.log with proto input, it should call batch.log_proto """ - from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE from google.cloud.logging import ProtobufEntry - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={"foo": Value(bool_value=True)}) ENTRY = ProtobufEntry(payload=message, resource=_GLOBAL_RESOURCE) @@ -1416,8 +1415,8 @@ def test_log_inference_struct_explicit(self): call batch.log_struct, along with input arguments """ import datetime - from google.cloud.logging import Resource - from google.cloud.logging import StructEntry + + from google.cloud.logging import Resource, StructEntry STRUCT = {"message": "Message text", "weather": "partly cloudy"} LABELS = {"foo": "bar", "baz": "qux"} @@ -1464,15 +1463,15 @@ def test_log_inference_struct_explicit(self): self.assertEqual(batch.entries, [ENTRY]) def test_commit_w_unknown_entry_type(self): - from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE from google.cloud.logging import LogEntry + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) api = client.logging_api = _DummyLoggingAPI() batch = self._make_one(logger, client) batch.entries.append(LogEntry(severity="blah")) - ENTRY = {"severity": "blah", "resource": _GLOBAL_RESOURCE._to_dict()} + ENTRY = {"severity": "BLAH", "resource": _GLOBAL_RESOURCE._to_dict()} batch.commit() @@ -1482,9 +1481,35 @@ def test_commit_w_unknown_entry_type(self): ([ENTRY], logger.full_name, None, None, True), ) - def test_commit_w_resource_specified(self): + def test_commit_w_lowercase_severity_type(self): + from google.cloud.logging import LogEntry from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + + logger = _Logger() + client = _Client(project=self.PROJECT, connection=_make_credentials()) + api = client.logging_api = _DummyLoggingAPI() + batch = self._make_one(logger, client) + batch.entries.append(LogEntry(severity="info")) + batch.entries.append(LogEntry(severity="warn")) + batch.entries.append(LogEntry(severity="error")) + batch.entries.append(LogEntry(severity="fatal")) + ENTRIES = [ + {"severity": "INFO", "resource": _GLOBAL_RESOURCE._to_dict()}, + {"severity": "WARN", "resource": _GLOBAL_RESOURCE._to_dict()}, + {"severity": "ERROR", "resource": _GLOBAL_RESOURCE._to_dict()}, + {"severity": "FATAL", "resource": _GLOBAL_RESOURCE._to_dict()}, + ] + + batch.commit() + self.assertEqual(list(batch.entries), []) + self.assertEqual( + api._write_entries_called_with, + (ENTRIES, logger.full_name, None, None, True), + ) + + def test_commit_w_resource_specified(self): from google.cloud.logging import Resource + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) @@ -1508,13 +1533,13 @@ def test_commit_w_resource_specified(self): ) def test_commit_w_bound_client(self): - import json import datetime - from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value + import json + from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value TEXT = "This is the entry text" STRUCT = {"message": TEXT, "weather": "partly cloudy"} @@ -1599,11 +1624,11 @@ def test_commit_w_bound_client(self): def test_commit_w_alternate_client(self): import json - from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value + from google.cloud.logging import Logger from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value TEXT = "This is the entry text" STRUCT = {"message": TEXT, "weather": "partly cloudy"} @@ -1651,11 +1676,11 @@ def test_commit_w_alternate_client(self): def test_context_mgr_success(self): import json - from google.protobuf.json_format import MessageToJson - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value + from google.cloud.logging import Logger from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value TEXT = "This is the entry text" STRUCT = {"message": TEXT, "weather": "partly cloudy"} @@ -1702,11 +1727,9 @@ def test_context_mgr_success(self): def test_context_mgr_failure(self): import datetime - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - from google.cloud.logging import TextEntry - from google.cloud.logging import StructEntry - from google.cloud.logging import ProtobufEntry + + from google.cloud.logging import ProtobufEntry, StructEntry, TextEntry + from google.protobuf.struct_pb2 import Struct, Value TEXT = "This is the entry text" STRUCT = {"message": TEXT, "weather": "partly cloudy"} @@ -1752,8 +1775,8 @@ def test_append_context_to_error(self): exception should be unchanged """ from google.api_core.exceptions import InvalidArgument - from google.rpc.error_details_pb2 import DebugInfo from google.cloud.logging import TextEntry + from google.rpc.error_details_pb2 import DebugInfo logger = _Logger() client = _Client(project=self.PROJECT) @@ -1803,8 +1826,8 @@ def test_batch_error_gets_context(self): _append_context_to_error is thrown """ from google.api_core.exceptions import InvalidArgument - from google.rpc.error_details_pb2 import DebugInfo from google.cloud.logging import TextEntry + from google.rpc.error_details_pb2 import DebugInfo logger = _Logger() client = _Client(project=self.PROJECT) From 3435d5564f2946eef58e818e19a0fb7cb537f244 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 25 May 2023 12:13:16 -0400 Subject: [PATCH 734/855] build(deps): bump requests from 2.28.1 to 2.31.0 in /synthtool/gcp/templates/python_library/.kokoro (#752) Source-Link: https://github.com/googleapis/synthtool/commit/30bd01b4ab78bf1b2a425816e15b3e7e090993dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 3 ++- packages/google-cloud-logging/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index b8edda51cf46..32b3c486591a 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 + digest: sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b +# created: 2023-05-25T14:56:16.294623272Z diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 66a2172a76a8..3b8d7ee81848 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -419,9 +419,9 @@ readme-renderer==37.3 \ --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine -requests==2.28.1 \ - --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ - --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 # via # gcp-releasetool # google-api-core From e66e1d3cf9d6cb642bab8b9f7a76dc11052ce879 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 1 Jun 2023 13:27:53 +0200 Subject: [PATCH 735/855] chore(deps): update all dependencies (#747) Co-authored-by: Anthonios Partheniou --- .../google-cloud-logging/samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 0acd1af76fdd..b4081154543d 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.5.0 google-cloud-bigquery==3.10.0 -google-cloud-storage==2.8.0 -google-cloud-pubsub==2.16.0 +google-cloud-storage==2.9.0 +google-cloud-pubsub==2.17.1 From d8bfbc524ac8b047170d6c94cbed03abcc6dd009 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 3 Jun 2023 19:19:50 -0400 Subject: [PATCH 736/855] build(deps): bump cryptography from 39.0.1 to 41.0.0 in /synthtool/gcp/templates/python_library/.kokoro (#756) Source-Link: https://github.com/googleapis/synthtool/commit/d0f51a0c2a9a6bcca86911eabea9e484baadf64b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 42 +++++++++---------- 2 files changed, 22 insertions(+), 24 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 32b3c486591a..02a4dedced74 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b -# created: 2023-05-25T14:56:16.294623272Z + digest: sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc +# created: 2023-06-03T21:25:37.968717478Z diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 3b8d7ee81848..c7929db6d152 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -113,28 +113,26 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==39.0.1 \ - --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ - --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ - --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ - --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ - --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ - --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ - --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ - --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ - --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ - --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ - --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ - --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ - --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ - --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ - --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ - --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ - --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ - --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ - --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ - --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ - --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 +cryptography==41.0.0 \ + --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ + --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ + --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ + --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ + --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ + --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ + --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ + --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ + --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ + --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ + --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ + --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ + --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ + --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ + --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ + --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ + --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ + --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ + --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be # via # gcp-releasetool # secretstorage From ca09c879acddbc0c233aca319e8085176da46f59 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 13 Jun 2023 16:50:23 +0200 Subject: [PATCH 737/855] chore(deps): update dependency google-cloud-bigquery to v3.11.0 (#754) Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index b4081154543d..bb7cbf27dec6 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.5.0 -google-cloud-bigquery==3.10.0 +google-cloud-bigquery==3.11.0 google-cloud-storage==2.9.0 google-cloud-pubsub==2.17.1 From 1e88f28726d73c1c21b40bd57149e7c28c7d7546 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 27 Jun 2023 10:12:00 -0400 Subject: [PATCH 738/855] chore: remove pinned Sphinx version [autoapprove] (#760) Source-Link: https://github.com/googleapis/synthtool/commit/909573ce9da2819eeb835909c795d29aea5c724e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ddf4551385d566771dc713090feb7b4c1164fb8a698fe52bbe7670b24236565b Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-logging/noxfile.py | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 02a4dedced74..1b3cb6c52663 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc -# created: 2023-06-03T21:25:37.968717478Z + digest: sha256:ddf4551385d566771dc713090feb7b4c1164fb8a698fe52bbe7670b24236565b +# created: 2023-06-27T13:04:21.96690344Z diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 554745bb464a..24a97044637f 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -313,10 +313,9 @@ def docfx(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", + "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", - "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) From 1871d2e8f498c67d5c64dc747c8ad4b7cd50459c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 29 Jun 2023 12:21:58 -0400 Subject: [PATCH 739/855] chore: store artifacts in placer (#761) Source-Link: https://github.com/googleapis/synthtool/commit/cb960373d12d20f8dc38beee2bf884d49627165e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-logging/.kokoro/release/common.cfg | 9 +++++++++ packages/google-cloud-logging/noxfile.py | 2 +- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 1b3cb6c52663..98994f474104 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ddf4551385d566771dc713090feb7b4c1164fb8a698fe52bbe7670b24236565b -# created: 2023-06-27T13:04:21.96690344Z + digest: sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd +# created: 2023-06-28T17:03:33.371210701Z diff --git a/packages/google-cloud-logging/.kokoro/release/common.cfg b/packages/google-cloud-logging/.kokoro/release/common.cfg index 53583ee71289..4dc3167a532f 100644 --- a/packages/google-cloud-logging/.kokoro/release/common.cfg +++ b/packages/google-cloud-logging/.kokoro/release/common.cfg @@ -39,6 +39,15 @@ env_vars: { value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } +# Store the packages we uploaded to PyPI. That way, we have a record of exactly +# what we published, which we can use to generate SBOMs and attestations. +action { + define_artifacts { + regex: "github/python-logging/**/*.tar.gz" + strip_prefix: "github/python-logging" + } +} + ############################################# # this section merged from .kokoro/common_env_vars.cfg using owlbot.py diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 24a97044637f..12440fa9bbb4 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -387,6 +387,7 @@ def prerelease_deps(session): "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", + "google-auth", "proto-plus", "google-cloud-testutils", # dependencies of google-cloud-testutils" @@ -399,7 +400,6 @@ def prerelease_deps(session): # Remaining dependencies other_deps = [ "requests", - "google-auth", ] session.install(*other_deps) From b725402f6b91140424d44667ad25dca2556bb8c3 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 5 Jul 2023 21:39:47 +0200 Subject: [PATCH 740/855] chore(deps): update all dependencies (#759) Co-authored-by: Anthonios Partheniou --- .../samples/snippets/requirements-test.txt | 2 +- .../google-cloud-logging/samples/snippets/requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 96aa71dab7f6..cbd0a47def45 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==2.2.1 -pytest==7.3.1 +pytest==7.4.0 diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index bb7cbf27dec6..c744c991e9f5 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.5.0 -google-cloud-bigquery==3.11.0 -google-cloud-storage==2.9.0 +google-cloud-bigquery==3.11.3 +google-cloud-storage==2.10.0 google-cloud-pubsub==2.17.1 From 81762ebb754d8ce1a40c37ea523b55ea2a14cf8a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 14 Jul 2023 10:17:36 -0700 Subject: [PATCH 741/855] chore(main): release 3.6.0 (#720) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-logging/CHANGELOG.md | 17 +++++++++++++++++ .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 21 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json index 155f1bdd1db8..23efc1eaad38 100644 --- a/packages/google-cloud-logging/.release-please-manifest.json +++ b/packages/google-cloud-logging/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.5.0" + ".": "3.6.0" } diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 0dd576c00354..16e128b18c93 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,23 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.6.0](https://github.com/googleapis/python-logging/compare/v3.5.0...v3.6.0) (2023-07-05) + + +### Features + +* Add logic to convert severity string to uppercase ([#744](https://github.com/googleapis/python-logging/issues/744)) ([c1c8ce1](https://github.com/googleapis/python-logging/commit/c1c8ce158f566150319c2d4fb2f068b10668d507)) + + +### Bug Fixes + +* Properly handle None from metadata server ([#718](https://github.com/googleapis/python-logging/issues/718)) ([dedaff9](https://github.com/googleapis/python-logging/commit/dedaff95b2e2ed178a26aa9a04cfafb9b803ec60)) + + +### Documentation + +* Fix formatting of request arg in docstring ([#734](https://github.com/googleapis/python-logging/issues/734)) ([dbb1794](https://github.com/googleapis/python-logging/commit/dbb179407f20beb0f8927570dbc1630c62b23268)) + ## [3.5.0](https://github.com/googleapis/python-logging/compare/v3.4.0...v3.5.0) (2023-01-24) diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py index 4576813f0a11..d2952231400a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.5.0" # {x-release-please-version} +__version__ = "3.6.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py index 4576813f0a11..d2952231400a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.5.0" # {x-release-please-version} +__version__ = "3.6.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 6fc255d8251b..0cf8959def02 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "0.1.0" + "version": "3.6.0" }, "snippets": [ { From ec854ee3caf65e7b98bda5089bf1e89f39d62ee5 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 14 Jul 2023 21:51:48 +0200 Subject: [PATCH 742/855] chore(deps): update all dependencies (#763) --- .../google-cloud-logging/samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index c744c991e9f5..3f968a7b66fd 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.5.0 +google-cloud-logging==3.6.0 google-cloud-bigquery==3.11.3 google-cloud-storage==2.10.0 -google-cloud-pubsub==2.17.1 +google-cloud-pubsub==2.18.0 From 4d6833fa372775fb2a9c0727b7812425b2e6ea99 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 17 Jul 2023 13:01:10 -0400 Subject: [PATCH 743/855] build(deps): [autoapprove] bump cryptography from 41.0.0 to 41.0.2 (#765) Source-Link: https://github.com/googleapis/synthtool/commit/d6103f4a3540ba60f633a9e25c37ec5fe7e6286d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb Co-authored-by: Owl Bot --- packages/google-cloud-logging/.flake8 | 2 +- .../.github/.OwlBot.lock.yaml | 4 +- .../google-cloud-logging/.kokoro/build.sh | 2 +- .../.kokoro/docker/docs/Dockerfile | 2 +- .../.kokoro/populate-secrets.sh | 2 +- .../.kokoro/publish-docs.sh | 2 +- .../google-cloud-logging/.kokoro/release.sh | 2 +- .../.kokoro/requirements.txt | 44 ++++++++++--------- .../.kokoro/test-samples-against-head.sh | 2 +- .../.kokoro/test-samples-impl.sh | 2 +- .../.kokoro/test-samples.sh | 2 +- .../.kokoro/trampoline.sh | 2 +- .../.kokoro/trampoline_v2.sh | 2 +- .../.pre-commit-config.yaml | 2 +- packages/google-cloud-logging/.trampolinerc | 4 +- packages/google-cloud-logging/MANIFEST.in | 2 +- packages/google-cloud-logging/docs/conf.py | 2 +- packages/google-cloud-logging/noxfile.py | 3 +- .../scripts/decrypt-secrets.sh | 2 +- .../scripts/readme-gen/readme_gen.py | 18 ++++---- packages/google-cloud-logging/setup.cfg | 2 +- 21 files changed, 54 insertions(+), 51 deletions(-) diff --git a/packages/google-cloud-logging/.flake8 b/packages/google-cloud-logging/.flake8 index 7f85294c9359..89954f8bd02e 100644 --- a/packages/google-cloud-logging/.flake8 +++ b/packages/google-cloud-logging/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 98994f474104..ae4a522b9e5f 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd -# created: 2023-06-28T17:03:33.371210701Z + digest: sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb +# created: 2023-07-17T15:20:13.819193964Z diff --git a/packages/google-cloud-logging/.kokoro/build.sh b/packages/google-cloud-logging/.kokoro/build.sh index 4d739a338901..afa7a81aa8d6 100755 --- a/packages/google-cloud-logging/.kokoro/build.sh +++ b/packages/google-cloud-logging/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile index f8137d0ae497..8e39a2cc438d 100644 --- a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/populate-secrets.sh b/packages/google-cloud-logging/.kokoro/populate-secrets.sh index f52514257ef0..6f3972140e80 100755 --- a/packages/google-cloud-logging/.kokoro/populate-secrets.sh +++ b/packages/google-cloud-logging/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC. +# Copyright 2023 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/publish-docs.sh b/packages/google-cloud-logging/.kokoro/publish-docs.sh index 1c4d62370042..9eafe0be3bba 100755 --- a/packages/google-cloud-logging/.kokoro/publish-docs.sh +++ b/packages/google-cloud-logging/.kokoro/publish-docs.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/release.sh b/packages/google-cloud-logging/.kokoro/release.sh index 37524859b662..9bdfbceb56b0 100755 --- a/packages/google-cloud-logging/.kokoro/release.sh +++ b/packages/google-cloud-logging/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index c7929db6d152..67d70a110897 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -113,26 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.0 \ - --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ - --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ - --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ - --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ - --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ - --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ - --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ - --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ - --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ - --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ - --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ - --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ - --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ - --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ - --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ - --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ - --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ - --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ - --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be +cryptography==41.0.2 \ + --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ + --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ + --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ + --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ + --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ + --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ + --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ + --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ + --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ + --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ + --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ + --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ + --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ + --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ + --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ + --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ + --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ + --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ + --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ + --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ + --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ + --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ + --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 # via # gcp-releasetool # secretstorage diff --git a/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh b/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh index ba3a707b040c..63ac41dfae1d 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh index 2c6500cae0b9..5a0f5fab6a89 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/test-samples.sh b/packages/google-cloud-logging/.kokoro/test-samples.sh index 11c042d342d7..50b35a48c190 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/trampoline.sh b/packages/google-cloud-logging/.kokoro/trampoline.sh index f39236e943a8..d85b1f267693 100755 --- a/packages/google-cloud-logging/.kokoro/trampoline.sh +++ b/packages/google-cloud-logging/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2017 Google Inc. +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/trampoline_v2.sh b/packages/google-cloud-logging/.kokoro/trampoline_v2.sh index 4af6cdc26dbc..59a7cf3a9373 100755 --- a/packages/google-cloud-logging/.kokoro/trampoline_v2.sh +++ b/packages/google-cloud-logging/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index 5405cc8ff1f3..9e3898fd1c12 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.trampolinerc b/packages/google-cloud-logging/.trampolinerc index a51abf0b5c2b..65248f703aa6 100644 --- a/packages/google-cloud-logging/.trampolinerc +++ b/packages/google-cloud-logging/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Template for .trampolinerc - # Add required env vars here. required_envvars+=() diff --git a/packages/google-cloud-logging/MANIFEST.in b/packages/google-cloud-logging/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-logging/MANIFEST.in +++ b/packages/google-cloud-logging/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index 04f5d0ef5d00..fffea8f16413 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 12440fa9bbb4..acfd04aa8657 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -408,6 +408,7 @@ def prerelease_deps(session): "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" ) session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-logging/scripts/decrypt-secrets.sh b/packages/google-cloud-logging/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-logging/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-logging/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/scripts/readme-gen/readme_gen.py b/packages/google-cloud-logging/scripts/readme-gen/readme_gen.py index 91b59676bfc7..1acc119835b5 100644 --- a/packages/google-cloud-logging/scripts/readme-gen/readme_gen.py +++ b/packages/google-cloud-logging/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2016 Google Inc +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -33,17 +33,17 @@ autoescape=True, ) -README_TMPL = jinja_env.get_template('README.tmpl.rst') +README_TMPL = jinja_env.get_template("README.tmpl.rst") def get_help(file): - return subprocess.check_output(['python', file, '--help']).decode() + return subprocess.check_output(["python", file, "--help"]).decode() def main(): parser = argparse.ArgumentParser() - parser.add_argument('source') - parser.add_argument('--destination', default='README.rst') + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") args = parser.parse_args() @@ -51,9 +51,9 @@ def main(): root = os.path.dirname(source) destination = os.path.join(root, args.destination) - jinja_env.globals['get_help'] = get_help + jinja_env.globals["get_help"] = get_help - with io.open(source, 'r') as f: + with io.open(source, "r") as f: config = yaml.load(f) # This allows get_help to execute in the right directory. @@ -61,9 +61,9 @@ def main(): output = README_TMPL.render(config) - with io.open(destination, 'w') as f: + with io.open(destination, "w") as f: f.write(output) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/packages/google-cloud-logging/setup.cfg b/packages/google-cloud-logging/setup.cfg index c3a2b39f6528..052350089505 100644 --- a/packages/google-cloud-logging/setup.cfg +++ b/packages/google-cloud-logging/setup.cfg @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 213a8d14443cf82332778d4a336381fa86b10a24 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 21 Jul 2023 10:17:19 -0400 Subject: [PATCH 744/855] build(deps): [autoapprove] bump pygments from 2.13.0 to 2.15.0 (#768) Source-Link: https://github.com/googleapis/synthtool/commit/eaef28efd179e6eeb9f4e9bf697530d074a6f3b9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-logging/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index ae4a522b9e5f..17c21d96d654 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb -# created: 2023-07-17T15:20:13.819193964Z + digest: sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e +# created: 2023-07-21T02:12:46.49799314Z diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 67d70a110897..b563eb284459 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -396,9 +396,9 @@ pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.13.0 \ - --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ - --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 +pygments==2.15.0 \ + --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ + --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 # via # readme-renderer # rich From 5d7c09919448fb25f0252492406c3260180f261d Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 22 Jul 2023 13:11:36 +0200 Subject: [PATCH 745/855] chore(deps): update dependency google-cloud-bigquery to v3.11.4 (#766) Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 3f968a7b66fd..34cc8a1e11e1 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.6.0 -google-cloud-bigquery==3.11.3 +google-cloud-bigquery==3.11.4 google-cloud-storage==2.10.0 google-cloud-pubsub==2.18.0 From 7225da85477a5c273a3deb80cff8c773008713cc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 27 Jul 2023 06:14:06 -0400 Subject: [PATCH 746/855] build(deps): [autoapprove] bump certifi from 2022.12.7 to 2023.7.22 (#770) Source-Link: https://github.com/googleapis/synthtool/commit/395d53adeeacfca00b73abf197f65f3c17c8f1e9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-logging/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 17c21d96d654..0ddd0e4d1873 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e -# created: 2023-07-21T02:12:46.49799314Z + digest: sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 +# created: 2023-07-25T21:01:10.396410762Z diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index b563eb284459..76d9bba0f7d0 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.12.7 \ - --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ - --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 +certifi==2023.7.22 \ + --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ + --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ From e5bc166e88bd65e89cf3b2daf1de2b69321de237 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 31 Jul 2023 16:03:51 +0200 Subject: [PATCH 747/855] chore(deps): update dependency google-cloud-pubsub to v2.18.1 (#772) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 34cc8a1e11e1..4e51431e6dcf 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.6.0 google-cloud-bigquery==3.11.4 google-cloud-storage==2.10.0 -google-cloud-pubsub==2.18.0 +google-cloud-pubsub==2.18.1 From d47737ebbccdd5280a044acccfb7966debb65a69 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Aug 2023 10:51:14 -0400 Subject: [PATCH 748/855] build: [autoapprove] bump cryptography from 41.0.2 to 41.0.3 (#775) * build: [autoapprove] bump cryptography from 41.0.2 to 41.0.3 Source-Link: https://github.com/googleapis/synthtool/commit/352b9d4c068ce7c05908172af128b294073bf53c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 * fix lint E721 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 48 +++++++++---------- .../.pre-commit-config.yaml | 2 +- .../google/cloud/logging_v2/_gapic.py | 6 +-- .../google/cloud/logging_v2/client.py | 2 +- packages/google-cloud-logging/noxfile.py | 3 +- 6 files changed, 33 insertions(+), 32 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 0ddd0e4d1873..a3da1b0d4cd3 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 -# created: 2023-07-25T21:01:10.396410762Z + digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 +# created: 2023-08-02T10:53:29.114535628Z diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 76d9bba0f7d0..029bd342de94 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -113,30 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.2 \ - --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ - --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ - --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ - --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ - --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ - --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ - --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ - --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ - --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ - --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ - --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ - --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ - --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ - --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ - --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ - --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ - --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ - --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ - --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ - --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ - --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ - --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ - --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 +cryptography==41.0.3 \ + --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ + --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ + --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ + --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ + --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ + --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ + --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ + --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ + --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ + --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ + --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ + --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ + --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ + --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ + --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ + --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ + --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ + --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ + --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ + --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ + --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ + --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ + --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de # via # gcp-releasetool # secretstorage diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index 9e3898fd1c12..19409cbd37a4 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://github.com/pycqa/flake8 - rev: 3.9.2 + rev: 6.1.0 hooks: - id: flake8 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py index b71d3d92c982..f6f6dca1f78d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py @@ -592,7 +592,7 @@ def make_logging_api(client): _LoggingAPI: A metrics API instance with the proper credentials. """ info = client._client_info - if type(info) == client_info.ClientInfo: + if isinstance(info, client_info.ClientInfo): # convert into gapic-compatible subclass info = _client_info_to_gapic(info) @@ -615,7 +615,7 @@ def make_metrics_api(client): _MetricsAPI: A metrics API instance with the proper credentials. """ info = client._client_info - if type(info) == client_info.ClientInfo: + if isinstance(info, client_info.ClientInfo): # convert into gapic-compatible subclass info = _client_info_to_gapic(info) @@ -638,7 +638,7 @@ def make_sinks_api(client): _SinksAPI: A metrics API instance with the proper credentials. """ info = client._client_info - if type(info) == client_info.ClientInfo: + if isinstance(info, client_info.ClientInfo): # convert into gapic-compatible subclass info = _client_info_to_gapic(info) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py index 218eee09530a..94c1e6ca7260 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -128,7 +128,7 @@ def __init__( kw_args = {"client_info": client_info} if client_options: - if type(client_options) == dict: + if isinstance(client_options, dict): client_options = google.api_core.client_options.from_dict( client_options ) diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index acfd04aa8657..7ebe500a312b 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -25,6 +25,7 @@ import nox +FLAKE8_VERSION = "flake8==6.1.0" BLACK_VERSION = "black==22.3.0" ISORT_VERSION = "isort==5.10.1" LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] @@ -90,7 +91,7 @@ def lint(session): Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.install("flake8", BLACK_VERSION) + session.install(FLAKE8_VERSION, BLACK_VERSION) session.run( "black", "--check", From ed79c3aad028956497ef09e6516fe8a9b50781c3 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 8 Aug 2023 16:27:21 +0200 Subject: [PATCH 749/855] chore(deps): update dependency google-cloud-pubsub to v2.18.2 (#776) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 4e51431e6dcf..d4cc2c363793 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.6.0 google-cloud-bigquery==3.11.4 google-cloud-storage==2.10.0 -google-cloud-pubsub==2.18.1 +google-cloud-pubsub==2.18.2 From 139860655b036f471e0279c2517dc32c0b30116f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 29 Aug 2023 15:28:36 +0200 Subject: [PATCH 750/855] chore(deps): update dependency google-cloud-pubsub to v2.18.3 (#777) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index d4cc2c363793..65c59ffd6c4b 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.6.0 google-cloud-bigquery==3.11.4 google-cloud-storage==2.10.0 -google-cloud-pubsub==2.18.2 +google-cloud-pubsub==2.18.3 From 15bcdd12ad6897923e8b402d86bc54ada759c0f4 Mon Sep 17 00:00:00 2001 From: minherz Date: Tue, 12 Sep 2023 16:12:42 +0000 Subject: [PATCH 751/855] fix: add severity to structured log write (#783) --- packages/google-cloud-logging/samples/snippets/snippets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/snippets.py b/packages/google-cloud-logging/samples/snippets/snippets.py index 39399dcf793b..a409e996bb61 100644 --- a/packages/google-cloud-logging/samples/snippets/snippets.py +++ b/packages/google-cloud-logging/samples/snippets/snippets.py @@ -38,7 +38,7 @@ def write_entry(logger_name): logger.log_text("Hello, world!") # Simple text log with severity. - logger.log_text("Goodbye, world!", severity="ERROR") + logger.log_text("Goodbye, world!", severity="WARNING") # Struct log. The struct can be any JSON-serializable dictionary. logger.log_struct( @@ -46,7 +46,7 @@ def write_entry(logger_name): "name": "King Arthur", "quest": "Find the Holy Grail", "favorite_color": "Blue", - } + }, severity="INFO" ) print("Wrote logs to {}.".format(logger.name)) From 5926412e1ab83bdb5755e27064d949a921428616 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 19 Sep 2023 11:55:31 -0400 Subject: [PATCH 752/855] feat: Log Analytics features of the Cloud Logging API (#746) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Minor formatting chore: Update gapic-generator-python to v1.11.5 build: Update rules_python to 0.24.0 PiperOrigin-RevId: 563436317 Source-Link: https://github.com/googleapis/googleapis/commit/42fd37b18d706f6f51f52f209973b3b2c28f509a Source-Link: https://github.com/googleapis/googleapis-gen/commit/280264ca02fb9316b4237a96d0af1a2343a81a56 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjgwMjY0Y2EwMmZiOTMxNmI0MjM3YTk2ZDBhZjFhMjM0M2E4MWE1NiJ9 chore: Update gapic-generator-python to v1.11.2 PiperOrigin-RevId: 546510849 Source-Link: https://github.com/googleapis/googleapis/commit/736073ad9a9763a170eceaaa54519bcc0ea55a5e Source-Link: https://github.com/googleapis/googleapis-gen/commit/deb64e8ec19d141e31089fe932b3a997ad541c4d Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGViNjRlOGVjMTlkMTQxZTMxMDg5ZmU5MzJiM2E5OTdhZDU0MWM0ZCJ9 fix: Add async context manager return types chore: Mock return_value should not populate oneof message fields chore: Support snippet generation for services that only support REST transport chore: Update gapic-generator-python to v1.11.0 PiperOrigin-RevId: 545430278 Source-Link: https://github.com/googleapis/googleapis/commit/601b5326107eeb74800b426d1f9933faa233258a Source-Link: https://github.com/googleapis/googleapis-gen/commit/b3f18d0f6560a855022fd058865e7620479d7af9 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjNmMThkMGY2NTYwYTg1NTAyMmZkMDU4ODY1ZTc2MjA0NzlkN2FmOSJ9 feat: Log Analytics features of the Cloud Logging API feat: Add ConfigServiceV2.CreateBucketAsync method for creating Log Buckets asynchronously feat: Add ConfigServiceV2.UpdateBucketAsync method for creating Log Buckets asynchronously feat: Add ConfigServiceV2.CreateLink method for creating linked datasets for Log Analytics Buckets feat: Add ConfigServiceV2.DeleteLink method for deleting linked datasets feat: Add ConfigServiceV2.ListLinks method for listing linked datasets feat: Add ConfigServiceV2.GetLink methods for describing linked datasets feat: Add LogBucket.analytics_enabled field that specifies whether Log Bucket's Analytics features are enabled feat: Add LogBucket.index_configs field that contains a list of Log Bucket's indexed fields and related configuration data docs: Documentation for the Log Analytics features of the Cloud Logging API PiperOrigin-RevId: 529851525 Source-Link: https://github.com/googleapis/googleapis/commit/1c7ee99d19adf8e444e2d73c5dd52884eab9862d Source-Link: https://github.com/googleapis/googleapis-gen/commit/4a2a3a05b91804333a1b39b635d8fe2243d4b4fd Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNGEyYTNhMDViOTE4MDQzMzNhMWIzOWI2MzVkOGZlMjI0M2Q0YjRmZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../cloud/logging_v2/gapic_metadata.json | 60 + .../cloud/logging_v2/services/__init__.py | 2 +- .../services/config_service_v2/__init__.py | 2 +- .../config_service_v2/async_client.py | 977 ++- .../services/config_service_v2/client.py | 973 ++- .../services/config_service_v2/pagers.py | 130 +- .../config_service_v2/transports/__init__.py | 2 +- .../config_service_v2/transports/base.py | 116 +- .../config_service_v2/transports/grpc.py | 231 +- .../transports/grpc_asyncio.py | 241 +- .../services/logging_service_v2/__init__.py | 2 +- .../logging_service_v2/async_client.py | 188 +- .../services/logging_service_v2/client.py | 186 +- .../services/logging_service_v2/pagers.py | 2 +- .../logging_service_v2/transports/__init__.py | 2 +- .../logging_service_v2/transports/base.py | 30 +- .../logging_service_v2/transports/grpc.py | 56 +- .../transports/grpc_asyncio.py | 56 +- .../services/metrics_service_v2/__init__.py | 2 +- .../metrics_service_v2/async_client.py | 171 +- .../services/metrics_service_v2/client.py | 169 +- .../services/metrics_service_v2/pagers.py | 2 +- .../metrics_service_v2/transports/__init__.py | 2 +- .../metrics_service_v2/transports/base.py | 30 +- .../metrics_service_v2/transports/grpc.py | 56 +- .../transports/grpc_asyncio.py | 56 +- .../google/cloud/logging_v2/types/__init__.py | 26 +- .../cloud/logging_v2/types/log_entry.py | 61 +- .../google/cloud/logging_v2/types/logging.py | 88 +- .../cloud/logging_v2/types/logging_config.py | 526 +- .../cloud/logging_v2/types/logging_metrics.py | 22 +- ...onfig_service_v2_copy_log_entries_async.py | 2 +- ...config_service_v2_copy_log_entries_sync.py | 2 +- ...d_config_service_v2_create_bucket_async.py | 2 +- ...ig_service_v2_create_bucket_async_async.py | 57 + ...fig_service_v2_create_bucket_async_sync.py | 57 + ...ed_config_service_v2_create_bucket_sync.py | 2 +- ...onfig_service_v2_create_exclusion_async.py | 2 +- ...config_service_v2_create_exclusion_sync.py | 2 +- ...ted_config_service_v2_create_link_async.py | 57 + ...ated_config_service_v2_create_link_sync.py | 57 + ...ted_config_service_v2_create_sink_async.py | 2 +- ...ated_config_service_v2_create_sink_sync.py | 2 +- ...ted_config_service_v2_create_view_async.py | 2 +- ...ated_config_service_v2_create_view_sync.py | 2 +- ...d_config_service_v2_delete_bucket_async.py | 2 +- ...ed_config_service_v2_delete_bucket_sync.py | 2 +- ...onfig_service_v2_delete_exclusion_async.py | 2 +- ...config_service_v2_delete_exclusion_sync.py | 2 +- ...ted_config_service_v2_delete_link_async.py | 56 + ...ated_config_service_v2_delete_link_sync.py | 56 + ...ted_config_service_v2_delete_sink_async.py | 2 +- ...ated_config_service_v2_delete_sink_sync.py | 2 +- ...ted_config_service_v2_delete_view_async.py | 2 +- ...ated_config_service_v2_delete_view_sync.py | 2 +- ...ated_config_service_v2_get_bucket_async.py | 2 +- ...rated_config_service_v2_get_bucket_sync.py | 2 +- ...nfig_service_v2_get_cmek_settings_async.py | 2 +- ...onfig_service_v2_get_cmek_settings_sync.py | 2 +- ...d_config_service_v2_get_exclusion_async.py | 2 +- ...ed_config_service_v2_get_exclusion_sync.py | 2 +- ...erated_config_service_v2_get_link_async.py | 52 + ...nerated_config_service_v2_get_link_sync.py | 52 + ...ed_config_service_v2_get_settings_async.py | 2 +- ...ted_config_service_v2_get_settings_sync.py | 2 +- ...erated_config_service_v2_get_sink_async.py | 2 +- ...nerated_config_service_v2_get_sink_sync.py | 2 +- ...erated_config_service_v2_get_view_async.py | 2 +- ...nerated_config_service_v2_get_view_sync.py | 2 +- ...ed_config_service_v2_list_buckets_async.py | 2 +- ...ted_config_service_v2_list_buckets_sync.py | 2 +- ...config_service_v2_list_exclusions_async.py | 2 +- ..._config_service_v2_list_exclusions_sync.py | 2 +- ...ated_config_service_v2_list_links_async.py | 53 + ...rated_config_service_v2_list_links_sync.py | 53 + ...ated_config_service_v2_list_sinks_async.py | 2 +- ...rated_config_service_v2_list_sinks_sync.py | 2 +- ...ated_config_service_v2_list_views_async.py | 2 +- ...rated_config_service_v2_list_views_sync.py | 2 +- ...config_service_v2_undelete_bucket_async.py | 2 +- ..._config_service_v2_undelete_bucket_sync.py | 2 +- ...d_config_service_v2_update_bucket_async.py | 2 +- ...ig_service_v2_update_bucket_async_async.py | 56 + ...fig_service_v2_update_bucket_async_sync.py | 56 + ...ed_config_service_v2_update_bucket_sync.py | 2 +- ...g_service_v2_update_cmek_settings_async.py | 2 +- ...ig_service_v2_update_cmek_settings_sync.py | 2 +- ...onfig_service_v2_update_exclusion_async.py | 2 +- ...config_service_v2_update_exclusion_sync.py | 2 +- ...config_service_v2_update_settings_async.py | 2 +- ..._config_service_v2_update_settings_sync.py | 2 +- ...ted_config_service_v2_update_sink_async.py | 2 +- ...ated_config_service_v2_update_sink_sync.py | 2 +- ...ted_config_service_v2_update_view_async.py | 2 +- ...ated_config_service_v2_update_view_sync.py | 2 +- ...ted_logging_service_v2_delete_log_async.py | 2 +- ...ated_logging_service_v2_delete_log_sync.py | 2 +- ...gging_service_v2_list_log_entries_async.py | 2 +- ...ogging_service_v2_list_log_entries_sync.py | 2 +- ...ated_logging_service_v2_list_logs_async.py | 2 +- ...rated_logging_service_v2_list_logs_sync.py | 2 +- ...st_monitored_resource_descriptors_async.py | 2 +- ...ist_monitored_resource_descriptors_sync.py | 2 +- ...gging_service_v2_tail_log_entries_async.py | 2 +- ...ogging_service_v2_tail_log_entries_sync.py | 2 +- ...ging_service_v2_write_log_entries_async.py | 2 +- ...gging_service_v2_write_log_entries_sync.py | 2 +- ...rics_service_v2_create_log_metric_async.py | 2 +- ...trics_service_v2_create_log_metric_sync.py | 2 +- ...rics_service_v2_delete_log_metric_async.py | 2 +- ...trics_service_v2_delete_log_metric_sync.py | 2 +- ...metrics_service_v2_get_log_metric_async.py | 2 +- ..._metrics_service_v2_get_log_metric_sync.py | 2 +- ...trics_service_v2_list_log_metrics_async.py | 2 +- ...etrics_service_v2_list_log_metrics_sync.py | 2 +- ...rics_service_v2_update_log_metric_async.py | 2 +- ...trics_service_v2_update_log_metric_sync.py | 2 +- .../snippet_metadata_google.logging.v2.json | 1666 +++- .../samples/snippets/snippets.py | 3 +- .../google-cloud-logging/tests/__init__.py | 2 +- .../tests/unit/__init__.py | 2 +- .../tests/unit/gapic/__init__.py | 2 +- .../tests/unit/gapic/logging_v2/__init__.py | 2 +- .../logging_v2/test_config_service_v2.py | 6829 +++++++++++------ .../logging_v2/test_logging_service_v2.py | 453 +- .../logging_v2/test_metrics_service_v2.py | 453 +- 126 files changed, 11552 insertions(+), 3137 deletions(-) create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py create mode 100644 packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_metadata.json b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_metadata.json index a629e5a50d4c..8d2b1297a0d7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_metadata.json +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_metadata.json @@ -20,11 +20,21 @@ "create_bucket" ] }, + "CreateBucketAsync": { + "methods": [ + "create_bucket_async" + ] + }, "CreateExclusion": { "methods": [ "create_exclusion" ] }, + "CreateLink": { + "methods": [ + "create_link" + ] + }, "CreateSink": { "methods": [ "create_sink" @@ -45,6 +55,11 @@ "delete_exclusion" ] }, + "DeleteLink": { + "methods": [ + "delete_link" + ] + }, "DeleteSink": { "methods": [ "delete_sink" @@ -70,6 +85,11 @@ "get_exclusion" ] }, + "GetLink": { + "methods": [ + "get_link" + ] + }, "GetSettings": { "methods": [ "get_settings" @@ -95,6 +115,11 @@ "list_exclusions" ] }, + "ListLinks": { + "methods": [ + "list_links" + ] + }, "ListSinks": { "methods": [ "list_sinks" @@ -115,6 +140,11 @@ "update_bucket" ] }, + "UpdateBucketAsync": { + "methods": [ + "update_bucket_async" + ] + }, "UpdateCmekSettings": { "methods": [ "update_cmek_settings" @@ -155,11 +185,21 @@ "create_bucket" ] }, + "CreateBucketAsync": { + "methods": [ + "create_bucket_async" + ] + }, "CreateExclusion": { "methods": [ "create_exclusion" ] }, + "CreateLink": { + "methods": [ + "create_link" + ] + }, "CreateSink": { "methods": [ "create_sink" @@ -180,6 +220,11 @@ "delete_exclusion" ] }, + "DeleteLink": { + "methods": [ + "delete_link" + ] + }, "DeleteSink": { "methods": [ "delete_sink" @@ -205,6 +250,11 @@ "get_exclusion" ] }, + "GetLink": { + "methods": [ + "get_link" + ] + }, "GetSettings": { "methods": [ "get_settings" @@ -230,6 +280,11 @@ "list_exclusions" ] }, + "ListLinks": { + "methods": [ + "list_links" + ] + }, "ListSinks": { "methods": [ "list_sinks" @@ -250,6 +305,11 @@ "update_bucket" ] }, + "UpdateBucketAsync": { + "methods": [ + "update_bucket_async" + ] + }, "UpdateCmekSettings": { "methods": [ "update_cmek_settings" diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py index 6eb3681ce414..bf30439496d6 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 7549eea4813d..e066569f72e2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -46,6 +46,8 @@ from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO @@ -65,6 +67,8 @@ class ConfigServiceV2AsyncClient: parse_cmek_settings_path = staticmethod( ConfigServiceV2Client.parse_cmek_settings_path ) + link_path = staticmethod(ConfigServiceV2Client.link_path) + parse_link_path = staticmethod(ConfigServiceV2Client.parse_link_path) log_bucket_path = staticmethod(ConfigServiceV2Client.log_bucket_path) parse_log_bucket_path = staticmethod(ConfigServiceV2Client.parse_log_bucket_path) log_exclusion_path = staticmethod(ConfigServiceV2Client.log_exclusion_path) @@ -299,6 +303,7 @@ async def sample_list_buckets(): Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager: The response from ListBuckets. + Iterating over this object will yield results and resolve additional pages automatically. @@ -434,6 +439,204 @@ async def sample_get_bucket(): # Done; return the response. return response + async def create_bucket_async( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_create_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): + The request object. The parameters to ``CreateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_bucket_async, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + + async def update_bucket_async( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_update_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): + The request object. The parameters to ``UpdateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_bucket_async, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + async def create_bucket( self, request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, @@ -524,12 +727,7 @@ async def update_bucket( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Updates a log bucket. This method replaces the following fields - in the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - ``FAILED_PRECONDITION`` will be returned. + r"""Updates a log bucket. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, then ``FAILED_PRECONDITION`` will be returned. @@ -807,6 +1005,7 @@ async def sample_list_views(): Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager: The response from ListViews. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1866,17 +2065,21 @@ async def sample_delete_sink(): metadata=metadata, ) - async def list_exclusions( + async def create_link( self, - request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, *, parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListExclusionsAsyncPager: - r"""Lists all the exclusions on the \_Default sink in a parent - resource. + ) -> operation_async.AsyncOperation: + r"""Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. .. code-block:: python @@ -1889,39 +2092,56 @@ async def list_exclusions( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_list_exclusions(): + async def sample_create_link(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListExclusionsRequest( + request = logging_v2.CreateLinkRequest( parent="parent_value", + link_id="link_id_value", ) # Make the request - page_result = client.list_exclusions(request=request) + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response - async for response in page_result: - print(response) + print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]]): - The request object. The parameters to ``ListExclusions``. + request (Optional[Union[google.cloud.logging_v2.types.CreateLinkRequest, dict]]): + The request object. The parameters to CreateLink. parent (:class:`str`): - Required. The parent resource whose exclusions are to be - listed. + Required. The full resource name of the bucket to create + a link for. :: - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + link (:class:`google.cloud.logging_v2.types.Link`): + Required. The new link. + This corresponds to the ``link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + link_id (:class:`str`): + Required. The ID to use for the link. The link_id can + have up to 100 characters. A valid link_id must only + have alphanumeric characters and underscores within it. + + This corresponds to the ``link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1929,46 +2149,40 @@ async def sample_list_exclusions(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager: - Result returned from ListExclusions. + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. - Iterating over this object will yield results and - resolve additional pages automatically. + The result type for the operation will be + :class:`google.cloud.logging_v2.types.Link` Describes a + link connected to an analytics enabled bucket. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([parent, link, link_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_config.ListExclusionsRequest(request) + request = logging_config.CreateLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent + if link is not None: + request.link = link + if link_id is not None: + request.link_id = link_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_exclusions, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + self._client._transport.create_link, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -1986,28 +2200,28 @@ async def sample_list_exclusions(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListExclusionsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.Link, + metadata_type=logging_config.LinkMetadata, ) # Done; return the response. return response - async def get_exclusion( + async def delete_link( self, - request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Gets the description of an exclusion in the \_Default sink. + ) -> operation_async.AsyncOperation: + r"""Deletes a link. This will also delete the + corresponding BigQuery linked dataset. .. code-block:: python @@ -2020,17 +2234,500 @@ async def get_exclusion( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_get_exclusion(): + async def sample_delete_link(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetExclusionRequest( + request = logging_v2.DeleteLinkRequest( name="name_value", ) # Make the request - response = await client.get_exclusion(request=request) + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.DeleteLinkRequest, dict]]): + The request object. The parameters to DeleteLink. + name (:class:`str`): + Required. The full resource name of the link to delete. + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.DeleteLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_link, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=logging_config.LinkMetadata, + ) + + # Done; return the response. + return response + + async def list_links( + self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLinksAsyncPager: + r"""Lists links. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_list_links(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.ListLinksRequest, dict]]): + The request object. The parameters to ListLinks. + parent (:class:`str`): + Required. The parent resource whose links are to be + listed: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/ + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager: + The response from ListLinks. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.ListLinksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_links, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLinksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_link( + self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.Link: + r"""Gets a link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_get_link(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.GetLinkRequest, dict]]): + The request object. The parameters to GetLink. + name (:class:`str`): + Required. The resource name of the link: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Link: + Describes a link connected to an + analytics enabled bucket. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.GetLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_link, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_exclusions( + self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExclusionsAsyncPager: + r"""Lists all the exclusions on the \_Default sink in a parent + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_list_exclusions(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_exclusions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]]): + The request object. The parameters to ``ListExclusions``. + parent (:class:`str`): + Required. The parent resource whose exclusions are to be + listed. + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager: + Result returned from ListExclusions. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.ListExclusionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_exclusions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListExclusionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_exclusion( + self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Gets the description of an exclusion in the \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_get_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_exclusion(request=request) # Handle the response print(response) @@ -3102,7 +3799,169 @@ async def sample_copy_log_entries(): # Done; return the response. return response - async def __aenter__(self): + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "ConfigServiceV2AsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index c76b46fa9543..5208fe442d74 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -50,6 +50,8 @@ from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO @@ -193,6 +195,30 @@ def parse_cmek_settings_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) return m.groupdict() if m else {} + @staticmethod + def link_path( + project: str, + location: str, + bucket: str, + link: str, + ) -> str: + """Returns a fully-qualified link string.""" + return "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format( + project=project, + location=location, + bucket=bucket, + link=link, + ) + + @staticmethod + def parse_link_path(path: str) -> Dict[str, str]: + """Parses a link path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/links/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def log_bucket_path( project: str, @@ -600,6 +626,7 @@ def sample_list_buckets(): Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager: The response from ListBuckets. + Iterating over this object will yield results and resolve additional pages automatically. @@ -736,6 +763,206 @@ def sample_get_bucket(): # Done; return the response. return response + def create_bucket_async( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_create_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): + The request object. The parameters to ``CreateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_bucket_async] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + + def update_bucket_async( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_update_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): + The request object. The parameters to ``UpdateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_bucket_async] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + def create_bucket( self, request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, @@ -827,12 +1054,7 @@ def update_bucket( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Updates a log bucket. This method replaces the following fields - in the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - ``FAILED_PRECONDITION`` will be returned. + r"""Updates a log bucket. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, then ``FAILED_PRECONDITION`` will be returned. @@ -1113,6 +1335,7 @@ def sample_list_views(): Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager: The response from ListViews. + Iterating over this object will yield results and resolve additional pages automatically. @@ -2132,17 +2355,21 @@ def sample_delete_sink(): metadata=metadata, ) - def list_exclusions( + def create_link( self, - request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, *, parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListExclusionsPager: - r"""Lists all the exclusions on the \_Default sink in a parent - resource. + ) -> operation.Operation: + r"""Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. .. code-block:: python @@ -2155,39 +2382,56 @@ def list_exclusions( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_list_exclusions(): + def sample_create_link(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListExclusionsRequest( + request = logging_v2.CreateLinkRequest( parent="parent_value", + link_id="link_id_value", ) # Make the request - page_result = client.list_exclusions(request=request) + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): - The request object. The parameters to ``ListExclusions``. + request (Union[google.cloud.logging_v2.types.CreateLinkRequest, dict]): + The request object. The parameters to CreateLink. parent (str): - Required. The parent resource whose exclusions are to be - listed. + Required. The full resource name of the bucket to create + a link for. :: - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + link (google.cloud.logging_v2.types.Link): + Required. The new link. + This corresponds to the ``link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + link_id (str): + Required. The ID to use for the link. The link_id can + have up to 100 characters. A valid link_id must only + have alphanumeric characters and underscores within it. + + This corresponds to the ``link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2195,17 +2439,18 @@ def sample_list_exclusions(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager: - Result returned from ListExclusions. + google.api_core.operation.Operation: + An object representing a long-running operation. - Iterating over this object will yield results and - resolve additional pages automatically. + The result type for the operation will be + :class:`google.cloud.logging_v2.types.Link` Describes a + link connected to an analytics enabled bucket. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([parent, link, link_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2213,19 +2458,23 @@ def sample_list_exclusions(): ) # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListExclusionsRequest. + # in a logging_config.CreateLinkRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, logging_config.ListExclusionsRequest): - request = logging_config.ListExclusionsRequest(request) + if not isinstance(request, logging_config.CreateLinkRequest): + request = logging_config.CreateLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent + if link is not None: + request.link = link + if link_id is not None: + request.link_id = link_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_exclusions] + rpc = self._transport._wrapped_methods[self._transport.create_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2241,28 +2490,500 @@ def sample_list_exclusions(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListExclusionsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.Link, + metadata_type=logging_config.LinkMetadata, ) # Done; return the response. return response - def get_exclusion( + def delete_link( self, - request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Gets the description of an exclusion in the \_Default sink. + ) -> operation.Operation: + r"""Deletes a link. This will also delete the + corresponding BigQuery linked dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_delete_link(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLinkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.DeleteLinkRequest, dict]): + The request object. The parameters to DeleteLink. + name (str): + Required. The full resource name of the link to delete. + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteLinkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteLinkRequest): + request = logging_config.DeleteLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=logging_config.LinkMetadata, + ) + + # Done; return the response. + return response + + def list_links( + self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLinksPager: + r"""Lists links. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_list_links(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_links(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.ListLinksRequest, dict]): + The request object. The parameters to ListLinks. + parent (str): + Required. The parent resource whose links are to be + listed: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/ + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager: + The response from ListLinks. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListLinksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListLinksRequest): + request = logging_config.ListLinksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_links] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLinksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_link( + self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.Link: + r"""Gets a link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_get_link(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = client.get_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetLinkRequest, dict]): + The request object. The parameters to GetLink. + name (str): + Required. The resource name of the link: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Link: + Describes a link connected to an + analytics enabled bucket. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetLinkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetLinkRequest): + request = logging_config.GetLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_exclusions( + self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExclusionsPager: + r"""Lists all the exclusions on the \_Default sink in a parent + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_list_exclusions(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_exclusions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): + The request object. The parameters to ``ListExclusions``. + parent (str): + Required. The parent resource whose exclusions are to be + listed. + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager: + Result returned from ListExclusions. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListExclusionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListExclusionsRequest): + request = logging_config.ListExclusionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_exclusions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListExclusionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_exclusion( + self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Gets the description of an exclusion in the \_Default sink. .. code-block:: python @@ -3351,6 +4072,168 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index 3c5ce7754242..4af8eaf1c980 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -411,6 +411,134 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListLinksPager: + """A pager for iterating through ``list_links`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLinksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``links`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLinks`` requests and continue to iterate + through the ``links`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., logging_config.ListLinksResponse], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLinksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListLinksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[logging_config.ListLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[logging_config.Link]: + for page in self.pages: + yield from page.links + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListLinksAsyncPager: + """A pager for iterating through ``list_links`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLinksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``links`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLinks`` requests and continue to iterate + through the ``links`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListLinksResponse]], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLinksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListLinksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[logging_config.ListLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[logging_config.Link]: + async def async_generator(): + async for page in self.pages: + for response in page.links: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListExclusionsPager: """A pager for iterating through ``list_exclusions`` requests. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index 93a29df099b2..fd02975e4069 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 53046583b8b6..73db34bed102 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -141,6 +141,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_bucket_async: gapic_v1.method.wrap_method( + self.create_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket_async: gapic_v1.method.wrap_method( + self.update_bucket_async, + default_timeout=None, + client_info=client_info, + ), self.create_bucket: gapic_v1.method.wrap_method( self.create_bucket, default_timeout=None, @@ -255,6 +265,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_link: gapic_v1.method.wrap_method( + self.create_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_link: gapic_v1.method.wrap_method( + self.delete_link, + default_timeout=None, + client_info=client_info, + ), + self.list_links: gapic_v1.method.wrap_method( + self.list_links, + default_timeout=None, + client_info=client_info, + ), + self.get_link: gapic_v1.method.wrap_method( + self.get_link, + default_timeout=None, + client_info=client_info, + ), self.list_exclusions: gapic_v1.method.wrap_method( self.list_exclusions, default_retry=retries.Retry( @@ -375,6 +405,24 @@ def get_bucket( ]: raise NotImplementedError() + @property + def create_bucket_async( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_bucket_async( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def create_bucket( self, @@ -507,6 +555,45 @@ def delete_sink( ]: raise NotImplementedError() + @property + def create_link( + self, + ) -> Callable[ + [logging_config.CreateLinkRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_link( + self, + ) -> Callable[ + [logging_config.DeleteLinkRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_links( + self, + ) -> Callable[ + [logging_config.ListLinksRequest], + Union[ + logging_config.ListLinksResponse, + Awaitable[logging_config.ListLinksResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_link( + self, + ) -> Callable[ + [logging_config.GetLinkRequest], + Union[logging_config.Link, Awaitable[logging_config.Link]], + ]: + raise NotImplementedError() + @property def list_exclusions( self, @@ -600,6 +687,33 @@ def copy_log_entries( ]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 97c220686c9a..b82203cf6ab5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -301,6 +301,67 @@ def get_bucket( ) return self._stubs["get_bucket"] + @property + def create_bucket_async( + self, + ) -> Callable[[logging_config.CreateBucketRequest], operations_pb2.Operation]: + r"""Return a callable for the create bucket async method over gRPC. + + Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_bucket_async" not in self._stubs: + self._stubs["create_bucket_async"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucketAsync", + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_bucket_async"] + + @property + def update_bucket_async( + self, + ) -> Callable[[logging_config.UpdateBucketRequest], operations_pb2.Operation]: + r"""Return a callable for the update bucket async method over gRPC. + + Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + Returns: + Callable[[~.UpdateBucketRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_bucket_async" not in self._stubs: + self._stubs["update_bucket_async"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucketAsync", + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_bucket_async"] + @property def create_bucket( self, @@ -335,12 +396,7 @@ def update_bucket( ) -> Callable[[logging_config.UpdateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the update bucket method over gRPC. - Updates a log bucket. This method replaces the following fields - in the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - ``FAILED_PRECONDITION`` will be returned. + Updates a log bucket. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, then ``FAILED_PRECONDITION`` will be returned. @@ -703,6 +759,114 @@ def delete_sink( ) return self._stubs["delete_sink"] + @property + def create_link( + self, + ) -> Callable[[logging_config.CreateLinkRequest], operations_pb2.Operation]: + r"""Return a callable for the create link method over gRPC. + + Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. + + Returns: + Callable[[~.CreateLinkRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_link" not in self._stubs: + self._stubs["create_link"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateLink", + request_serializer=logging_config.CreateLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_link"] + + @property + def delete_link( + self, + ) -> Callable[[logging_config.DeleteLinkRequest], operations_pb2.Operation]: + r"""Return a callable for the delete link method over gRPC. + + Deletes a link. This will also delete the + corresponding BigQuery linked dataset. + + Returns: + Callable[[~.DeleteLinkRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_link" not in self._stubs: + self._stubs["delete_link"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteLink", + request_serializer=logging_config.DeleteLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_link"] + + @property + def list_links( + self, + ) -> Callable[[logging_config.ListLinksRequest], logging_config.ListLinksResponse]: + r"""Return a callable for the list links method over gRPC. + + Lists links. + + Returns: + Callable[[~.ListLinksRequest], + ~.ListLinksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_links" not in self._stubs: + self._stubs["list_links"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListLinks", + request_serializer=logging_config.ListLinksRequest.serialize, + response_deserializer=logging_config.ListLinksResponse.deserialize, + ) + return self._stubs["list_links"] + + @property + def get_link( + self, + ) -> Callable[[logging_config.GetLinkRequest], logging_config.Link]: + r"""Return a callable for the get link method over gRPC. + + Gets a link. + + Returns: + Callable[[~.GetLinkRequest], + ~.Link]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_link" not in self._stubs: + self._stubs["get_link"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetLink", + request_serializer=logging_config.GetLinkRequest.serialize, + response_deserializer=logging_config.Link.deserialize, + ) + return self._stubs["get_link"] + @property def list_exclusions( self, @@ -1025,6 +1189,59 @@ def copy_log_entries( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 0d0737576858..f37ba9cb18ba 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -309,6 +309,71 @@ def get_bucket( ) return self._stubs["get_bucket"] + @property + def create_bucket_async( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create bucket async method over gRPC. + + Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_bucket_async" not in self._stubs: + self._stubs["create_bucket_async"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucketAsync", + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_bucket_async"] + + @property + def update_bucket_async( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update bucket async method over gRPC. + + Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + Returns: + Callable[[~.UpdateBucketRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_bucket_async" not in self._stubs: + self._stubs["update_bucket_async"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucketAsync", + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_bucket_async"] + @property def create_bucket( self, @@ -347,12 +412,7 @@ def update_bucket( ]: r"""Return a callable for the update bucket method over gRPC. - Updates a log bucket. This method replaces the following fields - in the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - ``FAILED_PRECONDITION`` will be returned. + Updates a log bucket. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, then ``FAILED_PRECONDITION`` will be returned. @@ -727,6 +787,120 @@ def delete_sink( ) return self._stubs["delete_sink"] + @property + def create_link( + self, + ) -> Callable[ + [logging_config.CreateLinkRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create link method over gRPC. + + Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. + + Returns: + Callable[[~.CreateLinkRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_link" not in self._stubs: + self._stubs["create_link"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateLink", + request_serializer=logging_config.CreateLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_link"] + + @property + def delete_link( + self, + ) -> Callable[ + [logging_config.DeleteLinkRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete link method over gRPC. + + Deletes a link. This will also delete the + corresponding BigQuery linked dataset. + + Returns: + Callable[[~.DeleteLinkRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_link" not in self._stubs: + self._stubs["delete_link"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteLink", + request_serializer=logging_config.DeleteLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_link"] + + @property + def list_links( + self, + ) -> Callable[ + [logging_config.ListLinksRequest], Awaitable[logging_config.ListLinksResponse] + ]: + r"""Return a callable for the list links method over gRPC. + + Lists links. + + Returns: + Callable[[~.ListLinksRequest], + Awaitable[~.ListLinksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_links" not in self._stubs: + self._stubs["list_links"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListLinks", + request_serializer=logging_config.ListLinksRequest.serialize, + response_deserializer=logging_config.ListLinksResponse.deserialize, + ) + return self._stubs["list_links"] + + @property + def get_link( + self, + ) -> Callable[[logging_config.GetLinkRequest], Awaitable[logging_config.Link]]: + r"""Return a callable for the get link method over gRPC. + + Gets a link. + + Returns: + Callable[[~.GetLinkRequest], + Awaitable[~.Link]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_link" not in self._stubs: + self._stubs["get_link"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetLink", + request_serializer=logging_config.GetLinkRequest.serialize, + response_deserializer=logging_config.Link.deserialize, + ) + return self._stubs["get_link"] + @property def list_exclusions( self, @@ -1065,5 +1239,58 @@ def copy_log_entries( def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + __all__ = ("ConfigServiceV2GrpcAsyncIOTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py index 41b2a2d15530..134609c9349d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index bd8ba63f060b..dcf622ac2641 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -49,6 +49,7 @@ from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport from .client import LoggingServiceV2Client @@ -598,21 +599,19 @@ async def sample_list_log_entries(): - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added - to this list. + to this list. A maximum of 100 resources may be + specified in a single request. This corresponds to the ``resource_names`` field on the ``request`` instance; if ``request`` is provided, this should not be set. filter (:class:`str`): - Optional. A filter that chooses which log entries to - return. See `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources - listed in ``resource_names``. Referencing a parent - resource that is not listed in ``resource_names`` will - cause the filter to return no results. The maximum - length of the filter is 20000 characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a + parent resource that is not listed in ``resource_names`` + will cause the filter to return no results. The maximum + length of a filter is 20,000 characters. This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this @@ -848,7 +847,7 @@ async def sample_list_logs(): request (Optional[Union[google.cloud.logging_v2.types.ListLogsRequest, dict]]): The request object. The parameters to ListLogs. parent (:class:`str`): - Required. The resource name that owns the logs: + Required. The resource name to list logs for: - ``projects/[PROJECT_ID]`` - ``organizations/[ORGANIZATION_ID]`` @@ -867,6 +866,7 @@ async def sample_list_logs(): Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager: Result returned from ListLogs. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1027,7 +1027,169 @@ def request_generator(): # Done; return the response. return response - async def __aenter__(self): + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "LoggingServiceV2AsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 7949a41a9fb4..ce60602c663b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -52,6 +52,7 @@ from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import LoggingServiceV2GrpcTransport from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport @@ -796,21 +797,19 @@ def sample_list_log_entries(): - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added - to this list. + to this list. A maximum of 100 resources may be + specified in a single request. This corresponds to the ``resource_names`` field on the ``request`` instance; if ``request`` is provided, this should not be set. filter (str): - Optional. A filter that chooses which log entries to - return. See `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources - listed in ``resource_names``. Referencing a parent - resource that is not listed in ``resource_names`` will - cause the filter to return no results. The maximum - length of the filter is 20000 characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a + parent resource that is not listed in ``resource_names`` + will cause the filter to return no results. The maximum + length of a filter is 20,000 characters. This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this @@ -1027,7 +1026,7 @@ def sample_list_logs(): request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): The request object. The parameters to ListLogs. parent (str): - Required. The resource name that owns the logs: + Required. The resource name to list logs for: - ``projects/[PROJECT_ID]`` - ``organizations/[ORGANIZATION_ID]`` @@ -1046,6 +1045,7 @@ def sample_list_logs(): Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager: Result returned from ListLogs. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1193,6 +1193,168 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index e1e7188cd167..02dcf93b3a27 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index 4e0163fe6542..d7dae810bf04 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 22665b15e9fd..a256ca284d07 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -299,6 +300,33 @@ def tail_log_entries( ]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index a6878b6fa52b..775fcbf98281 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ import grpc # type: ignore from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO @@ -411,6 +412,59 @@ def tail_log_entries( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 2e8f76017345..5f1acd97452f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import LoggingServiceV2GrpcTransport @@ -422,5 +423,58 @@ def tail_log_entries( def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + __all__ = ("LoggingServiceV2GrpcAsyncIOTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index fc0615f19919..3b688ccb4362 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index bcffd416c779..a120c352bdbe 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -46,6 +46,7 @@ from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport @@ -277,6 +278,7 @@ async def sample_list_log_metrics(): Returns: google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager: Result returned from ListLogMetrics. + Iterating over this object will yield results and resolve additional pages automatically. @@ -406,6 +408,7 @@ async def sample_get_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -549,6 +552,7 @@ async def sample_create_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -680,6 +684,7 @@ async def sample_update_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -851,7 +856,169 @@ async def sample_delete_log_metric(): metadata=metadata, ) - async def __aenter__(self): + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "MetricsServiceV2AsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index a75c1e9358cc..098014bcd17c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -50,6 +50,7 @@ from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import MetricsServiceV2GrpcTransport @@ -499,6 +500,7 @@ def sample_list_log_metrics(): Returns: google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager: Result returned from ListLogMetrics. + Iterating over this object will yield results and resolve additional pages automatically. @@ -617,6 +619,7 @@ def sample_get_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -749,6 +752,7 @@ def sample_create_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -880,6 +884,7 @@ def sample_update_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -1042,6 +1047,168 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 2c647cda1810..dd23001cc906 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index e28f020df0c7..57d82514d79f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 6e0f1698e105..f8c4b954fbd7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -258,6 +259,33 @@ def delete_log_metric( ]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 7eac78848617..9426a670c598 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ import grpc # type: ignore from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO @@ -365,6 +366,59 @@ def delete_log_metric( def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 99764a592906..1756f9a1d19f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import MetricsServiceV2GrpcTransport @@ -375,5 +376,58 @@ def delete_log_metric( def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + __all__ = ("MetricsServiceV2GrpcAsyncIOTransport",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py index 43b5674dd412..38dec7cdf17a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -34,33 +34,44 @@ WriteLogEntriesResponse, ) from .logging_config import ( + BigQueryDataset, BigQueryOptions, + BucketMetadata, CmekSettings, CopyLogEntriesMetadata, CopyLogEntriesRequest, CopyLogEntriesResponse, CreateBucketRequest, CreateExclusionRequest, + CreateLinkRequest, CreateSinkRequest, CreateViewRequest, DeleteBucketRequest, DeleteExclusionRequest, + DeleteLinkRequest, DeleteSinkRequest, DeleteViewRequest, GetBucketRequest, GetCmekSettingsRequest, GetExclusionRequest, + GetLinkRequest, GetSettingsRequest, GetSinkRequest, GetViewRequest, + IndexConfig, + Link, + LinkMetadata, ListBucketsRequest, ListBucketsResponse, ListExclusionsRequest, ListExclusionsResponse, + ListLinksRequest, + ListLinksResponse, ListSinksRequest, ListSinksResponse, ListViewsRequest, ListViewsResponse, + LocationMetadata, LogBucket, LogExclusion, LogSink, @@ -73,6 +84,7 @@ UpdateSettingsRequest, UpdateSinkRequest, UpdateViewRequest, + IndexType, LifecycleState, OperationState, ) @@ -103,33 +115,44 @@ "WriteLogEntriesPartialErrors", "WriteLogEntriesRequest", "WriteLogEntriesResponse", + "BigQueryDataset", "BigQueryOptions", + "BucketMetadata", "CmekSettings", "CopyLogEntriesMetadata", "CopyLogEntriesRequest", "CopyLogEntriesResponse", "CreateBucketRequest", "CreateExclusionRequest", + "CreateLinkRequest", "CreateSinkRequest", "CreateViewRequest", "DeleteBucketRequest", "DeleteExclusionRequest", + "DeleteLinkRequest", "DeleteSinkRequest", "DeleteViewRequest", "GetBucketRequest", "GetCmekSettingsRequest", "GetExclusionRequest", + "GetLinkRequest", "GetSettingsRequest", "GetSinkRequest", "GetViewRequest", + "IndexConfig", + "Link", + "LinkMetadata", "ListBucketsRequest", "ListBucketsResponse", "ListExclusionsRequest", "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", "ListSinksRequest", "ListSinksResponse", "ListViewsRequest", "ListViewsResponse", + "LocationMetadata", "LogBucket", "LogExclusion", "LogSink", @@ -142,6 +165,7 @@ "UpdateSettingsRequest", "UpdateSinkRequest", "UpdateViewRequest", + "IndexType", "LifecycleState", "OperationState", "CreateLogMetricRequest", diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py index 0536e4db555b..98f768fb27c5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -92,6 +92,7 @@ class LogEntry(proto.Message): protocol buffer. Some Google Cloud Platform services use this field for their log entry payloads. + The following protocol buffer types are supported; user-defined types are not supported: @@ -169,18 +170,54 @@ class LogEntry(proto.Message): Optional. Information about an operation associated with the log entry, if applicable. trace (str): - Optional. Resource name of the trace associated with the log - entry, if any. If it contains a relative resource name, the - name is assumed to be relative to - ``//tracing.googleapis.com``. Example: - ``projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824`` + Optional. The REST resource name of the trace being written + to `Cloud Trace `__ in + association with this log entry. For example, if your trace + data is stored in the Cloud project "my-trace-project" and + if the service that is creating the log entry receives a + trace header that includes the trace ID "12345", then the + service should use + "projects/my-tracing-project/traces/12345". + + The ``trace`` field provides the link between logs and + traces. By using this field, you can navigate from a log + entry to a trace. span_id (str): - Optional. The span ID within the trace associated with the - log entry. - - For Trace spans, this is the same format that the Trace API - v2 uses: a 16-character hexadecimal encoding of an 8-byte - array, such as ``000000000000004a``. + Optional. The ID of the `Cloud + Trace `__ span associated + with the current operation in which the log is being + written. For example, if a span has the REST resource name + of + "projects/some-project/traces/some-trace/spans/some-span-id", + then the ``span_id`` field is "some-span-id". + + A + `Span `__ + represents a single operation within a trace. Whereas a + trace may involve multiple different microservices running + on multiple different machines, a span generally corresponds + to a single logical operation being performed in a single + instance of a microservice on one specific machine. Spans + are the nodes within the tree that is a trace. + + Applications that are `instrumented for + tracing `__ will + generally assign a new, unique span ID on each incoming + request. It is also common to create and record additional + spans corresponding to internal processing elements as well + as issuing requests to dependencies. + + The span ID is expected to be a 16-character, hexadecimal + encoding of an 8-byte array and should not be zero. It + should be unique within the trace and should, ideally, be + generated in a manner that is uniformly random. + + Example values: + + - ``000000000000004a`` + - ``7a2190356c3fc94b`` + - ``0000f00300090021`` + - ``d39223e101960076`` trace_sampled (bool): Optional. The sampling decision of the trace associated with the log entry. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py index 4d27176d1cc6..02a17fc7b505 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -144,13 +144,15 @@ class WriteLogEntriesRequest(proto.Message): entries in this list, rather than calling this method for each individual log entry. partial_success (bool): - Optional. Whether valid entries should be written even if - some other entries fail due to INVALID_ARGUMENT or - PERMISSION_DENIED errors. If any entry is not written, then - the response status is the error associated with one of the - failed entries and the response includes error details keyed - by the entries' zero-based index in the ``entries.write`` - method. + Optional. Whether a batch's valid entries should be written + even if some other entry failed due to a permanent error + such as INVALID_ARGUMENT or PERMISSION_DENIED. If any entry + failed, then the response status is the response status of + one of the failed entries. The response will include error + details in ``WriteLogEntriesPartialErrors.log_entry_errors`` + keyed by the entries' zero-based index in the ``entries``. + Failed requests for which no entries are written will not + include per-entry errors. dry_run (bool): Optional. If true, the request should expect normal response, but the entries won't be @@ -235,17 +237,15 @@ class ListLogEntriesRequest(proto.Message): - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to - this list. + this list. A maximum of 100 resources may be specified in a + single request. filter (str): - Optional. A filter that chooses which log entries to return. - See `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources listed - in ``resource_names``. Referencing a parent resource that is - not listed in ``resource_names`` will cause the filter to - return no results. The maximum length of the filter is 20000 - characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will cause + the filter to return no results. The maximum length of a + filter is 20,000 characters. order_by (str): Optional. How the results should be sorted. Presently, the only permitted values are ``"timestamp asc"`` (default) and @@ -393,25 +393,14 @@ class ListLogsRequest(proto.Message): Attributes: parent (str): - Required. The resource name that owns the logs: + Required. The resource name to list logs for: - ``projects/[PROJECT_ID]`` - ``organizations/[ORGANIZATION_ID]`` - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - ``folders/[FOLDER_ID]`` - page_size (int): - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more - results might be available. - page_token (str): - Optional. If present, then retrieve the next batch of - results from the preceding call to this method. - ``pageToken`` must be the value of ``nextPageToken`` from - the previous response. The values of other method parameters - should be identical to those in the previous call. resource_names (MutableSequence[str]): - Optional. The resource name that owns the logs: + Optional. List of resource names to list logs for: - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` @@ -424,12 +413,30 @@ class ListLogsRequest(proto.Message): - ``organizations/[ORGANIZATION_ID]`` - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - ``folders/[FOLDER_ID]`` + + The resource name in the ``parent`` field is added to this + list. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. """ parent: str = proto.Field( proto.STRING, number=1, ) + resource_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) page_size: int = proto.Field( proto.INT32, number=2, @@ -438,10 +445,6 @@ class ListLogsRequest(proto.Message): proto.STRING, number=3, ) - resource_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) class ListLogsResponse(proto.Message): @@ -493,15 +496,12 @@ class TailLogEntriesRequest(proto.Message): - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` filter (str): - Optional. A filter that chooses which log entries to return. - See `Advanced Logs - Filters `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources listed - in ``resource_names``. Referencing a parent resource that is - not in ``resource_names`` will cause the filter to return no - results. The maximum length of the filter is 20000 - characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will cause + the filter to return no results. The maximum length of a + filter is 20,000 characters. buffer_window (google.protobuf.duration_pb2.Duration): Optional. The amount of time to buffer log entries at the server before being returned to diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index 9ed3a767c731..7826bd0264d9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -26,11 +26,15 @@ __protobuf__ = proto.module( package="google.logging.v2", manifest={ - "LifecycleState", "OperationState", + "LifecycleState", + "IndexType", + "IndexConfig", "LogBucket", "LogView", "LogSink", + "BigQueryDataset", + "Link", "BigQueryOptions", "ListBucketsRequest", "ListBucketsResponse", @@ -51,6 +55,11 @@ "CreateSinkRequest", "UpdateSinkRequest", "DeleteSinkRequest", + "CreateLinkRequest", + "DeleteLinkRequest", + "ListLinksRequest", + "ListLinksResponse", + "GetLinkRequest", "LogExclusion", "ListExclusionsRequest", "ListExclusionsResponse", @@ -67,29 +76,13 @@ "CopyLogEntriesRequest", "CopyLogEntriesMetadata", "CopyLogEntriesResponse", + "BucketMetadata", + "LinkMetadata", + "LocationMetadata", }, ) -class LifecycleState(proto.Enum): - r"""LogBucket lifecycle states. - - Values: - LIFECYCLE_STATE_UNSPECIFIED (0): - Unspecified state. This is only used/useful - for distinguishing unset values. - ACTIVE (1): - The normal and active state. - DELETE_REQUESTED (2): - The resource has been marked for deletion by - the user. For some resources (e.g. buckets), - this can be reversed by an un-delete operation. - """ - LIFECYCLE_STATE_UNSPECIFIED = 0 - ACTIVE = 1 - DELETE_REQUESTED = 2 - - class OperationState(proto.Enum): r"""List of different operation states. High level state of the operation. This is used to report the @@ -123,6 +116,93 @@ class OperationState(proto.Enum): OPERATION_STATE_CANCELLED = 6 +class LifecycleState(proto.Enum): + r"""LogBucket lifecycle states. + + Values: + LIFECYCLE_STATE_UNSPECIFIED (0): + Unspecified state. This is only used/useful + for distinguishing unset values. + ACTIVE (1): + The normal and active state. + DELETE_REQUESTED (2): + The resource has been marked for deletion by + the user. For some resources (e.g. buckets), + this can be reversed by an un-delete operation. + UPDATING (3): + The resource has been marked for an update by + the user. It will remain in this state until the + update is complete. + CREATING (4): + The resource has been marked for creation by + the user. It will remain in this state until the + creation is complete. + FAILED (5): + The resource is in an INTERNAL error state. + """ + LIFECYCLE_STATE_UNSPECIFIED = 0 + ACTIVE = 1 + DELETE_REQUESTED = 2 + UPDATING = 3 + CREATING = 4 + FAILED = 5 + + +class IndexType(proto.Enum): + r"""IndexType is used for custom indexing. It describes the type + of an indexed field. + + Values: + INDEX_TYPE_UNSPECIFIED (0): + The index's type is unspecified. + INDEX_TYPE_STRING (1): + The index is a string-type index. + INDEX_TYPE_INTEGER (2): + The index is a integer-type index. + """ + INDEX_TYPE_UNSPECIFIED = 0 + INDEX_TYPE_STRING = 1 + INDEX_TYPE_INTEGER = 2 + + +class IndexConfig(proto.Message): + r"""Configuration for an indexed field. + + Attributes: + field_path (str): + Required. The LogEntry field path to index. + + Note that some paths are automatically indexed, and other + paths are not eligible for indexing. See `indexing + documentation `__ + for details. + + For example: ``jsonPayload.request.status`` + type_ (google.cloud.logging_v2.types.IndexType): + Required. The type of data in this index. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the index was + last modified. + This is used to return the timestamp, and will + be ignored if supplied during update. + """ + + field_path: str = proto.Field( + proto.STRING, + number=1, + ) + type_: "IndexType" = proto.Field( + proto.ENUM, + number=2, + enum="IndexType", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + class LogBucket(proto.Message): r"""Describes a repository in which log entries are stored. @@ -160,11 +240,17 @@ class LogBucket(proto.Message): days will be used. locked (bool): Whether the bucket is locked. + The retention period on a locked bucket cannot be changed. Locked buckets may only be deleted if they are empty. lifecycle_state (google.cloud.logging_v2.types.LifecycleState): Output only. The bucket lifecycle state. + analytics_enabled (bool): + Whether log analytics is enabled for this + bucket. + Once enabled, log analytics features cannot be + disabled. restricted_fields (MutableSequence[str]): Log entry field paths that are denied access in this bucket. @@ -175,6 +261,9 @@ class LogBucket(proto.Message): Restricting a repeated field will restrict all values. Adding a parent will block all child fields. (e.g. ``foo.bar`` will block ``foo.bar.baz``) + index_configs (MutableSequence[google.cloud.logging_v2.types.IndexConfig]): + A list of indexed fields and related + configuration data. cmek_settings (google.cloud.logging_v2.types.CmekSettings): The CMEK settings of the log bucket. If present, new log entries written to this log @@ -216,10 +305,19 @@ class LogBucket(proto.Message): number=12, enum="LifecycleState", ) + analytics_enabled: bool = proto.Field( + proto.BOOL, + number=14, + ) restricted_fields: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=15, ) + index_configs: MutableSequence["IndexConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message="IndexConfig", + ) cmek_settings: "CmekSettings" = proto.Field( proto.MESSAGE, number=19, @@ -332,6 +430,7 @@ class LogSink(proto.Message): ``logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR`` description (str): Optional. A description of this sink. + The maximum length of the description is 8000 characters. disabled (bool): @@ -348,7 +447,9 @@ class LogSink(proto.Message): writer_identity (str): Output only. An IAM identity—a service account or group—under which Cloud Logging writes the exported log - entries to the sink's destination. This field is set by + entries to the sink's destination. This field is either set + by specifying ``custom_writer_identity`` or set + automatically by [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] and [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] @@ -363,7 +464,7 @@ class LogSink(proto.Message): the appropriate IAM roles to assign to the identity. Sinks that have a destination that is a log bucket in the - same project as the sink do not have a writer_identity and + same project as the sink cannot have a writer_identity and no additional permissions are required. include_children (bool): Optional. This field applies only to sinks owned by @@ -472,6 +573,90 @@ class VersionFormat(proto.Enum): ) +class BigQueryDataset(proto.Message): + r"""Describes a BigQuery dataset that was created by a link. + + Attributes: + dataset_id (str): + Output only. The full resource name of the BigQuery dataset. + The DATASET_ID will match the ID of the link, so the link + must match the naming restrictions of BigQuery datasets + (alphanumeric characters and underscores only). + + The dataset will have a resource path of + "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET_ID]". + """ + + dataset_id: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Link(proto.Message): + r"""Describes a link connected to an analytics enabled bucket. + + Attributes: + name (str): + The resource name of the link. The name can have up to 100 + characters. A valid link id (at the end of the link name) + must only have alphanumeric characters and underscores + within it. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + + For example: + + \`projects/my-project/locations/global/buckets/my-bucket/links/my_link + description (str): + Describes this link. + + The maximum length of the description is 8000 + characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + link. + lifecycle_state (google.cloud.logging_v2.types.LifecycleState): + Output only. The resource lifecycle state. + bigquery_dataset (google.cloud.logging_v2.types.BigQueryDataset): + The information of a BigQuery Dataset. When a + link is created, a BigQuery dataset is created + along with it, in the same project as the + LogBucket it's linked to. This dataset will also + have BigQuery Views corresponding to the + LogViews in the bucket. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + lifecycle_state: "LifecycleState" = proto.Field( + proto.ENUM, + number=4, + enum="LifecycleState", + ) + bigquery_dataset: "BigQueryDataset" = proto.Field( + proto.MESSAGE, + number=5, + message="BigQueryDataset", + ) + + class BigQueryOptions(proto.Message): r"""Options that change functionality of a sink exporting data to BigQuery. @@ -827,7 +1012,10 @@ class CreateViewRequest(proto.Message): ``"projects/my-project/locations/global/buckets/my-bucket"`` view_id (str): - Required. The id to use for this view. + Required. A client-assigned identifier such as + ``"my-view"``. Identifiers are limited to 100 characters and + can include only letters, digits, underscores, hyphens, and + periods. view (google.cloud.logging_v2.types.LogView): Required. The new view. """ @@ -1186,6 +1374,144 @@ class DeleteSinkRequest(proto.Message): ) +class CreateLinkRequest(proto.Message): + r"""The parameters to CreateLink. + + Attributes: + parent (str): + Required. The full resource name of the bucket to create a + link for. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + link (google.cloud.logging_v2.types.Link): + Required. The new link. + link_id (str): + Required. The ID to use for the link. The link_id can have + up to 100 characters. A valid link_id must only have + alphanumeric characters and underscores within it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + link: "Link" = proto.Field( + proto.MESSAGE, + number=2, + message="Link", + ) + link_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteLinkRequest(proto.Message): + r"""The parameters to DeleteLink. + + Attributes: + name (str): + Required. The full resource name of the link to delete. + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListLinksRequest(proto.Message): + r"""The parameters to ListLinks. + + Attributes: + parent (str): + Required. The parent resource whose links are to be listed: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/ + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. + page_size (int): + Optional. The maximum number of results to + return from this request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class ListLinksResponse(proto.Message): + r"""The response from ListLinks. + + Attributes: + links (MutableSequence[google.cloud.logging_v2.types.Link]): + A list of links. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call the same method again using the + value of ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + links: MutableSequence["Link"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Link", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetLinkRequest(proto.Message): + r"""The parameters to GetLink. + + Attributes: + name (str): + Required. The resource name of the link: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID] + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class LogExclusion(proto.Message): r"""Specifies a set of log entries that are filtered out by a sink. If your Google Cloud resource receives a large volume of log entries, @@ -1611,6 +1937,27 @@ class CmekSettings(proto.Message): See `Enabling CMEK for Log Router `__ for more information. + kms_key_version_name (str): + The CryptoKeyVersion resource name for the configured Cloud + KMS key. + + KMS key name format: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]/cryptoKeyVersions/[VERSION]" + + For example: + + ``"projects/my-project/locations/us-central1/keyRings/my-ring/cryptoKeys/my-key/cryptoKeyVersions/1"`` + + This is a read-only field used to convey the specific + configured CryptoKeyVersion of ``kms_key`` that has been + configured. It will be populated in cases where the CMEK + settings are bound to a single key version. + + If this field is populated, the ``kms_key`` is tied to a + specific CryptoKeyVersion. service_account_id (str): Output only. The service account that will be used by the Log Router to access your Cloud KMS key. @@ -1635,6 +1982,10 @@ class CmekSettings(proto.Message): proto.STRING, number=2, ) + kms_key_version_name: str = proto.Field( + proto.STRING, + number=4, + ) service_account_id: str = proto.Field( proto.STRING, number=3, @@ -1937,4 +2288,131 @@ class CopyLogEntriesResponse(proto.Message): ) +class BucketMetadata(proto.Message): + r"""Metadata for LongRunningUpdateBucket Operations. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The create time of an operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end time of an operation. + state (google.cloud.logging_v2.types.OperationState): + State of an operation. + create_bucket_request (google.cloud.logging_v2.types.CreateBucketRequest): + LongRunningCreateBucket RPC request. + + This field is a member of `oneof`_ ``request``. + update_bucket_request (google.cloud.logging_v2.types.UpdateBucketRequest): + LongRunningUpdateBucket RPC request. + + This field is a member of `oneof`_ ``request``. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: "OperationState" = proto.Field( + proto.ENUM, + number=3, + enum="OperationState", + ) + create_bucket_request: "CreateBucketRequest" = proto.Field( + proto.MESSAGE, + number=4, + oneof="request", + message="CreateBucketRequest", + ) + update_bucket_request: "UpdateBucketRequest" = proto.Field( + proto.MESSAGE, + number=5, + oneof="request", + message="UpdateBucketRequest", + ) + + +class LinkMetadata(proto.Message): + r"""Metadata for long running Link operations. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The start time of an operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end time of an operation. + state (google.cloud.logging_v2.types.OperationState): + State of an operation. + create_link_request (google.cloud.logging_v2.types.CreateLinkRequest): + CreateLink RPC request. + + This field is a member of `oneof`_ ``request``. + delete_link_request (google.cloud.logging_v2.types.DeleteLinkRequest): + DeleteLink RPC request. + + This field is a member of `oneof`_ ``request``. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: "OperationState" = proto.Field( + proto.ENUM, + number=3, + enum="OperationState", + ) + create_link_request: "CreateLinkRequest" = proto.Field( + proto.MESSAGE, + number=4, + oneof="request", + message="CreateLinkRequest", + ) + delete_link_request: "DeleteLinkRequest" = proto.Field( + proto.MESSAGE, + number=5, + oneof="request", + message="DeleteLinkRequest", + ) + + +class LocationMetadata(proto.Message): + r"""Cloud Logging specific location metadata. + + Attributes: + log_analytics_enabled (bool): + Indicates whether or not Log Analytics + features are supported in the given location. + """ + + log_analytics_enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py index 0d31860a01c3..74d167d5beea 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -42,6 +42,7 @@ class LogMetric(proto.Message): r"""Describes a logs-based metric. The value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an @@ -79,6 +80,17 @@ class LogMetric(proto.Message): "resource.type=gae_app AND severity>=ERROR" The maximum length of the filter is 20000 characters. + bucket_name (str): + Optional. The resource name of the Log Bucket that owns the + Log Metric. Only Log Buckets in projects are supported. The + bucket has to be in the same project as the metric. + + For example: + + ``projects/my-project/locations/global/buckets/my-bucket`` + + If empty, then the Log Metric is considered a non-Bucket Log + Metric. disabled (bool): Optional. If set to True, then this metric is disabled and it does not generate any points. @@ -113,7 +125,7 @@ class LogMetric(proto.Message): distribution logs-based metric to extract the values to record from a log entry. Two functions are supported for value extraction: ``EXTRACT(field)`` or - ``REGEXP_EXTRACT(field, regex)``. The argument are: + ``REGEXP_EXTRACT(field, regex)``. The arguments are: 1. field: The name of the log entry field from which the value is to be extracted. @@ -142,7 +154,7 @@ class LogMetric(proto.Message): ``value_extractor`` field. The extracted value is converted to the type defined in the - label descriptor. If the either the extraction or the type + label descriptor. If either the extraction or the type conversion fails, the label will have a default value. The default value for a string label is an empty string, for an integer label its 0, and for a boolean label its ``false``. @@ -193,6 +205,10 @@ class ApiVersion(proto.Enum): proto.STRING, number=3, ) + bucket_name: str = proto.Field( + proto.STRING, + number=13, + ) disabled: bool = proto.Field( proto.BOOL, number=12, diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py index dead26544520..806e937ddae0 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py index 949dde286ac7..ca0209f00fcb 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py index f399b226f588..c1f028fb0464 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py new file mode 100644 index 000000000000..8fe42df3c81a --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_create_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py new file mode 100644 index 000000000000..1ce698784552 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_create_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py index 374173f52b55..dc73253f4897 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py index 8f14d777f8cb..17490c61ef37 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py index 11bd92ad58fa..75ec32f48eeb 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py new file mode 100644 index 000000000000..8ceb5298553a --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_create_link(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + # Make the request + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateLink_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py new file mode 100644 index 000000000000..604ff66269c8 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_create_link(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + # Make the request + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateLink_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py index 0fc007986813..277e83055225 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py index d6d2a0d7db7a..a4df0299426a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py index 016f3e70b081..5cd201276977 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py index 3ee9567f1a8c..cd3ca94e19c6 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py index 6633aa3c4344..fcffb6db861d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py index ec39ca1a09b7..a8f902116832 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py index 9d6146c57c8b..b426d4703e1c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py index bc051e4e025e..5d98f782bde2 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py new file mode 100644 index 000000000000..8c7a934a735d --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_delete_link(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLinkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_DeleteLink_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py new file mode 100644 index 000000000000..dfa59b30742b --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_delete_link(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLinkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_DeleteLink_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py index 62a27ea1d1a8..11d91947e3bc 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py index fa7d6f6e7b91..bf9875b0aa94 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py index 2fed68bbcbe9..fe9c7031ef46 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py index 53bda04c937e..b5539c04f291 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py index 1a91ecdd63d9..c29c35a4e213 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py index 337050c45a7e..818ab646fdf3 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py index 6998c4b4af97..3ae13401d749 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py index a91ec6042d7a..499d68bdbf44 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py index 83a5bcda4016..03b7dc7d8f71 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py index 913ec9968d8c..a445ed396c8f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py new file mode 100644 index 000000000000..ddc3d131f4c7 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_get_link(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_link(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetLink_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py new file mode 100644 index 000000000000..3a7643b3a273 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_get_link(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = client.get_link(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetLink_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py index c095649bc27a..4ee968e8155d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py index 2b5350a5a5c7..a3e018440c2a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py index 0739e175be61..b6fe5b11e08f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py index ff34156f3826..ecebaf1194ff 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py index 5de975ecc9cd..5992e53ee4aa 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py index a9818b572059..14d8679bce7f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py index 614e9ec66b6c..dfbc3b411480 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py index 0c7912f7f038..8626f7cae9fd 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py index a1aa5ed6f4dc..ad42edff534e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py index 8cdac9d12438..727723a7a559 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py new file mode 100644 index 000000000000..7eccffaa6bae --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListLinks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_list_links(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListLinks_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py new file mode 100644 index 000000000000..a2f98d69d320 --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListLinks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_list_links(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_links(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListLinks_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py index ea81f5c8b29c..b642d38eec23 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py index 05a35323d6bb..b4fc92452254 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py index c39fb2d9e0a1..1542a5a387f2 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py index 270e5c14578f..b273c465d3ec 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py index eeca015d1b41..d2695708ddd3 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py index 6355e2a62226..8d25c7d33f73 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py index 450f5662a3fc..e1c741b67075 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py new file mode 100644 index 000000000000..7dde59dcdd4f --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_update_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py new file mode 100644 index 000000000000..2ecaf8df26dd --- /dev/null +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_update_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py index 81aa0fb920f4..7b4a3c597f2b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py index e1a2421fec56..96fc8ff9788c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py index 1828ce84c3a7..9bbc7dcb1c5d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py index 873fecb737b1..d8b8d7f7bca5 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py index 958572b9301a..36d5776e36dc 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py index 531b431f1b52..b51dd81cc946 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py index 3381a4aadc71..1e7aefce8f6a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py index 400d57897c58..aef8473798e3 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py index cc3a1be435c6..e84230202450 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py index 8ccc9f3c3319..f143a772c3d3 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py index 33014bf23658..3867d3702391 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py index 209dd510d93c..c6469053baa4 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py index 86f6c9ccb3aa..1e4e28abc08c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index d968835f75a2..d5cfe190c8dd 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index d200793fa90c..d24923cb1e75 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py index eebad0bfd01e..71859024dbb7 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py index 8d132377e740..5a5ff140c42a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py index 4b99bc6f038d..519a2498ac3a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py index c54a2201fc14..ca97be4b3d86 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index f1afa6fae199..24e9e200951f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index 29461c6f8613..dc9a545e7c4b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py index bd7954828811..1a0d48664303 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py index d28fbe7a037c..de4bfe6c822b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py index d351980e9679..557d3229302a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py index bb9a56579e6f..b9e9cade9e94 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py index 54a73b14122e..fea40e7a4957 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py index 73276ef182fb..ccf2983be6d8 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py index d6ef03c486b6..4b6984171895 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py index 6ab2bb57fd83..abb071c655da 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py index 92c92395a253..f280ec9dea62 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py index 5a3e646926f7..bffbe10a8eac 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py index 9a794a4ccb6d..59bfeeaaaa3f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py index 39a6e72e3072..ed4dd0126e75 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 0cf8959def02..b62675ba6439 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.6.0" + "version": "0.1.0" }, "snippets": [ { @@ -164,6 +164,159 @@ ], "title": "logging_v2_generated_config_service_v2_copy_log_entries_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_bucket_async", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateBucketAsync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_bucket_async" + }, + "description": "Sample for CreateBucketAsync", + "file": "logging_v2_generated_config_service_v2_create_bucket_async_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_bucket_async_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_bucket_async", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateBucketAsync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_bucket_async" + }, + "description": "Sample for CreateBucketAsync", + "file": "logging_v2_generated_config_service_v2_create_bucket_async_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_bucket_async_sync.py" + }, { "canonical": true, "clientMethod": { @@ -494,27 +647,31 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "fullName": "google.logging.v2.ConfigServiceV2.CreateLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateSink" + "shortName": "CreateLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateSinkRequest" + "type": "google.cloud.logging_v2.types.CreateLinkRequest" }, { "name": "parent", "type": "str" }, { - "name": "sink", - "type": "google.cloud.logging_v2.types.LogSink" + "name": "link", + "type": "google.cloud.logging_v2.types.Link" + }, + { + "name": "link_id", + "type": "str" }, { "name": "retry", @@ -529,14 +686,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "create_sink" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_link" }, - "description": "Sample for CreateSink", - "file": "logging_v2_generated_config_service_v2_create_sink_async.py", + "description": "Sample for CreateLink", + "file": "logging_v2_generated_config_service_v2_create_link_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateLink_async", "segments": [ { "end": 56, @@ -554,22 +711,657 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_link", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "link", + "type": "google.cloud.logging_v2.types.Link" + }, + { + "name": "link_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_link" + }, + "description": "Sample for CreateLink", + "file": "logging_v2_generated_config_service_v2_create_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateLink_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "create_sink" + }, + "description": "Sample for CreateSink", + "file": "logging_v2_generated_config_service_v2_create_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_sink_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "create_sink" + }, + "description": "Sample for CreateSink", + "file": "logging_v2_generated_config_service_v2_create_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_sink_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "create_view" + }, + "description": "Sample for CreateView", + "file": "logging_v2_generated_config_service_v2_create_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_view_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "create_view" + }, + "description": "Sample for CreateView", + "file": "logging_v2_generated_config_service_v2_create_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_view_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_bucket" + }, + "description": "Sample for DeleteBucket", + "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_bucket" + }, + "description": "Sample for DeleteBucket", + "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_exclusion", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteExclusion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_exclusion" + }, + "description": "Sample for DeleteExclusion", + "file": "logging_v2_generated_config_service_v2_delete_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_sink_async.py" + "title": "logging_v2_generated_config_service_v2_delete_exclusion_async.py" }, { "canonical": true, @@ -578,28 +1370,24 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateSink" + "shortName": "DeleteExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateSinkRequest" + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, - { - "name": "sink", - "type": "google.cloud.logging_v2.types.LogSink" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -613,22 +1401,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "create_sink" + "shortName": "delete_exclusion" }, - "description": "Sample for CreateSink", - "file": "logging_v2_generated_config_service_v2_create_sink_sync.py", + "description": "Sample for DeleteExclusion", + "file": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -638,22 +1425,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_sink_sync.py" + "title": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py" }, { "canonical": true, @@ -663,19 +1448,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateView" + "shortName": "DeleteLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateViewRequest" + "type": "google.cloud.logging_v2.types.DeleteLinkRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -690,22 +1479,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "create_view" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_link" }, - "description": "Sample for CreateView", - "file": "logging_v2_generated_config_service_v2_create_view_async.py", + "description": "Sample for DeleteLink", + "file": "logging_v2_generated_config_service_v2_delete_link_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteLink_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -715,22 +1504,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_view_async.py" + "title": "logging_v2_generated_config_service_v2_delete_link_async.py" }, { "canonical": true, @@ -739,19 +1528,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateView" + "shortName": "DeleteLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateViewRequest" + "type": "google.cloud.logging_v2.types.DeleteLinkRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -766,22 +1559,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "create_view" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_link" }, - "description": "Sample for CreateView", - "file": "logging_v2_generated_config_service_v2_create_view_sync.py", + "description": "Sample for DeleteLink", + "file": "logging_v2_generated_config_service_v2_delete_link_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteLink_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -791,22 +1584,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_view_sync.py" + "title": "logging_v2_generated_config_service_v2_delete_link_sync.py" }, { "canonical": true, @@ -816,19 +1609,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteBucket" + "shortName": "DeleteSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" }, { "name": "retry", @@ -843,13 +1640,13 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_bucket" + "shortName": "delete_sink" }, - "description": "Sample for DeleteBucket", - "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "description": "Sample for DeleteSink", + "file": "logging_v2_generated_config_service_v2_delete_sink_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", "segments": [ { "end": 49, @@ -880,7 +1677,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_delete_sink_async.py" }, { "canonical": true, @@ -889,19 +1686,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteBucket" + "shortName": "DeleteSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" }, { "name": "retry", @@ -916,13 +1717,13 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_bucket" + "shortName": "delete_sink" }, - "description": "Sample for DeleteBucket", - "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "description": "Sample for DeleteSink", + "file": "logging_v2_generated_config_service_v2_delete_sink_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", "segments": [ { "end": 49, @@ -953,7 +1754,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_delete_sink_sync.py" }, { "canonical": true, @@ -963,23 +1764,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteExclusion" + "shortName": "DeleteView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.DeleteViewRequest" }, { "name": "retry", @@ -994,13 +1791,13 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_exclusion" + "shortName": "delete_view" }, - "description": "Sample for DeleteExclusion", - "file": "logging_v2_generated_config_service_v2_delete_exclusion_async.py", + "description": "Sample for DeleteView", + "file": "logging_v2_generated_config_service_v2_delete_view_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", "segments": [ { "end": 49, @@ -1031,7 +1828,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_exclusion_async.py" + "title": "logging_v2_generated_config_service_v2_delete_view_async.py" }, { "canonical": true, @@ -1040,23 +1837,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteExclusion" + "shortName": "DeleteView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.DeleteViewRequest" }, { "name": "retry", @@ -1071,13 +1864,13 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_exclusion" + "shortName": "delete_view" }, - "description": "Sample for DeleteExclusion", - "file": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py", + "description": "Sample for DeleteView", + "file": "logging_v2_generated_config_service_v2_delete_view_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", "segments": [ { "end": 49, @@ -1108,7 +1901,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py" + "title": "logging_v2_generated_config_service_v2_delete_view_sync.py" }, { "canonical": true, @@ -1118,23 +1911,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteSink" + "shortName": "GetBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteSinkRequest" - }, - { - "name": "sink_name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetBucketRequest" }, { "name": "retry", @@ -1149,21 +1938,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_sink" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" }, - "description": "Sample for DeleteSink", - "file": "logging_v2_generated_config_service_v2_delete_sink_async.py", + "description": "Sample for GetBucket", + "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1178,15 +1968,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_sink_async.py" + "title": "logging_v2_generated_config_service_v2_get_bucket_async.py" }, { "canonical": true, @@ -1195,23 +1987,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteSink" + "shortName": "GetBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteSinkRequest" - }, - { - "name": "sink_name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetBucketRequest" }, { "name": "retry", @@ -1226,21 +2014,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_sink" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" }, - "description": "Sample for DeleteSink", - "file": "logging_v2_generated_config_service_v2_delete_sink_sync.py", + "description": "Sample for GetBucket", + "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1255,15 +2044,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_sink_sync.py" + "title": "logging_v2_generated_config_service_v2_get_bucket_sync.py" }, { "canonical": true, @@ -1273,19 +2064,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_cmek_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteView" + "shortName": "GetCmekSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteViewRequest" + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" }, { "name": "retry", @@ -1300,21 +2091,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_view" + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "get_cmek_settings" }, - "description": "Sample for DeleteView", - "file": "logging_v2_generated_config_service_v2_delete_view_async.py", + "description": "Sample for GetCmekSettings", + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1329,15 +2121,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_view_async.py" + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py" }, { "canonical": true, @@ -1346,19 +2140,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_cmek_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteView" + "shortName": "GetCmekSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteViewRequest" + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" }, { "name": "retry", @@ -1373,21 +2167,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_view" + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "get_cmek_settings" }, - "description": "Sample for DeleteView", - "file": "logging_v2_generated_config_service_v2_delete_view_sync.py", + "description": "Sample for GetCmekSettings", + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1402,15 +2197,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_view_sync.py" + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py" }, { "canonical": true, @@ -1420,19 +2217,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetBucket" + "shortName": "GetExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetBucketRequest" + "type": "google.cloud.logging_v2.types.GetExclusionRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1447,14 +2248,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "get_bucket" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "get_exclusion" }, - "description": "Sample for GetBucket", - "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", + "description": "Sample for GetExclusion", + "file": "logging_v2_generated_config_service_v2_get_exclusion_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", "segments": [ { "end": 51, @@ -1487,7 +2288,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_get_exclusion_async.py" }, { "canonical": true, @@ -1496,19 +2297,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetBucket" + "shortName": "GetExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetBucketRequest" + "type": "google.cloud.logging_v2.types.GetExclusionRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1523,14 +2328,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "get_bucket" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "get_exclusion" }, - "description": "Sample for GetBucket", - "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", + "description": "Sample for GetExclusion", + "file": "logging_v2_generated_config_service_v2_get_exclusion_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", "segments": [ { "end": 51, @@ -1563,7 +2368,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_get_exclusion_sync.py" }, { "canonical": true, @@ -1573,19 +2378,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_cmek_settings", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", + "fullName": "google.logging.v2.ConfigServiceV2.GetLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetCmekSettings" + "shortName": "GetLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + "type": "google.cloud.logging_v2.types.GetLinkRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1600,14 +2409,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.CmekSettings", - "shortName": "get_cmek_settings" + "resultType": "google.cloud.logging_v2.types.Link", + "shortName": "get_link" }, - "description": "Sample for GetCmekSettings", - "file": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py", + "description": "Sample for GetLink", + "file": "logging_v2_generated_config_service_v2_get_link_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetLink_async", "segments": [ { "end": 51, @@ -1640,7 +2449,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py" + "title": "logging_v2_generated_config_service_v2_get_link_async.py" }, { "canonical": true, @@ -1649,19 +2458,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_cmek_settings", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", + "fullName": "google.logging.v2.ConfigServiceV2.GetLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetCmekSettings" + "shortName": "GetLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + "type": "google.cloud.logging_v2.types.GetLinkRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1676,14 +2489,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.CmekSettings", - "shortName": "get_cmek_settings" + "resultType": "google.cloud.logging_v2.types.Link", + "shortName": "get_link" }, - "description": "Sample for GetCmekSettings", - "file": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py", + "description": "Sample for GetLink", + "file": "logging_v2_generated_config_service_v2_get_link_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetLink_sync", "segments": [ { "end": 51, @@ -1716,7 +2529,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py" + "title": "logging_v2_generated_config_service_v2_get_link_sync.py" }, { "canonical": true, @@ -1726,19 +2539,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetExclusion" + "shortName": "GetSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetExclusionRequest" + "type": "google.cloud.logging_v2.types.GetSettingsRequest" }, { "name": "name", @@ -1757,14 +2570,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "get_exclusion" + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "get_settings" }, - "description": "Sample for GetExclusion", - "file": "logging_v2_generated_config_service_v2_get_exclusion_async.py", + "description": "Sample for GetSettings", + "file": "logging_v2_generated_config_service_v2_get_settings_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_async", "segments": [ { "end": 51, @@ -1797,7 +2610,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_exclusion_async.py" + "title": "logging_v2_generated_config_service_v2_get_settings_async.py" }, { "canonical": true, @@ -1806,19 +2619,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetExclusion" + "shortName": "GetSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetExclusionRequest" + "type": "google.cloud.logging_v2.types.GetSettingsRequest" }, { "name": "name", @@ -1837,14 +2650,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "get_exclusion" + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "get_settings" }, - "description": "Sample for GetExclusion", - "file": "logging_v2_generated_config_service_v2_get_exclusion_sync.py", + "description": "Sample for GetSettings", + "file": "logging_v2_generated_config_service_v2_get_settings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_sync", "segments": [ { "end": 51, @@ -1877,7 +2690,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_exclusion_sync.py" + "title": "logging_v2_generated_config_service_v2_get_settings_sync.py" }, { "canonical": true, @@ -1887,22 +2700,22 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_settings", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetSettings" + "shortName": "GetSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSettingsRequest" + "type": "google.cloud.logging_v2.types.GetSinkRequest" }, { - "name": "name", + "name": "sink_name", "type": "str" }, { @@ -1918,14 +2731,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.Settings", - "shortName": "get_settings" + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "get_sink" }, - "description": "Sample for GetSettings", - "file": "logging_v2_generated_config_service_v2_get_settings_async.py", + "description": "Sample for GetSink", + "file": "logging_v2_generated_config_service_v2_get_sink_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", "segments": [ { "end": 51, @@ -1958,7 +2771,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_settings_async.py" + "title": "logging_v2_generated_config_service_v2_get_sink_async.py" }, { "canonical": true, @@ -1967,22 +2780,22 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_settings", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetSettings" + "shortName": "GetSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSettingsRequest" + "type": "google.cloud.logging_v2.types.GetSinkRequest" }, { - "name": "name", + "name": "sink_name", "type": "str" }, { @@ -1998,14 +2811,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.Settings", - "shortName": "get_settings" + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "get_sink" }, - "description": "Sample for GetSettings", - "file": "logging_v2_generated_config_service_v2_get_settings_sync.py", + "description": "Sample for GetSink", + "file": "logging_v2_generated_config_service_v2_get_sink_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", "segments": [ { "end": 51, @@ -2038,7 +2851,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_settings_sync.py" + "title": "logging_v2_generated_config_service_v2_get_sink_sync.py" }, { "canonical": true, @@ -2048,23 +2861,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetSink", + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetSink" + "shortName": "GetView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSinkRequest" - }, - { - "name": "sink_name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetViewRequest" }, { "name": "retry", @@ -2079,14 +2888,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "get_sink" + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "get_view" }, - "description": "Sample for GetSink", - "file": "logging_v2_generated_config_service_v2_get_sink_async.py", + "description": "Sample for GetView", + "file": "logging_v2_generated_config_service_v2_get_view_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", "segments": [ { "end": 51, @@ -2119,7 +2928,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_sink_async.py" + "title": "logging_v2_generated_config_service_v2_get_view_async.py" }, { "canonical": true, @@ -2128,23 +2937,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetSink", + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetSink" + "shortName": "GetView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSinkRequest" - }, - { - "name": "sink_name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetViewRequest" }, { "name": "retry", @@ -2159,14 +2964,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "get_sink" + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "get_view" }, - "description": "Sample for GetSink", - "file": "logging_v2_generated_config_service_v2_get_sink_sync.py", + "description": "Sample for GetView", + "file": "logging_v2_generated_config_service_v2_get_view_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", "segments": [ { "end": 51, @@ -2199,7 +3004,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_sink_sync.py" + "title": "logging_v2_generated_config_service_v2_get_view_sync.py" }, { "canonical": true, @@ -2209,19 +3014,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_buckets", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetView", + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetView" + "shortName": "ListBuckets" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetViewRequest" + "type": "google.cloud.logging_v2.types.ListBucketsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -2236,22 +3045,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "get_view" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", + "shortName": "list_buckets" }, - "description": "Sample for GetView", - "file": "logging_v2_generated_config_service_v2_get_view_async.py", + "description": "Sample for ListBuckets", + "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2271,12 +3080,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_view_async.py" + "title": "logging_v2_generated_config_service_v2_list_buckets_async.py" }, { "canonical": true, @@ -2285,19 +3094,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_buckets", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetView", + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetView" + "shortName": "ListBuckets" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetViewRequest" + "type": "google.cloud.logging_v2.types.ListBucketsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -2312,22 +3125,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "get_view" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", + "shortName": "list_buckets" }, - "description": "Sample for GetView", - "file": "logging_v2_generated_config_service_v2_get_view_sync.py", + "description": "Sample for ListBuckets", + "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2347,12 +3160,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_view_sync.py" + "title": "logging_v2_generated_config_service_v2_list_buckets_sync.py" }, { "canonical": true, @@ -2362,19 +3175,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_buckets", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_exclusions", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListBuckets" + "shortName": "ListExclusions" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListBucketsRequest" + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" }, { "name": "parent", @@ -2393,14 +3206,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", - "shortName": "list_buckets" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager", + "shortName": "list_exclusions" }, - "description": "Sample for ListBuckets", - "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", + "description": "Sample for ListExclusions", + "file": "logging_v2_generated_config_service_v2_list_exclusions_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", "segments": [ { "end": 52, @@ -2433,7 +3246,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_buckets_async.py" + "title": "logging_v2_generated_config_service_v2_list_exclusions_async.py" }, { "canonical": true, @@ -2442,19 +3255,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_buckets", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_exclusions", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListBuckets" + "shortName": "ListExclusions" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListBucketsRequest" + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" }, { "name": "parent", @@ -2473,14 +3286,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", - "shortName": "list_buckets" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager", + "shortName": "list_exclusions" }, - "description": "Sample for ListBuckets", - "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", + "description": "Sample for ListExclusions", + "file": "logging_v2_generated_config_service_v2_list_exclusions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", "segments": [ { "end": 52, @@ -2513,7 +3326,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_buckets_sync.py" + "title": "logging_v2_generated_config_service_v2_list_exclusions_sync.py" }, { "canonical": true, @@ -2523,19 +3336,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_exclusions", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_links", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", + "fullName": "google.logging.v2.ConfigServiceV2.ListLinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListExclusions" + "shortName": "ListLinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + "type": "google.cloud.logging_v2.types.ListLinksRequest" }, { "name": "parent", @@ -2554,14 +3367,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager", - "shortName": "list_exclusions" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager", + "shortName": "list_links" }, - "description": "Sample for ListExclusions", - "file": "logging_v2_generated_config_service_v2_list_exclusions_async.py", + "description": "Sample for ListLinks", + "file": "logging_v2_generated_config_service_v2_list_links_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListLinks_async", "segments": [ { "end": 52, @@ -2594,7 +3407,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_exclusions_async.py" + "title": "logging_v2_generated_config_service_v2_list_links_async.py" }, { "canonical": true, @@ -2603,19 +3416,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_exclusions", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_links", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", + "fullName": "google.logging.v2.ConfigServiceV2.ListLinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListExclusions" + "shortName": "ListLinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + "type": "google.cloud.logging_v2.types.ListLinksRequest" }, { "name": "parent", @@ -2634,14 +3447,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager", - "shortName": "list_exclusions" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager", + "shortName": "list_links" }, - "description": "Sample for ListExclusions", - "file": "logging_v2_generated_config_service_v2_list_exclusions_sync.py", + "description": "Sample for ListLinks", + "file": "logging_v2_generated_config_service_v2_list_links_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListLinks_sync", "segments": [ { "end": 52, @@ -2674,7 +3487,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_exclusions_sync.py" + "title": "logging_v2_generated_config_service_v2_list_links_sync.py" }, { "canonical": true, @@ -3145,6 +3958,159 @@ ], "title": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_bucket_async", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateBucketAsync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_bucket_async" + }, + "description": "Sample for UpdateBucketAsync", + "file": "logging_v2_generated_config_service_v2_update_bucket_async_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_update_bucket_async_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_bucket_async", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateBucketAsync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_bucket_async" + }, + "description": "Sample for UpdateBucketAsync", + "file": "logging_v2_generated_config_service_v2_update_bucket_async_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_update_bucket_async_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-logging/samples/snippets/snippets.py b/packages/google-cloud-logging/samples/snippets/snippets.py index a409e996bb61..736311e0f44c 100644 --- a/packages/google-cloud-logging/samples/snippets/snippets.py +++ b/packages/google-cloud-logging/samples/snippets/snippets.py @@ -46,7 +46,8 @@ def write_entry(logger_name): "name": "King Arthur", "quest": "Find the Holy Grail", "favorite_color": "Blue", - }, severity="INFO" + }, + severity="INFO", ) print("Wrote logs to {}.".format(logger.name)) diff --git a/packages/google-cloud-logging/tests/__init__.py b/packages/google-cloud-logging/tests/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-logging/tests/__init__.py +++ b/packages/google-cloud-logging/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/__init__.py b/packages/google-cloud-logging/tests/unit/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-logging/tests/unit/__init__.py +++ b/packages/google-cloud-logging/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/__init__.py b/packages/google-cloud-logging/tests/unit/gapic/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-logging/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index be77714c0196..1af57347050f 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -48,8 +48,9 @@ from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.services.config_service_v2 import transports from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import google.auth @@ -1120,9 +1121,11 @@ async def test_list_buckets_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_buckets(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1154,6 +1157,7 @@ def test_get_bucket(request_type, transport: str = "grpc"): retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, restricted_fields=["restricted_fields_value"], ) response = client.get_bucket(request) @@ -1170,6 +1174,7 @@ def test_get_bucket(request_type, transport: str = "grpc"): assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True assert response.restricted_fields == ["restricted_fields_value"] @@ -1212,6 +1217,7 @@ async def test_get_bucket_async( retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, restricted_fields=["restricted_fields_value"], ) ) @@ -1229,6 +1235,7 @@ async def test_get_bucket_async( assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True assert response.restricted_fields == ["restricted_fields_value"] @@ -1305,7 +1312,7 @@ async def test_get_bucket_field_headers_async(): dict, ], ) -def test_create_bucket(request_type, transport: str = "grpc"): +def test_create_bucket_async(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1316,17 +1323,12 @@ def test_create_bucket(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + with mock.patch.object( + type(client.transport.create_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogBucket( - name="name_value", - description="description_value", - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - restricted_fields=["restricted_fields_value"], - ) - response = client.create_bucket(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1334,16 +1336,10 @@ def test_create_bucket(request_type, transport: str = "grpc"): assert args[0] == logging_config.CreateBucketRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - assert response.restricted_fields == ["restricted_fields_value"] + assert isinstance(response, future.Future) -def test_create_bucket_empty_call(): +def test_create_bucket_async_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -1352,15 +1348,17 @@ def test_create_bucket_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: - client.create_bucket() + with mock.patch.object( + type(client.transport.create_bucket_async), "__call__" + ) as call: + client.create_bucket_async() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest() @pytest.mark.asyncio -async def test_create_bucket_async( +async def test_create_bucket_async_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest ): client = ConfigServiceV2AsyncClient( @@ -1373,19 +1371,14 @@ async def test_create_bucket_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + with mock.patch.object( + type(client.transport.create_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket( - name="name_value", - description="description_value", - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - restricted_fields=["restricted_fields_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.create_bucket(request) + response = await client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1393,21 +1386,15 @@ async def test_create_bucket_async( assert args[0] == logging_config.CreateBucketRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - assert response.restricted_fields == ["restricted_fields_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_create_bucket_async_from_dict(): - await test_create_bucket_async(request_type=dict) +async def test_create_bucket_async_async_from_dict(): + await test_create_bucket_async_async(request_type=dict) -def test_create_bucket_field_headers(): +def test_create_bucket_async_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -1419,9 +1406,11 @@ def test_create_bucket_field_headers(): request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: - call.return_value = logging_config.LogBucket() - client.create_bucket(request) + with mock.patch.object( + type(client.transport.create_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1437,7 +1426,7 @@ def test_create_bucket_field_headers(): @pytest.mark.asyncio -async def test_create_bucket_field_headers_async(): +async def test_create_bucket_async_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -1449,11 +1438,13 @@ async def test_create_bucket_field_headers_async(): request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + with mock.patch.object( + type(client.transport.create_bucket_async), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket() + operations_pb2.Operation(name="operations/op") ) - await client.create_bucket(request) + await client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1475,7 +1466,7 @@ async def test_create_bucket_field_headers_async(): dict, ], ) -def test_update_bucket(request_type, transport: str = "grpc"): +def test_update_bucket_async(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1486,17 +1477,12 @@ def test_update_bucket(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + with mock.patch.object( + type(client.transport.update_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogBucket( - name="name_value", - description="description_value", - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - restricted_fields=["restricted_fields_value"], - ) - response = client.update_bucket(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1504,16 +1490,10 @@ def test_update_bucket(request_type, transport: str = "grpc"): assert args[0] == logging_config.UpdateBucketRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - assert response.restricted_fields == ["restricted_fields_value"] + assert isinstance(response, future.Future) -def test_update_bucket_empty_call(): +def test_update_bucket_async_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -1522,15 +1502,17 @@ def test_update_bucket_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: - client.update_bucket() + with mock.patch.object( + type(client.transport.update_bucket_async), "__call__" + ) as call: + client.update_bucket_async() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest() @pytest.mark.asyncio -async def test_update_bucket_async( +async def test_update_bucket_async_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest ): client = ConfigServiceV2AsyncClient( @@ -1543,19 +1525,14 @@ async def test_update_bucket_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + with mock.patch.object( + type(client.transport.update_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket( - name="name_value", - description="description_value", - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - restricted_fields=["restricted_fields_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_bucket(request) + response = await client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1563,21 +1540,15 @@ async def test_update_bucket_async( assert args[0] == logging_config.UpdateBucketRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - assert response.restricted_fields == ["restricted_fields_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_update_bucket_async_from_dict(): - await test_update_bucket_async(request_type=dict) +async def test_update_bucket_async_async_from_dict(): + await test_update_bucket_async_async(request_type=dict) -def test_update_bucket_field_headers(): +def test_update_bucket_async_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -1589,9 +1560,11 @@ def test_update_bucket_field_headers(): request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: - call.return_value = logging_config.LogBucket() - client.update_bucket(request) + with mock.patch.object( + type(client.transport.update_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1607,7 +1580,7 @@ def test_update_bucket_field_headers(): @pytest.mark.asyncio -async def test_update_bucket_field_headers_async(): +async def test_update_bucket_async_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -1619,11 +1592,13 @@ async def test_update_bucket_field_headers_async(): request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + with mock.patch.object( + type(client.transport.update_bucket_async), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket() + operations_pb2.Operation(name="operations/op") ) - await client.update_bucket(request) + await client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1641,11 +1616,11 @@ async def test_update_bucket_field_headers_async(): @pytest.mark.parametrize( "request_type", [ - logging_config.DeleteBucketRequest, + logging_config.CreateBucketRequest, dict, ], ) -def test_delete_bucket(request_type, transport: str = "grpc"): +def test_create_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1656,21 +1631,36 @@ def test_delete_bucket(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_bucket(request) + call.return_value = logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + response = client.create_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() + assert args[0] == logging_config.CreateBucketRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.LogBucket) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ["restricted_fields_value"] -def test_delete_bucket_empty_call(): +def test_create_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -1679,16 +1669,16 @@ def test_delete_bucket_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: - client.delete_bucket() + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + client.create_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() + assert args[0] == logging_config.CreateBucketRequest() @pytest.mark.asyncio -async def test_delete_bucket_async( - transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest +async def test_create_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1700,40 +1690,57 @@ async def test_delete_bucket_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_bucket(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) + response = await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() + assert args[0] == logging_config.CreateBucketRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.LogBucket) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio -async def test_delete_bucket_async_from_dict(): - await test_delete_bucket_async(request_type=dict) +async def test_create_bucket_async_from_dict(): + await test_create_bucket_async(request_type=dict) -def test_delete_bucket_field_headers(): +def test_create_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteBucketRequest() + request = logging_config.CreateBucketRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: - call.return_value = None - client.delete_bucket(request) + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + client.create_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1744,26 +1751,28 @@ def test_delete_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_bucket_field_headers_async(): +async def test_create_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteBucketRequest() + request = logging_config.CreateBucketRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_bucket(request) + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) + await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1774,18 +1783,18 @@ async def test_delete_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.parametrize( "request_type", [ - logging_config.UndeleteBucketRequest, + logging_config.UpdateBucketRequest, dict, ], ) -def test_undelete_bucket(request_type, transport: str = "grpc"): +def test_update_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1796,21 +1805,36 @@ def test_undelete_bucket(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.undelete_bucket(request) + call.return_value = logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + response = client.update_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() + assert args[0] == logging_config.UpdateBucketRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.LogBucket) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ["restricted_fields_value"] -def test_undelete_bucket_empty_call(): +def test_update_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -1819,16 +1843,16 @@ def test_undelete_bucket_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: - client.undelete_bucket() - call.assert_called() + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + client.update_bucket() + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() + assert args[0] == logging_config.UpdateBucketRequest() @pytest.mark.asyncio -async def test_undelete_bucket_async( - transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest +async def test_update_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1840,40 +1864,57 @@ async def test_undelete_bucket_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.undelete_bucket(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) + response = await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() + assert args[0] == logging_config.UpdateBucketRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.LogBucket) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio -async def test_undelete_bucket_async_from_dict(): - await test_undelete_bucket_async(request_type=dict) +async def test_update_bucket_async_from_dict(): + await test_update_bucket_async(request_type=dict) -def test_undelete_bucket_field_headers(): +def test_update_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UndeleteBucketRequest() + request = logging_config.UpdateBucketRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: - call.return_value = None - client.undelete_bucket(request) + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + client.update_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1889,21 +1930,23 @@ def test_undelete_bucket_field_headers(): @pytest.mark.asyncio -async def test_undelete_bucket_field_headers_async(): +async def test_update_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UndeleteBucketRequest() + request = logging_config.UpdateBucketRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.undelete_bucket(request) + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) + await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1921,11 +1964,11 @@ async def test_undelete_bucket_field_headers_async(): @pytest.mark.parametrize( "request_type", [ - logging_config.ListViewsRequest, + logging_config.DeleteBucketRequest, dict, ], ) -def test_list_views(request_type, transport: str = "grpc"): +def test_delete_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1936,24 +1979,21 @@ def test_list_views(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListViewsResponse( - next_page_token="next_page_token_value", - ) - response = client.list_views(request) + call.return_value = None + response = client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() + assert args[0] == logging_config.DeleteBucketRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsPager) - assert response.next_page_token == "next_page_token_value" + assert response is None -def test_list_views_empty_call(): +def test_delete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -1962,16 +2002,16 @@ def test_list_views_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - client.list_views() + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + client.delete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() + assert args[0] == logging_config.DeleteBucketRequest() @pytest.mark.asyncio -async def test_list_views_async( - transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest +async def test_delete_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1983,45 +2023,40 @@ async def test_list_views_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListViewsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_views(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() + assert args[0] == logging_config.DeleteBucketRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert response is None @pytest.mark.asyncio -async def test_list_views_async_from_dict(): - await test_list_views_async(request_type=dict) +async def test_delete_bucket_async_from_dict(): + await test_delete_bucket_async(request_type=dict) -def test_list_views_field_headers(): +def test_delete_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListViewsRequest() + request = logging_config.DeleteBucketRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - call.return_value = logging_config.ListViewsResponse() - client.list_views(request) + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value = None + client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2032,28 +2067,26 @@ def test_list_views_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_views_field_headers_async(): +async def test_delete_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListViewsRequest() + request = logging_config.DeleteBucketRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListViewsResponse() - ) - await client.list_views(request) + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2064,288 +2097,158 @@ async def test_list_views_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_views_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UndeleteBucketRequest, + dict, + ], +) +def test_undelete_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListViewsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_views( - parent="parent_value", - ) + call.return_value = None + response = client.undelete_bucket(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert args[0] == logging_config.UndeleteBucketRequest() + # Establish that the response is the type that we expect. + assert response is None -def test_list_views_flattened_error(): + +def test_undelete_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_views( - logging_config.ListViewsRequest(), - parent="parent_value", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + client.undelete_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UndeleteBucketRequest() @pytest.mark.asyncio -async def test_list_views_flattened_async(): +async def test_undelete_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest +): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListViewsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListViewsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_views( - parent="parent_value", - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.undelete_bucket(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert args[0] == logging_config.UndeleteBucketRequest() + # Establish that the response is the type that we expect. + assert response is None -@pytest.mark.asyncio -async def test_list_views_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_views( - logging_config.ListViewsRequest(), - parent="parent_value", - ) +@pytest.mark.asyncio +async def test_undelete_bucket_async_from_dict(): + await test_undelete_bucket_async(request_type=dict) -def test_list_views_pager(transport_name: str = "grpc"): +def test_undelete_bucket_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UndeleteBucketRequest() + + request.name = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - logging_config.LogView(), - ], - next_page_token="abc", - ), - logging_config.ListViewsResponse( - views=[], - next_page_token="def", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - ], - next_page_token="ghi", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_views(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogView) for i in results) - + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value = None + client.undelete_bucket(request) -def test_list_views_pages(transport_name: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - logging_config.LogView(), - ], - next_page_token="abc", - ), - logging_config.ListViewsResponse( - views=[], - next_page_token="def", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - ], - next_page_token="ghi", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - ], - ), - RuntimeError, - ) - pages = list(client.list_views(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_views_async_pager(): +async def test_undelete_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - logging_config.LogView(), - ], - next_page_token="abc", - ), - logging_config.ListViewsResponse( - views=[], - next_page_token="def", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - ], - next_page_token="ghi", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_views( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UndeleteBucketRequest() - assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogView) for i in responses) + request.name = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.undelete_bucket(request) -@pytest.mark.asyncio -async def test_list_views_async_pages(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - logging_config.LogView(), - ], - next_page_token="abc", - ), - logging_config.ListViewsResponse( - views=[], - next_page_token="def", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - ], - next_page_token="ghi", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in ( - await client.list_views(request={}) - ).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.parametrize( "request_type", [ - logging_config.GetViewRequest, + logging_config.ListViewsRequest, dict, ], ) -def test_get_view(request_type, transport: str = "grpc"): +def test_list_views(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2356,28 +2259,24 @@ def test_get_view(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogView( - name="name_value", - description="description_value", - filter="filter_value", + call.return_value = logging_config.ListViewsResponse( + next_page_token="next_page_token_value", ) - response = client.get_view(request) + response = client.list_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() + assert args[0] == logging_config.ListViewsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" + assert isinstance(response, pagers.ListViewsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_view_empty_call(): +def test_list_views_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -2386,16 +2285,16 @@ def test_get_view_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: - client.get_view() + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + client.list_views() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() + assert args[0] == logging_config.ListViewsRequest() @pytest.mark.asyncio -async def test_get_view_async( - transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest +async def test_list_views_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2407,50 +2306,46 @@ async def test_get_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogView( - name="name_value", - description="description_value", - filter="filter_value", + logging_config.ListViewsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.get_view(request) + response = await client.list_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() + assert args[0] == logging_config.ListViewsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" + assert isinstance(response, pagers.ListViewsAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_get_view_async_from_dict(): - await test_get_view_async(request_type=dict) +async def test_list_views_async_from_dict(): + await test_list_views_async(request_type=dict) -def test_get_view_field_headers(): +def test_list_views_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetViewRequest() + request = logging_config.ListViewsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: - call.return_value = logging_config.LogView() - client.get_view(request) - + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = logging_config.ListViewsResponse() + client.list_views(request) + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] @@ -2460,28 +2355,28 @@ def test_get_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_view_field_headers_async(): +async def test_list_views_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetViewRequest() + request = logging_config.ListViewsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogView() + logging_config.ListViewsResponse() ) - await client.get_view(request) + await client.list_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2492,226 +2387,340 @@ async def test_get_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -@pytest.mark.parametrize( - "request_type", - [ - logging_config.CreateViewRequest, - dict, - ], -) -def test_create_view(request_type, transport: str = "grpc"): +def test_list_views_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogView( - name="name_value", - description="description_value", - filter="filter_value", + call.return_value = logging_config.ListViewsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_views( + parent="parent_value", ) - response = client.create_view(request) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying call was made with the expected + # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_create_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_views_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: - client.create_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_views( + logging_config.ListViewsRequest(), + parent="parent_value", + ) @pytest.mark.asyncio -async def test_create_view_async( - transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest -): +async def test_list_views_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogView( - name="name_value", - description="description_value", - filter="filter_value", - ) + logging_config.ListViewsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_views( + parent="parent_value", ) - response = await client.create_view(request) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying call was made with the expected + # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - - -@pytest.mark.asyncio -async def test_create_view_async_from_dict(): - await test_create_view_async(request_type=dict) - - -def test_create_view_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.CreateViewRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: - call.return_value = logging_config.LogView() - client.create_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio -async def test_create_view_field_headers_async(): +async def test_list_views_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.CreateViewRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogView() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_views( + logging_config.ListViewsRequest(), + parent="parent_value", ) - await client.create_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] -@pytest.mark.parametrize( - "request_type", - [ - logging_config.UpdateViewRequest, - dict, - ], -) -def test_update_view(request_type, transport: str = "grpc"): +def test_list_views_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogView( - name="name_value", - description="description_value", - filter="filter_value", + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse( + views=[], + next_page_token="def", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, ) - response = client.update_view(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_views(request={}) - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogView) for i in results) -def test_update_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_views_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse( + views=[], + next_page_token="def", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + pages = list(client.list_views(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_views_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse( + views=[], + next_page_token="def", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_views( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogView) for i in responses) + + +@pytest.mark.asyncio +async def test_list_views_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse( + views=[], + next_page_token="def", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_views(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetViewRequest, + dict, + ], +) +def test_get_view(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + response = client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetViewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + + +def test_get_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - client.update_view() + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + client.get_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() + assert args[0] == logging_config.GetViewRequest() @pytest.mark.asyncio -async def test_update_view_async( - transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest +async def test_get_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2723,7 +2732,7 @@ async def test_update_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogView( @@ -2732,12 +2741,12 @@ async def test_update_view_async( filter="filter_value", ) ) - response = await client.update_view(request) + response = await client.get_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() + assert args[0] == logging_config.GetViewRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) @@ -2747,25 +2756,25 @@ async def test_update_view_async( @pytest.mark.asyncio -async def test_update_view_async_from_dict(): - await test_update_view_async(request_type=dict) +async def test_get_view_async_from_dict(): + await test_get_view_async(request_type=dict) -def test_update_view_field_headers(): +def test_get_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateViewRequest() + request = logging_config.GetViewRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_view), "__call__") as call: call.return_value = logging_config.LogView() - client.update_view(request) + client.get_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2781,23 +2790,23 @@ def test_update_view_field_headers(): @pytest.mark.asyncio -async def test_update_view_field_headers_async(): +async def test_get_view_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateViewRequest() + request = logging_config.GetViewRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_view), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogView() ) - await client.update_view(request) + await client.get_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2815,11 +2824,11 @@ async def test_update_view_field_headers_async(): @pytest.mark.parametrize( "request_type", [ - logging_config.DeleteViewRequest, + logging_config.CreateViewRequest, dict, ], ) -def test_delete_view(request_type, transport: str = "grpc"): +def test_create_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2830,21 +2839,28 @@ def test_delete_view(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_view(request) + call.return_value = logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + response = client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() + assert args[0] == logging_config.CreateViewRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.LogView) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" -def test_delete_view_empty_call(): +def test_create_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -2853,16 +2869,16 @@ def test_delete_view_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - client.delete_view() + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + client.create_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() + assert args[0] == logging_config.CreateViewRequest() @pytest.mark.asyncio -async def test_delete_view_async( - transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest +async def test_create_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2874,40 +2890,49 @@ async def test_delete_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) + response = await client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateViewRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.LogView) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio -async def test_delete_view_async_from_dict(): - await test_delete_view_async(request_type=dict) +async def test_create_view_async_from_dict(): + await test_create_view_async(request_type=dict) -def test_delete_view_field_headers(): +def test_create_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteViewRequest() + request = logging_config.CreateViewRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - call.return_value = None - client.delete_view(request) + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value = logging_config.LogView() + client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2918,26 +2943,28 @@ def test_delete_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_view_field_headers_async(): +async def test_create_view_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteViewRequest() + request = logging_config.CreateViewRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_view(request) + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView() + ) + await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2948,18 +2975,18 @@ async def test_delete_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.parametrize( "request_type", [ - logging_config.ListSinksRequest, + logging_config.UpdateViewRequest, dict, ], ) -def test_list_sinks(request_type, transport: str = "grpc"): +def test_update_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2970,24 +2997,28 @@ def test_list_sinks(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListSinksResponse( - next_page_token="next_page_token_value", + call.return_value = logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", ) - response = client.list_sinks(request) + response = client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() + assert args[0] == logging_config.UpdateViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSinksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, logging_config.LogView) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" -def test_list_sinks_empty_call(): +def test_update_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -2996,16 +3027,16 @@ def test_list_sinks_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: - client.list_sinks() + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + client.update_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() + assert args[0] == logging_config.UpdateViewRequest() @pytest.mark.asyncio -async def test_list_sinks_async( - transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest +async def test_update_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3017,45 +3048,49 @@ async def test_list_sinks_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListSinksResponse( - next_page_token="next_page_token_value", + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", ) ) - response = await client.list_sinks(request) + response = await client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() + assert args[0] == logging_config.UpdateViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSinksAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, logging_config.LogView) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio -async def test_list_sinks_async_from_dict(): - await test_list_sinks_async(request_type=dict) +async def test_update_view_async_from_dict(): + await test_update_view_async(request_type=dict) -def test_list_sinks_field_headers(): +def test_update_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListSinksRequest() + request = logging_config.UpdateViewRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: - call.return_value = logging_config.ListSinksResponse() - client.list_sinks(request) + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value = logging_config.LogView() + client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3066,28 +3101,28 @@ def test_list_sinks_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_sinks_field_headers_async(): +async def test_update_view_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListSinksRequest() + request = logging_config.UpdateViewRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListSinksResponse() + logging_config.LogView() ) - await client.list_sinks(request) + await client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3098,288 +3133,158 @@ async def test_list_sinks_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_sinks_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteViewRequest, + dict, + ], +) +def test_delete_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListSinksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_sinks( - parent="parent_value", - ) + call.return_value = None + response = client.delete_view(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert args[0] == logging_config.DeleteViewRequest() + + # Establish that the response is the type that we expect. + assert response is None -def test_list_sinks_flattened_error(): +def test_delete_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_sinks( - logging_config.ListSinksRequest(), - parent="parent_value", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + client.delete_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest() @pytest.mark.asyncio -async def test_list_sinks_flattened_async(): +async def test_delete_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest +): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListSinksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListSinksResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_sinks( - parent="parent_value", - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_view(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert args[0] == logging_config.DeleteViewRequest() + # Establish that the response is the type that we expect. + assert response is None -@pytest.mark.asyncio -async def test_list_sinks_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_sinks( - logging_config.ListSinksRequest(), - parent="parent_value", - ) +@pytest.mark.asyncio +async def test_delete_view_async_from_dict(): + await test_delete_view_async(request_type=dict) -def test_list_sinks_pager(transport_name: str = "grpc"): +def test_delete_view_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteViewRequest() + + request.name = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - logging_config.LogSink(), - ], - next_page_token="abc", - ), - logging_config.ListSinksResponse( - sinks=[], - next_page_token="def", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - ], - next_page_token="ghi", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_sinks(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogSink) for i in results) - + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = None + client.delete_view(request) -def test_list_sinks_pages(transport_name: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - logging_config.LogSink(), - ], - next_page_token="abc", - ), - logging_config.ListSinksResponse( - sinks=[], - next_page_token="def", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - ], - next_page_token="ghi", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - ], - ), - RuntimeError, - ) - pages = list(client.list_sinks(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_sinks_async_pager(): +async def test_delete_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - logging_config.LogSink(), - ], - next_page_token="abc", - ), - logging_config.ListSinksResponse( - sinks=[], - next_page_token="def", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - ], - next_page_token="ghi", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_sinks( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteViewRequest() - assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogSink) for i in responses) + request.name = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_view(request) -@pytest.mark.asyncio -async def test_list_sinks_async_pages(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - logging_config.LogSink(), - ], - next_page_token="abc", - ), - logging_config.ListSinksResponse( - sinks=[], - next_page_token="def", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - ], - next_page_token="ghi", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in ( - await client.list_sinks(request={}) - ).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.parametrize( "request_type", [ - logging_config.GetSinkRequest, + logging_config.ListSinksRequest, dict, ], ) -def test_get_sink(request_type, transport: str = "grpc"): +def test_list_sinks(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3390,41 +3295,24 @@ def test_get_sink(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink( - name="name_value", - destination="destination_value", - filter="filter_value", - description="description_value", - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity="writer_identity_value", - include_children=True, - bigquery_options=logging_config.BigQueryOptions( - use_partitioned_tables=True - ), + call.return_value = logging_config.ListSinksResponse( + next_page_token="next_page_token_value", ) - response = client.get_sink(request) + response = client.list_sinks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() + assert args[0] == logging_config.ListSinksRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True + assert isinstance(response, pagers.ListSinksPager) + assert response.next_page_token == "next_page_token_value" -def test_get_sink_empty_call(): +def test_list_sinks_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -3433,16 +3321,16 @@ def test_get_sink_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: - client.get_sink() + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + client.list_sinks() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() + assert args[0] == logging_config.ListSinksRequest() @pytest.mark.asyncio -async def test_get_sink_async( - transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest +async def test_list_sinks_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3454,59 +3342,45 @@ async def test_get_sink_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink( - name="name_value", - destination="destination_value", - filter="filter_value", - description="description_value", - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity="writer_identity_value", - include_children=True, + logging_config.ListSinksResponse( + next_page_token="next_page_token_value", ) ) - response = await client.get_sink(request) + response = await client.list_sinks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() + assert args[0] == logging_config.ListSinksRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True + assert isinstance(response, pagers.ListSinksAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_get_sink_async_from_dict(): - await test_get_sink_async(request_type=dict) +async def test_list_sinks_async_from_dict(): + await test_list_sinks_async(request_type=dict) -def test_get_sink_field_headers(): +def test_list_sinks_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetSinkRequest() + request = logging_config.ListSinksRequest() - request.sink_name = "sink_name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: - call.return_value = logging_config.LogSink() - client.get_sink(request) + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + call.return_value = logging_config.ListSinksResponse() + client.list_sinks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3517,28 +3391,28 @@ def test_get_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_sink_field_headers_async(): +async def test_list_sinks_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetSinkRequest() + request = logging_config.ListSinksRequest() - request.sink_name = "sink_name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink() + logging_config.ListSinksResponse() ) - await client.get_sink(request) + await client.list_sinks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3549,35 +3423,35 @@ async def test_get_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_sink_flattened(): +def test_list_sinks_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() + call.return_value = logging_config.ListSinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_sink( - sink_name="sink_name_value", + client.list_sinks( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].sink_name - mock_val = "sink_name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_sink_flattened_error(): +def test_list_sinks_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3585,43 +3459,43 @@ def test_get_sink_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_sink( - logging_config.GetSinkRequest(), - sink_name="sink_name_value", + client.list_sinks( + logging_config.ListSinksRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_sink_flattened_async(): +async def test_list_sinks_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() + call.return_value = logging_config.ListSinksResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink() + logging_config.ListSinksResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_sink( - sink_name="sink_name_value", + response = await client.list_sinks( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].sink_name - mock_val = "sink_name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_sink_flattened_error_async(): +async def test_list_sinks_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3629,293 +3503,210 @@ async def test_get_sink_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_sink( - logging_config.GetSinkRequest(), - sink_name="sink_name_value", + await client.list_sinks( + logging_config.ListSinksRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - logging_config.CreateSinkRequest, - dict, - ], -) -def test_create_sink(request_type, transport: str = "grpc"): +def test_list_sinks_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink( - name="name_value", - destination="destination_value", - filter="filter_value", - description="description_value", - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity="writer_identity_value", - include_children=True, - bigquery_options=logging_config.BigQueryOptions( - use_partitioned_tables=True + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token="abc", + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token="def", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], ), + RuntimeError, ) - response = client.create_sink(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_sinks(request={}) - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogSink) for i in results) -def test_create_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_sinks_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: - client.create_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token="abc", + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token="def", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sinks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_create_sink_async( - transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest -): +async def test_list_sinks_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink( - name="name_value", - destination="destination_value", - filter="filter_value", - description="description_value", - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity="writer_identity_value", - include_children=True, - ) - ) - response = await client.create_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True - - -@pytest.mark.asyncio -async def test_create_sink_async_from_dict(): - await test_create_sink_async(request_type=dict) - - -def test_create_sink_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.CreateSinkRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: - call.return_value = logging_config.LogSink() - client.create_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.CreateSinkRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink() + with mock.patch.object( + type(client.transport.list_sinks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token="abc", + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token="def", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, ) - await client.create_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_create_sink_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_sink( - parent="parent_value", - sink=logging_config.LogSink(name="name_value"), + async_pager = await client.list_sinks( + request={}, ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].sink - mock_val = logging_config.LogSink(name="name_value") - assert arg == mock_val - - -def test_create_sink_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_sink( - logging_config.CreateSinkRequest(), - parent="parent_value", - sink=logging_config.LogSink(name="name_value"), - ) + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogSink) for i in responses) @pytest.mark.asyncio -async def test_create_sink_flattened_async(): +async def test_list_sinks_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_sink( - parent="parent_value", - sink=logging_config.LogSink(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].sink - mock_val = logging_config.LogSink(name="name_value") - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_sink( - logging_config.CreateSinkRequest(), - parent="parent_value", - sink=logging_config.LogSink(name="name_value"), + with mock.patch.object( + type(client.transport.list_sinks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token="abc", + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token="def", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_sinks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - logging_config.UpdateSinkRequest, + logging_config.GetSinkRequest, dict, ], ) -def test_update_sink(request_type, transport: str = "grpc"): +def test_get_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3926,7 +3717,7 @@ def test_update_sink(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( name="name_value", @@ -3937,16 +3728,13 @@ def test_update_sink(request_type, transport: str = "grpc"): output_version_format=logging_config.LogSink.VersionFormat.V2, writer_identity="writer_identity_value", include_children=True, - bigquery_options=logging_config.BigQueryOptions( - use_partitioned_tables=True - ), ) - response = client.update_sink(request) + response = client.get_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() + assert args[0] == logging_config.GetSinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) @@ -3960,7 +3748,7 @@ def test_update_sink(request_type, transport: str = "grpc"): assert response.include_children is True -def test_update_sink_empty_call(): +def test_get_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -3969,16 +3757,16 @@ def test_update_sink_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: - client.update_sink() + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + client.get_sink() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() + assert args[0] == logging_config.GetSinkRequest() @pytest.mark.asyncio -async def test_update_sink_async( - transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest +async def test_get_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3990,7 +3778,7 @@ async def test_update_sink_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogSink( @@ -4004,12 +3792,12 @@ async def test_update_sink_async( include_children=True, ) ) - response = await client.update_sink(request) + response = await client.get_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() + assert args[0] == logging_config.GetSinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) @@ -4024,25 +3812,25 @@ async def test_update_sink_async( @pytest.mark.asyncio -async def test_update_sink_async_from_dict(): - await test_update_sink_async(request_type=dict) +async def test_get_sink_async_from_dict(): + await test_get_sink_async(request_type=dict) -def test_update_sink_field_headers(): +def test_get_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateSinkRequest() + request = logging_config.GetSinkRequest() request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: call.return_value = logging_config.LogSink() - client.update_sink(request) + client.get_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4058,23 +3846,23 @@ def test_update_sink_field_headers(): @pytest.mark.asyncio -async def test_update_sink_field_headers_async(): +async def test_get_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateSinkRequest() + request = logging_config.GetSinkRequest() request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogSink() ) - await client.update_sink(request) + await client.get_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4089,21 +3877,19 @@ async def test_update_sink_field_headers_async(): ) in kw["metadata"] -def test_update_sink_flattened(): +def test_get_sink_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_sink( + client.get_sink( sink_name="sink_name_value", - sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -4113,15 +3899,9 @@ def test_update_sink_flattened(): arg = args[0].sink_name mock_val = "sink_name_value" assert arg == mock_val - arg = args[0].sink - mock_val = logging_config.LogSink(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val -def test_update_sink_flattened_error(): +def test_get_sink_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4129,22 +3909,20 @@ def test_update_sink_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_sink( - logging_config.UpdateSinkRequest(), + client.get_sink( + logging_config.GetSinkRequest(), sink_name="sink_name_value", - sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_update_sink_flattened_async(): +async def test_get_sink_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() @@ -4153,10 +3931,8 @@ async def test_update_sink_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_sink( + response = await client.get_sink( sink_name="sink_name_value", - sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -4166,16 +3942,10 @@ async def test_update_sink_flattened_async(): arg = args[0].sink_name mock_val = "sink_name_value" assert arg == mock_val - arg = args[0].sink - mock_val = logging_config.LogSink(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val @pytest.mark.asyncio -async def test_update_sink_flattened_error_async(): +async def test_get_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4183,22 +3953,20 @@ async def test_update_sink_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_sink( - logging_config.UpdateSinkRequest(), + await client.get_sink( + logging_config.GetSinkRequest(), sink_name="sink_name_value", - sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - logging_config.DeleteSinkRequest, + logging_config.CreateSinkRequest, dict, ], ) -def test_delete_sink(request_type, transport: str = "grpc"): +def test_create_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4209,21 +3977,38 @@ def test_delete_sink(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_sink(request) + call.return_value = logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + response = client.create_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() + assert args[0] == logging_config.CreateSinkRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.LogSink) + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == "writer_identity_value" + assert response.include_children is True -def test_delete_sink_empty_call(): +def test_create_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -4232,16 +4017,16 @@ def test_delete_sink_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: - client.delete_sink() + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + client.create_sink() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() + assert args[0] == logging_config.CreateSinkRequest() @pytest.mark.asyncio -async def test_delete_sink_async( - transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest +async def test_create_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4253,40 +4038,59 @@ async def test_delete_sink_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_sink(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) + response = await client.create_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() + assert args[0] == logging_config.CreateSinkRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.LogSink) + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == "writer_identity_value" + assert response.include_children is True @pytest.mark.asyncio -async def test_delete_sink_async_from_dict(): - await test_delete_sink_async(request_type=dict) +async def test_create_sink_async_from_dict(): + await test_create_sink_async(request_type=dict) -def test_delete_sink_field_headers(): +def test_create_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteSinkRequest() + request = logging_config.CreateSinkRequest() - request.sink_name = "sink_name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: - call.return_value = None - client.delete_sink(request) + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value = logging_config.LogSink() + client.create_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4297,26 +4101,28 @@ def test_delete_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_sink_field_headers_async(): +async def test_create_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteSinkRequest() + request = logging_config.CreateSinkRequest() - request.sink_name = "sink_name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_sink(request) + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) + await client.create_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4327,35 +4133,39 @@ async def test_delete_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_sink_flattened(): +def test_create_sink_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = logging_config.LogSink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_sink( - sink_name="sink_name_value", + client.create_sink( + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].sink_name - mock_val = "sink_name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val -def test_delete_sink_flattened_error(): +def test_create_sink_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4363,41 +4173,48 @@ def test_delete_sink_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_sink( - logging_config.DeleteSinkRequest(), - sink_name="sink_name_value", + client.create_sink( + logging_config.CreateSinkRequest(), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) @pytest.mark.asyncio -async def test_delete_sink_flattened_async(): +async def test_create_sink_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = logging_config.LogSink() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_sink( - sink_name="sink_name_value", + response = await client.create_sink( + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].sink_name - mock_val = "sink_name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_delete_sink_flattened_error_async(): +async def test_create_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4405,20 +4222,21 @@ async def test_delete_sink_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_sink( - logging_config.DeleteSinkRequest(), - sink_name="sink_name_value", + await client.create_sink( + logging_config.CreateSinkRequest(), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - logging_config.ListExclusionsRequest, + logging_config.UpdateSinkRequest, dict, ], ) -def test_list_exclusions(request_type, transport: str = "grpc"): +def test_update_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4429,24 +4247,38 @@ def test_list_exclusions(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListExclusionsResponse( - next_page_token="next_page_token_value", + call.return_value = logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, ) - response = client.list_exclusions(request) + response = client.update_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + assert args[0] == logging_config.UpdateSinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExclusionsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, logging_config.LogSink) + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == "writer_identity_value" + assert response.include_children is True -def test_list_exclusions_empty_call(): +def test_update_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -4455,16 +4287,16 @@ def test_list_exclusions_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: - client.list_exclusions() + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + client.update_sink() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + assert args[0] == logging_config.UpdateSinkRequest() @pytest.mark.asyncio -async def test_list_exclusions_async( - transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest +async def test_update_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4476,45 +4308,59 @@ async def test_list_exclusions_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListExclusionsResponse( - next_page_token="next_page_token_value", + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, ) ) - response = await client.list_exclusions(request) + response = await client.update_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + assert args[0] == logging_config.UpdateSinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExclusionsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, logging_config.LogSink) + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == "writer_identity_value" + assert response.include_children is True @pytest.mark.asyncio -async def test_list_exclusions_async_from_dict(): - await test_list_exclusions_async(request_type=dict) +async def test_update_sink_async_from_dict(): + await test_update_sink_async(request_type=dict) -def test_list_exclusions_field_headers(): +def test_update_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListExclusionsRequest() + request = logging_config.UpdateSinkRequest() - request.parent = "parent_value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: - call.return_value = logging_config.ListExclusionsResponse() - client.list_exclusions(request) + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value = logging_config.LogSink() + client.update_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4525,28 +4371,28 @@ def test_list_exclusions_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "sink_name=sink_name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_exclusions_field_headers_async(): +async def test_update_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListExclusionsRequest() + request = logging_config.UpdateSinkRequest() - request.parent = "parent_value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListExclusionsResponse() + logging_config.LogSink() ) - await client.list_exclusions(request) + await client.update_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4557,35 +4403,43 @@ async def test_list_exclusions_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "sink_name=sink_name_value", ) in kw["metadata"] -def test_list_exclusions_flattened(): +def test_update_sink_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListExclusionsResponse() + call.return_value = logging_config.LogSink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_exclusions( - parent="parent_value", + client.update_sink( + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_list_exclusions_flattened_error(): +def test_update_sink_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4593,43 +4447,53 @@ def test_list_exclusions_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_exclusions( - logging_config.ListExclusionsRequest(), - parent="parent_value", + client.update_sink( + logging_config.UpdateSinkRequest(), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_list_exclusions_flattened_async(): +async def test_update_sink_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListExclusionsResponse() + call.return_value = logging_config.LogSink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListExclusionsResponse() + logging_config.LogSink() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_exclusions( - parent="parent_value", + response = await client.update_sink( + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_list_exclusions_flattened_error_async(): +async def test_update_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4637,316 +4501,110 @@ async def test_list_exclusions_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_exclusions( - logging_config.ListExclusionsRequest(), - parent="parent_value", + await client.update_sink( + logging_config.UpdateSinkRequest(), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_exclusions_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteSinkRequest, + dict, + ], +) +def test_delete_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token="abc", - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token="def", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token="ghi", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_exclusions(request={}) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_sink(request) - assert pager._metadata == metadata + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest() - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogExclusion) for i in results) + # Establish that the response is the type that we expect. + assert response is None -def test_list_exclusions_pages(transport_name: str = "grpc"): +def test_delete_sink_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token="abc", - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token="def", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token="ghi", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - pages = list(client.list_exclusions(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + client.delete_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest() @pytest.mark.asyncio -async def test_list_exclusions_async_pager(): +async def test_delete_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest +): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token="abc", - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token="def", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token="ghi", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_exclusions( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogExclusion) for i in responses) - - -@pytest.mark.asyncio -async def test_list_exclusions_async_pages(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token="abc", - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token="def", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token="ghi", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in ( - await client.list_exclusions(request={}) - ).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - logging_config.GetExclusionRequest, - dict, - ], -) -def test_get_exclusion(request_type, transport: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion( - name="name_value", - description="description_value", - filter="filter_value", - disabled=True, - ) - response = client.get_exclusion(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True - - -def test_get_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: - client.get_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() - - -@pytest.mark.asyncio -async def test_get_exclusion_async( - transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest -): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion( - name="name_value", - description="description_value", - filter="filter_value", - disabled=True, - ) - ) - response = await client.get_exclusion(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + assert args[0] == logging_config.DeleteSinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True + assert response is None @pytest.mark.asyncio -async def test_get_exclusion_async_from_dict(): - await test_get_exclusion_async(request_type=dict) +async def test_delete_sink_async_from_dict(): + await test_delete_sink_async(request_type=dict) -def test_get_exclusion_field_headers(): +def test_delete_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetExclusionRequest() + request = logging_config.DeleteSinkRequest() - request.name = "name_value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: - call.return_value = logging_config.LogExclusion() - client.get_exclusion(request) + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + call.return_value = None + client.delete_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4957,28 +4615,26 @@ def test_get_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "sink_name=sink_name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_exclusion_field_headers_async(): +async def test_delete_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetExclusionRequest() + request = logging_config.DeleteSinkRequest() - request.name = "name_value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion() - ) - await client.get_exclusion(request) + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4989,35 +4645,35 @@ async def test_get_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "sink_name=sink_name_value", ) in kw["metadata"] -def test_get_exclusion_flattened(): +def test_delete_sink_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_exclusion( - name="name_value", + client.delete_sink( + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].sink_name + mock_val = "sink_name_value" assert arg == mock_val -def test_get_exclusion_flattened_error(): +def test_delete_sink_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5025,43 +4681,41 @@ def test_get_exclusion_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_exclusion( - logging_config.GetExclusionRequest(), - name="name_value", + client.delete_sink( + logging_config.DeleteSinkRequest(), + sink_name="sink_name_value", ) @pytest.mark.asyncio -async def test_get_exclusion_flattened_async(): +async def test_delete_sink_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_exclusion( - name="name_value", + response = await client.delete_sink( + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].sink_name + mock_val = "sink_name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_exclusion_flattened_error_async(): +async def test_delete_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5069,20 +4723,20 @@ async def test_get_exclusion_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_exclusion( - logging_config.GetExclusionRequest(), - name="name_value", + await client.delete_sink( + logging_config.DeleteSinkRequest(), + sink_name="sink_name_value", ) @pytest.mark.parametrize( "request_type", [ - logging_config.CreateExclusionRequest, + logging_config.CreateLinkRequest, dict, ], ) -def test_create_exclusion(request_type, transport: str = "grpc"): +def test_create_link(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5093,30 +4747,21 @@ def test_create_exclusion(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.create_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion( - name="name_value", - description="description_value", - filter="filter_value", - disabled=True, - ) - response = client.create_exclusion(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + assert args[0] == logging_config.CreateLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True + assert isinstance(response, future.Future) -def test_create_exclusion_empty_call(): +def test_create_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -5125,16 +4770,16 @@ def test_create_exclusion_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: - client.create_exclusion() + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + client.create_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + assert args[0] == logging_config.CreateLinkRequest() @pytest.mark.asyncio -async def test_create_exclusion_async( - transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest +async def test_create_link_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateLinkRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5146,51 +4791,42 @@ async def test_create_exclusion_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.create_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion( - name="name_value", - description="description_value", - filter="filter_value", - disabled=True, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.create_exclusion(request) + response = await client.create_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + assert args[0] == logging_config.CreateLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_create_exclusion_async_from_dict(): - await test_create_exclusion_async(request_type=dict) +async def test_create_link_async_from_dict(): + await test_create_link_async(request_type=dict) -def test_create_exclusion_field_headers(): +def test_create_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.CreateExclusionRequest() + request = logging_config.CreateLinkRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: - call.return_value = logging_config.LogExclusion() - client.create_exclusion(request) + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5206,23 +4842,23 @@ def test_create_exclusion_field_headers(): @pytest.mark.asyncio -async def test_create_exclusion_field_headers_async(): +async def test_create_link_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.CreateExclusionRequest() + request = logging_config.CreateLinkRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.create_link), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion() + operations_pb2.Operation(name="operations/op") ) - await client.create_exclusion(request) + await client.create_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5237,20 +4873,21 @@ async def test_create_exclusion_field_headers_async(): ) in kw["metadata"] -def test_create_exclusion_flattened(): +def test_create_link_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.create_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_exclusion( + client.create_link( parent="parent_value", - exclusion=logging_config.LogExclusion(name="name_value"), + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) # Establish that the underlying call was made with the expected @@ -5260,12 +4897,15 @@ def test_create_exclusion_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name="name_value") + arg = args[0].link + mock_val = logging_config.Link(name="name_value") + assert arg == mock_val + arg = args[0].link_id + mock_val = "link_id_value" assert arg == mock_val -def test_create_exclusion_flattened_error(): +def test_create_link_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5273,32 +4913,34 @@ def test_create_exclusion_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_exclusion( - logging_config.CreateExclusionRequest(), + client.create_link( + logging_config.CreateLinkRequest(), parent="parent_value", - exclusion=logging_config.LogExclusion(name="name_value"), + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) @pytest.mark.asyncio -async def test_create_exclusion_flattened_async(): +async def test_create_link_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.create_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_exclusion( + response = await client.create_link( parent="parent_value", - exclusion=logging_config.LogExclusion(name="name_value"), + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) # Establish that the underlying call was made with the expected @@ -5308,13 +4950,16 @@ async def test_create_exclusion_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name="name_value") + arg = args[0].link + mock_val = logging_config.Link(name="name_value") + assert arg == mock_val + arg = args[0].link_id + mock_val = "link_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_exclusion_flattened_error_async(): +async def test_create_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5322,21 +4967,22 @@ async def test_create_exclusion_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_exclusion( - logging_config.CreateExclusionRequest(), + await client.create_link( + logging_config.CreateLinkRequest(), parent="parent_value", - exclusion=logging_config.LogExclusion(name="name_value"), + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) @pytest.mark.parametrize( "request_type", [ - logging_config.UpdateExclusionRequest, + logging_config.DeleteLinkRequest, dict, ], ) -def test_update_exclusion(request_type, transport: str = "grpc"): +def test_delete_link(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5347,30 +4993,21 @@ def test_update_exclusion(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion( - name="name_value", - description="description_value", - filter="filter_value", - disabled=True, - ) - response = client.update_exclusion(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + assert args[0] == logging_config.DeleteLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True + assert isinstance(response, future.Future) -def test_update_exclusion_empty_call(): +def test_delete_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -5379,16 +5016,16 @@ def test_update_exclusion_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: - client.update_exclusion() + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + client.delete_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + assert args[0] == logging_config.DeleteLinkRequest() @pytest.mark.asyncio -async def test_update_exclusion_async( - transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest +async def test_delete_link_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteLinkRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5400,51 +5037,42 @@ async def test_update_exclusion_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion( - name="name_value", - description="description_value", - filter="filter_value", - disabled=True, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_exclusion(request) + response = await client.delete_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + assert args[0] == logging_config.DeleteLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_update_exclusion_async_from_dict(): - await test_update_exclusion_async(request_type=dict) +async def test_delete_link_async_from_dict(): + await test_delete_link_async(request_type=dict) -def test_update_exclusion_field_headers(): +def test_delete_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateExclusionRequest() + request = logging_config.DeleteLinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: - call.return_value = logging_config.LogExclusion() - client.update_exclusion(request) + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5460,23 +5088,23 @@ def test_update_exclusion_field_headers(): @pytest.mark.asyncio -async def test_update_exclusion_field_headers_async(): +async def test_delete_link_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateExclusionRequest() + request = logging_config.DeleteLinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion() + operations_pb2.Operation(name="operations/op") ) - await client.update_exclusion(request) + await client.delete_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5491,21 +5119,19 @@ async def test_update_exclusion_field_headers_async(): ) in kw["metadata"] -def test_update_exclusion_flattened(): +def test_delete_link_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_exclusion( + client.delete_link( name="name_value", - exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -5515,15 +5141,9 @@ def test_update_exclusion_flattened(): arg = args[0].name mock_val = "name_value" assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val -def test_update_exclusion_flattened_error(): +def test_delete_link_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5531,34 +5151,30 @@ def test_update_exclusion_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_exclusion( - logging_config.UpdateExclusionRequest(), + client.delete_link( + logging_config.DeleteLinkRequest(), name="name_value", - exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_update_exclusion_flattened_async(): +async def test_delete_link_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_exclusion( + response = await client.delete_link( name="name_value", - exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -5568,16 +5184,10 @@ async def test_update_exclusion_flattened_async(): arg = args[0].name mock_val = "name_value" assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val @pytest.mark.asyncio -async def test_update_exclusion_flattened_error_async(): +async def test_delete_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5585,22 +5195,20 @@ async def test_update_exclusion_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_exclusion( - logging_config.UpdateExclusionRequest(), + await client.delete_link( + logging_config.DeleteLinkRequest(), name="name_value", - exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - logging_config.DeleteExclusionRequest, + logging_config.ListLinksRequest, dict, ], ) -def test_delete_exclusion(request_type, transport: str = "grpc"): +def test_list_links(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5611,21 +5219,24 @@ def test_delete_exclusion(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_exclusion(request) + call.return_value = logging_config.ListLinksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + assert args[0] == logging_config.ListLinksRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListLinksPager) + assert response.next_page_token == "next_page_token_value" -def test_delete_exclusion_empty_call(): +def test_list_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -5634,16 +5245,16 @@ def test_delete_exclusion_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: - client.delete_exclusion() + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + client.list_links() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + assert args[0] == logging_config.ListLinksRequest() @pytest.mark.asyncio -async def test_delete_exclusion_async( - transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest +async def test_list_links_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListLinksRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5655,40 +5266,45 @@ async def test_delete_exclusion_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_exclusion(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + assert args[0] == logging_config.ListLinksRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListLinksAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_delete_exclusion_async_from_dict(): - await test_delete_exclusion_async(request_type=dict) +async def test_list_links_async_from_dict(): + await test_list_links_async(request_type=dict) -def test_delete_exclusion_field_headers(): +def test_list_links_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteExclusionRequest() + request = logging_config.ListLinksRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: - call.return_value = None - client.delete_exclusion(request) + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + call.return_value = logging_config.ListLinksResponse() + client.list_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5699,26 +5315,28 @@ def test_delete_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_exclusion_field_headers_async(): +async def test_list_links_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteExclusionRequest() + request = logging_config.ListLinksRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_exclusion(request) + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse() + ) + await client.list_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5729,35 +5347,35 @@ async def test_delete_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_exclusion_flattened(): +def test_list_links_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = logging_config.ListLinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_exclusion( - name="name_value", + client.list_links( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_delete_exclusion_flattened_error(): +def test_list_links_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5765,41 +5383,43 @@ def test_delete_exclusion_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_exclusion( - logging_config.DeleteExclusionRequest(), - name="name_value", + client.list_links( + logging_config.ListLinksRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_delete_exclusion_flattened_async(): +async def test_list_links_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = logging_config.ListLinksResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_exclusion( - name="name_value", + response = await client.list_links( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_delete_exclusion_flattened_error_async(): +async def test_list_links_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5807,20 +5427,210 @@ async def test_delete_exclusion_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_exclusion( - logging_config.DeleteExclusionRequest(), - name="name_value", + await client.list_links( + logging_config.ListLinksRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - logging_config.GetCmekSettingsRequest, - dict, - ], +def test_list_links_pager(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token="abc", + ), + logging_config.ListLinksResponse( + links=[], + next_page_token="def", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token="ghi", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_links(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.Link) for i in results) + + +def test_list_links_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token="abc", + ), + logging_config.ListLinksResponse( + links=[], + next_page_token="def", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token="ghi", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + pages = list(client.list_links(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_links_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token="abc", + ), + logging_config.ListLinksResponse( + links=[], + next_page_token="def", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token="ghi", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_links( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.Link) for i in responses) + + +@pytest.mark.asyncio +async def test_list_links_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token="abc", + ), + logging_config.ListLinksResponse( + links=[], + next_page_token="def", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token="ghi", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_links(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetLinkRequest, + dict, + ], ) -def test_get_cmek_settings(request_type, transport: str = "grpc"): +def test_get_link(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5831,30 +5641,28 @@ def test_get_cmek_settings(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.CmekSettings( + call.return_value = logging_config.Link( name="name_value", - kms_key_name="kms_key_name_value", - service_account_id="service_account_id_value", + description="description_value", + lifecycle_state=logging_config.LifecycleState.ACTIVE, ) - response = client.get_cmek_settings(request) + response = client.get_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + assert args[0] == logging_config.GetLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) + assert isinstance(response, logging_config.Link) assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" + assert response.description == "description_value" + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_get_cmek_settings_empty_call(): +def test_get_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -5863,18 +5671,16 @@ def test_get_cmek_settings_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), "__call__" - ) as call: - client.get_cmek_settings() + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + client.get_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + assert args[0] == logging_config.GetLinkRequest() @pytest.mark.asyncio -async def test_get_cmek_settings_async( - transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest +async def test_get_link_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetLinkRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5886,53 +5692,49 @@ async def test_get_cmek_settings_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.CmekSettings( + logging_config.Link( name="name_value", - kms_key_name="kms_key_name_value", - service_account_id="service_account_id_value", + description="description_value", + lifecycle_state=logging_config.LifecycleState.ACTIVE, ) ) - response = await client.get_cmek_settings(request) + response = await client.get_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + assert args[0] == logging_config.GetLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) + assert isinstance(response, logging_config.Link) assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" + assert response.description == "description_value" + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @pytest.mark.asyncio -async def test_get_cmek_settings_async_from_dict(): - await test_get_cmek_settings_async(request_type=dict) +async def test_get_link_async_from_dict(): + await test_get_link_async(request_type=dict) -def test_get_cmek_settings_field_headers(): +def test_get_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetCmekSettingsRequest() + request = logging_config.GetLinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), "__call__" - ) as call: - call.return_value = logging_config.CmekSettings() - client.get_cmek_settings(request) + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + call.return_value = logging_config.Link() + client.get_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5948,25 +5750,21 @@ def test_get_cmek_settings_field_headers(): @pytest.mark.asyncio -async def test_get_cmek_settings_field_headers_async(): +async def test_get_link_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetCmekSettingsRequest() + request = logging_config.GetLinkRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.CmekSettings() - ) - await client.get_cmek_settings(request) + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link()) + await client.get_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5981,183 +5779,94 @@ async def test_get_cmek_settings_field_headers_async(): ) in kw["metadata"] -@pytest.mark.parametrize( - "request_type", - [ - logging_config.UpdateCmekSettingsRequest, - dict, - ], -) -def test_update_cmek_settings(request_type, transport: str = "grpc"): +def test_get_link_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.CmekSettings( + call.return_value = logging_config.Link() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_link( name="name_value", - kms_key_name="kms_key_name_value", - service_account_id="service_account_id_value", ) - response = client.update_cmek_settings(request) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying call was made with the expected + # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_update_cmek_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_get_link_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), "__call__" - ) as call: - client.update_cmek_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_link( + logging_config.GetLinkRequest(), + name="name_value", + ) @pytest.mark.asyncio -async def test_update_cmek_settings_async( - transport: str = "grpc_asyncio", - request_type=logging_config.UpdateCmekSettingsRequest, -): +async def test_get_link_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.CmekSettings( - name="name_value", - kms_key_name="kms_key_name_value", - service_account_id="service_account_id_value", - ) + call.return_value = logging_config.Link() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_link( + name="name_value", ) - response = await client.update_cmek_settings(request) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying call was made with the expected + # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" - - -@pytest.mark.asyncio -async def test_update_cmek_settings_async_from_dict(): - await test_update_cmek_settings_async(request_type=dict) - - -def test_update_cmek_settings_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateCmekSettingsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), "__call__" - ) as call: - call.return_value = logging_config.CmekSettings() - client.update_cmek_settings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio -async def test_update_cmek_settings_field_headers_async(): +async def test_get_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateCmekSettingsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.CmekSettings() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_link( + logging_config.GetLinkRequest(), + name="name_value", ) - await client.update_cmek_settings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] @pytest.mark.parametrize( "request_type", [ - logging_config.GetSettingsRequest, + logging_config.ListExclusionsRequest, dict, ], ) -def test_get_settings(request_type, transport: str = "grpc"): +def test_list_exclusions(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6168,32 +5877,24 @@ def test_get_settings(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.Settings( - name="name_value", - kms_key_name="kms_key_name_value", - kms_service_account_id="kms_service_account_id_value", - storage_location="storage_location_value", - disable_default_sink=True, + call.return_value = logging_config.ListExclusionsResponse( + next_page_token="next_page_token_value", ) - response = client.get_settings(request) + response = client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() + assert args[0] == logging_config.ListExclusionsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.Settings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.kms_service_account_id == "kms_service_account_id_value" - assert response.storage_location == "storage_location_value" - assert response.disable_default_sink is True + assert isinstance(response, pagers.ListExclusionsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_settings_empty_call(): +def test_list_exclusions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -6202,16 +5903,16 @@ def test_get_settings_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: - client.get_settings() + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + client.list_exclusions() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() + assert args[0] == logging_config.ListExclusionsRequest() @pytest.mark.asyncio -async def test_get_settings_async( - transport: str = "grpc_asyncio", request_type=logging_config.GetSettingsRequest +async def test_list_exclusions_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest ): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6223,53 +5924,45 @@ async def test_get_settings_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.Settings( - name="name_value", - kms_key_name="kms_key_name_value", - kms_service_account_id="kms_service_account_id_value", - storage_location="storage_location_value", - disable_default_sink=True, + logging_config.ListExclusionsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.get_settings(request) + response = await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() + assert args[0] == logging_config.ListExclusionsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.Settings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.kms_service_account_id == "kms_service_account_id_value" - assert response.storage_location == "storage_location_value" - assert response.disable_default_sink is True + assert isinstance(response, pagers.ListExclusionsAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_get_settings_async_from_dict(): - await test_get_settings_async(request_type=dict) +async def test_list_exclusions_async_from_dict(): + await test_list_exclusions_async(request_type=dict) -def test_get_settings_field_headers(): +def test_list_exclusions_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetSettingsRequest() + request = logging_config.ListExclusionsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: - call.return_value = logging_config.Settings() - client.get_settings(request) + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value = logging_config.ListExclusionsResponse() + client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6280,28 +5973,28 @@ def test_get_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_settings_field_headers_async(): +async def test_list_exclusions_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetSettingsRequest() + request = logging_config.ListExclusionsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.Settings() + logging_config.ListExclusionsResponse() ) - await client.get_settings(request) + await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6312,35 +6005,35 @@ async def test_get_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_settings_flattened(): +def test_list_exclusions_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.Settings() + call.return_value = logging_config.ListExclusionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_settings( - name="name_value", + client.list_exclusions( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_settings_flattened_error(): +def test_list_exclusions_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6348,43 +6041,43 @@ def test_get_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_settings( - logging_config.GetSettingsRequest(), - name="name_value", + client.list_exclusions( + logging_config.ListExclusionsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_settings_flattened_async(): +async def test_list_exclusions_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.Settings() + call.return_value = logging_config.ListExclusionsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.Settings() + logging_config.ListExclusionsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_settings( - name="name_value", + response = await client.list_exclusions( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_settings_flattened_error_async(): +async def test_list_exclusions_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6392,55 +6085,1820 @@ async def test_get_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_settings( - logging_config.GetSettingsRequest(), - name="name_value", + await client.list_exclusions( + logging_config.ListExclusionsRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - logging_config.UpdateSettingsRequest, - dict, - ], -) -def test_update_settings(request_type, transport: str = "grpc"): +def test_list_exclusions_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.Settings( - name="name_value", - kms_key_name="kms_key_name_value", - kms_service_account_id="kms_service_account_id_value", - storage_location="storage_location_value", - disable_default_sink=True, - ) - response = client.update_settings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.Settings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.kms_service_account_id == "kms_service_account_id_value" - assert response.storage_location == "storage_location_value" - assert response.disable_default_sink is True - - + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token="abc", + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token="def", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_exclusions(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogExclusion) for i in results) + + +def test_list_exclusions_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token="abc", + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token="def", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = list(client.list_exclusions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_exclusions_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token="abc", + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token="def", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_exclusions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogExclusion) for i in responses) + + +@pytest.mark.asyncio +async def test_list_exclusions_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token="abc", + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token="def", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_exclusions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetExclusionRequest, + dict, + ], +) +def test_get_exclusion(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + response = client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.disabled is True + + +def test_get_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + client.get_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest() + + +@pytest.mark.asyncio +async def test_get_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + response = await client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_get_exclusion_async_from_dict(): + await test_get_exclusion_async(request_type=dict) + + +def test_get_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetExclusionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value = logging_config.LogExclusion() + client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetExclusionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + await client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_exclusion( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_exclusion( + logging_config.GetExclusionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_exclusion( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_exclusion( + logging_config.GetExclusionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateExclusionRequest, + dict, + ], +) +def test_create_exclusion(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + response = client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.disabled is True + + +def test_create_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + client.create_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest() + + +@pytest.mark.asyncio +async def test_create_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + response = await client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_create_exclusion_async_from_dict(): + await test_create_exclusion_async(request_type=dict) + + +def test_create_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateExclusionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value = logging_config.LogExclusion() + client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateExclusionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + await client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_exclusion( + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val + + +def test_create_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_exclusion( + logging_config.CreateExclusionRequest(), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_exclusion( + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_exclusion( + logging_config.CreateExclusionRequest(), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateExclusionRequest, + dict, + ], +) +def test_update_exclusion(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + response = client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.disabled is True + + +def test_update_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + client.update_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() + + +@pytest.mark.asyncio +async def test_update_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + response = await client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_update_exclusion_async_from_dict(): + await test_update_exclusion_async(request_type=dict) + + +def test_update_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value = logging_config.LogExclusion() + client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + await client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_update_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_exclusion( + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_exclusion( + logging_config.UpdateExclusionRequest(), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_exclusion( + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_exclusion( + logging_config.UpdateExclusionRequest(), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteExclusionRequest, + dict, + ], +) +def test_delete_exclusion(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + client.delete_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() + + +@pytest.mark.asyncio +async def test_delete_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_exclusion_async_from_dict(): + await test_delete_exclusion_async(request_type=dict) + + +def test_delete_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + call.return_value = None + client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_exclusion( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_exclusion( + logging_config.DeleteExclusionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_exclusion( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_exclusion( + logging_config.DeleteExclusionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetCmekSettingsRequest, + dict, + ], +) +def test_get_cmek_settings(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + response = client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" + + +def test_get_cmek_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + client.get_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() + + +@pytest.mark.asyncio +async def test_get_cmek_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + ) + response = await client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" + + +@pytest.mark.asyncio +async def test_get_cmek_settings_async_from_dict(): + await test_get_cmek_settings_async(request_type=dict) + + +def test_get_cmek_settings_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + call.return_value = logging_config.CmekSettings() + client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cmek_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings() + ) + await client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateCmekSettingsRequest, + dict, + ], +) +def test_update_cmek_settings(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + response = client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" + + +def test_update_cmek_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + client.update_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() + + +@pytest.mark.asyncio +async def test_update_cmek_settings_async( + transport: str = "grpc_asyncio", + request_type=logging_config.UpdateCmekSettingsRequest, +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + ) + response = await client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" + + +@pytest.mark.asyncio +async def test_update_cmek_settings_async_from_dict(): + await test_update_cmek_settings_async(request_type=dict) + + +def test_update_cmek_settings_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateCmekSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + call.return_value = logging_config.CmekSettings() + client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_cmek_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateCmekSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings() + ) + await client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetSettingsRequest, + dict, + ], +) +def test_get_settings(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + response = client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" + assert response.disable_default_sink is True + + +def test_get_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + client.get_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() + + +@pytest.mark.asyncio +async def test_get_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetSettingsRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) + response = await client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" + assert response.disable_default_sink is True + + +@pytest.mark.asyncio +async def test_get_settings_async_from_dict(): + await test_get_settings_async(request_type=dict) + + +def test_get_settings_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = logging_config.Settings() + client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) + await client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_settings_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_settings( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_settings_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_settings( + logging_config.GetSettingsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_settings_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_settings( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_settings_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_settings( + logging_config.GetSettingsRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateSettingsRequest, + dict, + ], +) +def test_update_settings(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + response = client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" + assert response.disable_default_sink is True + + def test_update_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -6878,6 +8336,8 @@ def test_config_service_v2_base_transport(): methods = ( "list_buckets", "get_bucket", + "create_bucket_async", + "update_bucket_async", "create_bucket", "update_bucket", "delete_bucket", @@ -6892,6 +8352,10 @@ def test_config_service_v2_base_transport(): "create_sink", "update_sink", "delete_sink", + "create_link", + "delete_link", + "list_links", + "get_link", "list_exclusions", "get_exclusion", "create_exclusion", @@ -6902,6 +8366,9 @@ def test_config_service_v2_base_transport(): "get_settings", "update_settings", "copy_log_entries", + "get_operation", + "cancel_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -7302,291 +8769,751 @@ def test_config_service_v2_grpc_lro_async_client(): operations_v1.OperationsAsyncClient, ) - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cmek_settings_path(): + project = "squid" + expected = "projects/{project}/cmekSettings".format( + project=project, + ) + actual = ConfigServiceV2Client.cmek_settings_path(project) + assert expected == actual + + +def test_parse_cmek_settings_path(): + expected = { + "project": "clam", + } + path = ConfigServiceV2Client.cmek_settings_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_cmek_settings_path(path) + assert expected == actual + + +def test_link_path(): + project = "whelk" + location = "octopus" + bucket = "oyster" + link = "nudibranch" + expected = ( + "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format( + project=project, + location=location, + bucket=bucket, + link=link, + ) + ) + actual = ConfigServiceV2Client.link_path(project, location, bucket, link) + assert expected == actual + + +def test_parse_link_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "bucket": "winkle", + "link": "nautilus", + } + path = ConfigServiceV2Client.link_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_link_path(path) + assert expected == actual + + +def test_log_bucket_path(): + project = "scallop" + location = "abalone" + bucket = "squid" + expected = "projects/{project}/locations/{location}/buckets/{bucket}".format( + project=project, + location=location, + bucket=bucket, + ) + actual = ConfigServiceV2Client.log_bucket_path(project, location, bucket) + assert expected == actual + + +def test_parse_log_bucket_path(): + expected = { + "project": "clam", + "location": "whelk", + "bucket": "octopus", + } + path = ConfigServiceV2Client.log_bucket_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_bucket_path(path) + assert expected == actual + + +def test_log_exclusion_path(): + project = "oyster" + exclusion = "nudibranch" + expected = "projects/{project}/exclusions/{exclusion}".format( + project=project, + exclusion=exclusion, + ) + actual = ConfigServiceV2Client.log_exclusion_path(project, exclusion) + assert expected == actual + + +def test_parse_log_exclusion_path(): + expected = { + "project": "cuttlefish", + "exclusion": "mussel", + } + path = ConfigServiceV2Client.log_exclusion_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_exclusion_path(path) + assert expected == actual + + +def test_log_sink_path(): + project = "winkle" + sink = "nautilus" + expected = "projects/{project}/sinks/{sink}".format( + project=project, + sink=sink, + ) + actual = ConfigServiceV2Client.log_sink_path(project, sink) + assert expected == actual + + +def test_parse_log_sink_path(): + expected = { + "project": "scallop", + "sink": "abalone", + } + path = ConfigServiceV2Client.log_sink_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_sink_path(path) + assert expected == actual + + +def test_log_view_path(): + project = "squid" + location = "clam" + bucket = "whelk" + view = "octopus" + expected = ( + "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( + project=project, + location=location, + bucket=bucket, + view=view, + ) + ) + actual = ConfigServiceV2Client.log_view_path(project, location, bucket, view) + assert expected == actual + + +def test_parse_log_view_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "bucket": "cuttlefish", + "view": "mussel", + } + path = ConfigServiceV2Client.log_view_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_view_path(path) + assert expected == actual + + +def test_settings_path(): + project = "winkle" + expected = "projects/{project}/settings".format( + project=project, + ) + actual = ConfigServiceV2Client.settings_path(project) + assert expected == actual + + +def test_parse_settings_path(): + expected = { + "project": "nautilus", + } + path = ConfigServiceV2Client.settings_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_settings_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ConfigServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = ConfigServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual -def test_cmek_settings_path(): - project = "squid" - expected = "projects/{project}/cmekSettings".format( - project=project, +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, ) - actual = ConfigServiceV2Client.cmek_settings_path(project) + actual = ConfigServiceV2Client.common_folder_path(folder) assert expected == actual -def test_parse_cmek_settings_path(): +def test_parse_common_folder_path(): expected = { - "project": "clam", + "folder": "clam", } - path = ConfigServiceV2Client.cmek_settings_path(**expected) + path = ConfigServiceV2Client.common_folder_path(**expected) # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_cmek_settings_path(path) + actual = ConfigServiceV2Client.parse_common_folder_path(path) assert expected == actual -def test_log_bucket_path(): - project = "whelk" - location = "octopus" - bucket = "oyster" - expected = "projects/{project}/locations/{location}/buckets/{bucket}".format( - project=project, - location=location, - bucket=bucket, +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, ) - actual = ConfigServiceV2Client.log_bucket_path(project, location, bucket) + actual = ConfigServiceV2Client.common_organization_path(organization) assert expected == actual -def test_parse_log_bucket_path(): +def test_parse_common_organization_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "bucket": "mussel", + "organization": "octopus", } - path = ConfigServiceV2Client.log_bucket_path(**expected) + path = ConfigServiceV2Client.common_organization_path(**expected) # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_bucket_path(path) + actual = ConfigServiceV2Client.parse_common_organization_path(path) assert expected == actual -def test_log_exclusion_path(): - project = "winkle" - exclusion = "nautilus" - expected = "projects/{project}/exclusions/{exclusion}".format( +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( project=project, - exclusion=exclusion, ) - actual = ConfigServiceV2Client.log_exclusion_path(project, exclusion) + actual = ConfigServiceV2Client.common_project_path(project) assert expected == actual -def test_parse_log_exclusion_path(): +def test_parse_common_project_path(): expected = { - "project": "scallop", - "exclusion": "abalone", + "project": "nudibranch", } - path = ConfigServiceV2Client.log_exclusion_path(**expected) + path = ConfigServiceV2Client.common_project_path(**expected) # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_exclusion_path(path) + actual = ConfigServiceV2Client.parse_common_project_path(path) assert expected == actual -def test_log_sink_path(): - project = "squid" - sink = "clam" - expected = "projects/{project}/sinks/{sink}".format( +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( project=project, - sink=sink, + location=location, ) - actual = ConfigServiceV2Client.log_sink_path(project, sink) + actual = ConfigServiceV2Client.common_location_path(project, location) assert expected == actual -def test_parse_log_sink_path(): +def test_parse_common_location_path(): expected = { - "project": "whelk", - "sink": "octopus", + "project": "winkle", + "location": "nautilus", } - path = ConfigServiceV2Client.log_sink_path(**expected) + path = ConfigServiceV2Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ConfigServiceV2Transport, "_prep_wrapped_messages" + ) as prep: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ConfigServiceV2Transport, "_prep_wrapped_messages" + ) as prep: + transport_class = ConfigServiceV2Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation(transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_sink_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -def test_log_view_path(): - project = "oyster" - location = "nudibranch" - bucket = "cuttlefish" - view = "mussel" - expected = ( - "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( - project=project, - location=location, - bucket=bucket, - view=view, - ) +def test_get_operation_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = ConfigServiceV2Client.log_view_path(project, location, bucket, view) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" -def test_parse_log_view_path(): - expected = { - "project": "winkle", - "location": "nautilus", - "bucket": "scallop", - "view": "abalone", - } - path = ConfigServiceV2Client.log_view_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_view_path(path) - assert expected == actual + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_settings_path(): - project = "squid" - expected = "projects/{project}/settings".format( - project=project, + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = ConfigServiceV2Client.settings_path(project) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" -def test_parse_settings_path(): - expected = { - "project": "clam", - } - path = ConfigServiceV2Client.settings_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_settings_path(path) - assert expected == actual + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, +def test_get_operation_from_dict(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = ConfigServiceV2Client.common_billing_account_path(billing_account) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = ConfigServiceV2Client.common_billing_account_path(**expected) - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_billing_account_path(path) - assert expected == actual +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format( - folder=folder, +def test_list_operations(transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - actual = ConfigServiceV2Client.common_folder_path(folder) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = ConfigServiceV2Client.common_folder_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_folder_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format( - organization=organization, +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - actual = ConfigServiceV2Client.common_organization_path(organization) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = ConfigServiceV2Client.common_organization_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_organization_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format( - project=project, +def test_list_operations_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = ConfigServiceV2Client.common_project_path(project) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = ConfigServiceV2Client.common_project_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_project_path(path) - assert expected == actual + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = ConfigServiceV2Client.common_location_path(project, location) - assert expected == actual +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = ConfigServiceV2Client.common_location_path(**expected) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_location_path(path) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object( - transports.ConfigServiceV2Transport, "_prep_wrapped_messages" - ) as prep: - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) +def test_list_operations_from_dict(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() - with mock.patch.object( - transports.ConfigServiceV2Transport, "_prep_wrapped_messages" - ) as prep: - transport_class = ConfigServiceV2Client.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + response = client.list_operations( + request={ + "name": "locations", + } ) - prep.assert_called_once_with(client_info) + call.assert_called() @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_list_operations_from_dict_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() def test_transport_close(): diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index d8bbd5b730c2..ba5e56f22ade 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -48,6 +48,7 @@ from google.cloud.logging_v2.types import logging from google.logging.type import http_request_pb2 # type: ignore from google.logging.type import log_severity_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore @@ -1504,9 +1505,11 @@ async def test_list_log_entries_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_log_entries(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1799,9 +1802,11 @@ async def test_list_monitored_resource_descriptors_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_monitored_resource_descriptors(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2223,9 +2228,11 @@ async def test_list_logs_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_logs(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2446,6 +2453,9 @@ def test_logging_service_v2_base_transport(): "list_monitored_resource_descriptors", "list_logs", "tail_log_entries", + "get_operation", + "cancel_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -2978,6 +2988,435 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation(transport: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "grpc": "_grpc_channel", diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 39a005c97cbe..00e443415fc8 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -48,6 +48,7 @@ from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.services.metrics_service_v2 import transports from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -1136,9 +1137,11 @@ async def test_list_log_metrics_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_log_metrics(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1168,6 +1171,7 @@ def test_get_log_metric(request_type, transport: str = "grpc"): name="name_value", description="description_value", filter="filter_value", + bucket_name="bucket_name_value", disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, @@ -1184,6 +1188,7 @@ def test_get_log_metric(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1226,6 +1231,7 @@ async def test_get_log_metric_async( name="name_value", description="description_value", filter="filter_value", + bucket_name="bucket_name_value", disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, @@ -1243,6 +1249,7 @@ async def test_get_log_metric_async( assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1422,6 +1429,7 @@ def test_create_log_metric(request_type, transport: str = "grpc"): name="name_value", description="description_value", filter="filter_value", + bucket_name="bucket_name_value", disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, @@ -1438,6 +1446,7 @@ def test_create_log_metric(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1484,6 +1493,7 @@ async def test_create_log_metric_async( name="name_value", description="description_value", filter="filter_value", + bucket_name="bucket_name_value", disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, @@ -1501,6 +1511,7 @@ async def test_create_log_metric_async( assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1698,6 +1709,7 @@ def test_update_log_metric(request_type, transport: str = "grpc"): name="name_value", description="description_value", filter="filter_value", + bucket_name="bucket_name_value", disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, @@ -1714,6 +1726,7 @@ def test_update_log_metric(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1760,6 +1773,7 @@ async def test_update_log_metric_async( name="name_value", description="description_value", filter="filter_value", + bucket_name="bucket_name_value", disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, @@ -1777,6 +1791,7 @@ async def test_update_log_metric_async( assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -2324,6 +2339,9 @@ def test_metrics_service_v2_base_transport(): "create_log_metric", "update_log_metric", "delete_log_metric", + "get_operation", + "cancel_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -2856,6 +2874,435 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation(transport: str = "grpc"): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "grpc": "_grpc_channel", From 6207cfcdc1745b414137127ee0b208288ddba0ff Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 19 Sep 2023 17:56:07 +0200 Subject: [PATCH 753/855] chore(deps): update all dependencies (#781) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: meredithslota Co-authored-by: Owl Bot --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index cbd0a47def45..1779d47d2c8f 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==2.2.1 -pytest==7.4.0 +pytest==7.4.2 diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 65c59ffd6c4b..4c4ec825fb8f 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.6.0 google-cloud-bigquery==3.11.4 google-cloud-storage==2.10.0 -google-cloud-pubsub==2.18.3 +google-cloud-pubsub==2.18.4 From 39bfd15f6e5998ba00951e86cd41158e90b47f99 Mon Sep 17 00:00:00 2001 From: David Buxton Date: Fri, 22 Sep 2023 01:05:37 +0100 Subject: [PATCH 754/855] fix: Handle exceptions raised when fetching Django request data (#758) --- .../google/cloud/logging_v2/handlers/_helpers.py | 9 ++++++++- .../tests/unit/handlers/test__helpers.py | 13 +++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py index 32e70dfdd5e6..43678ed0df52 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py @@ -104,10 +104,17 @@ def get_request_data_from_django(): if request is None: return None, None, None, False + # Django can raise django.core.exceptions.DisallowedHost here for a + # malformed HTTP_HOST header. But we don't want to import Django modules. + try: + request_url = request.build_absolute_uri() + except Exception: + request_url = None + # build http_request http_request = { "requestMethod": request.method, - "requestUrl": request.build_absolute_uri(), + "requestUrl": request_url, "userAgent": request.META.get(_DJANGO_USERAGENT_HEADER), "protocol": request.META.get(_PROTOCOL_HEADER), } diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index 6a7ff245fdd9..5eeae4ba47e4 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -242,6 +242,19 @@ def test_http_request_sparse(self): self.assertEqual(http_request["requestUrl"], expected_path) self.assertEqual(http_request["protocol"], "HTTP/1.1") + def test_invalid_host_header(self): + from django.test import RequestFactory + from google.cloud.logging_v2.handlers.middleware import request + + invalid_http_host = "testserver%7d" + django_request = RequestFactory().put("/", HTTP_HOST=invalid_http_host) + middleware = request.RequestMiddleware(None) + middleware(django_request) + http_request, *_ = self._call_fut() + self.assertEqual(http_request["requestMethod"], "PUT") + self.assertIsNone(http_request["requestUrl"]) + self.assertEqual(http_request["protocol"], "HTTP/1.1") + class Test_get_request_data(unittest.TestCase): @staticmethod From c6204bd5da28e3081a93a61eadc402836e59f681 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 22 Sep 2023 20:41:50 +0200 Subject: [PATCH 755/855] chore(deps): update dependency google-cloud-storage to v2.11.0 (#785) Co-authored-by: Daniel Sanche --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 4c4ec825fb8f..0da7130555f9 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.6.0 google-cloud-bigquery==3.11.4 -google-cloud-storage==2.10.0 +google-cloud-storage==2.11.0 google-cloud-pubsub==2.18.4 From 83f67bd5892cbdf59a9717cb269c04df5b68b793 Mon Sep 17 00:00:00 2001 From: Shoji KUMAGAI Date: Tue, 26 Sep 2023 04:38:06 +0900 Subject: [PATCH 756/855] fix: unintended exception omittion (#736) --- .../logging_v2/handlers/structured_log.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py index fac9b26b3b7d..e6094091eda2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py @@ -79,8 +79,18 @@ def __init__( log_filter = CloudLoggingFilter(project=project_id, default_labels=labels) self.addFilter(log_filter) + class _Formatter(logging.Formatter): + """Formatter to format log message without traceback""" + + def format(self, record): + """Ignore exception info to avoid duplicating it + https://github.com/googleapis/python-logging/issues/382 + """ + record.message = record.getMessage() + return self.formatMessage(record) + # make logs appear in GCP structured logging format - self._gcp_formatter = logging.Formatter(GCP_FORMAT) + self._gcp_formatter = _Formatter(GCP_FORMAT) self._json_encoder_cls = json_encoder_cls or json.JSONEncoder @@ -115,11 +125,7 @@ def format(self, record): payload = '"message": {},'.format(encoded_message) record._payload_str = payload or "" - # remove exception info to avoid duplicating it - # https://github.com/googleapis/python-logging/issues/382 - record.exc_info = None - record.exc_text = None - # convert to GCP structred logging format + # convert to GCP structured logging format gcp_payload = self._gcp_formatter.format(record) return gcp_payload From e06664d1b97bab5e162209349e98a7155e36506b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 25 Sep 2023 15:39:15 -0700 Subject: [PATCH 757/855] chore(main): release 3.7.0 (#784) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-logging/CHANGELOG.md | 29 +++++++++++++++++++ .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 33 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json index 23efc1eaad38..2e30867768ef 100644 --- a/packages/google-cloud-logging/.release-please-manifest.json +++ b/packages/google-cloud-logging/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.6.0" + ".": "3.7.0" } diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 16e128b18c93..219c978d73b5 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,35 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.7.0](https://github.com/googleapis/python-logging/compare/v3.6.0...v3.7.0) (2023-09-25) + + +### Features + +* Add ConfigServiceV2.CreateBucketAsync method for creating Log Buckets asynchronously ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add ConfigServiceV2.CreateLink method for creating linked datasets for Log Analytics Buckets ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add ConfigServiceV2.DeleteLink method for deleting linked datasets ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add ConfigServiceV2.GetLink methods for describing linked datasets ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add ConfigServiceV2.ListLinks method for listing linked datasets ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add ConfigServiceV2.UpdateBucketAsync method for creating Log Buckets asynchronously ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add LogBucket.analytics_enabled field that specifies whether Log Bucket's Analytics features are enabled ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add LogBucket.index_configs field that contains a list of Log Bucket's indexed fields and related configuration data ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Log Analytics features of the Cloud Logging API ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) + + +### Bug Fixes + +* Add async context manager return types ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add severity to structured log write ([#783](https://github.com/googleapis/python-logging/issues/783)) ([31a7f69](https://github.com/googleapis/python-logging/commit/31a7f69ed94719546136a3bf1b3ecdb28e369414)) +* Handle exceptions raised when fetching Django request data ([#758](https://github.com/googleapis/python-logging/issues/758)) ([5ecf886](https://github.com/googleapis/python-logging/commit/5ecf88606b4f29b00ff8b18ae71c151d203d5c3b)) +* Unintended exception omittion ([#736](https://github.com/googleapis/python-logging/issues/736)) ([022dc54](https://github.com/googleapis/python-logging/commit/022dc545f781648043296b3ca04d835fcb6f1d7e)) + + +### Documentation + +* Documentation for the Log Analytics features of the Cloud Logging API ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Minor formatting ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) + ## [3.6.0](https://github.com/googleapis/python-logging/compare/v3.5.0...v3.6.0) (2023-07-05) diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py index d2952231400a..a845974e4da2 100644 --- a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.6.0" # {x-release-please-version} +__version__ = "3.7.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py index d2952231400a..a845974e4da2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.6.0" # {x-release-please-version} +__version__ = "3.7.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index b62675ba6439..b44c07e7798b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "0.1.0" + "version": "3.7.0" }, "snippets": [ { From af14071b032b1194c44e47e99c5781fa450a8c94 Mon Sep 17 00:00:00 2001 From: Aaron Lerner Date: Fri, 29 Sep 2023 12:52:39 -0700 Subject: [PATCH 758/855] feat: add cloud_run_job monitored resource type. (#788) --- .../handlers/_monitored_resources.py | 41 ++++++++++++++++--- .../handlers/test__monitored_resources.py | 40 +++++++++++++++--- 2 files changed, 70 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py index a5b8dfee3269..0d94450ceb3c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py @@ -26,11 +26,21 @@ _CLOUD_RUN_SERVICE_ID = "K_SERVICE" _CLOUD_RUN_REVISION_ID = "K_REVISION" _CLOUD_RUN_CONFIGURATION_ID = "K_CONFIGURATION" -_CLOUD_RUN_ENV_VARS = [ +_CLOUD_RUN_SERVICE_ENV_VARS = [ _CLOUD_RUN_SERVICE_ID, _CLOUD_RUN_REVISION_ID, _CLOUD_RUN_CONFIGURATION_ID, ] +_CLOUD_RUN_JOB_ID = "CLOUD_RUN_JOB" +_CLOUD_RUN_EXECUTION_ID = "CLOUD_RUN_EXECUTION" +_CLOUD_RUN_TASK_INDEX = "CLOUD_RUN_TASK_INDEX" +_CLOUD_RUN_TASK_ATTEMPT = "CLOUD_RUN_TASK_ATTEMPT" +_CLOUD_RUN_JOB_ENV_VARS = [ + _CLOUD_RUN_JOB_ID, + _CLOUD_RUN_EXECUTION_ID, + _CLOUD_RUN_TASK_INDEX, + _CLOUD_RUN_TASK_ATTEMPT, +] """Environment variables set in Cloud Run environment.""" _FUNCTION_TARGET = "FUNCTION_TARGET" @@ -118,8 +128,8 @@ def _create_compute_resource(): return resource -def _create_cloud_run_resource(): - """Create a standardized Cloud Run resource. +def _create_cloud_run_service_resource(): + """Create a standardized Cloud Run service resource. Returns: google.cloud.logging.Resource """ @@ -138,6 +148,24 @@ def _create_cloud_run_resource(): return resource +def _create_cloud_run_job_resource(): + """Create a standardized Cloud Run job resource. + Returns: + google.cloud.logging.Resource + """ + region = retrieve_metadata_server(_REGION_ID) + project = retrieve_metadata_server(_PROJECT_NAME) + resource = Resource( + type="cloud_run_job", + labels={ + "project_id": project if project else "", + "job_name": os.environ.get(_CLOUD_RUN_JOB_ID, ""), + "location": region.split("/")[-1] if region else "", + }, + ) + return resource + + def _create_app_engine_resource(): """Create a standardized App Engine resource. Returns: @@ -190,9 +218,12 @@ def detect_resource(project=""): ): # Cloud Functions return _create_functions_resource() - elif all([env in os.environ for env in _CLOUD_RUN_ENV_VARS]): + elif all([env in os.environ for env in _CLOUD_RUN_SERVICE_ENV_VARS]): + # Cloud Run + return _create_cloud_run_service_resource() + elif all([env in os.environ for env in _CLOUD_RUN_JOB_ENV_VARS]): # Cloud Run - return _create_cloud_run_resource() + return _create_cloud_run_job_resource() elif gce_instance_name is not None: # Compute Engine return _create_compute_resource() diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py b/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py index 3c62cba88958..16378fd50995 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py @@ -28,7 +28,10 @@ _create_kubernetes_resource, ) from google.cloud.logging_v2.handlers._monitored_resources import ( - _create_cloud_run_resource, + _create_cloud_run_service_resource, +) +from google.cloud.logging_v2.handlers._monitored_resources import ( + _create_cloud_run_job_resource, ) from google.cloud.logging_v2.handlers._monitored_resources import ( _create_compute_resource, @@ -160,7 +163,7 @@ def test_compute_resource(self): self.assertEqual(resource.labels["instance_id"], self.NAME) self.assertEqual(resource.labels["zone"], self.LOCATION) - def test_cloud_run_resource(self): + def test_cloud_run_service_resource(self): patch = mock.patch( "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", wraps=self._mock_metadata, @@ -169,7 +172,7 @@ def test_cloud_run_resource(self): os.environ[_monitored_resources._CLOUD_RUN_REVISION_ID] = self.VERSION os.environ[_monitored_resources._CLOUD_RUN_CONFIGURATION_ID] = self.CONFIG with patch: - resource = _create_cloud_run_resource() + resource = _create_cloud_run_service_resource() self.assertIsInstance(resource, Resource) self.assertEqual(resource.type, "cloud_run_revision") self.assertEqual(resource.labels["project_id"], self.PROJECT) @@ -178,6 +181,23 @@ def test_cloud_run_resource(self): self.assertEqual(resource.labels["configuration_name"], self.CONFIG) self.assertEqual(resource.labels["location"], self.LOCATION) + def test_cloud_run_job_resource(self): + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + wraps=self._mock_metadata, + ) + os.environ[_monitored_resources._CLOUD_RUN_JOB_ID] = self.NAME + os.environ[_monitored_resources._CLOUD_RUN_EXECUTION_ID] = self.VERSION + os.environ[_monitored_resources._CLOUD_RUN_TASK_INDEX] = self.CONFIG + os.environ[_monitored_resources._CLOUD_RUN_TASK_ATTEMPT] = self.CLUSTER + with patch: + resource = _create_cloud_run_job_resource() + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "cloud_run_job") + self.assertEqual(resource.labels["project_id"], self.PROJECT) + self.assertEqual(resource.labels["job_name"], self.NAME) + self.assertEqual(resource.labels["location"], self.LOCATION) + def test_app_engine_resource(self): patch = mock.patch( "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", @@ -214,7 +234,8 @@ def test_with_no_project_from_server(self): resource_fns = [ _global_resource_patched, _create_app_engine_resource, - _create_cloud_run_resource, + _create_cloud_run_service_resource, + _create_cloud_run_job_resource, _create_compute_resource, _create_kubernetes_resource, _create_functions_resource, @@ -285,13 +306,20 @@ def test_detect_legacy_functions(self): self.assertIsInstance(resource, Resource) self.assertEqual(resource.type, "cloud_function") - def test_detect_cloud_run(self): - for env in _monitored_resources._CLOUD_RUN_ENV_VARS: + def test_detect_cloud_run_service(self): + for env in _monitored_resources._CLOUD_RUN_SERVICE_ENV_VARS: os.environ[env] = "TRUE" resource = detect_resource(self.PROJECT) self.assertIsInstance(resource, Resource) self.assertEqual(resource.type, "cloud_run_revision") + def test_detect_cloud_run_job(self): + for env in _monitored_resources._CLOUD_RUN_JOB_ENV_VARS: + os.environ[env] = "TRUE" + resource = detect_resource(self.PROJECT) + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "cloud_run_job") + def test_detect_compute_engine(self): patch = mock.patch( "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", From db770062982414b963ac97d0b7dc16292ea37d39 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 3 Oct 2023 12:10:44 -0700 Subject: [PATCH 759/855] chore: [autoapprove] bump cryptography from 41.0.3 to 41.0.4 (#790) --- .../.github/.OwlBot.lock.yaml | 4 +- packages/google-cloud-logging/.gitignore | 1 + .../.kokoro/requirements.txt | 49 ++++++++++--------- 3 files changed, 28 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index a3da1b0d4cd3..a9bdb1b7ac0f 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 -# created: 2023-08-02T10:53:29.114535628Z + digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb +# created: 2023-10-02T21:31:03.517640371Z diff --git a/packages/google-cloud-logging/.gitignore b/packages/google-cloud-logging/.gitignore index b4243ced74e4..d083ea1ddc3e 100644 --- a/packages/google-cloud-logging/.gitignore +++ b/packages/google-cloud-logging/.gitignore @@ -50,6 +50,7 @@ docs.metadata # Virtual environment env/ +venv/ # Test logs coverage.xml diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 029bd342de94..96d593c8c82a 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -113,30 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.3 \ - --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ - --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ - --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ - --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ - --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ - --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ - --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ - --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ - --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ - --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ - --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ - --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ - --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ - --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ - --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ - --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ - --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ - --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ - --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ - --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ - --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ - --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ - --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de +cryptography==41.0.4 \ + --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ + --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ + --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ + --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ + --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ + --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ + --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ + --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ + --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ + --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ + --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ + --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ + --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ + --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ + --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ + --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ + --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ + --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ + --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ + --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ + --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ + --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ + --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f # via # gcp-releasetool # secretstorage @@ -382,6 +382,7 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core + # googleapis-common-protos pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba From 838336fcd4f3003293ffaed2c7e8a94dffdda7c8 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 3 Oct 2023 21:43:03 +0200 Subject: [PATCH 760/855] chore(deps): update dependency google-cloud-logging to v3.7.0 (#787) Co-authored-by: Daniel Sanche --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 0da7130555f9..9af0e5d4c8e2 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.6.0 +google-cloud-logging==3.7.0 google-cloud-bigquery==3.11.4 google-cloud-storage==2.11.0 google-cloud-pubsub==2.18.4 From 21a5a7c1d79719c62b9e19856b563ad6a9a1006c Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 3 Oct 2023 23:42:57 +0200 Subject: [PATCH 761/855] chore(deps): update dependency google-cloud-bigquery to v3.12.0 (#792) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 9af0e5d4c8e2..618f938574d7 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.7.0 -google-cloud-bigquery==3.11.4 +google-cloud-bigquery==3.12.0 google-cloud-storage==2.11.0 google-cloud-pubsub==2.18.4 From 5da51456e0fe8380e572521d197e5418f8e0989b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 4 Oct 2023 14:56:13 -0700 Subject: [PATCH 762/855] chore(main): release 3.8.0 (#789) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Daniel Sanche --- .../google-cloud-logging/.release-please-manifest.json | 2 +- packages/google-cloud-logging/CHANGELOG.md | 7 +++++++ .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json index 2e30867768ef..fa4291eb23d2 100644 --- a/packages/google-cloud-logging/.release-please-manifest.json +++ b/packages/google-cloud-logging/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.7.0" + ".": "3.8.0" } diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 219c978d73b5..d8cfadcaa434 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.8.0](https://github.com/googleapis/python-logging/compare/v3.7.0...v3.8.0) (2023-10-03) + + +### Features + +* Add cloud_run_job monitored resource type. ([#788](https://github.com/googleapis/python-logging/issues/788)) ([3b310d6](https://github.com/googleapis/python-logging/commit/3b310d68b68df5bb31e21ac30b23207ef50c3f6f)) + ## [3.7.0](https://github.com/googleapis/python-logging/compare/v3.6.0...v3.7.0) (2023-09-25) diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py index a845974e4da2..4052fbb0c7fc 100644 --- a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.7.0" # {x-release-please-version} +__version__ = "3.8.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py index a845974e4da2..4052fbb0c7fc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.7.0" # {x-release-please-version} +__version__ = "3.8.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index b44c07e7798b..6c11ae7e588c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.7.0" + "version": "3.8.0" }, "snippets": [ { From 9e2cb6f10e5fd9761138451cda0146972c0a6e6d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 9 Oct 2023 10:52:05 -0400 Subject: [PATCH 763/855] chore: [autoapprove] Update `black` and `isort` to latest versions (#795) Source-Link: https://github.com/googleapis/synthtool/commit/0c7b0333f44b2b7075447f43a121a12d15a7b76a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +-- .../.kokoro/requirements.txt | 6 ++-- .../.pre-commit-config.yaml | 2 +- .../google/cloud/logging_v2/_http.py | 1 - .../cloud/logging_v2/handlers/handlers.py | 2 +- packages/google-cloud-logging/noxfile.py | 34 ++++++++++--------- .../tests/system/test_system.py | 1 - .../handlers/test__monitored_resources.py | 3 -- .../tests/unit/handlers/test_handlers.py | 4 +-- .../unit/handlers/test_structured_log.py | 14 ++++---- .../unit/handlers/transports/test_base.py | 1 - .../unit/handlers/transports/test_sync.py | 1 - .../tests/unit/test__http.py | 5 --- .../tests/unit/test__instrumentation.py | 1 - .../tests/unit/test_client.py | 2 -- .../tests/unit/test_entries.py | 4 --- .../tests/unit/test_logger.py | 5 --- .../tests/unit/test_metric.py | 1 - .../tests/unit/test_sink.py | 1 - 19 files changed, 33 insertions(+), 59 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index a9bdb1b7ac0f..dd98abbdeebe 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb -# created: 2023-10-02T21:31:03.517640371Z + digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 +# created: 2023-10-09T14:06:13.397766266Z diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 96d593c8c82a..0332d3267e15 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.12 \ - --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ - --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 +urllib3==1.26.17 \ + --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ + --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b # via # requests # twine diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index 19409cbd37a4..6a8e16950664 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.7.0 hooks: - id: black - repo: https://github.com/pycqa/flake8 diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_http.py b/packages/google-cloud-logging/google/cloud/logging_v2/_http.py index 581dce35edd7..b90789353dc9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_http.py @@ -26,7 +26,6 @@ class Connection(_http.JSONConnection): - DEFAULT_API_ENDPOINT = "https://logging.googleapis.com" def __init__(self, client, *, client_info=None, api_endpoint=DEFAULT_API_ENDPOINT): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 28960ae71ca4..ce5822fcd17a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -70,7 +70,7 @@ def _infer_source_location(record): ("function", "funcName"), ] output = {} - for (gcp_name, std_lib_name) in name_map: + for gcp_name, std_lib_name in name_map: value = getattr(record, std_lib_name, None) if value is not None: output[gcp_name] = value diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 7ebe500a312b..565df040b9bd 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -17,22 +17,24 @@ # Generated by synthtool. DO NOT EDIT! from __future__ import absolute_import + import os import pathlib import re import shutil +from typing import Dict, List import warnings import nox FLAKE8_VERSION = "flake8==6.1.0" -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -40,32 +42,32 @@ "pytest-cov", "pytest-asyncio", ] -UNIT_TEST_EXTERNAL_DEPENDENCIES = [ +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ "flask", "webob", "django", ] -UNIT_TEST_LOCAL_DEPENDENCIES = [] -UNIT_TEST_DEPENDENCIES = [] -UNIT_TEST_EXTRAS = [] -UNIT_TEST_EXTRAS_BY_PYTHON = {} +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock", "pytest", "google-cloud-testutils", ] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [ +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ "google-cloud-bigquery", "google-cloud-pubsub", "google-cloud-storage", "google-cloud-testutils", ] -SYSTEM_TEST_LOCAL_DEPENDENCIES = [] -SYSTEM_TEST_DEPENDENCIES = [] -SYSTEM_TEST_EXTRAS = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON = {} +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -78,6 +80,7 @@ "lint_setup_py", "blacken", "docs", + "format", ] # Error if a python version is missing @@ -196,7 +199,6 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): - # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. # See https://github.com/grpc/grpc/issues/32163 diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 8d39408d3895..ba7fd6c2addb 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -118,7 +118,6 @@ def setUpModule(): class TestLogging(unittest.TestCase): - JSON_PAYLOAD = { "message": "System test: test_log_struct", "weather": { diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py b/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py index 16378fd50995..838543253a4d 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py @@ -45,7 +45,6 @@ class Test_Create_Resources(unittest.TestCase): - PROJECT = "test-project" LOCATION = "test-location" NAME = "test-name" @@ -135,7 +134,6 @@ def test_functions_resource_no_name(self): self.assertEqual(func_resource.labels["function_name"], "") def test_create_kubernetes_resource(self): - patch = mock.patch( "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", wraps=self._mock_metadata, @@ -246,7 +244,6 @@ def test_with_no_project_from_server(self): class Test_Resource_Detection(unittest.TestCase): - PROJECT = "test-project" def _mock_k8s_metadata(self, endpoint): diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 1e431f1aab30..1f86a8e37564 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -25,7 +25,6 @@ class TestCloudLoggingFilter(unittest.TestCase): - PROJECT = "PROJECT" @staticmethod @@ -291,7 +290,6 @@ def test_user_overrides(self): class TestCloudLoggingHandler(unittest.TestCase): - PROJECT = "PROJECT" @staticmethod @@ -859,7 +857,7 @@ def test_json_fields_input_unmodified(self): _format_and_parse_message(record, handler) # ensure json_fields has no side-effects self.assertEqual(set(json_fields.keys()), set(json_fields_orig.keys())) - for (key, value) in json_fields_orig.items(): + for key, value in json_fields_orig.items(): self.assertEqual( value, json_fields[key], f"expected_payload[{key}] != result[{key}]" ) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 353530ed1a7e..fc6b7c598406 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -86,7 +86,7 @@ def test_format(self): } handler.filter(record) result = json.loads(handler.format(record)) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual(value, result[key]) self.assertEqual( len(expected_payload.keys()), @@ -121,7 +121,7 @@ def test_format_minimal(self): handler.filter(record) result = json.loads(handler.format(record)) self.assertEqual(set(expected_payload.keys()), set(result.keys())) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual( value, result[key], f"expected_payload[{key}] != result[{key}]" ) @@ -304,7 +304,7 @@ def test_format_with_reserved_json_field(self): handler.filter(record) result = json.loads(handler.format(record)) self.assertEqual(set(expected_payload.keys()), set(result.keys())) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual( value, result[key], f"expected_payload[{key}] != result[{key}]" ) @@ -417,7 +417,7 @@ def test_format_with_request(self): ): handler.filter(record) result = json.loads(handler.format(record)) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual(value, result[key]) def test_format_with_traceparent(self): @@ -452,7 +452,7 @@ def test_format_with_traceparent(self): ): handler.filter(record) result = json.loads(handler.format(record)) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual(value, result[key]) def test_format_overrides(self): @@ -509,7 +509,7 @@ def test_format_overrides(self): ) handler.filter(record) result = json.loads(handler.format(record)) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual(value, result[key]) def test_format_with_json_fields(self): @@ -590,7 +590,7 @@ def test_json_fields_input_unmodified(self): handler.format(record) # ensure json_fields has no side-effects self.assertEqual(set(json_fields.keys()), set(json_fields_orig.keys())) - for (key, value) in json_fields_orig.items(): + for key, value in json_fields_orig.items(): self.assertEqual( value, json_fields[key], f"expected_payload[{key}] != result[{key}]" ) diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py index 71ef1366a9fd..a0013cadf14b 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py @@ -16,7 +16,6 @@ class TestBaseHandler(unittest.TestCase): - PROJECT = "PROJECT" @staticmethod diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py index 752a96d9fa8f..01a949d246b2 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_sync.py @@ -17,7 +17,6 @@ class TestSyncHandler(unittest.TestCase): - PROJECT = "PROJECT" @staticmethod diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index f9b60cfa6040..0e83bd82cca4 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -24,7 +24,6 @@ def _make_credentials(): class TestConnection(unittest.TestCase): - PROJECT = "project" FILTER = "logName:syslog AND severity>=ERROR" @@ -96,7 +95,6 @@ def test_extra_headers(self): class Test_LoggingAPI(unittest.TestCase): - PROJECT = "project" PROJECT_PATH = "projects/project" LIST_ENTRIES_PATH = "entries:list" @@ -354,7 +352,6 @@ def test_logger_delete(self): class Test_SinksAPI(unittest.TestCase): - PROJECT = "project" PROJECT_PATH = "projects/project" FILTER = "logName:syslog AND severity>=ERROR" @@ -636,7 +633,6 @@ def test_sink_delete_hit(self): class Test_MetricsAPI(unittest.TestCase): - PROJECT = "project" FILTER = "logName:syslog AND severity>=ERROR" LIST_METRICS_PATH = "projects/%s/metrics" % (PROJECT,) @@ -865,7 +861,6 @@ def test_metric_delete_hit(self): class _Connection(object): - _called_with = None _raise_conflict = False diff --git a/packages/google-cloud-logging/tests/unit/test__instrumentation.py b/packages/google-cloud-logging/tests/unit/test__instrumentation.py index dc330b0ca21a..a98aae34c3d9 100644 --- a/packages/google-cloud-logging/tests/unit/test__instrumentation.py +++ b/packages/google-cloud-logging/tests/unit/test__instrumentation.py @@ -17,7 +17,6 @@ class TestInstrumentation(unittest.TestCase): - TEST_NAME = "python" # LONG_NAME > 14 characters LONG_NAME = TEST_NAME + "789ABCDEF" diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 1c47a343b148..ec3130ac5aca 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -34,7 +34,6 @@ def _make_credentials(): class TestClient(unittest.TestCase): - PROJECT = "PROJECT" PROJECT_PATH = f"projects/{PROJECT}" LOGGER_NAME = "LOGGER_NAME" @@ -903,7 +902,6 @@ def test_setup_logging_w_extra_kwargs(self): class _Connection(object): - _called_with = None def __init__(self, *responses): diff --git a/packages/google-cloud-logging/tests/unit/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py index 6f3af684fe6e..4742f55742d6 100644 --- a/packages/google-cloud-logging/tests/unit/test_entries.py +++ b/packages/google-cloud-logging/tests/unit/test_entries.py @@ -79,7 +79,6 @@ def test_w_str(self): class TestLogEntry(unittest.TestCase): - PROJECT = "PROJECT" LOGGER_NAME = "LOGGER_NAME" @@ -469,7 +468,6 @@ def test_to_api_repr_explicit(self): class TestTextEntry(unittest.TestCase): - PROJECT = "PROJECT" LOGGER_NAME = "LOGGER_NAME" @@ -557,7 +555,6 @@ def test_to_api_repr_explicit(self): class TestStructEntry(unittest.TestCase): - PROJECT = "PROJECT" LOGGER_NAME = "LOGGER_NAME" @@ -659,7 +656,6 @@ def test_to_api_repr_explicit(self): class TestProtobufEntry(unittest.TestCase): - PROJECT = "PROJECT" LOGGER_NAME = "LOGGER_NAME" diff --git a/packages/google-cloud-logging/tests/unit/test_logger.py b/packages/google-cloud-logging/tests/unit/test_logger.py index 16c89959bb7b..cdb56747ddbf 100644 --- a/packages/google-cloud-logging/tests/unit/test_logger.py +++ b/packages/google-cloud-logging/tests/unit/test_logger.py @@ -28,7 +28,6 @@ def _make_credentials(): class TestLogger(unittest.TestCase): - PROJECT = "test-project" LOGGER_NAME = "logger-name" TIME_FORMAT = '"%Y-%m-%dT%H:%M:%S.%f%z"' @@ -1086,7 +1085,6 @@ def test_first_log_emits_instrumentation(self): class TestBatch(unittest.TestCase): - PROJECT = "test-project" @staticmethod @@ -1847,7 +1845,6 @@ def test_batch_error_gets_context(self): class _Logger(object): - labels = None def __init__(self, name="NAME", project="PROJECT"): @@ -1855,7 +1852,6 @@ def __init__(self, name="NAME", project="PROJECT"): class _DummyLoggingAPI(object): - _write_entries_called_with = None def write_entries( @@ -1909,7 +1905,6 @@ class _Bugout(Exception): class _Connection(object): - _called_with = None def __init__(self, *responses): diff --git a/packages/google-cloud-logging/tests/unit/test_metric.py b/packages/google-cloud-logging/tests/unit/test_metric.py index 83b49d02dfa1..f36ae3b2a32a 100644 --- a/packages/google-cloud-logging/tests/unit/test_metric.py +++ b/packages/google-cloud-logging/tests/unit/test_metric.py @@ -16,7 +16,6 @@ class TestMetric(unittest.TestCase): - PROJECT = "test-project" METRIC_NAME = "metric-name" FULL_METRIC_NAME = f"projects/{PROJECT}/metrics/{METRIC_NAME}" diff --git a/packages/google-cloud-logging/tests/unit/test_sink.py b/packages/google-cloud-logging/tests/unit/test_sink.py index 1e4852ab523b..b5005b057ab7 100644 --- a/packages/google-cloud-logging/tests/unit/test_sink.py +++ b/packages/google-cloud-logging/tests/unit/test_sink.py @@ -16,7 +16,6 @@ class TestSink(unittest.TestCase): - PROJECT = "test-project" PROJECT_PATH = f"projects/{PROJECT}" SINK_NAME = "sink-name" From c14ce694e61629812b6a13acf92b388fa8775f79 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 11 Oct 2023 00:16:07 +0200 Subject: [PATCH 764/855] chore(deps): update dependency google-cloud-logging to v3.8.0 (#793) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 618f938574d7..a8c7b99314b4 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.7.0 +google-cloud-logging==3.8.0 google-cloud-bigquery==3.12.0 google-cloud-storage==2.11.0 google-cloud-pubsub==2.18.4 From f3aff8c5099a4b422883fd9448dce3be88f1b3d7 Mon Sep 17 00:00:00 2001 From: gkevinzheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 18 Oct 2023 12:02:09 -0400 Subject: [PATCH 765/855] fix: Updated protobuf JSON formatting to support nested protobufs (#797) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Updated protobuf JSON formatting to support nested protobufs * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Cleaner way to differentiate between proto objects and dict objects in to_api_repr * Fixed unused import. * Fixed failing unit test. --------- Co-authored-by: Owl Bot --- .../google/cloud/logging_v2/entries.py | 12 +++--- .../tests/system/test_system.py | 7 ++++ .../tests/unit/test_entries.py | 39 +++++++++++++++++++ 3 files changed, 52 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/entries.py b/packages/google-cloud-logging/google/cloud/logging_v2/entries.py index 9db020f67ffc..d8a877738307 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/entries.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/entries.py @@ -18,9 +18,9 @@ import json import re -from google.protobuf.any_pb2 import Any from google.protobuf.json_format import MessageToDict from google.protobuf.json_format import Parse +from google.protobuf.message import Message from google.cloud.logging_v2.resource import Resource from google.cloud._helpers import _name_from_project_path @@ -325,7 +325,7 @@ def _extract_payload(cls, resource): @property def payload_pb(self): - if isinstance(self.payload, Any): + if isinstance(self.payload, Message): return self.payload @property @@ -337,10 +337,10 @@ def to_api_repr(self): """API repr (JSON format) for entry.""" info = super(ProtobufEntry, self).to_api_repr() proto_payload = None - if self.payload_json: - proto_payload = dict(self.payload_json) - elif self.payload_pb: - proto_payload = MessageToDict(self.payload_pb) + if self.payload_pb: + proto_payload = MessageToDict(self.payload) + elif self.payload_json: + proto_payload = dict(self.payload) info["protoPayload"] = proto_payload return info diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index ba7fd6c2addb..0d39aa0a95ce 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -192,6 +192,7 @@ def test_list_entry_with_auditlog(self): "methodName": "test", "resourceName": "test", "serviceName": "test", + "requestMetadata": {"callerIp": "127.0.0.1"}, } audit_struct = self._dict_to_struct(audit_dict) @@ -223,6 +224,12 @@ def test_list_entry_with_auditlog(self): protobuf_entry.to_api_repr()["protoPayload"]["methodName"], audit_dict["methodName"], ) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["requestMetadata"][ + "callerIp" + ], + audit_dict["requestMetadata"]["callerIp"], + ) def test_list_entry_with_requestlog(self): """ diff --git a/packages/google-cloud-logging/tests/unit/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py index 4742f55742d6..e7bf30d87fd7 100644 --- a/packages/google-cloud-logging/tests/unit/test_entries.py +++ b/packages/google-cloud-logging/tests/unit/test_entries.py @@ -739,6 +739,45 @@ def test_to_api_repr_proto_defaults(self): } self.assertEqual(entry.to_api_repr(), expected) + def test_to_api_repr_proto_inner_struct_field(self): + from google.protobuf.json_format import MessageToDict + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + LOG_NAME = "test.log" + inner_struct = Struct(fields={"foo": Value(string_value="bar")}) + message = Struct(fields={"inner": Value(struct_value=inner_struct)}) + + entry = self._make_one(log_name=LOG_NAME, payload=message) + expected = { + "logName": LOG_NAME, + "protoPayload": MessageToDict(message), + "resource": _GLOBAL_RESOURCE._to_dict(), + } + self.assertEqual(entry.to_api_repr(), expected) + + def test_to_api_repr_proto_inner_list_field(self): + from google.protobuf.json_format import MessageToDict + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.protobuf.struct_pb2 import ListValue + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + LOG_NAME = "test.log" + lines = ListValue( + values=[Value(string_value="line1"), Value(string_value="line2")] + ) + message = Struct(fields={"lines": Value(list_value=lines)}) + + entry = self._make_one(log_name=LOG_NAME, payload=message) + expected = { + "logName": LOG_NAME, + "protoPayload": MessageToDict(message), + "resource": _GLOBAL_RESOURCE._to_dict(), + } + self.assertEqual(entry.to_api_repr(), expected) + def test_to_api_repr_proto_explicit(self): import datetime from google.protobuf.json_format import MessageToDict From e2a77c585e738987f00ca521117470d3fad8a543 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 26 Oct 2023 14:10:34 -0700 Subject: [PATCH 766/855] chore: rename rst files to avoid conflict with service names (#800) Source-Link: https://github.com/googleapis/synthtool/commit/d52e638b37b091054c869bfa6f5a9fedaba9e0dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-logging/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index dd98abbdeebe..7f291dbd5f9b 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 -# created: 2023-10-09T14:06:13.397766266Z + digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 +# created: 2023-10-18T20:26:37.410353675Z diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 0332d3267e15..16170d0ca7b8 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.17 \ - --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ - --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b +urllib3==1.26.18 \ + --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ + --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 # via # requests # twine From 80f87075f50bfb8d114340f589363114da8a946e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 1 Nov 2023 17:17:40 +0100 Subject: [PATCH 767/855] chore(deps): update all dependencies (#796) --- .../samples/snippets/requirements-test.txt | 2 +- .../google-cloud-logging/samples/snippets/requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 1779d47d2c8f..908e344b5dde 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==2.2.1 -pytest==7.4.2 +pytest==7.4.3 diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index a8c7b99314b4..0b0ff4e678df 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.8.0 -google-cloud-bigquery==3.12.0 -google-cloud-storage==2.11.0 +google-cloud-bigquery==3.13.0 +google-cloud-storage==2.13.0 google-cloud-pubsub==2.18.4 From 1ca6ee45e240c6b19cd68a9e8a4c5fb27679f6cc Mon Sep 17 00:00:00 2001 From: gkevinzheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 1 Nov 2023 14:07:12 -0400 Subject: [PATCH 768/855] fix: Fixed object paths in autogenerated code in owlbot.py (#804) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Fixed object paths in autogenerated code in owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/google-cloud-logging/owlbot.py | 105 ++++++++++++++++++ ...onfig_service_v2_copy_log_entries_async.py | 4 +- ...config_service_v2_copy_log_entries_sync.py | 4 +- ...d_config_service_v2_create_bucket_async.py | 4 +- ...ig_service_v2_create_bucket_async_async.py | 4 +- ...fig_service_v2_create_bucket_async_sync.py | 4 +- ...ed_config_service_v2_create_bucket_sync.py | 4 +- ...onfig_service_v2_create_exclusion_async.py | 6 +- ...config_service_v2_create_exclusion_sync.py | 6 +- ...ted_config_service_v2_create_link_async.py | 4 +- ...ated_config_service_v2_create_link_sync.py | 4 +- ...ted_config_service_v2_create_sink_async.py | 6 +- ...ated_config_service_v2_create_sink_sync.py | 6 +- ...ted_config_service_v2_create_view_async.py | 4 +- ...ated_config_service_v2_create_view_sync.py | 4 +- ...d_config_service_v2_delete_bucket_async.py | 4 +- ...ed_config_service_v2_delete_bucket_sync.py | 4 +- ...onfig_service_v2_delete_exclusion_async.py | 4 +- ...config_service_v2_delete_exclusion_sync.py | 4 +- ...ted_config_service_v2_delete_link_async.py | 4 +- ...ated_config_service_v2_delete_link_sync.py | 4 +- ...ted_config_service_v2_delete_sink_async.py | 4 +- ...ated_config_service_v2_delete_sink_sync.py | 4 +- ...ted_config_service_v2_delete_view_async.py | 4 +- ...ated_config_service_v2_delete_view_sync.py | 4 +- ...ated_config_service_v2_get_bucket_async.py | 4 +- ...rated_config_service_v2_get_bucket_sync.py | 4 +- ...nfig_service_v2_get_cmek_settings_async.py | 4 +- ...onfig_service_v2_get_cmek_settings_sync.py | 4 +- ...d_config_service_v2_get_exclusion_async.py | 4 +- ...ed_config_service_v2_get_exclusion_sync.py | 4 +- ...erated_config_service_v2_get_link_async.py | 4 +- ...nerated_config_service_v2_get_link_sync.py | 4 +- ...ed_config_service_v2_get_settings_async.py | 4 +- ...ted_config_service_v2_get_settings_sync.py | 4 +- ...erated_config_service_v2_get_sink_async.py | 4 +- ...nerated_config_service_v2_get_sink_sync.py | 4 +- ...erated_config_service_v2_get_view_async.py | 4 +- ...nerated_config_service_v2_get_view_sync.py | 4 +- ...ed_config_service_v2_list_buckets_async.py | 4 +- ...ted_config_service_v2_list_buckets_sync.py | 4 +- ...config_service_v2_list_exclusions_async.py | 4 +- ..._config_service_v2_list_exclusions_sync.py | 4 +- ...ated_config_service_v2_list_links_async.py | 4 +- ...rated_config_service_v2_list_links_sync.py | 4 +- ...ated_config_service_v2_list_sinks_async.py | 4 +- ...rated_config_service_v2_list_sinks_sync.py | 4 +- ...ated_config_service_v2_list_views_async.py | 4 +- ...rated_config_service_v2_list_views_sync.py | 4 +- ...config_service_v2_undelete_bucket_async.py | 4 +- ..._config_service_v2_undelete_bucket_sync.py | 4 +- ...d_config_service_v2_update_bucket_async.py | 4 +- ...ig_service_v2_update_bucket_async_async.py | 4 +- ...fig_service_v2_update_bucket_async_sync.py | 4 +- ...ed_config_service_v2_update_bucket_sync.py | 4 +- ...g_service_v2_update_cmek_settings_async.py | 4 +- ...ig_service_v2_update_cmek_settings_sync.py | 4 +- ...onfig_service_v2_update_exclusion_async.py | 6 +- ...config_service_v2_update_exclusion_sync.py | 6 +- ...config_service_v2_update_settings_async.py | 4 +- ..._config_service_v2_update_settings_sync.py | 4 +- ...ted_config_service_v2_update_sink_async.py | 6 +- ...ated_config_service_v2_update_sink_sync.py | 6 +- ...ted_config_service_v2_update_view_async.py | 4 +- ...ated_config_service_v2_update_view_sync.py | 4 +- ...ted_logging_service_v2_delete_log_async.py | 4 +- ...ated_logging_service_v2_delete_log_sync.py | 4 +- ...gging_service_v2_list_log_entries_async.py | 4 +- ...ogging_service_v2_list_log_entries_sync.py | 4 +- ...ated_logging_service_v2_list_logs_async.py | 4 +- ...rated_logging_service_v2_list_logs_sync.py | 4 +- ...st_monitored_resource_descriptors_async.py | 4 +- ...ist_monitored_resource_descriptors_sync.py | 4 +- ...gging_service_v2_tail_log_entries_async.py | 6 +- ...ogging_service_v2_tail_log_entries_sync.py | 6 +- ...ging_service_v2_write_log_entries_async.py | 6 +- ...gging_service_v2_write_log_entries_sync.py | 6 +- ...rics_service_v2_create_log_metric_async.py | 6 +- ...trics_service_v2_create_log_metric_sync.py | 6 +- ...rics_service_v2_delete_log_metric_async.py | 4 +- ...trics_service_v2_delete_log_metric_sync.py | 4 +- ...metrics_service_v2_get_log_metric_async.py | 4 +- ..._metrics_service_v2_get_log_metric_sync.py | 4 +- ...trics_service_v2_list_log_metrics_async.py | 4 +- ...etrics_service_v2_list_log_metrics_sync.py | 4 +- ...rics_service_v2_update_log_metric_async.py | 6 +- ...trics_service_v2_update_log_metric_sync.py | 6 +- 87 files changed, 293 insertions(+), 188 deletions(-) diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index 3e932c854938..8e3057207574 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import glob import json import os import shutil @@ -139,6 +140,110 @@ def place_before(path, text, *before_text, escape=None): python.py_samples() +# For autogenerated sample code, resolve object paths by finding the specific subpackage +# the object belongs to. This is because we leave out all autogenerated packages from the +# __init__.py of logging_v2. For now, this is manually copy-pasted from the __all__s of each +# subpackage's __init__.py. +gapic_objects = { + "logging_v2.services.config_service_v2": [ + "ConfigServiceV2Client", + "ConfigServiceV2AsyncClient" + ], + "logging_v2.services.logging_service_v2": [ + "LoggingServiceV2Client", + "LoggingServiceV2AsyncClient" + ], + "logging_v2.services.metrics_service_v2": [ + "MetricsServiceV2Client", + "MetricsServiceV2AsyncClient" + ], + "logging_v2.types": [ + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", + "DeleteLogRequest", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "BigQueryDataset", + "BigQueryOptions", + "BucketMetadata", + "CmekSettings", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", + "CreateBucketRequest", + "CreateExclusionRequest", + "CreateLinkRequest", + "CreateSinkRequest", + "CreateViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteLinkRequest", + "DeleteSinkRequest", + "DeleteViewRequest", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", + "GetLinkRequest", + "GetSettingsRequest", + "GetSinkRequest", + "GetViewRequest", + "IndexConfig", + "Link", + "LinkMetadata", + "ListBucketsRequest", + "ListBucketsResponse", + "ListExclusionsRequest", + "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LocationMetadata", + "LogBucket", + "LogExclusion", + "LogSink", + "LogView", + "Settings", + "UndeleteBucketRequest", + "UpdateBucketRequest", + "UpdateCmekSettingsRequest", + "UpdateExclusionRequest", + "UpdateSettingsRequest", + "UpdateSinkRequest", + "UpdateViewRequest", + "IndexType", + "LifecycleState", + "OperationState", + "CreateLogMetricRequest", + "DeleteLogMetricRequest", + "GetLogMetricRequest", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "LogMetric", + "UpdateLogMetricRequest" + ] +} + +sample_files = glob.glob("samples/generated_samples/logging_v2_*.py") +for subpackage_name in gapic_objects: + for object_name in gapic_objects[subpackage_name]: + text = "logging_v2." + object_name + replacement = subpackage_name + "." + object_name + s.replace(sample_files, text, replacement) + s.shell.run(["nox", "-s", "blacken"], hide_output=False) s.shell.run(["nox", "-s", "blacken"], cwd="samples/snippets", hide_output=False) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py index 806e937ddae0..993293752c38 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py @@ -36,10 +36,10 @@ async def sample_copy_log_entries(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.CopyLogEntriesRequest( + request = logging_v2.types.CopyLogEntriesRequest( name="name_value", destination="destination_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py index ca0209f00fcb..b95b83ab4f96 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py @@ -36,10 +36,10 @@ def sample_copy_log_entries(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.CopyLogEntriesRequest( + request = logging_v2.types.CopyLogEntriesRequest( name="name_value", destination="destination_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py index c1f028fb0464..089263531030 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -36,10 +36,10 @@ async def sample_create_bucket(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.CreateBucketRequest( + request = logging_v2.types.CreateBucketRequest( parent="parent_value", bucket_id="bucket_id_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py index 8fe42df3c81a..8d55ee0bfea5 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py @@ -36,10 +36,10 @@ async def sample_create_bucket_async(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.CreateBucketRequest( + request = logging_v2.types.CreateBucketRequest( parent="parent_value", bucket_id="bucket_id_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py index 1ce698784552..9b71e2d74123 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py @@ -36,10 +36,10 @@ def sample_create_bucket_async(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.CreateBucketRequest( + request = logging_v2.types.CreateBucketRequest( parent="parent_value", bucket_id="bucket_id_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py index dc73253f4897..111a2d272676 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -36,10 +36,10 @@ def sample_create_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.CreateBucketRequest( + request = logging_v2.types.CreateBucketRequest( parent="parent_value", bucket_id="bucket_id_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py index 17490c61ef37..b592719058ba 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -36,14 +36,14 @@ async def sample_create_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() + exclusion = logging_v2.types.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" - request = logging_v2.CreateExclusionRequest( + request = logging_v2.types.CreateExclusionRequest( parent="parent_value", exclusion=exclusion, ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py index 75ec32f48eeb..a3b20a5f3a35 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -36,14 +36,14 @@ def sample_create_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() + exclusion = logging_v2.types.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" - request = logging_v2.CreateExclusionRequest( + request = logging_v2.types.CreateExclusionRequest( parent="parent_value", exclusion=exclusion, ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py index 8ceb5298553a..c130fe56dd93 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py @@ -36,10 +36,10 @@ async def sample_create_link(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.CreateLinkRequest( + request = logging_v2.types.CreateLinkRequest( parent="parent_value", link_id="link_id_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py index 604ff66269c8..ce3bbfd12de8 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py @@ -36,10 +36,10 @@ def sample_create_link(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.CreateLinkRequest( + request = logging_v2.types.CreateLinkRequest( parent="parent_value", link_id="link_id_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py index 277e83055225..c4deb526b3b5 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -36,14 +36,14 @@ async def sample_create_sink(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - sink = logging_v2.LogSink() + sink = logging_v2.types.LogSink() sink.name = "name_value" sink.destination = "destination_value" - request = logging_v2.CreateSinkRequest( + request = logging_v2.types.CreateSinkRequest( parent="parent_value", sink=sink, ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py index a4df0299426a..16db9a155869 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -36,14 +36,14 @@ def sample_create_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - sink = logging_v2.LogSink() + sink = logging_v2.types.LogSink() sink.name = "name_value" sink.destination = "destination_value" - request = logging_v2.CreateSinkRequest( + request = logging_v2.types.CreateSinkRequest( parent="parent_value", sink=sink, ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py index 5cd201276977..8eaba2353bd4 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -36,10 +36,10 @@ async def sample_create_view(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.CreateViewRequest( + request = logging_v2.types.CreateViewRequest( parent="parent_value", view_id="view_id_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py index cd3ca94e19c6..7f1f4a7dc6ac 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -36,10 +36,10 @@ def sample_create_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.CreateViewRequest( + request = logging_v2.types.CreateViewRequest( parent="parent_value", view_id="view_id_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py index fcffb6db861d..cb409bf4bc5b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -36,10 +36,10 @@ async def sample_delete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteBucketRequest( + request = logging_v2.types.DeleteBucketRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py index a8f902116832..a31d04ceb85d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -36,10 +36,10 @@ def sample_delete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteBucketRequest( + request = logging_v2.types.DeleteBucketRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py index b426d4703e1c..6bd56016a3f5 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py @@ -36,10 +36,10 @@ async def sample_delete_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteExclusionRequest( + request = logging_v2.types.DeleteExclusionRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py index 5d98f782bde2..66c82b08b68a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py @@ -36,10 +36,10 @@ def sample_delete_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteExclusionRequest( + request = logging_v2.types.DeleteExclusionRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py index 8c7a934a735d..9c47004edf17 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py @@ -36,10 +36,10 @@ async def sample_delete_link(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteLinkRequest( + request = logging_v2.types.DeleteLinkRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py index dfa59b30742b..209651ad6d78 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py @@ -36,10 +36,10 @@ def sample_delete_link(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteLinkRequest( + request = logging_v2.types.DeleteLinkRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py index 11d91947e3bc..d8b4f483242c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py @@ -36,10 +36,10 @@ async def sample_delete_sink(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteSinkRequest( + request = logging_v2.types.DeleteSinkRequest( sink_name="sink_name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py index bf9875b0aa94..947fdf52df7c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py @@ -36,10 +36,10 @@ def sample_delete_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteSinkRequest( + request = logging_v2.types.DeleteSinkRequest( sink_name="sink_name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py index fe9c7031ef46..1fe4e6dae87b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py @@ -36,10 +36,10 @@ async def sample_delete_view(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteViewRequest( + request = logging_v2.types.DeleteViewRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py index b5539c04f291..6416ff773e33 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py @@ -36,10 +36,10 @@ def sample_delete_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteViewRequest( + request = logging_v2.types.DeleteViewRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py index c29c35a4e213..11ce2f13ac02 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -36,10 +36,10 @@ async def sample_get_bucket(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetBucketRequest( + request = logging_v2.types.GetBucketRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py index 818ab646fdf3..ac8db344413a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -36,10 +36,10 @@ def sample_get_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetBucketRequest( + request = logging_v2.types.GetBucketRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py index 3ae13401d749..660759e092de 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -36,10 +36,10 @@ async def sample_get_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetCmekSettingsRequest( + request = logging_v2.types.GetCmekSettingsRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py index 499d68bdbf44..eedf30d59132 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -36,10 +36,10 @@ def sample_get_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetCmekSettingsRequest( + request = logging_v2.types.GetCmekSettingsRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py index 03b7dc7d8f71..a296e0bdd504 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -36,10 +36,10 @@ async def sample_get_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetExclusionRequest( + request = logging_v2.types.GetExclusionRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py index a445ed396c8f..bd47eede1dcd 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -36,10 +36,10 @@ def sample_get_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetExclusionRequest( + request = logging_v2.types.GetExclusionRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py index ddc3d131f4c7..efc87806ddbe 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py @@ -36,10 +36,10 @@ async def sample_get_link(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetLinkRequest( + request = logging_v2.types.GetLinkRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py index 3a7643b3a273..8db2ca31071d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py @@ -36,10 +36,10 @@ def sample_get_link(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetLinkRequest( + request = logging_v2.types.GetLinkRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py index 4ee968e8155d..0eb6fb853d33 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py @@ -36,10 +36,10 @@ async def sample_get_settings(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetSettingsRequest( + request = logging_v2.types.GetSettingsRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py index a3e018440c2a..b0290a2fb0a7 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py @@ -36,10 +36,10 @@ def sample_get_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetSettingsRequest( + request = logging_v2.types.GetSettingsRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py index b6fe5b11e08f..694d6ddabc8a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -36,10 +36,10 @@ async def sample_get_sink(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetSinkRequest( + request = logging_v2.types.GetSinkRequest( sink_name="sink_name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py index ecebaf1194ff..2a0f1c100e48 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -36,10 +36,10 @@ def sample_get_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetSinkRequest( + request = logging_v2.types.GetSinkRequest( sink_name="sink_name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py index 5992e53ee4aa..f0438a0a1a7d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -36,10 +36,10 @@ async def sample_get_view(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetViewRequest( + request = logging_v2.types.GetViewRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py index 14d8679bce7f..f0e60b745a29 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -36,10 +36,10 @@ def sample_get_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetViewRequest( + request = logging_v2.types.GetViewRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py index dfbc3b411480..883810c4ed62 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -36,10 +36,10 @@ async def sample_list_buckets(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListBucketsRequest( + request = logging_v2.types.ListBucketsRequest( parent="parent_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py index 8626f7cae9fd..641d8f6b613c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -36,10 +36,10 @@ def sample_list_buckets(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListBucketsRequest( + request = logging_v2.types.ListBucketsRequest( parent="parent_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py index ad42edff534e..444ca9c5be28 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -36,10 +36,10 @@ async def sample_list_exclusions(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListExclusionsRequest( + request = logging_v2.types.ListExclusionsRequest( parent="parent_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py index 727723a7a559..ec66239ed7e6 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -36,10 +36,10 @@ def sample_list_exclusions(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListExclusionsRequest( + request = logging_v2.types.ListExclusionsRequest( parent="parent_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py index 7eccffaa6bae..cad31c4d4345 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py @@ -36,10 +36,10 @@ async def sample_list_links(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListLinksRequest( + request = logging_v2.types.ListLinksRequest( parent="parent_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py index a2f98d69d320..ec752eda86b2 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py @@ -36,10 +36,10 @@ def sample_list_links(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListLinksRequest( + request = logging_v2.types.ListLinksRequest( parent="parent_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py index b642d38eec23..83754a2383a3 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -36,10 +36,10 @@ async def sample_list_sinks(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListSinksRequest( + request = logging_v2.types.ListSinksRequest( parent="parent_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py index b4fc92452254..d79a68b0256e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -36,10 +36,10 @@ def sample_list_sinks(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListSinksRequest( + request = logging_v2.types.ListSinksRequest( parent="parent_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py index 1542a5a387f2..1a36ac6659b6 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -36,10 +36,10 @@ async def sample_list_views(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListViewsRequest( + request = logging_v2.types.ListViewsRequest( parent="parent_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py index b273c465d3ec..1fdb4e9c1513 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -36,10 +36,10 @@ def sample_list_views(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListViewsRequest( + request = logging_v2.types.ListViewsRequest( parent="parent_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py index d2695708ddd3..52001dd745b7 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -36,10 +36,10 @@ async def sample_undelete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UndeleteBucketRequest( + request = logging_v2.types.UndeleteBucketRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py index 8d25c7d33f73..9e04ebadc403 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -36,10 +36,10 @@ def sample_undelete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UndeleteBucketRequest( + request = logging_v2.types.UndeleteBucketRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py index e1c741b67075..6bebb379278f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -36,10 +36,10 @@ async def sample_update_bucket(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateBucketRequest( + request = logging_v2.types.UpdateBucketRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py index 7dde59dcdd4f..8f0b5b1077d0 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py @@ -36,10 +36,10 @@ async def sample_update_bucket_async(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateBucketRequest( + request = logging_v2.types.UpdateBucketRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py index 2ecaf8df26dd..7c6c3716052f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py @@ -36,10 +36,10 @@ def sample_update_bucket_async(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateBucketRequest( + request = logging_v2.types.UpdateBucketRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py index 7b4a3c597f2b..d1f37e9299c3 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -36,10 +36,10 @@ def sample_update_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateBucketRequest( + request = logging_v2.types.UpdateBucketRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py index 96fc8ff9788c..89fb901e5cdc 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -36,10 +36,10 @@ async def sample_update_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateCmekSettingsRequest( + request = logging_v2.types.UpdateCmekSettingsRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py index 9bbc7dcb1c5d..31b5415fc6e5 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -36,10 +36,10 @@ def sample_update_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateCmekSettingsRequest( + request = logging_v2.types.UpdateCmekSettingsRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py index d8b8d7f7bca5..7df03d1e77cd 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -36,14 +36,14 @@ async def sample_update_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() + exclusion = logging_v2.types.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" - request = logging_v2.UpdateExclusionRequest( + request = logging_v2.types.UpdateExclusionRequest( name="name_value", exclusion=exclusion, ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py index 36d5776e36dc..cc17ec23c4de 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -36,14 +36,14 @@ def sample_update_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() + exclusion = logging_v2.types.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" - request = logging_v2.UpdateExclusionRequest( + request = logging_v2.types.UpdateExclusionRequest( name="name_value", exclusion=exclusion, ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py index b51dd81cc946..1242c3cfb0b1 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py @@ -36,10 +36,10 @@ async def sample_update_settings(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateSettingsRequest( + request = logging_v2.types.UpdateSettingsRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py index 1e7aefce8f6a..3edc24c9693f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py @@ -36,10 +36,10 @@ def sample_update_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateSettingsRequest( + request = logging_v2.types.UpdateSettingsRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py index aef8473798e3..d9739167724e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -36,14 +36,14 @@ async def sample_update_sink(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - sink = logging_v2.LogSink() + sink = logging_v2.types.LogSink() sink.name = "name_value" sink.destination = "destination_value" - request = logging_v2.UpdateSinkRequest( + request = logging_v2.types.UpdateSinkRequest( sink_name="sink_name_value", sink=sink, ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py index e84230202450..ec2ff7fbf6a6 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -36,14 +36,14 @@ def sample_update_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - sink = logging_v2.LogSink() + sink = logging_v2.types.LogSink() sink.name = "name_value" sink.destination = "destination_value" - request = logging_v2.UpdateSinkRequest( + request = logging_v2.types.UpdateSinkRequest( sink_name="sink_name_value", sink=sink, ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py index f143a772c3d3..949b9d98b71c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -36,10 +36,10 @@ async def sample_update_view(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateViewRequest( + request = logging_v2.types.UpdateViewRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py index 3867d3702391..53890848417a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -36,10 +36,10 @@ def sample_update_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateViewRequest( + request = logging_v2.types.UpdateViewRequest( name="name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py index c6469053baa4..7032872fabdf 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -36,10 +36,10 @@ async def sample_delete_log(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteLogRequest( + request = logging_v2.types.DeleteLogRequest( log_name="log_name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py index 1e4e28abc08c..12124e531d12 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -36,10 +36,10 @@ def sample_delete_log(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteLogRequest( + request = logging_v2.types.DeleteLogRequest( log_name="log_name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index d5cfe190c8dd..e310819b79da 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -36,10 +36,10 @@ async def sample_list_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListLogEntriesRequest( + request = logging_v2.types.ListLogEntriesRequest( resource_names=['resource_names_value1', 'resource_names_value2'], ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index d24923cb1e75..7e20ad1659de 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -36,10 +36,10 @@ def sample_list_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListLogEntriesRequest( + request = logging_v2.types.ListLogEntriesRequest( resource_names=['resource_names_value1', 'resource_names_value2'], ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py index 71859024dbb7..3149daeb1a76 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -36,10 +36,10 @@ async def sample_list_logs(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListLogsRequest( + request = logging_v2.types.ListLogsRequest( parent="parent_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py index 5a5ff140c42a..04441e67173c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -36,10 +36,10 @@ def sample_list_logs(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListLogsRequest( + request = logging_v2.types.ListLogsRequest( parent="parent_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py index 519a2498ac3a..a1867444113f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -36,10 +36,10 @@ async def sample_list_monitored_resource_descriptors(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListMonitoredResourceDescriptorsRequest( + request = logging_v2.types.ListMonitoredResourceDescriptorsRequest( ) # Make the request diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py index ca97be4b3d86..399bf369a7c3 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -36,10 +36,10 @@ def sample_list_monitored_resource_descriptors(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListMonitoredResourceDescriptorsRequest( + request = logging_v2.types.ListMonitoredResourceDescriptorsRequest( ) # Make the request diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index 24e9e200951f..1ce36bba4bab 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -36,15 +36,15 @@ async def sample_tail_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.TailLogEntriesRequest( + request = logging_v2.types.TailLogEntriesRequest( resource_names=['resource_names_value1', 'resource_names_value2'], ) # This method expects an iterator which contains - # 'logging_v2.TailLogEntriesRequest' objects + # 'logging_v2.types.TailLogEntriesRequest' objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index dc9a545e7c4b..1756dccec4f0 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -36,15 +36,15 @@ def sample_tail_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.TailLogEntriesRequest( + request = logging_v2.types.TailLogEntriesRequest( resource_names=['resource_names_value1', 'resource_names_value2'], ) # This method expects an iterator which contains - # 'logging_v2.TailLogEntriesRequest' objects + # 'logging_v2.types.TailLogEntriesRequest' objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py index 1a0d48664303..eb377d2264b9 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -36,13 +36,13 @@ async def sample_write_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - entries = logging_v2.LogEntry() + entries = logging_v2.types.LogEntry() entries.log_name = "log_name_value" - request = logging_v2.WriteLogEntriesRequest( + request = logging_v2.types.WriteLogEntriesRequest( entries=entries, ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py index de4bfe6c822b..4d30f92fb122 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -36,13 +36,13 @@ def sample_write_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - entries = logging_v2.LogEntry() + entries = logging_v2.types.LogEntry() entries.log_name = "log_name_value" - request = logging_v2.WriteLogEntriesRequest( + request = logging_v2.types.WriteLogEntriesRequest( entries=entries, ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py index 557d3229302a..9af902280e94 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -36,14 +36,14 @@ async def sample_create_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2AsyncClient() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - metric = logging_v2.LogMetric() + metric = logging_v2.types.LogMetric() metric.name = "name_value" metric.filter = "filter_value" - request = logging_v2.CreateLogMetricRequest( + request = logging_v2.types.CreateLogMetricRequest( parent="parent_value", metric=metric, ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py index b9e9cade9e94..a0a68cfedda6 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -36,14 +36,14 @@ def sample_create_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client() # Initialize request argument(s) - metric = logging_v2.LogMetric() + metric = logging_v2.types.LogMetric() metric.name = "name_value" metric.filter = "filter_value" - request = logging_v2.CreateLogMetricRequest( + request = logging_v2.types.CreateLogMetricRequest( parent="parent_value", metric=metric, ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py index fea40e7a4957..0d0f9f4c8cb6 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py @@ -36,10 +36,10 @@ async def sample_delete_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2AsyncClient() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteLogMetricRequest( + request = logging_v2.types.DeleteLogMetricRequest( metric_name="metric_name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py index ccf2983be6d8..5452c586f99e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py @@ -36,10 +36,10 @@ def sample_delete_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteLogMetricRequest( + request = logging_v2.types.DeleteLogMetricRequest( metric_name="metric_name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py index 4b6984171895..53f9e5b06c2d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -36,10 +36,10 @@ async def sample_get_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2AsyncClient() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetLogMetricRequest( + request = logging_v2.types.GetLogMetricRequest( metric_name="metric_name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py index abb071c655da..26409d6d9c65 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -36,10 +36,10 @@ def sample_get_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetLogMetricRequest( + request = logging_v2.types.GetLogMetricRequest( metric_name="metric_name_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py index f280ec9dea62..325cf4d44bbc 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -36,10 +36,10 @@ async def sample_list_log_metrics(): # Create a client - client = logging_v2.MetricsServiceV2AsyncClient() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListLogMetricsRequest( + request = logging_v2.types.ListLogMetricsRequest( parent="parent_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py index bffbe10a8eac..9442a7a01981 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -36,10 +36,10 @@ def sample_list_log_metrics(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListLogMetricsRequest( + request = logging_v2.types.ListLogMetricsRequest( parent="parent_value", ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py index 59bfeeaaaa3f..047ae2c869eb 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -36,14 +36,14 @@ async def sample_update_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2AsyncClient() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - metric = logging_v2.LogMetric() + metric = logging_v2.types.LogMetric() metric.name = "name_value" metric.filter = "filter_value" - request = logging_v2.UpdateLogMetricRequest( + request = logging_v2.types.UpdateLogMetricRequest( metric_name="metric_name_value", metric=metric, ) diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py index ed4dd0126e75..583fa4c7bafd 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -36,14 +36,14 @@ def sample_update_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client() # Initialize request argument(s) - metric = logging_v2.LogMetric() + metric = logging_v2.types.LogMetric() metric.name = "name_value" metric.filter = "filter_value" - request = logging_v2.UpdateLogMetricRequest( + request = logging_v2.types.UpdateLogMetricRequest( metric_name="metric_name_value", metric=metric, ) From 0cb01f34e36e660d5daf4668dbfdfe399608eebb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 2 Nov 2023 21:23:28 -0400 Subject: [PATCH 769/855] chore: update docfx minimum Python version (#810) Source-Link: https://github.com/googleapis/synthtool/commit/bc07fd415c39853b382bcf8315f8eeacdf334055 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-logging/noxfile.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 7f291dbd5f9b..ec696b558c35 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 -# created: 2023-10-18T20:26:37.410353675Z + digest: sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 +# created: 2023-11-03T00:57:07.335914631Z diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 565df040b9bd..6f651e5ec1ec 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -310,7 +310,7 @@ def docs(session): ) -@nox.session(python="3.9") +@nox.session(python="3.10") def docfx(session): """Build the docfx yaml files for this library.""" From b15db3edcacfd3a5926ad2a869006cbbfb089647 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 16 Nov 2023 17:36:56 -0500 Subject: [PATCH 770/855] chore: bump urllib3 from 1.26.12 to 1.26.18 (#811) Source-Link: https://github.com/googleapis/synthtool/commit/febacccc98d6d224aff9d0bd0373bb5a4cd5969c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 532 +++++++++--------- 2 files changed, 277 insertions(+), 259 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index ec696b558c35..453b540c1e58 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 -# created: 2023-11-03T00:57:07.335914631Z + digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 +# created: 2023-11-08T19:46:45.022803742Z diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 16170d0ca7b8..8957e21104e2 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -4,91 +4,75 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==2.0.0 \ - --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ - --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e +argcomplete==3.1.4 \ + --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ + --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f # via nox -attrs==22.1.0 \ - --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ - --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c +attrs==23.1.0 \ + --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ + --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 # via gcp-releasetool -bleach==5.0.1 \ - --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ - --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c - # via readme-renderer -cachetools==5.2.0 \ - --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ - --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db +cachetools==5.3.2 \ + --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ + --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 # via google-auth certifi==2023.7.22 \ --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests -cffi==1.15.1 \ - --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ - --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ - --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ - --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ - --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ - --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ - --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ - --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ - --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ - --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ - --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ - --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ - --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ - --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ - --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ - --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ - --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ - --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ - --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ - --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ - --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ - --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ - --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ - --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ - --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ - --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ - --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ - --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ - --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ - --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ - --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ - --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ - --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ - --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ - --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ - --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ - --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ - --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ - --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ - --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ - --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ - --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ - --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ - --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ - --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ - --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ - --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ - --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ - --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ - --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ - --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ - --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ - --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ - --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ - --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ - --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ - --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ - --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ - --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ - --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ - --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ - --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ - --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ - --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 # via cryptography charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ @@ -109,78 +93,74 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -commonmark==0.9.1 \ - --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ - --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 - # via rich -cryptography==41.0.4 \ - --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ - --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ - --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ - --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ - --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ - --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ - --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ - --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ - --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ - --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ - --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ - --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ - --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ - --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ - --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ - --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ - --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ - --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ - --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ - --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ - --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ - --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ - --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f +cryptography==41.0.5 \ + --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ + --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ + --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ + --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ + --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ + --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ + --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ + --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ + --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ + --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ + --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ + --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ + --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ + --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ + --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ + --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ + --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ + --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ + --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ + --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ + --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ + --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ + --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 # via # gcp-releasetool # secretstorage -distlib==0.3.6 \ - --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ - --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e +distlib==0.3.7 \ + --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ + --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 # via virtualenv -docutils==0.19 \ - --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ - --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc +docutils==0.20.1 \ + --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ + --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b # via readme-renderer -filelock==3.8.0 \ - --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ - --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c # via virtualenv -gcp-docuploader==0.6.4 \ - --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ - --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.10.5 \ - --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ - --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 +gcp-releasetool==1.16.0 \ + --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ + --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 # via -r requirements.in -google-api-core==2.10.2 \ - --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ - --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e +google-api-core==2.12.0 \ + --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ + --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 # via # google-cloud-core # google-cloud-storage -google-auth==2.14.1 \ - --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ - --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 +google-auth==2.23.4 \ + --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ + --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.2 \ - --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ - --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a +google-cloud-core==2.3.3 \ + --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ + --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 # via google-cloud-storage -google-cloud-storage==2.6.0 \ - --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ - --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 +google-cloud-storage==2.13.0 \ + --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ + --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -251,29 +231,31 @@ google-crc32c==1.5.0 \ --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via google-resumable-media -google-resumable-media==2.4.0 \ - --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ - --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.6.0 \ + --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ + --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b # via google-cloud-storage -googleapis-common-protos==1.57.0 \ - --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ - --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c +googleapis-common-protos==1.61.0 \ + --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ + --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==5.0.0 \ - --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ - --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 +importlib-metadata==6.8.0 \ + --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ + --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 # via # -r requirements.in # keyring # twine -jaraco-classes==3.2.3 \ - --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ - --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a +jaraco-classes==3.3.0 \ + --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ + --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -285,75 +267,121 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.11.0 \ - --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ - --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 +keyring==24.2.0 \ + --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ + --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 # via # gcp-releasetool # twine -markupsafe==2.1.1 \ - --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ - --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ - --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ - --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ - --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ - --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ - --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ - --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ - --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ - --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ - --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ - --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ - --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ - --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ - --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ - --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ - --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ - --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ - --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ - --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ - --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ - --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ - --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ - --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ - --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ - --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ - --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ - --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ - --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ - --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ - --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ - --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ - --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ - --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ - --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ - --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ - --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ - --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ - --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ - --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 +markdown-it-py==3.0.0 \ + --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ + --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb + # via rich +markupsafe==2.1.3 \ + --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ + --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ + --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ + --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ + --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ + --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ + --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ + --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ + --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ + --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ + --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ + --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ + --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ + --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ + --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ + --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ + --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ + --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ + --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ + --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ + --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ + --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ + --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ + --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ + --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ + --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ + --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ + --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ + --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ + --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ + --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ + --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ + --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ + --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ + --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ + --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ + --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ + --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ + --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ + --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ + --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ + --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ + --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ + --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ + --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ + --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ + --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ + --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ + --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ + --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ + --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ + --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ + --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ + --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ + --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ + --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ + --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ + --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ + --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ + --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 # via jinja2 -more-itertools==9.0.0 \ - --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ - --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py +more-itertools==10.1.0 \ + --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ + --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 # via jaraco-classes -nox==2022.11.21 \ - --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ - --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 +nh3==0.2.14 \ + --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ + --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ + --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ + --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ + --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ + --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ + --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ + --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ + --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ + --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ + --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ + --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ + --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ + --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ + --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ + --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 + # via readme-renderer +nox==2023.4.22 \ + --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ + --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f # via -r requirements.in -packaging==21.3 \ - --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ - --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 +packaging==23.2 \ + --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ + --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 # via # gcp-releasetool # nox -pkginfo==1.8.3 \ - --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ - --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c +pkginfo==1.9.6 \ + --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ + --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 # via twine -platformdirs==2.5.4 \ - --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ - --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 +platformdirs==3.11.0 \ + --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ + --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv protobuf==3.20.3 \ --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ @@ -383,34 +411,30 @@ protobuf==3.20.3 \ # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.4.8 \ - --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ - --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba +pyasn1==0.5.0 \ + --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ + --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde # via # pyasn1-modules # rsa -pyasn1-modules==0.2.8 \ - --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ - --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 +pyasn1-modules==0.3.0 \ + --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ + --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d # via google-auth pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.15.0 \ - --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ - --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 +pygments==2.16.1 \ + --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ + --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 # via # readme-renderer # rich -pyjwt==2.6.0 \ - --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ - --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 +pyjwt==2.8.0 \ + --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ + --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyparsing==3.0.9 \ - --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ - --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc - # via packaging pyperclip==1.8.2 \ --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 # via gcp-releasetool @@ -418,9 +442,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.3 \ - --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ - --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 +readme-renderer==42.0 \ + --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ + --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 # via twine requests==2.31.0 \ --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ @@ -431,17 +455,17 @@ requests==2.31.0 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.10.1 \ - --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ - --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d +requests-toolbelt==1.0.0 \ + --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ + --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.6.0 \ - --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ - --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 +rich==13.6.0 \ + --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ + --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -455,43 +479,37 @@ six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via - # bleach # gcp-docuploader - # google-auth # python-dateutil -twine==4.0.1 \ - --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ - --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 +twine==4.0.2 \ + --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ + --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 # via -r requirements.in -typing-extensions==4.4.0 \ - --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ - --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e +typing-extensions==4.8.0 \ + --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ + --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef # via -r requirements.in -urllib3==1.26.18 \ - --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ - --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 +urllib3==2.0.7 \ + --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ + --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e # via # requests # twine -virtualenv==20.16.7 \ - --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ - --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 +virtualenv==20.24.6 \ + --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ + --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 # via nox -webencodings==0.5.1 \ - --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ - --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 - # via bleach -wheel==0.38.4 \ - --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ - --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 +wheel==0.41.3 \ + --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ + --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 # via -r requirements.in -zipp==3.10.0 \ - --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ - --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 +zipp==3.17.0 \ + --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ + --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.5.1 \ - --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ - --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f +setuptools==68.2.2 \ + --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ + --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a # via -r requirements.in From 8918c60e3e8a8f0480e0ba9dc37fba460c52a560 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 29 Nov 2023 12:41:31 -0500 Subject: [PATCH 771/855] feat: use native namespaces instead of pkg_resources (#812) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: use native namespaces instead of pkg_resources * linting * Added packaging test for native namespace support. * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google-cloud-logging/google/__init__.py | 22 -------- .../google/cloud/__init__.py | 22 -------- packages/google-cloud-logging/setup.py | 7 +-- .../tests/unit/test_packaging.py | 56 +++++++++++++++++++ 4 files changed, 57 insertions(+), 50 deletions(-) delete mode 100644 packages/google-cloud-logging/google/__init__.py delete mode 100644 packages/google-cloud-logging/google/cloud/__init__.py create mode 100644 packages/google-cloud-logging/tests/unit/test_packaging.py diff --git a/packages/google-cloud-logging/google/__init__.py b/packages/google-cloud-logging/google/__init__.py deleted file mode 100644 index 0e1bc5131ba6..000000000000 --- a/packages/google-cloud-logging/google/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-logging/google/cloud/__init__.py b/packages/google-cloud-logging/google/cloud/__init__.py deleted file mode 100644 index 0e1bc5131ba6..000000000000 --- a/packages/google-cloud-logging/google/cloud/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index f43fd0bf9e52..e4a71277a795 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -55,14 +55,10 @@ packages = [ package - for package in setuptools.PEP420PackageFinder.find() + for package in setuptools.find_namespace_packages() if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - setuptools.setup( name=name, version=version, @@ -89,7 +85,6 @@ platforms="Posix; MacOS X; Windows", packages=packages, python_requires=">=3.7", - namespace_packages=namespaces, install_requires=dependencies, include_package_data=True, zip_safe=False, diff --git a/packages/google-cloud-logging/tests/unit/test_packaging.py b/packages/google-cloud-logging/tests/unit/test_packaging.py new file mode 100644 index 000000000000..4369ca2c1abe --- /dev/null +++ b/packages/google-cloud-logging/tests/unit/test_packaging.py @@ -0,0 +1,56 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +import sys + + +def test_namespace_package_compat(tmp_path): + # The ``google`` namespace package should not be masked + # by the presence of ``google-cloud-logging``. + + google = tmp_path / "google" + google.mkdir() + google.joinpath("othermod.py").write_text("") + + google_otherpkg = tmp_path / "google" / "otherpkg" + google_otherpkg.mkdir() + google_otherpkg.joinpath("__init__.py").write_text("") + + # The ``google.cloud`` namespace package should not be masked + # by the presence of ``google-cloud-logging``. + google_cloud = tmp_path / "google" / "cloud" + google_cloud.mkdir() + google_cloud.joinpath("othermod.py").write_text("") + + google_cloud_otherpkg = tmp_path / "google" / "cloud" / "otherpkg" + google_cloud_otherpkg.mkdir() + google_cloud_otherpkg.joinpath("__init__.py").write_text("") + + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + + for pkg in [ + "google.othermod", + "google.cloud.othermod", + "google.otherpkg", + "google.cloud.otherpkg", + "google.cloud.logging", + ]: + cmd = [sys.executable, "-c", f"import {pkg}"] + subprocess.check_output(cmd, env=env) + + for module in ["google.othermod", "google.cloud.othermod"]: + cmd = [sys.executable, "-m", module] + subprocess.check_output(cmd, env=env) From 1a35b6d83f6a97033ffde2dac56156aa8ca053c5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 29 Nov 2023 15:19:46 -0500 Subject: [PATCH 772/855] feat: Add support for Python 3.12 (#813) * chore(python): Add Python 3.12 Source-Link: https://github.com/googleapis/synthtool/commit/af16e6d4672cc7b400f144de2fc3068b54ff47d2 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 * add trove classifier for python 3.12 * add python 3.12, and older, as a required check --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/sync-repo-settings.yaml | 4 ++ .../.kokoro/samples/python3.12/common.cfg | 59 +++++++++++++++++++ .../.kokoro/samples/python3.12/continuous.cfg | 6 ++ .../samples/python3.12/periodic-head.cfg | 11 ++++ .../.kokoro/samples/python3.12/periodic.cfg | 6 ++ .../.kokoro/samples/python3.12/presubmit.cfg | 6 ++ .../google-cloud-logging/CONTRIBUTING.rst | 6 +- packages/google-cloud-logging/noxfile.py | 2 +- .../samples/snippets/noxfile.py | 2 +- packages/google-cloud-logging/setup.py | 1 + 11 files changed, 101 insertions(+), 6 deletions(-) create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.12/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.12/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.12/periodic-head.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.12/periodic.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.12/presubmit.cfg diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 453b540c1e58..eb4d9f794dc1 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 -# created: 2023-11-08T19:46:45.022803742Z + digest: sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 +# created: 2023-11-23T18:17:28.105124211Z diff --git a/packages/google-cloud-logging/.github/sync-repo-settings.yaml b/packages/google-cloud-logging/.github/sync-repo-settings.yaml index 37438d33d4fa..439a0bcb7715 100644 --- a/packages/google-cloud-logging/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-logging/.github/sync-repo-settings.yaml @@ -12,3 +12,7 @@ branchProtectionRules: - 'Samples - Lint' - 'Samples - Python 3.7' - 'Samples - Python 3.8' + - 'Samples - Python 3.9' + - 'Samples - Python 3.10' + - 'Samples - Python 3.11' + - 'Samples - Python 3.12' diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.12/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.12/common.cfg new file mode 100644 index 000000000000..fb8ce87952aa --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.12/common.cfg @@ -0,0 +1,59 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.12" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-312" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.12/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.12/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.12/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.12/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.12/periodic-head.cfg new file mode 100644 index 000000000000..7e2973e3b659 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.12/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.12/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.12/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.12/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.12/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.12/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.12/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index 6fa7a4dac6a4..f5be18c3dfcb 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.11 -- -k + $ nox -s unit-3.12 -- -k .. note:: @@ -226,12 +226,14 @@ We support: - `Python 3.9`_ - `Python 3.10`_ - `Python 3.11`_ +- `Python 3.12`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 6f651e5ec1ec..9cff1ae8492f 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -34,7 +34,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 1224cbe212e4..3b7135946fd5 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index e4a71277a795..978175d3a1b4 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -79,6 +79,7 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", "Topic :: Internet", ], From 6a116520bb21fe225e60b7b21c242909291241d8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Nov 2023 10:43:08 -0500 Subject: [PATCH 773/855] fix: use `retry_async` instead of `retry` in async client (#816) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.13.0 PiperOrigin-RevId: 586460538 Source-Link: https://github.com/googleapis/googleapis/commit/44582d0577fdc95dd2af37628a0569e16aac0bfe Source-Link: https://github.com/googleapis/googleapis-gen/commit/5e7073c9de847929c4ae97f8a444c3fca2d45a6b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNWU3MDczYzlkZTg0NzkyOWM0YWU5N2Y4YTQ0NGMzZmNhMmQ0NWE2YiJ9 chore: Update gapic-generator-python to v1.12.0 PiperOrigin-RevId: 586356061 Source-Link: https://github.com/googleapis/googleapis/commit/72a1f55abaedbb62decd8ae8a44a4de223799c76 Source-Link: https://github.com/googleapis/googleapis-gen/commit/558a04bcd1cc0576e8fac1089e48e48b27ac161b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTU4YTA0YmNkMWNjMDU3NmU4ZmFjMTA4OWU0OGU0OGIyN2FjMTYxYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../config_service_v2/async_client.py | 96 +++++++++---------- .../logging_service_v2/async_client.py | 42 ++++---- .../metrics_service_v2/async_client.py | 36 +++---- .../snippet_metadata_google.logging.v2.json | 2 +- .../logging_v2/test_config_service_v2.py | 6 +- .../logging_v2/test_logging_service_v2.py | 6 +- .../logging_v2/test_metrics_service_v2.py | 6 +- 7 files changed, 97 insertions(+), 97 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index e066569f72e2..3962c40e9dca 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -33,14 +33,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -294,7 +294,7 @@ async def sample_list_buckets(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -399,7 +399,7 @@ async def sample_get_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetBucketRequest, dict]]): The request object. The parameters to ``GetBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -486,7 +486,7 @@ async def sample_create_bucket_async(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): The request object. The parameters to ``CreateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -586,7 +586,7 @@ async def sample_update_bucket_async(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): The request object. The parameters to ``UpdateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -679,7 +679,7 @@ async def sample_create_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): The request object. The parameters to ``CreateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -764,7 +764,7 @@ async def sample_update_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): The request object. The parameters to ``UpdateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -845,7 +845,7 @@ async def sample_delete_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]]): The request object. The parameters to ``DeleteBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -914,7 +914,7 @@ async def sample_undelete_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]]): The request object. The parameters to ``UndeleteBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -996,7 +996,7 @@ async def sample_list_views(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1101,7 +1101,7 @@ async def sample_get_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetViewRequest, dict]]): The request object. The parameters to ``GetView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1182,7 +1182,7 @@ async def sample_create_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateViewRequest, dict]]): The request object. The parameters to ``CreateView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1265,7 +1265,7 @@ async def sample_update_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]]): The request object. The parameters to ``UpdateView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1344,7 +1344,7 @@ async def sample_delete_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]]): The request object. The parameters to ``DeleteView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1430,7 +1430,7 @@ async def sample_list_sinks(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1465,7 +1465,7 @@ async def sample_list_sinks(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_sinks, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1563,7 +1563,7 @@ async def sample_get_sink(): This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1603,7 +1603,7 @@ async def sample_get_sink(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_sink, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1711,7 +1711,7 @@ async def sample_create_sink(): This corresponds to the ``sink`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1873,7 +1873,7 @@ async def sample_update_sink(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1917,7 +1917,7 @@ async def sample_update_sink(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_sink, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -2007,7 +2007,7 @@ async def sample_delete_sink(): This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2034,7 +2034,7 @@ async def sample_delete_sink(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_sink, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -2142,7 +2142,7 @@ async def sample_create_link(): This corresponds to the ``link_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2267,7 +2267,7 @@ async def sample_delete_link(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2392,7 +2392,7 @@ async def sample_list_links(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2509,7 +2509,7 @@ async def sample_get_link(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2619,7 +2619,7 @@ async def sample_list_exclusions(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2654,7 +2654,7 @@ async def sample_list_exclusions(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_exclusions, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -2752,7 +2752,7 @@ async def sample_get_exclusion(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2790,7 +2790,7 @@ async def sample_get_exclusion(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_exclusion, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -2897,7 +2897,7 @@ async def sample_create_exclusion(): This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3045,7 +3045,7 @@ async def sample_update_exclusion(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3163,7 +3163,7 @@ async def sample_delete_exclusion(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3190,7 +3190,7 @@ async def sample_delete_exclusion(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_exclusion, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -3272,7 +3272,7 @@ async def sample_get_cmek_settings(): See `Enabling CMEK for Log Router `__ for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3380,7 +3380,7 @@ async def sample_update_cmek_settings(): See `Enabling CMEK for Log Router `__ for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3509,7 +3509,7 @@ async def sample_get_settings(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3651,7 +3651,7 @@ async def sample_update_settings(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3753,7 +3753,7 @@ async def sample_copy_log_entries(): Args: request (Optional[Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]]): The request object. The parameters to CopyLogEntries. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3813,7 +3813,7 @@ async def list_operations( request (:class:`~.operations_pb2.ListOperationsRequest`): The request object. Request message for `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3830,7 +3830,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_operations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -3867,7 +3867,7 @@ async def get_operation( request (:class:`~.operations_pb2.GetOperationRequest`): The request object. Request message for `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3884,7 +3884,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -3925,7 +3925,7 @@ async def cancel_operation( request (:class:`~.operations_pb2.CancelOperationRequest`): The request object. Request message for `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3941,7 +3941,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index dcf622ac2641..59dcad291e5d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -36,14 +36,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers @@ -278,7 +278,7 @@ async def sample_delete_log(): This corresponds to the ``log_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -305,7 +305,7 @@ async def sample_delete_log(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_log, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -472,7 +472,7 @@ async def sample_write_log_entries(): This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -510,7 +510,7 @@ async def sample_write_log_entries(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.write_log_entries, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -629,7 +629,7 @@ async def sample_list_log_entries(): This corresponds to the ``order_by`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -668,7 +668,7 @@ async def sample_list_log_entries(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_log_entries, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -746,7 +746,7 @@ async def sample_list_monitored_resource_descriptors(): request (Optional[Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]]): The request object. The parameters to ListMonitoredResourceDescriptors - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -768,7 +768,7 @@ async def sample_list_monitored_resource_descriptors(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_monitored_resource_descriptors, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -857,7 +857,7 @@ async def sample_list_logs(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -893,7 +893,7 @@ async def sample_list_logs(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_logs, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -986,7 +986,7 @@ def request_generator(): Args: requests (AsyncIterator[`google.cloud.logging_v2.types.TailLogEntriesRequest`]): The request object AsyncIterator. The parameters to ``TailLogEntries``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1001,7 +1001,7 @@ def request_generator(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.tail_log_entries, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1041,7 +1041,7 @@ async def list_operations( request (:class:`~.operations_pb2.ListOperationsRequest`): The request object. Request message for `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1058,7 +1058,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_operations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1095,7 +1095,7 @@ async def get_operation( request (:class:`~.operations_pb2.GetOperationRequest`): The request object. Request message for `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1112,7 +1112,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1153,7 +1153,7 @@ async def cancel_operation( request (:class:`~.operations_pb2.CancelOperationRequest`): The request object. Request message for `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1169,7 +1169,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index a120c352bdbe..b2cf3e3d7b33 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -33,14 +33,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore @@ -269,7 +269,7 @@ async def sample_list_log_metrics(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -305,7 +305,7 @@ async def sample_list_log_metrics(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_log_metrics, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -396,7 +396,7 @@ async def sample_get_log_metric(): This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -439,7 +439,7 @@ async def sample_get_log_metric(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_log_metric, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -540,7 +540,7 @@ async def sample_create_log_metric(): This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -672,7 +672,7 @@ async def sample_update_log_metric(): This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -717,7 +717,7 @@ async def sample_update_log_metric(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_log_metric, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -798,7 +798,7 @@ async def sample_delete_log_metric(): This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -825,7 +825,7 @@ async def sample_delete_log_metric(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_log_metric, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -870,7 +870,7 @@ async def list_operations( request (:class:`~.operations_pb2.ListOperationsRequest`): The request object. Request message for `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -887,7 +887,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_operations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -924,7 +924,7 @@ async def get_operation( request (:class:`~.operations_pb2.GetOperationRequest`): The request object. Request message for `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -941,7 +941,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -982,7 +982,7 @@ async def cancel_operation( request (:class:`~.operations_pb2.CancelOperationRequest`): The request object. Request message for `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -998,7 +998,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 6c11ae7e588c..b62675ba6439 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.8.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 1af57347050f..abe89b5aec5d 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -9112,7 +9112,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9251,7 +9251,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9396,7 +9396,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): +async def test_list_operations_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index ba5e56f22ade..498ad94afd40 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -3013,7 +3013,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3152,7 +3152,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3297,7 +3297,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): +async def test_list_operations_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 00e443415fc8..f1d5ba3a382a 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -2899,7 +2899,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3038,7 +3038,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3183,7 +3183,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): +async def test_list_operations_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, From badf1a0fef0b9a0865a8df5d6399c24c374e55de Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Dec 2023 13:47:47 -0500 Subject: [PATCH 774/855] chore: bump cryptography from 41.0.5 to 41.0.6 in /synthtool/gcp/templates/python_library/.kokoro (#815) Source-Link: https://github.com/googleapis/synthtool/commit/9367caadcbb30b5b2719f30eb00c44cc913550ed Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 48 +++++++++---------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index eb4d9f794dc1..773c1dfd2146 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 -# created: 2023-11-23T18:17:28.105124211Z + digest: sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c +# created: 2023-11-29T14:54:29.548172703Z diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 8957e21104e2..e5c1ffca94b7 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -93,30 +93,30 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.5 \ - --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ - --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ - --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ - --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ - --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ - --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ - --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ - --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ - --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ - --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ - --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ - --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ - --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ - --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ - --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ - --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ - --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ - --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ - --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ - --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ - --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ - --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ - --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 +cryptography==41.0.6 \ + --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ + --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ + --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ + --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ + --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ + --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ + --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ + --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ + --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ + --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ + --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ + --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ + --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ + --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ + --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ + --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ + --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ + --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ + --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ + --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ + --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ + --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ + --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae # via # gcp-releasetool # secretstorage From 76de16899adc86db71e3145106994b403b902803 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 4 Dec 2023 10:56:49 -0500 Subject: [PATCH 775/855] build: treat warnings as errors (#819) * build: treat warnings as errors * resolve warning Client.dataset is deprecated and will be removed in a future version * See https://github.com/googleapis/python-logging/issues/820 * address warning @pytest.yield_fixture is deprecated. Use @pytest.fixture instead; they are the same. * filter warnings from grpcio * revert * update comment --- packages/google-cloud-logging/pytest.ini | 23 +++++++++++++++ .../samples/snippets/export_test.py | 2 +- .../tests/system/test_system.py | 2 +- .../tests/unit/handlers/test_app_engine.py | 29 ++++++++++++++++--- .../unit/handlers/test_container_engine.py | 25 +++++++++++++--- 5 files changed, 71 insertions(+), 10 deletions(-) create mode 100644 packages/google-cloud-logging/pytest.ini diff --git a/packages/google-cloud-logging/pytest.ini b/packages/google-cloud-logging/pytest.ini new file mode 100644 index 000000000000..994e939cb8fe --- /dev/null +++ b/packages/google-cloud-logging/pytest.ini @@ -0,0 +1,23 @@ +[pytest] +filterwarnings = + # treat all warnings as errors + error + # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed + ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning + # Remove once Release PR https://github.com/googleapis/python-api-common-protos/pull/191 is merged + ignore:.*pkg_resources.declare_namespace:DeprecationWarning + ignore:.*pkg_resources is deprecated as an API:DeprecationWarning + # Remove once https://github.com/grpc/grpc/issues/35086 is fixed + ignore:There is no current event loop:DeprecationWarning:grpc.aio._channel + # Remove once release PR https://github.com/googleapis/proto-plus-python/pull/391 is merged + ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:proto.datetime_helpers + # Remove once release PR https://github.com/googleapis/python-api-core/pull/555 is merged + ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:google.api_core.datetime_helpers + # Remove once https://github.com/googleapis/python-logging/issues/818 is fixed + ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:google.cloud.logging_v2.handlers.transports + ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:tests.unit.test__http + ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:tests.unit.test_entries + # Remove once https://github.com/googleapis/python-logging/issues/820 is fixed + ignore:.*warn.*is deprecated, use.*warning.*instead:DeprecationWarning + # Remove once a version of grpcio newer than 1.59.3 is released to PyPI + ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:grpc._channel diff --git a/packages/google-cloud-logging/samples/snippets/export_test.py b/packages/google-cloud-logging/samples/snippets/export_test.py index b1ecf49230f1..c5830e3b24d3 100644 --- a/packages/google-cloud-logging/samples/snippets/export_test.py +++ b/packages/google-cloud-logging/samples/snippets/export_test.py @@ -34,7 +34,7 @@ def _random_id(): ) -@pytest.yield_fixture +@pytest.fixture def example_sink(): client = logging.Client() diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 0d39aa0a95ce..ec67a99d052e 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -813,7 +813,7 @@ def _init_bigquery_dataset(self): # Stackdriver Logging to write into it. retry = RetryErrors((TooManyRequests, BadGateway, ServiceUnavailable)) bigquery_client = bigquery.Client() - dataset_ref = bigquery_client.dataset(dataset_name) + dataset_ref = bigquery.DatasetReference(Config.CLIENT.project, dataset_name) dataset = retry(bigquery_client.create_dataset)(bigquery.Dataset(dataset_ref)) self.to_delete.append((bigquery_client, dataset)) bigquery_client.get_dataset(dataset) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index 8eedfad9b053..868fc9be8a41 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -13,6 +13,7 @@ # limitations under the License. import logging +import pytest import unittest import mock @@ -46,6 +47,9 @@ def test_constructor_w_gae_standard_env(self): ), mock.patch( "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", return_value=self.PROJECT, + ), pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", ): handler = self._make_one(client, transport=_Transport) @@ -78,6 +82,9 @@ def test_constructor_w_gae_flex_env(self): ), mock.patch( "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", return_value=self.PROJECT, + ), pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", ): handler = self._make_one( client, name=name, transport=_Transport, stream=stream @@ -99,7 +106,10 @@ def test_emit(self): "google.cloud.logging_v2.handlers.app_engine.get_request_data", return_value=(expected_http_request, trace_id, None, None), ) - with get_request_patch: + with get_request_patch, pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", + ): # library integrations mocked to return test data client = mock.Mock(project=self.PROJECT, spec=["project"]) handler = self._make_one(client, transport=_Transport) @@ -137,7 +147,10 @@ def test_emit_manual_field_override(self): "google.cloud.logging_v2.handlers.app_engine.get_request_data", return_value=(inferred_http_request, inferred_trace_id, None, None), ) - with get_request_patch: + with get_request_patch, pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", + ): # library integrations mocked to return test data client = mock.Mock(project=self.PROJECT, spec=["project"]) handler = self._make_one(client, transport=_Transport) @@ -197,12 +210,20 @@ def test_get_gae_labels_with_label(self): from google.cloud.logging_v2.handlers import app_engine trace_id = "test-gae-trace-id" - gae_labels = self._get_gae_labels_helper(trace_id) + with pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", + ): + gae_labels = self._get_gae_labels_helper(trace_id) expected_labels = {app_engine._TRACE_ID_LABEL: trace_id} self.assertEqual(gae_labels, expected_labels) def test_get_gae_labels_without_label(self): - gae_labels = self._get_gae_labels_helper(None) + with pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", + ): + gae_labels = self._get_gae_labels_helper(None) self.assertEqual(gae_labels, {}) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py index 280ab9cf0037..5c814c53d82e 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_container_engine.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest import unittest @@ -27,18 +28,30 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): - handler = self._make_one() + with pytest.warns( + DeprecationWarning, + match="ContainerEngineHandler is deprecated. Use StructuredLogHandler instead", + ): + handler = self._make_one() self.assertIsNone(handler.name) def test_ctor_w_name(self): - handler = self._make_one(name="foo") + with pytest.warns( + DeprecationWarning, + match="ContainerEngineHandler is deprecated. Use StructuredLogHandler instead", + ): + handler = self._make_one(name="foo") self.assertEqual(handler.name, "foo") def test_format(self): import logging import json - handler = self._make_one() + with pytest.warns( + DeprecationWarning, + match="ContainerEngineHandler is deprecated. Use StructuredLogHandler instead", + ): + handler = self._make_one() logname = "loggername" message = "hello world,嗨 世界" record = logging.LogRecord( @@ -51,6 +64,10 @@ def test_format(self): "thread": record.thread, "severity": record.levelname, } - payload = handler.format(record) + with pytest.warns( + DeprecationWarning, + match="format_stackdriver_json is deprecated. Use StructuredLogHandler instead", + ): + payload = handler.format(record) self.assertEqual(payload, json.dumps(expected_payload, ensure_ascii=False)) From f8b7a3636165472f28a72e0f5acbb4c1dd82c79c Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Tue, 5 Dec 2023 13:39:57 -0500 Subject: [PATCH 776/855] fix: Use warning instead of warn in system tests to avoid DeprecationWarning (#821) * fix: Use warning instead of warn in system tests to avoid DeprecationWarning * Removed ignore like from pytest.ini --- packages/google-cloud-logging/pytest.ini | 2 -- packages/google-cloud-logging/tests/system/test_system.py | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/pytest.ini b/packages/google-cloud-logging/pytest.ini index 994e939cb8fe..8bc54e713e58 100644 --- a/packages/google-cloud-logging/pytest.ini +++ b/packages/google-cloud-logging/pytest.ini @@ -17,7 +17,5 @@ filterwarnings = ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:google.cloud.logging_v2.handlers.transports ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:tests.unit.test__http ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:tests.unit.test_entries - # Remove once https://github.com/googleapis/python-logging/issues/820 is fixed - ignore:.*warn.*is deprecated, use.*warning.*instead:DeprecationWarning # Remove once a version of grpcio newer than 1.59.3 is released to PyPI ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:grpc._channel diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index ec67a99d052e..821a938df793 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -605,7 +605,7 @@ def test_handlers_w_extras(self): "resource": Resource(type="cloudiot_device", labels={}), "labels": {"test-label": "manual"}, } - cloud_logger.warn(LOG_MESSAGE, extra=extra) + cloud_logger.warning(LOG_MESSAGE, extra=extra) entries = _list_entries(logger) self.assertEqual(len(entries), 1) @@ -634,7 +634,7 @@ def test_handlers_w_json_fields(self): cloud_logger = logging.getLogger(LOGGER_NAME) cloud_logger.addHandler(handler) extra = {"json_fields": {"hello": "world", "two": 2}} - cloud_logger.warn(LOG_MESSAGE, extra=extra) + cloud_logger.warning(LOG_MESSAGE, extra=extra) entries = _list_entries(logger) self.assertEqual(len(entries), 1) From 11de9525f5736924c0092f9de94662a571b07ad7 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 6 Dec 2023 11:05:28 -0500 Subject: [PATCH 777/855] fix: Ignore Python37DeprecationWarnings from google.auth (#823) --- packages/google-cloud-logging/pytest.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/google-cloud-logging/pytest.ini b/packages/google-cloud-logging/pytest.ini index 8bc54e713e58..1d4be1ee6ecf 100644 --- a/packages/google-cloud-logging/pytest.ini +++ b/packages/google-cloud-logging/pytest.ini @@ -19,3 +19,5 @@ filterwarnings = ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:tests.unit.test_entries # Remove once a version of grpcio newer than 1.59.3 is released to PyPI ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:grpc._channel + # Remove after support for Python 3.7 is dropped + ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning From d8a87d3ea6e4ba420a2ac514cbca1d3aaaf5f116 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Fri, 8 Dec 2023 11:02:56 -0500 Subject: [PATCH 778/855] fix: Fixed DeprecationWarning for datetime objects for Python 3.12 (#824) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Fixed DeprecationWarning for datetime objects for Python 3.12 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../handlers/transports/background_thread.py | 4 +++- packages/google-cloud-logging/pytest.ini | 4 ---- .../google-cloud-logging/tests/system/test_system.py | 4 ++-- .../google-cloud-logging/tests/unit/test__http.py | 4 ++-- .../google-cloud-logging/tests/unit/test_entries.py | 12 ++++++------ 5 files changed, 13 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py index f361e043cdf8..7cf2799f59c3 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py @@ -240,7 +240,9 @@ def enqueue(self, record, message, **kwargs): queue_entry = { "message": message, "severity": _helpers._normalize_severity(record.levelno), - "timestamp": datetime.datetime.utcfromtimestamp(record.created), + "timestamp": datetime.datetime.fromtimestamp( + record.created, datetime.timezone.utc + ), } queue_entry.update(kwargs) self._queue.put_nowait(queue_entry) diff --git a/packages/google-cloud-logging/pytest.ini b/packages/google-cloud-logging/pytest.ini index 1d4be1ee6ecf..15e373380698 100644 --- a/packages/google-cloud-logging/pytest.ini +++ b/packages/google-cloud-logging/pytest.ini @@ -13,10 +13,6 @@ filterwarnings = ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:proto.datetime_helpers # Remove once release PR https://github.com/googleapis/python-api-core/pull/555 is merged ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:google.api_core.datetime_helpers - # Remove once https://github.com/googleapis/python-logging/issues/818 is fixed - ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:google.cloud.logging_v2.handlers.transports - ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:tests.unit.test__http - ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:tests.unit.test_entries # Remove once a version of grpcio newer than 1.59.3 is released to PyPI ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:grpc._channel # Remove after support for Python 3.7 is dropped diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 821a938df793..c5000f1463cd 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -336,7 +336,7 @@ def test_log_text_with_timestamp(self): text_payload = "System test: test_log_text_with_timestamp" gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_ts_http")) - now = datetime.utcnow() + now = datetime.now(timezone.utc) loggers = ( [gapic_logger] if Config.use_mtls == "always" @@ -356,7 +356,7 @@ def test_log_text_with_resource(self): gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_res")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_res_http")) - now = datetime.utcnow() + now = datetime.now(timezone.utc) loggers = ( [gapic_logger] if Config.use_mtls == "always" diff --git a/packages/google-cloud-logging/tests/unit/test__http.py b/packages/google-cloud-logging/tests/unit/test__http.py index 0e83bd82cca4..5709a50a6e80 100644 --- a/packages/google-cloud-logging/tests/unit/test__http.py +++ b/packages/google-cloud-logging/tests/unit/test__http.py @@ -122,9 +122,9 @@ def test_ctor(self): @staticmethod def _make_timestamp(): import datetime - from google.cloud._helpers import UTC + from datetime import timezone - NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) + NOW = datetime.datetime.now(timezone.utc) return NOW, _datetime_to_rfc3339_w_nanos(NOW) def test_list_entries_with_limits(self): diff --git a/packages/google-cloud-logging/tests/unit/test_entries.py b/packages/google-cloud-logging/tests/unit/test_entries.py index e7bf30d87fd7..382674ebda2f 100644 --- a/packages/google-cloud-logging/tests/unit/test_entries.py +++ b/packages/google-cloud-logging/tests/unit/test_entries.py @@ -200,14 +200,14 @@ def test_from_api_repr_missing_data_no_loggers(self): def test_from_api_repr_w_loggers_no_logger_match(self): from datetime import datetime - from google.cloud._helpers import UTC + from datetime import timezone from google.cloud.logging import Resource klass = self._get_target_class() client = _Client(self.PROJECT) SEVERITY = "CRITICAL" IID = "IID" - NOW = datetime.utcnow().replace(tzinfo=UTC) + NOW = datetime.now(timezone.utc) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) LABELS = {"foo": "bar", "baz": "qux"} @@ -283,11 +283,11 @@ def test_from_api_repr_w_loggers_no_logger_match(self): def test_from_api_repr_w_loggers_w_logger_match(self): from datetime import datetime from datetime import timedelta - from google.cloud._helpers import UTC + from datetime import timezone client = _Client(self.PROJECT) IID = "IID" - NOW = datetime.utcnow().replace(tzinfo=UTC) + NOW = datetime.now(timezone.utc) LATER = NOW + timedelta(seconds=1) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) RECEIVED = _datetime_to_rfc3339_w_nanos(LATER) @@ -341,11 +341,11 @@ def test_from_api_repr_w_loggers_w_logger_match(self): def test_from_api_repr_w_folder_path(self): from datetime import datetime from datetime import timedelta - from google.cloud._helpers import UTC + from datetime import timezone client = _Client(self.PROJECT) IID = "IID" - NOW = datetime.utcnow().replace(tzinfo=UTC) + NOW = datetime.now(timezone.utc) LATER = NOW + timedelta(seconds=1) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) RECEIVED = _datetime_to_rfc3339_w_nanos(LATER) From c9baa9586fc902c659657099c567384482ba6881 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 11 Dec 2023 14:22:57 -0500 Subject: [PATCH 779/855] chore(main): release 3.9.0 (#799) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-logging/CHANGELOG.md | 17 +++++++++++++++++ .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 21 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json index fa4291eb23d2..7c3079b2d898 100644 --- a/packages/google-cloud-logging/.release-please-manifest.json +++ b/packages/google-cloud-logging/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.8.0" + ".": "3.9.0" } diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index d8cfadcaa434..0ae1f74bb841 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,23 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.9.0](https://github.com/googleapis/python-logging/compare/v3.8.0...v3.9.0) (2023-12-08) + + +### Features + +* Add support for Python 3.12 ([#813](https://github.com/googleapis/python-logging/issues/813)) ([6591b53](https://github.com/googleapis/python-logging/commit/6591b53e3fcd67e156765f329700443647b70349)) +* Use native namespaces instead of pkg_resources ([#812](https://github.com/googleapis/python-logging/issues/812)) ([10ad75d](https://github.com/googleapis/python-logging/commit/10ad75d2b9276df389f5069f9f143f8f4621d04d)) + + +### Bug Fixes + +* Fixed DeprecationWarning for datetime objects for Python 3.12 ([#824](https://github.com/googleapis/python-logging/issues/824)) ([2384981](https://github.com/googleapis/python-logging/commit/2384981c9137a57a647a69a32b67dcacd619ea0a)) +* Fixed object paths in autogenerated code in owlbot.py ([#804](https://github.com/googleapis/python-logging/issues/804)) ([b14bb14](https://github.com/googleapis/python-logging/commit/b14bb144fad2dcf067b7e62e402b708f45ebadbe)) +* Updated protobuf JSON formatting to support nested protobufs ([#797](https://github.com/googleapis/python-logging/issues/797)) ([a00c261](https://github.com/googleapis/python-logging/commit/a00c261ee07a5dcaac9f5b966b4bb6729a2bbe65)) +* Use `retry_async` instead of `retry` in async client ([#816](https://github.com/googleapis/python-logging/issues/816)) ([c79f7f5](https://github.com/googleapis/python-logging/commit/c79f7f55dddb170eac29f24b23bfe1dde8bfbda8)) +* Use warning instead of warn in system tests to avoid DeprecationWarning ([#821](https://github.com/googleapis/python-logging/issues/821)) ([c447175](https://github.com/googleapis/python-logging/commit/c4471758e1efee0e3599b08969449b2ce71bd1b4)) + ## [3.8.0](https://github.com/googleapis/python-logging/compare/v3.7.0...v3.8.0) (2023-10-03) diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py index 4052fbb0c7fc..90b3aae3d90f 100644 --- a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.8.0" # {x-release-please-version} +__version__ = "3.9.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py index 4052fbb0c7fc..90b3aae3d90f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.8.0" # {x-release-please-version} +__version__ = "3.9.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index b62675ba6439..f1b714b6b7b8 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "0.1.0" + "version": "3.9.0" }, "snippets": [ { From 474c8c9384b3c2ead34778c3cd4ed3d63cf9ac2d Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 12 Dec 2023 18:50:19 +0100 Subject: [PATCH 780/855] chore(deps): update all dependencies (#828) --- .../google-cloud-logging/samples/snippets/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 0b0ff4e678df..8cbb676d6a1e 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.8.0 -google-cloud-bigquery==3.13.0 +google-cloud-logging==3.9.0 +google-cloud-bigquery==3.14.0 google-cloud-storage==2.13.0 -google-cloud-pubsub==2.18.4 +google-cloud-pubsub==2.19.0 From c464a3a355b49e8f81346ab224b2f9f1e105921f Mon Sep 17 00:00:00 2001 From: Cindy Peng <148148319+cindy-peng@users.noreply.github.com> Date: Tue, 12 Dec 2023 11:56:31 -0800 Subject: [PATCH 781/855] chore(.github): Update python logging issues and prs assignee (#825) Co-authored-by: cindy-peng --- packages/google-cloud-logging/.github/blunderbuss.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/.github/blunderbuss.yml b/packages/google-cloud-logging/.github/blunderbuss.yml index a9d3f44e3967..febbb3f31340 100644 --- a/packages/google-cloud-logging/.github/blunderbuss.yml +++ b/packages/google-cloud-logging/.github/blunderbuss.yml @@ -1,4 +1,4 @@ assign_issues: - - googleapis/api-logging-reviewers + - googleapis/api-logging-python-reviewers assign_prs: - - googleapis/api-logging-reviewers + - googleapis/api-logging-python-reviewers From 1da3e9b22a3d2e5479397c4986378ea0f5e290ba Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 14 Dec 2023 13:44:46 +0100 Subject: [PATCH 782/855] chore(deps): update all dependencies (#829) --- .../google-cloud-logging/samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 8cbb676d6a1e..771ec6dc46cd 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.9.0 -google-cloud-bigquery==3.14.0 -google-cloud-storage==2.13.0 +google-cloud-bigquery==3.14.1 +google-cloud-storage==2.14.0 google-cloud-pubsub==2.19.0 From fa96f3e6ebdac067e07eefc92b394aa0b96011fb Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 2 Jan 2024 16:14:37 +0100 Subject: [PATCH 783/855] chore(deps): update dependency pytest to v7.4.4 (#834) --- .../google-cloud-logging/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 908e344b5dde..43b02e724796 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==2.2.1 -pytest==7.4.3 +pytest==7.4.4 From 0e1482b3a6c55317b37b486e09a838f0970cff1b Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Tue, 9 Jan 2024 10:34:30 -0500 Subject: [PATCH 784/855] fix: Allowed for a partial override of loggers that get excluded from setup_client (#831) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Allowed for a partial override of loggers that get excluded from setup_client * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../cloud/logging_v2/handlers/handlers.py | 12 ++++--- .../tests/unit/handlers/test_handlers.py | 33 ++++++++++++++++++- .../tests/unit/test_client.py | 6 ---- 3 files changed, 40 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index ce5822fcd17a..34bb018d8a65 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -24,13 +24,17 @@ DEFAULT_LOGGER_NAME = "python" -"""Exclude internal logs from propagating through handlers""" +"""Defaults for filtering out noisy loggers""" EXCLUDED_LOGGER_DEFAULTS = ( + "google.api_core.bidi", + "werkzeug", +) + +"""Exclude internal logs from propagating through handlers""" +_INTERNAL_LOGGERS = ( "google.cloud", "google.auth", "google_auth_httplib2", - "google.api_core.bidi", - "werkzeug", ) """These environments require us to remove extra handlers on setup""" @@ -291,7 +295,7 @@ def setup_logging( log_level (Optional[int]): Python logging log level. Defaults to :const:`logging.INFO`. """ - all_excluded_loggers = set(excluded_loggers + EXCLUDED_LOGGER_DEFAULTS) + all_excluded_loggers = set(excluded_loggers + _INTERNAL_LOGGERS) logger = logging.getLogger() # remove built-in handlers on App Engine or Cloud Functions environments diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 1f86a8e37564..c301327a9d84 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -18,6 +18,11 @@ import mock import json +from google.cloud.logging_v2.handlers.handlers import ( + _INTERNAL_LOGGERS, + EXCLUDED_LOGGER_DEFAULTS, +) + from google.cloud.logging_v2.handlers._monitored_resources import ( _FUNCTION_ENV_VARS, _GAE_ENV_VARS, @@ -867,7 +872,7 @@ class TestSetupLogging(unittest.TestCase): def _call_fut(self, handler, excludes=None): from google.cloud.logging.handlers import setup_logging - if excludes: + if excludes is not None: return setup_logging(handler, excluded_loggers=excludes) else: return setup_logging(handler) @@ -893,6 +898,24 @@ def test_setup_logging_excludes(self): self.assertNotIn(handler, excluded_logger.handlers) self.assertFalse(excluded_logger.propagate) + def test_setup_logging_internal_loggers_no_excludes(self): + handler = _Handler(logging.INFO) + self._call_fut(handler, excludes=()) + + # Test that excluded logger defaults can be included, but internal + # loggers can't be. + for logger_name in _INTERNAL_LOGGERS: + logger = logging.getLogger(logger_name) + self.assertNotIn(handler, logger.handlers) + self.assertFalse(logger.propagate) + + logger = logging.getLogger("logging") + self.assertTrue(logger.propagate) + + for logger_name in EXCLUDED_LOGGER_DEFAULTS: + logger = logging.getLogger(logger_name) + self.assertTrue(logger.propagate) + @patch.dict("os.environ", {envar: "1" for envar in _FUNCTION_ENV_VARS}) def test_remove_handlers_gcf(self): logger = logging.getLogger() @@ -939,10 +962,18 @@ def test_keep_handlers_others(self): def setUp(self): self._handlers_cache = logging.getLogger().handlers[:] + # reset the logging manager every time so that we're not reusing loggers + # across different test cases. + self._logger_manager = logging.Logger.manager + logging.Logger.manager = logging.Manager(logging.Logger.root) + def tearDown(self): # cleanup handlers logging.getLogger().handlers = self._handlers_cache[:] + # restore the old logging manager. + logging.Logger.manager = self._logger_manager + class _Handler(object): def __init__(self, level): diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index ec3130ac5aca..2f6736dcf798 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -847,9 +847,6 @@ def test_setup_logging(self): expected_kwargs = { "excluded_loggers": ( - "google.cloud", - "google.auth", - "google_auth_httplib2", "google.api_core.bidi", "werkzeug", ), @@ -890,9 +887,6 @@ def test_setup_logging_w_extra_kwargs(self): expected_kwargs = { "excluded_loggers": ( - "google.cloud", - "google.auth", - "google_auth_httplib2", "google.api_core.bidi", "werkzeug", ), From 65aceefb27b3ad0246e4de89ead77fcdf61321fa Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Tue, 9 Jan 2024 13:18:19 -0500 Subject: [PATCH 785/855] fix: Use value of cluster-location in GKE for tagging location (#830) --- .../cloud/logging_v2/handlers/_monitored_resources.py | 7 +++++-- .../tests/unit/handlers/test__monitored_resources.py | 1 + 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py index 0d94450ceb3c..f93d549886bd 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py @@ -61,6 +61,9 @@ _GKE_CLUSTER_NAME = "instance/attributes/cluster-name" """Attribute in metadata server when in GKE environment.""" +_GKE_CLUSTER_LOCATION = "instance/attributes/cluster-location" +"""Attribute in metadata server when in GKE environment.""" + _PROJECT_NAME = "project/project-id" """Attribute in metadata server when in GKE environment.""" @@ -94,7 +97,7 @@ def _create_kubernetes_resource(): Returns: google.cloud.logging.Resource """ - zone = retrieve_metadata_server(_ZONE_ID) + location = retrieve_metadata_server(_GKE_CLUSTER_LOCATION) cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME) project = retrieve_metadata_server(_PROJECT_NAME) @@ -102,7 +105,7 @@ def _create_kubernetes_resource(): type="k8s_container", labels={ "project_id": project if project else "", - "location": zone if zone else "", + "location": location if location else "", "cluster_name": cluster_name if cluster_name else "", }, ) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py b/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py index 838543253a4d..e788f8e3425e 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py @@ -56,6 +56,7 @@ def _mock_metadata(self, endpoint): if ( endpoint == _monitored_resources._ZONE_ID or endpoint == _monitored_resources._REGION_ID + or endpoint == _monitored_resources._GKE_CLUSTER_LOCATION ): return self.LOCATION elif ( From eba658154ece85c37e9ab5f824c247bb89744e85 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 10 Jan 2024 16:57:18 +0100 Subject: [PATCH 786/855] chore(deps): update dependency google-cloud-bigquery to v3.15.0 (#836) --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 771ec6dc46cd..9633af67f622 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.9.0 -google-cloud-bigquery==3.14.1 +google-cloud-bigquery==3.15.0 google-cloud-storage==2.14.0 google-cloud-pubsub==2.19.0 From 4ff93a42621a981eeae1fd529fb96b2f4fd074ec Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Fri, 12 Jan 2024 15:34:13 -0500 Subject: [PATCH 787/855] build: Added minimal versions of Django/Flask for unit tests. (#837) * build: Added minimal versions of Django/Flask for unit tests. * Added trailing whitespace to pytest.ini * Update pytest.ini * Delete testing/constraints-3.7.txt * Added constraints-3.7.txt back * Fixed pytest.ini for Python 3.7 --- packages/google-cloud-logging/pytest.ini | 10 ++++++++++ .../google-cloud-logging/testing/constraints-3.10.txt | 10 ++++++++++ .../google-cloud-logging/testing/constraints-3.11.txt | 10 ++++++++++ .../google-cloud-logging/testing/constraints-3.12.txt | 10 ++++++++++ .../google-cloud-logging/testing/constraints-3.7.txt | 10 ++++++++++ .../google-cloud-logging/testing/constraints-3.8.txt | 10 ++++++++++ .../google-cloud-logging/testing/constraints-3.9.txt | 10 ++++++++++ 7 files changed, 70 insertions(+) diff --git a/packages/google-cloud-logging/pytest.ini b/packages/google-cloud-logging/pytest.ini index 15e373380698..8a432dd0f073 100644 --- a/packages/google-cloud-logging/pytest.ini +++ b/packages/google-cloud-logging/pytest.ini @@ -17,3 +17,13 @@ filterwarnings = ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:grpc._channel # Remove after support for Python 3.7 is dropped ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning + # DeprecationWarnings triggered by Flask 1.0 testing by Flask dependencies in test code + # 3.7 deprecation warnings + ignore:Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated since Python 3.3,and in 3.9 it will stop working:DeprecationWarning + # 3.8 - 3.9 deprecation warnings + ignore:Importing 'itsdangerous.json' is deprecated and will be removed in ItsDangerous 2.1. Use Python's 'json' module instead.:DeprecationWarning + ignore:Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated since Python 3.3, and in 3.10 it will stop working:DeprecationWarning + # 3.12 deprecation warnings + ignore:Attribute s is deprecated and will be removed in Python 3.14; use value instead:DeprecationWarning + ignore:ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead:DeprecationWarning + ignore:'pkgutil.get_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec\(\) instead:DeprecationWarning diff --git a/packages/google-cloud-logging/testing/constraints-3.10.txt b/packages/google-cloud-logging/testing/constraints-3.10.txt index ed7f9aed2559..577d7b4cd853 100644 --- a/packages/google-cloud-logging/testing/constraints-3.10.txt +++ b/packages/google-cloud-logging/testing/constraints-3.10.txt @@ -4,3 +4,13 @@ google-api-core proto-plus protobuf + +# Lower bound testing for optional dependencies +django==3.2 + +# Need specific versions of Flask dependencies for Flask 1.0 to work +flask==1.0.3 +jinja2==2.11 +markupsafe==2.0.1 +itsdangerous==2.0.1 +werkzeug==1.0.1 \ No newline at end of file diff --git a/packages/google-cloud-logging/testing/constraints-3.11.txt b/packages/google-cloud-logging/testing/constraints-3.11.txt index ed7f9aed2559..68a9effe9f7e 100644 --- a/packages/google-cloud-logging/testing/constraints-3.11.txt +++ b/packages/google-cloud-logging/testing/constraints-3.11.txt @@ -4,3 +4,13 @@ google-api-core proto-plus protobuf + +# Lower bound testing for optional dependencies +django==4.1 + +# Need specific versions of Flask dependencies for Flask 1.0 to work +flask==1.0.3 +jinja2==2.11 +markupsafe==2.0.1 +itsdangerous==2.0.1 +werkzeug==1.0.1 \ No newline at end of file diff --git a/packages/google-cloud-logging/testing/constraints-3.12.txt b/packages/google-cloud-logging/testing/constraints-3.12.txt index ed7f9aed2559..5548b06affcf 100644 --- a/packages/google-cloud-logging/testing/constraints-3.12.txt +++ b/packages/google-cloud-logging/testing/constraints-3.12.txt @@ -4,3 +4,13 @@ google-api-core proto-plus protobuf + +# Lower bound testing for optional dependencies +django==4.2 + +# Need specific versions of Flask dependencies for Flask 1.0 to work +flask==1.0.3 +jinja2==2.11 +markupsafe==2.0.1 +itsdangerous==2.0.1 +werkzeug==1.0.1 \ No newline at end of file diff --git a/packages/google-cloud-logging/testing/constraints-3.7.txt b/packages/google-cloud-logging/testing/constraints-3.7.txt index 587626c54bb2..977767328234 100644 --- a/packages/google-cloud-logging/testing/constraints-3.7.txt +++ b/packages/google-cloud-logging/testing/constraints-3.7.txt @@ -8,3 +8,13 @@ google-api-core==1.33.2 proto-plus==1.22.0 protobuf==3.19.5 google-cloud-core==2.0.0 + +# Lower bound testing for optional dependencies +django==3.2 + +# Need specific versions of Flask dependencies for Flask 1.0 to work +flask==1.0.0 +jinja2==2.10.1 +markupsafe==2.0.1 +itsdangerous==2.0.1 +werkzeug==1.0.1 \ No newline at end of file diff --git a/packages/google-cloud-logging/testing/constraints-3.8.txt b/packages/google-cloud-logging/testing/constraints-3.8.txt index ed7f9aed2559..cc90422a45f4 100644 --- a/packages/google-cloud-logging/testing/constraints-3.8.txt +++ b/packages/google-cloud-logging/testing/constraints-3.8.txt @@ -4,3 +4,13 @@ google-api-core proto-plus protobuf + +# Lower bound testing for optional dependencies +django==3.2 + +# Need specific versions of Flask dependencies for Flask 1.0 to work +flask==1.0.0 +jinja2==2.10.1 +markupsafe==2.0.1 +itsdangerous==2.0.1 +werkzeug==1.0.1 \ No newline at end of file diff --git a/packages/google-cloud-logging/testing/constraints-3.9.txt b/packages/google-cloud-logging/testing/constraints-3.9.txt index ed7f9aed2559..cc90422a45f4 100644 --- a/packages/google-cloud-logging/testing/constraints-3.9.txt +++ b/packages/google-cloud-logging/testing/constraints-3.9.txt @@ -4,3 +4,13 @@ google-api-core proto-plus protobuf + +# Lower bound testing for optional dependencies +django==3.2 + +# Need specific versions of Flask dependencies for Flask 1.0 to work +flask==1.0.0 +jinja2==2.10.1 +markupsafe==2.0.1 +itsdangerous==2.0.1 +werkzeug==1.0.1 \ No newline at end of file From 4688f57f7e39cb7c1faa9d450dec0725fb809c70 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 16 Jan 2024 11:52:21 -0500 Subject: [PATCH 788/855] build(python): fix `docs` and `docfx` builds (#840) Source-Link: https://github.com/googleapis/synthtool/commit/fac8444edd5f5526e804c306b766a271772a3e2f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 6 +++--- .../.kokoro/requirements.txt | 6 +++--- packages/google-cloud-logging/noxfile.py | 20 ++++++++++++++++++- 3 files changed, 25 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 773c1dfd2146..d8a1bbca7179 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c -# created: 2023-11-29T14:54:29.548172703Z + digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa +# created: 2024-01-15T16:32:08.142785673Z diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index e5c1ffca94b7..bb3d6ca38b14 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -263,9 +263,9 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 +jinja2==3.1.3 \ + --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ + --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 # via gcp-releasetool keyring==24.2.0 \ --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 9cff1ae8492f..9478ab93cf94 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -290,7 +290,16 @@ def docs(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", "alabaster", "recommonmark", ) @@ -316,6 +325,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", From 129d16e1108d433bdb5e0b0f3039e329b468b0bd Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 17 Jan 2024 19:13:07 +0100 Subject: [PATCH 789/855] chore(deps): update dependency google-cloud-bigquery to v3.16.0 (#839) Co-authored-by: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> --- packages/google-cloud-logging/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 9633af67f622..9ce9629c6b2c 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.9.0 -google-cloud-bigquery==3.15.0 +google-cloud-bigquery==3.16.0 google-cloud-storage==2.14.0 google-cloud-pubsub==2.19.0 From 93068bbf07120af617eefca63a5490ec7b49a17a Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 24 Jan 2024 07:28:34 -0500 Subject: [PATCH 790/855] chore: Add api-logging-partners to CODEOWNERS (#841) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Add api-logging-partners to CODEOWNERS * Add api-logging-partners to repo metadata JSON * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/CODEOWNERS | 8 ++++---- packages/google-cloud-logging/.repo-metadata.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-logging/.github/CODEOWNERS b/packages/google-cloud-logging/.github/CODEOWNERS index 2a3b42055693..0738e11eea58 100644 --- a/packages/google-cloud-logging/.github/CODEOWNERS +++ b/packages/google-cloud-logging/.github/CODEOWNERS @@ -5,8 +5,8 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# @googleapis/yoshi-python @googleapis/api-logging are the default owners for changes in this repo -* @googleapis/yoshi-python @googleapis/api-logging +# @googleapis/yoshi-python @googleapis/api-logging @googleapis/api-logging-partners are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/api-logging @googleapis/api-logging-partners -# @googleapis/python-samples-reviewers @googleapis/api-logging are the default owners for samples changes -/samples/ @googleapis/python-samples-reviewers @googleapis/api-logging +# @googleapis/python-samples-reviewers @googleapis/api-logging @googleapis/api-logging-partners are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/api-logging @googleapis/api-logging-partners diff --git a/packages/google-cloud-logging/.repo-metadata.json b/packages/google-cloud-logging/.repo-metadata.json index 0b6c0d8ca0f9..83c212332e89 100644 --- a/packages/google-cloud-logging/.repo-metadata.json +++ b/packages/google-cloud-logging/.repo-metadata.json @@ -10,7 +10,7 @@ "repo": "googleapis/python-logging", "distribution_name": "google-cloud-logging", "api_id": "logging.googleapis.com", - "codeowner_team": "@googleapis/api-logging", + "codeowner_team": "@googleapis/api-logging @googleapis/api-logging-partners", "default_version": "v2", "api_shortname": "logging", "api_description": "allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud and Amazon Web Services. Using the BindPlane service, you can also collect this data from over 150 common application components, on-premises systems, and hybrid cloud systems. BindPlane is included with your Google Cloud project at no additional cost." From c80fdce77b713db7a94589735da5864a774a3be5 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Thu, 1 Feb 2024 10:17:50 -0500 Subject: [PATCH 791/855] fix: Added placeholder kwargs to StructuredLogHandler (#845) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Added placeholder kwargs to StructuredLogHandler * Replaced unused named arguments with **kwargs * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * linting * Update structured_log.py --------- Co-authored-by: Owl Bot --- .../cloud/logging_v2/handlers/handlers.py | 1 + .../logging_v2/handlers/structured_log.py | 8 ++++- .../tests/unit/test_client.py | 36 +++++++++++++++++++ 3 files changed, 44 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 34bb018d8a65..3d6ab9d1e9a7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -157,6 +157,7 @@ def __init__( resource=None, labels=None, stream=None, + **kwargs, ): """ Args: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py index e6094091eda2..dcba02c9c923 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/structured_log.py @@ -63,7 +63,13 @@ class StructuredLogHandler(logging.StreamHandler): """ def __init__( - self, *, labels=None, stream=None, project_id=None, json_encoder_cls=None + self, + *, + labels=None, + stream=None, + project_id=None, + json_encoder_cls=None, + **kwargs ): """ Args: diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 2f6736dcf798..2d12a283e74b 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -894,6 +894,42 @@ def test_setup_logging_w_extra_kwargs(self): } self.assertEqual(kwargs, expected_kwargs) + def test_setup_logging_w_extra_kwargs_structured_log(self): + import io + from google.cloud.logging.handlers import StructuredLogHandler + from google.cloud.logging import Resource + from google.cloud.logging_v2.client import _GKE_RESOURCE_TYPE + + name = "test-logger" + resource = Resource(_GKE_RESOURCE_TYPE, {"resource_label": "value"}) + labels = {"handler_label": "value"} + stream = io.BytesIO() + + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) + + with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked: + client.setup_logging( + name=name, resource=resource, labels=labels, stream=stream + ) + + self.assertEqual(len(mocked.mock_calls), 1) + _, args, kwargs = mocked.mock_calls[0] + + (handler,) = args + self.assertIsInstance(handler, StructuredLogHandler) + + expected_kwargs = { + "excluded_loggers": ( + "google.api_core.bidi", + "werkzeug", + ), + "log_level": 20, + } + self.assertEqual(kwargs, expected_kwargs) + class _Connection(object): _called_with = None From 24ce5ef744fe1bc4630ff5e246c558b9113d0544 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 6 Feb 2024 11:40:56 -0500 Subject: [PATCH 792/855] build(deps): bump cryptography from 41.0.6 to 42.0.0 in /synthtool/gcp/templates/python_library/.kokoro (#852) Source-Link: https://github.com/googleapis/synthtool/commit/e13b22b1f660c80e4c3e735a9177d2f16c4b8bdc Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 57 +++++++++++-------- 2 files changed, 35 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index d8a1bbca7179..2aefd0e91175 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa -# created: 2024-01-15T16:32:08.142785673Z + digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 +# created: 2024-02-06T03:20:16.660474034Z diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index bb3d6ca38b14..8c11c9f3e9b6 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -93,30 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.6 \ - --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ - --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ - --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ - --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ - --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ - --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ - --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ - --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ - --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ - --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ - --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ - --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ - --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ - --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ - --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ - --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ - --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ - --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ - --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ - --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ - --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ - --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ - --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae +cryptography==42.0.0 \ + --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ + --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ + --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ + --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ + --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ + --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ + --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ + --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ + --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ + --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ + --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ + --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ + --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ + --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ + --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ + --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ + --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ + --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ + --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ + --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ + --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ + --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ + --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ + --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ + --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ + --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ + --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ + --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ + --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ + --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ + --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ + --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 # via # gcp-releasetool # secretstorage From 219e79045d889a1c1938a6275988f89922c8d11f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 13 Feb 2024 17:16:04 +0100 Subject: [PATCH 793/855] chore(deps): update all dependencies (#843) * chore(deps): update all dependencies * Update requirements-test.txt for Python 3.7 compatibility --------- Co-authored-by: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> --- .../samples/snippets/requirements-test.txt | 3 ++- .../google-cloud-logging/samples/snippets/requirements.txt | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 43b02e724796..9d5ac84b9f2d 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,2 +1,3 @@ backoff==2.2.1 -pytest==7.4.4 +pytest==7.4.4; python_version == '3.7' +pytest==8.0.0; python_version >= '3.8' diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 9ce9629c6b2c..bcf91785d284 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.9.0 -google-cloud-bigquery==3.16.0 +google-cloud-bigquery==3.17.2 google-cloud-storage==2.14.0 -google-cloud-pubsub==2.19.0 +google-cloud-pubsub==2.19.4 From fd3da7b85029321083ac842c5a3f4fb1d6507d3f Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 14 Feb 2024 15:27:24 -0500 Subject: [PATCH 794/855] docs: Added documentation for Django/Flask integrations and dictConfig (#848) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Added documentation for Django/Flask integrations and dictConfig * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Added product prefix to new snippet * Added client setup in sample + link to settings in documentation * Changed django links to point to `/stable/` links --------- Co-authored-by: Owl Bot --- packages/google-cloud-logging/README.rst | 4 +-- .../docs/std-lib-integration.rst | 13 ++++++-- packages/google-cloud-logging/docs/usage.rst | 1 + .../docs/web-framework-integration.rst | 32 +++++++++++++++++++ .../cloud/logging_v2/handlers/_helpers.py | 2 +- .../samples/snippets/usage_guide.py | 31 ++++++++++++++++++ 6 files changed, 78 insertions(+), 5 deletions(-) create mode 100644 packages/google-cloud-logging/docs/web-framework-integration.rst diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 2618dc37a47d..84dd1e77fd90 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -61,8 +61,8 @@ Python >= 3.7 Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. The last version of the library compatible with Python 2.7 is `google-cloud-logging==1.15.1`. -Python == 3.6. The last version of the library compatible with Python 3.6 is `google-cloud-logging==3.1.2`. +| Python == 2.7. The last version of the library compatible with Python 2.7 is ``google-cloud-logging==1.15.1``. +| Python == 3.6. The last version of the library compatible with Python 3.6 is ``google-cloud-logging==3.1.2``. Mac/Linux diff --git a/packages/google-cloud-logging/docs/std-lib-integration.rst b/packages/google-cloud-logging/docs/std-lib-integration.rst index a485fce6d407..be43231fdc5e 100644 --- a/packages/google-cloud-logging/docs/std-lib-integration.rst +++ b/packages/google-cloud-logging/docs/std-lib-integration.rst @@ -44,6 +44,16 @@ There are two supported handler classes to choose from: to standard out, to be read and parsed by a GCP logging agent - This is the default handler on Kubernetes Engine, Cloud Functions and Cloud Run +Handler classes can also be specified via `dictConfig `_: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_dict_config] + :end-before: [END logging_dict_config] + :dedent: 4 + +Note that since :class:`~google.cloud.logging_v2.handlers.handlers.CloudLoggingHandler` requires an already initialized :class:`~google.cloud.logging_v2.client.Client`, +you must initialize a client and include it in the dictConfig entry for a `CloudLoggingHandler`. + Standard Library --------------------------- @@ -101,8 +111,7 @@ The following fields are currently supported: - :ref:`json_fields` .. note:: - Fields marked with "*" require a supported Python web framework. The Google Cloud Logging - library currently supports `flask `_ and `django `_ + Fields marked with "*" require a :doc:`supported Python web framework `. Manual Metadata Using the `extra` Argument -------------------------------------------- diff --git a/packages/google-cloud-logging/docs/usage.rst b/packages/google-cloud-logging/docs/usage.rst index 929ee9cefc47..7541f355b466 100644 --- a/packages/google-cloud-logging/docs/usage.rst +++ b/packages/google-cloud-logging/docs/usage.rst @@ -4,6 +4,7 @@ Usage Guide :maxdepth: 2 std-lib-integration + web-framework-integration direct-lib-usage grpc-vs-http diff --git a/packages/google-cloud-logging/docs/web-framework-integration.rst b/packages/google-cloud-logging/docs/web-framework-integration.rst new file mode 100644 index 000000000000..d91d714b31b0 --- /dev/null +++ b/packages/google-cloud-logging/docs/web-framework-integration.rst @@ -0,0 +1,32 @@ +Integration with Python Web Frameworks +====================================== + +The Google Cloud Logging library can integrate with Python web frameworks +`flask `_ and `django `_ to +automatically populate `LogEntry fields `_ +`trace`, `span_id`, `trace_sampled`, and `http_request`. + +Django +------ + +Django integration has been tested to work with each of the Django/Python versions listed `here `_. +To enable Django integration, add `google.cloud.logging_v2.handlers.middleware.RequestMiddleware` to the list of `MIDDLEWARE` +in your `settings `_ file. Also be sure to :doc:`set up logging ` in your settings file. + +Flask +----- + +Flask integration has been tested to work with the following versions of Flask: + +=============== ============== +Python version Flask versions +=============== ============== +3.7 >=1.0.0 +3.8 >=1.0.0 +3.9 >=1.0.0 +3.10 >=1.0.3 +3.11 >=1.0.3 +3.12 >=1.0.3 +=============== ============== + +Be sure to :doc:`set up logging ` before declaring the Flask app. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py index 43678ed0df52..f0c301ceb034 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py @@ -66,7 +66,7 @@ def get_request_data_from_flask(): Returns: Tuple[Optional[dict], Optional[str], Optional[str], bool]: Data related to the current http request, trace_id, span_id and trace_sampled - for the request. All fields will be None if a django request isn't found. + for the request. All fields will be None if a Flask request isn't found. """ if flask is None or not flask.request: return None, None, None, False diff --git a/packages/google-cloud-logging/samples/snippets/usage_guide.py b/packages/google-cloud-logging/samples/snippets/usage_guide.py index 5c9e869909dd..f4292a9de83a 100644 --- a/packages/google-cloud-logging/samples/snippets/usage_guide.py +++ b/packages/google-cloud-logging/samples/snippets/usage_guide.py @@ -484,6 +484,37 @@ def setup_logging(client): # [END setup_logging_excludes] +@snippet +def logging_dict_config(client): + import logging.config + + # [START logging_dict_config] + import google.cloud.logging + + client = google.cloud.logging.Client() + + LOGGING = { + "version": 1, + "handlers": { + "cloud_logging": { + "class": "google.cloud.logging.handlers.CloudLoggingHandler", + "client": client, + }, + "structured_log": { + "class": "google.cloud.logging.handlers.StructuredLogHandler" + }, + }, + "root": {"handlers": ["console"], "level": "WARNING"}, + "loggers": { + "my_logger": {"handlers": ["cloud_logging"], "level": "INFO"}, + "my_other_logger": {"handlers": ["structured_log"], "level": "INFO"}, + }, + } + # [END logging_dict_config] + + logging.config.dictConfig(LOGGING) + + def _line_no(func): return func.__code__.co_firstlineno From 1b96015082e4509a929e147c2af3535297bf7f21 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 10:43:58 -0500 Subject: [PATCH 795/855] build(deps): bump cryptography from 42.0.2 to 42.0.4 in .kokoro (#864) * build(deps): bump cryptography from 42.0.2 to 42.0.4 in .kokoro Source-Link: https://github.com/googleapis/synthtool/commit/d895aec3679ad22aa120481f746bf9f2f325f26f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad * update warning filter for grpc; remove obsolete warnings --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 66 +++++++++---------- packages/google-cloud-logging/pytest.ini | 10 +-- 3 files changed, 37 insertions(+), 43 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 2aefd0e91175..e4e943e0259a 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 -# created: 2024-02-06T03:20:16.660474034Z + digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad +# created: 2024-02-27T15:56:18.442440378Z diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 8c11c9f3e9b6..bda8e38c4f31 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -93,39 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.0 \ - --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ - --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ - --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ - --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ - --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ - --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ - --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ - --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ - --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ - --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ - --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ - --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ - --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ - --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ - --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ - --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ - --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ - --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ - --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ - --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ - --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ - --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ - --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ - --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ - --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ - --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ - --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ - --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ - --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ - --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ - --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ - --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 +cryptography==42.0.4 \ + --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ + --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ + --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ + --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ + --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ + --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ + --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ + --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ + --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ + --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ + --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ + --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ + --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ + --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ + --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ + --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ + --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ + --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ + --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ + --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ + --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ + --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ + --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ + --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ + --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ + --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ + --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ + --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ + --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ + --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ + --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ + --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 # via # gcp-releasetool # secretstorage diff --git a/packages/google-cloud-logging/pytest.ini b/packages/google-cloud-logging/pytest.ini index 8a432dd0f073..8b380c7df275 100644 --- a/packages/google-cloud-logging/pytest.ini +++ b/packages/google-cloud-logging/pytest.ini @@ -7,14 +7,8 @@ filterwarnings = # Remove once Release PR https://github.com/googleapis/python-api-common-protos/pull/191 is merged ignore:.*pkg_resources.declare_namespace:DeprecationWarning ignore:.*pkg_resources is deprecated as an API:DeprecationWarning - # Remove once https://github.com/grpc/grpc/issues/35086 is fixed - ignore:There is no current event loop:DeprecationWarning:grpc.aio._channel - # Remove once release PR https://github.com/googleapis/proto-plus-python/pull/391 is merged - ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:proto.datetime_helpers - # Remove once release PR https://github.com/googleapis/python-api-core/pull/555 is merged - ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:google.api_core.datetime_helpers - # Remove once a version of grpcio newer than 1.59.3 is released to PyPI - ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:grpc._channel + # Remove warning once https://github.com/grpc/grpc/issues/35974 is fixed + ignore:unclosed:ResourceWarning # Remove after support for Python 3.7 is dropped ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning # DeprecationWarnings triggered by Flask 1.0 testing by Flask dependencies in test code From c4573240ab46c90df21c51376845bf33fdd48197 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 10:03:48 -0400 Subject: [PATCH 796/855] feat: Allow users to explicitly configure universe domain (#846) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Allow users to explicitly configure universe domain chore: Update gapic-generator-python to v1.14.0 PiperOrigin-RevId: 603108274 Source-Link: https://github.com/googleapis/googleapis/commit/3d83e3652f689ab51c3f95f876458c6faef619bf Source-Link: https://github.com/googleapis/googleapis-gen/commit/baf5e9bbb14a768b2b4c9eae9feb78f18f1757fa Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYmFmNWU5YmJiMTRhNzY4YjJiNGM5ZWFlOWZlYjc4ZjE4ZjE3NTdmYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix: Resolve AttributeError 'Credentials' object has no attribute 'universe_domain' fix: Add google-auth as a direct dependency fix: Add staticmethod decorator to methods added in v1.14.0 chore: Update gapic-generator-python to v1.14.1 PiperOrigin-RevId: 603728206 Source-Link: https://github.com/googleapis/googleapis/commit/9063da8b4d45339db4e2d7d92a27c6708620e694 Source-Link: https://github.com/googleapis/googleapis-gen/commit/891c67d0a855b08085eb301dabb14064ef4b2c6d Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiODkxYzY3ZDBhODU1YjA4MDg1ZWIzMDFkYWJiMTQwNjRlZjRiMmM2ZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(diregapic): s/bazel/bazelisk/ in DIREGAPIC build GitHub action PiperOrigin-RevId: 604714585 Source-Link: https://github.com/googleapis/googleapis/commit/e4dce1324f4cb6dedb6822cb157e13cb8e0b3073 Source-Link: https://github.com/googleapis/googleapis-gen/commit/4036f78305c5c2aab80ff91960b3a3d983ff4b03 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDAzNmY3ODMwNWM1YzJhYWI4MGZmOTE5NjBiM2EzZDk4M2ZmNGIwMyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): Require `google-api-core>=1.34.1` fix: Resolve issue with missing import for certain enums in `**/types/…` PiperOrigin-RevId: 607041732 Source-Link: https://github.com/googleapis/googleapis/commit/b4532678459355676c95c00e39866776b7f40b2e Source-Link: https://github.com/googleapis/googleapis-gen/commit/cd796416f0f54cb22b2c44fb2d486960e693a346 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2Q3OTY0MTZmMGY1NGNiMjJiMmM0NGZiMmQ0ODY5NjBlNjkzYTM0NiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): Exclude google-auth 2.24.0 and 2.25.0 chore: Update gapic-generator-python to v1.14.4 PiperOrigin-RevId: 611561820 Source-Link: https://github.com/googleapis/googleapis/commit/87ef1fe57feede1f23b523f3c7fc4c3f2b92d6d2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/197316137594aafad94dea31226528fbcc39310c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTk3MzE2MTM3NTk0YWFmYWQ5NGRlYTMxMjI2NTI4ZmJjYzM5MzEwYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add include_recaptcha_script for as a new action in firewall policies PiperOrigin-RevId: 612851792 Source-Link: https://github.com/googleapis/googleapis/commit/49ea2c0fc42dd48996b833f05a258ad7e8590d3d Source-Link: https://github.com/googleapis/googleapis-gen/commit/460fdcbbbe00f35b1c591b1f3ef0c77ebd3ce277 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDYwZmRjYmJiZTAwZjM1YjFjNTkxYjFmM2VmMGM3N2ViZDNjZTI3NyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): require google-api-core>=1.34.1,>=2.11.0, google-auth >= 2.14.1 * filter warning in generated tests --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../cloud/logging_v2/services/__init__.py | 2 +- .../services/config_service_v2/__init__.py | 2 +- .../config_service_v2/async_client.py | 173 +++++- .../services/config_service_v2/client.py | 397 ++++++++++++-- .../services/config_service_v2/pagers.py | 2 +- .../config_service_v2/transports/__init__.py | 2 +- .../config_service_v2/transports/base.py | 8 +- .../config_service_v2/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../services/logging_service_v2/__init__.py | 2 +- .../logging_service_v2/async_client.py | 95 +++- .../services/logging_service_v2/client.py | 319 +++++++++-- .../services/logging_service_v2/pagers.py | 2 +- .../logging_service_v2/transports/__init__.py | 2 +- .../logging_service_v2/transports/base.py | 8 +- .../logging_service_v2/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../services/metrics_service_v2/__init__.py | 2 +- .../metrics_service_v2/async_client.py | 92 +++- .../services/metrics_service_v2/client.py | 316 +++++++++-- .../services/metrics_service_v2/pagers.py | 2 +- .../metrics_service_v2/transports/__init__.py | 2 +- .../metrics_service_v2/transports/base.py | 8 +- .../metrics_service_v2/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../google/cloud/logging_v2/types/__init__.py | 2 +- .../cloud/logging_v2/types/log_entry.py | 2 +- .../google/cloud/logging_v2/types/logging.py | 2 +- .../cloud/logging_v2/types/logging_config.py | 2 +- .../cloud/logging_v2/types/logging_metrics.py | 2 +- packages/google-cloud-logging/owlbot.py | 1 + packages/google-cloud-logging/pytest.ini | 2 + ...onfig_service_v2_copy_log_entries_async.py | 2 +- ...config_service_v2_copy_log_entries_sync.py | 2 +- ...d_config_service_v2_create_bucket_async.py | 2 +- ...ig_service_v2_create_bucket_async_async.py | 2 +- ...fig_service_v2_create_bucket_async_sync.py | 2 +- ...ed_config_service_v2_create_bucket_sync.py | 2 +- ...onfig_service_v2_create_exclusion_async.py | 2 +- ...config_service_v2_create_exclusion_sync.py | 2 +- ...ted_config_service_v2_create_link_async.py | 2 +- ...ated_config_service_v2_create_link_sync.py | 2 +- ...ted_config_service_v2_create_sink_async.py | 2 +- ...ated_config_service_v2_create_sink_sync.py | 2 +- ...ted_config_service_v2_create_view_async.py | 2 +- ...ated_config_service_v2_create_view_sync.py | 2 +- ...d_config_service_v2_delete_bucket_async.py | 2 +- ...ed_config_service_v2_delete_bucket_sync.py | 2 +- ...onfig_service_v2_delete_exclusion_async.py | 2 +- ...config_service_v2_delete_exclusion_sync.py | 2 +- ...ted_config_service_v2_delete_link_async.py | 2 +- ...ated_config_service_v2_delete_link_sync.py | 2 +- ...ted_config_service_v2_delete_sink_async.py | 2 +- ...ated_config_service_v2_delete_sink_sync.py | 2 +- ...ted_config_service_v2_delete_view_async.py | 2 +- ...ated_config_service_v2_delete_view_sync.py | 2 +- ...ated_config_service_v2_get_bucket_async.py | 2 +- ...rated_config_service_v2_get_bucket_sync.py | 2 +- ...nfig_service_v2_get_cmek_settings_async.py | 2 +- ...onfig_service_v2_get_cmek_settings_sync.py | 2 +- ...d_config_service_v2_get_exclusion_async.py | 2 +- ...ed_config_service_v2_get_exclusion_sync.py | 2 +- ...erated_config_service_v2_get_link_async.py | 2 +- ...nerated_config_service_v2_get_link_sync.py | 2 +- ...ed_config_service_v2_get_settings_async.py | 2 +- ...ted_config_service_v2_get_settings_sync.py | 2 +- ...erated_config_service_v2_get_sink_async.py | 2 +- ...nerated_config_service_v2_get_sink_sync.py | 2 +- ...erated_config_service_v2_get_view_async.py | 2 +- ...nerated_config_service_v2_get_view_sync.py | 2 +- ...ed_config_service_v2_list_buckets_async.py | 2 +- ...ted_config_service_v2_list_buckets_sync.py | 2 +- ...config_service_v2_list_exclusions_async.py | 2 +- ..._config_service_v2_list_exclusions_sync.py | 2 +- ...ated_config_service_v2_list_links_async.py | 2 +- ...rated_config_service_v2_list_links_sync.py | 2 +- ...ated_config_service_v2_list_sinks_async.py | 2 +- ...rated_config_service_v2_list_sinks_sync.py | 2 +- ...ated_config_service_v2_list_views_async.py | 2 +- ...rated_config_service_v2_list_views_sync.py | 2 +- ...config_service_v2_undelete_bucket_async.py | 2 +- ..._config_service_v2_undelete_bucket_sync.py | 2 +- ...d_config_service_v2_update_bucket_async.py | 2 +- ...ig_service_v2_update_bucket_async_async.py | 2 +- ...fig_service_v2_update_bucket_async_sync.py | 2 +- ...ed_config_service_v2_update_bucket_sync.py | 2 +- ...g_service_v2_update_cmek_settings_async.py | 2 +- ...ig_service_v2_update_cmek_settings_sync.py | 2 +- ...onfig_service_v2_update_exclusion_async.py | 2 +- ...config_service_v2_update_exclusion_sync.py | 2 +- ...config_service_v2_update_settings_async.py | 2 +- ..._config_service_v2_update_settings_sync.py | 2 +- ...ted_config_service_v2_update_sink_async.py | 2 +- ...ated_config_service_v2_update_sink_sync.py | 2 +- ...ted_config_service_v2_update_view_async.py | 2 +- ...ated_config_service_v2_update_view_sync.py | 2 +- ...ted_logging_service_v2_delete_log_async.py | 2 +- ...ated_logging_service_v2_delete_log_sync.py | 2 +- ...gging_service_v2_list_log_entries_async.py | 2 +- ...ogging_service_v2_list_log_entries_sync.py | 2 +- ...ated_logging_service_v2_list_logs_async.py | 2 +- ...rated_logging_service_v2_list_logs_sync.py | 2 +- ...st_monitored_resource_descriptors_async.py | 2 +- ...ist_monitored_resource_descriptors_sync.py | 2 +- ...gging_service_v2_tail_log_entries_async.py | 2 +- ...ogging_service_v2_tail_log_entries_sync.py | 2 +- ...ging_service_v2_write_log_entries_async.py | 2 +- ...gging_service_v2_write_log_entries_sync.py | 2 +- ...rics_service_v2_create_log_metric_async.py | 2 +- ...trics_service_v2_create_log_metric_sync.py | 2 +- ...rics_service_v2_delete_log_metric_async.py | 2 +- ...trics_service_v2_delete_log_metric_sync.py | 2 +- ...metrics_service_v2_get_log_metric_async.py | 2 +- ..._metrics_service_v2_get_log_metric_sync.py | 2 +- ...trics_service_v2_list_log_metrics_async.py | 2 +- ...etrics_service_v2_list_log_metrics_sync.py | 2 +- ...rics_service_v2_update_log_metric_async.py | 2 +- ...trics_service_v2_update_log_metric_sync.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- packages/google-cloud-logging/setup.py | 5 +- .../testing/constraints-3.10.txt | 10 - .../testing/constraints-3.11.txt | 10 - .../testing/constraints-3.12.txt | 10 - .../testing/constraints-3.7.txt | 3 +- .../testing/constraints-3.8.txt | 12 +- .../testing/constraints-3.9.txt | 10 - .../google-cloud-logging/tests/__init__.py | 2 +- .../tests/unit/__init__.py | 2 +- .../tests/unit/gapic/__init__.py | 2 +- .../tests/unit/gapic/logging_v2/__init__.py | 2 +- .../logging_v2/test_config_service_v2.py | 500 ++++++++++++++++-- .../logging_v2/test_logging_service_v2.py | 484 +++++++++++++++-- .../logging_v2/test_metrics_service_v2.py | 468 +++++++++++++++- 133 files changed, 2738 insertions(+), 429 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py index bf30439496d6..a56e06a1d6ac 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 3962c40e9dca..729a878be035 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -38,9 +38,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -60,8 +60,12 @@ class ConfigServiceV2AsyncClient: _client: ConfigServiceV2Client + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = ConfigServiceV2Client.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ConfigServiceV2Client._DEFAULT_UNIVERSE cmek_settings_path = staticmethod(ConfigServiceV2Client.cmek_settings_path) parse_cmek_settings_path = staticmethod( @@ -184,6 +188,25 @@ def transport(self) -> ConfigServiceV2Transport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial( type(ConfigServiceV2Client).get_transport_class, type(ConfigServiceV2Client) ) @@ -196,7 +219,7 @@ def __init__( client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the config service v2 client. + """Instantiates the config service v2 async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -207,23 +230,38 @@ def __init__( transport (Union[str, ~.ConfigServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -340,6 +378,9 @@ async def sample_list_buckets(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -428,6 +469,9 @@ async def sample_get_bucket(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -518,6 +562,9 @@ async def sample_create_bucket_async(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -618,6 +665,9 @@ async def sample_update_bucket_async(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -708,6 +758,9 @@ async def sample_create_bucket(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -793,6 +846,9 @@ async def sample_update_bucket(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -868,6 +924,9 @@ async def sample_delete_bucket(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -937,6 +996,9 @@ async def sample_undelete_bucket(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1042,6 +1104,9 @@ async def sample_list_views(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1130,6 +1195,9 @@ async def sample_get_view(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1211,6 +1279,9 @@ async def sample_create_view(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1294,6 +1365,9 @@ async def sample_update_view(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1367,6 +1441,9 @@ async def sample_delete_view(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1486,6 +1563,9 @@ async def sample_list_sinks(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1626,6 +1706,9 @@ async def sample_get_sink(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1763,6 +1846,9 @@ async def sample_create_sink(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1940,6 +2026,9 @@ async def sample_update_sink(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2057,6 +2146,9 @@ async def sample_delete_sink(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -2192,6 +2284,9 @@ async def sample_create_link(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2320,6 +2415,9 @@ async def sample_delete_link(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2438,6 +2536,9 @@ async def sample_list_links(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2552,6 +2653,9 @@ async def sample_get_link(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2675,6 +2779,9 @@ async def sample_list_exclusions(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2811,6 +2918,9 @@ async def sample_get_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2947,6 +3057,9 @@ async def sample_create_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3097,6 +3210,9 @@ async def sample_update_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3211,6 +3327,9 @@ async def sample_delete_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -3311,6 +3430,9 @@ async def sample_get_cmek_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3419,6 +3541,9 @@ async def sample_update_cmek_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3553,6 +3678,9 @@ async def sample_get_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3697,6 +3825,9 @@ async def sample_update_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3780,6 +3911,9 @@ async def sample_copy_log_entries(): client_info=DEFAULT_CLIENT_INFO, ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3842,6 +3976,9 @@ async def list_operations( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3896,6 +4033,9 @@ async def get_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3953,6 +4093,9 @@ async def cancel_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 5208fe442d74..5257f8ddf204 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ Union, cast, ) +import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -42,9 +43,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -128,11 +129,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -395,7 +400,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -425,6 +430,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -458,6 +468,178 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ConfigServiceV2Client._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ConfigServiceV2Client._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, @@ -477,22 +659,32 @@ def __init__( transport (Union[str, ConfigServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -503,17 +695,34 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ConfigServiceV2Client._read_environment_variables() + self._client_cert_source = ConfigServiceV2Client._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ConfigServiceV2Client._get_universe_domain( + universe_domain_opt, self._universe_domain_env ) + self._api_endpoint = None # updated below, depending on `transport` - api_key_value = getattr(client_options, "api_key", None) + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -522,20 +731,33 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, ConfigServiceV2Transport): + transport_provided = isinstance(transport, ConfigServiceV2Transport) + if transport_provided: # transport is a ConfigServiceV2Transport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(ConfigServiceV2Transport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ConfigServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -545,17 +767,17 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def list_buckets( @@ -663,6 +885,9 @@ def sample_list_buckets(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -752,6 +977,9 @@ def sample_get_bucket(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -843,6 +1071,9 @@ def sample_create_bucket_async(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -944,6 +1175,9 @@ def sample_update_bucket_async(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1035,6 +1269,9 @@ def sample_create_bucket(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1121,6 +1358,9 @@ def sample_update_bucket(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1197,6 +1437,9 @@ def sample_delete_bucket(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1267,6 +1510,9 @@ def sample_undelete_bucket(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1372,6 +1618,9 @@ def sample_list_views(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1461,6 +1710,9 @@ def sample_get_view(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1543,6 +1795,9 @@ def sample_create_view(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1627,6 +1882,9 @@ def sample_update_view(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1701,6 +1959,9 @@ def sample_delete_view(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1809,6 +2070,9 @@ def sample_list_sinks(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1938,6 +2202,9 @@ def sample_get_sink(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2075,6 +2342,9 @@ def sample_create_sink(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2241,6 +2511,9 @@ def sample_update_sink(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2347,6 +2620,9 @@ def sample_delete_sink(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -2482,6 +2758,9 @@ def sample_create_link(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2610,6 +2889,9 @@ def sample_delete_link(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2728,6 +3010,9 @@ def sample_list_links(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2842,6 +3127,9 @@ def sample_get_link(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2954,6 +3242,9 @@ def sample_list_exclusions(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3079,6 +3370,9 @@ def sample_get_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3215,6 +3509,9 @@ def sample_create_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3365,6 +3662,9 @@ def sample_update_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3468,6 +3768,9 @@ def sample_delete_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -3569,6 +3872,9 @@ def sample_get_cmek_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3678,6 +3984,9 @@ def sample_update_cmek_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3812,6 +4121,9 @@ def sample_get_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3956,6 +4268,9 @@ def sample_update_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -4040,6 +4355,9 @@ def sample_copy_log_entries(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.copy_log_entries] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -4115,6 +4433,9 @@ def list_operations( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -4169,6 +4490,9 @@ def get_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -4226,6 +4550,9 @@ def cancel_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index 4af8eaf1c980..8a9710005a8e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index fd02975e4069..eb6d2764a9e0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 73db34bed102..e9b3dae141ba 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -65,7 +65,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -128,6 +128,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index b82203cf6ab5..ccb53fe66b1b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -67,7 +67,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index f37ba9cb18ba..41894f1ebca2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -112,7 +112,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py index 134609c9349d..2f8ce7deee61 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 59dcad291e5d..890361b49e28 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -41,9 +41,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers @@ -60,8 +60,12 @@ class LoggingServiceV2AsyncClient: _client: LoggingServiceV2Client + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = LoggingServiceV2Client.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = LoggingServiceV2Client._DEFAULT_UNIVERSE log_path = staticmethod(LoggingServiceV2Client.log_path) parse_log_path = staticmethod(LoggingServiceV2Client.parse_log_path) @@ -168,6 +172,25 @@ def transport(self) -> LoggingServiceV2Transport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial( type(LoggingServiceV2Client).get_transport_class, type(LoggingServiceV2Client) ) @@ -180,7 +203,7 @@ def __init__( client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the logging service v2 client. + """Instantiates the logging service v2 async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -191,23 +214,38 @@ def __init__( transport (Union[str, ~.LoggingServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -326,6 +364,9 @@ async def sample_delete_log(): gapic_v1.routing_header.to_grpc_metadata((("log_name", request.log_name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -525,6 +566,9 @@ async def sample_write_log_entries(): client_info=DEFAULT_CLIENT_INFO, ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -683,6 +727,9 @@ async def sample_list_log_entries(): client_info=DEFAULT_CLIENT_INFO, ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -783,6 +830,9 @@ async def sample_list_monitored_resource_descriptors(): client_info=DEFAULT_CLIENT_INFO, ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -914,6 +964,9 @@ async def sample_list_logs(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1016,6 +1069,9 @@ def request_generator(): client_info=DEFAULT_CLIENT_INFO, ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = rpc( requests, @@ -1070,6 +1126,9 @@ async def list_operations( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1124,6 +1183,9 @@ async def get_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1181,6 +1243,9 @@ async def cancel_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index ce60602c663b..a9d6e082b3af 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -30,6 +30,7 @@ Union, cast, ) +import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -44,9 +45,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers @@ -127,11 +128,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -277,7 +282,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -307,6 +312,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -340,6 +350,178 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = LoggingServiceV2Client._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or LoggingServiceV2Client._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, @@ -359,22 +541,32 @@ def __init__( transport (Union[str, LoggingServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -385,17 +577,34 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = LoggingServiceV2Client._read_environment_variables() + self._client_cert_source = LoggingServiceV2Client._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = LoggingServiceV2Client._get_universe_domain( + universe_domain_opt, self._universe_domain_env ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False - api_key_value = getattr(client_options, "api_key", None) + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -404,20 +613,33 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, LoggingServiceV2Transport): + transport_provided = isinstance(transport, LoggingServiceV2Transport) + if transport_provided: # transport is a LoggingServiceV2Transport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(LoggingServiceV2Transport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or LoggingServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -427,17 +649,17 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def delete_log( @@ -536,6 +758,9 @@ def sample_delete_log(): gapic_v1.routing_header.to_grpc_metadata((("log_name", request.log_name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -723,6 +948,9 @@ def sample_write_log_entries(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.write_log_entries] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -870,6 +1098,9 @@ def sample_list_log_entries(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_log_entries] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -962,6 +1193,9 @@ def sample_list_monitored_resource_descriptors(): self._transport.list_monitored_resource_descriptors ] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1082,6 +1316,9 @@ def sample_list_logs(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1169,6 +1406,9 @@ def request_generator(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.tail_log_entries] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( requests, @@ -1236,6 +1476,9 @@ def list_operations( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1290,6 +1533,9 @@ def get_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1347,6 +1593,9 @@ def cancel_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 02dcf93b3a27..f8a63387bfbf 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index d7dae810bf04..668b54522433 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index a256ca284d07..5bbd7cc78a06 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -65,7 +65,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -128,6 +128,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 775fcbf98281..8a6a3efd3819 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -66,7 +66,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 5f1acd97452f..159a0e2e4975 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -111,7 +111,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index 3b688ccb4362..d95456f174ec 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index b2cf3e3d7b33..1053158e95ac 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -38,9 +38,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore @@ -58,8 +58,12 @@ class MetricsServiceV2AsyncClient: _client: MetricsServiceV2Client + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = MetricsServiceV2Client.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = MetricsServiceV2Client._DEFAULT_UNIVERSE log_metric_path = staticmethod(MetricsServiceV2Client.log_metric_path) parse_log_metric_path = staticmethod(MetricsServiceV2Client.parse_log_metric_path) @@ -166,6 +170,25 @@ def transport(self) -> MetricsServiceV2Transport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial( type(MetricsServiceV2Client).get_transport_class, type(MetricsServiceV2Client) ) @@ -178,7 +201,7 @@ def __init__( client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the metrics service v2 client. + """Instantiates the metrics service v2 async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -189,23 +212,38 @@ def __init__( transport (Union[str, ~.MetricsServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -326,6 +364,9 @@ async def sample_list_log_metrics(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -462,6 +503,9 @@ async def sample_get_log_metric(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -595,6 +639,9 @@ async def sample_create_log_metric(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -740,6 +787,9 @@ async def sample_update_log_metric(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -848,6 +898,9 @@ async def sample_delete_log_metric(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -899,6 +952,9 @@ async def list_operations( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -953,6 +1009,9 @@ async def get_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1010,6 +1069,9 @@ async def cancel_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 098014bcd17c..9309f5c1779b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ Union, cast, ) +import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -42,9 +43,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore @@ -126,11 +127,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -276,7 +281,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -306,6 +311,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -339,6 +349,178 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = MetricsServiceV2Client._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or MetricsServiceV2Client._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, @@ -358,22 +540,32 @@ def __init__( transport (Union[str, MetricsServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -384,17 +576,34 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = MetricsServiceV2Client._read_environment_variables() + self._client_cert_source = MetricsServiceV2Client._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = MetricsServiceV2Client._get_universe_domain( + universe_domain_opt, self._universe_domain_env ) + self._api_endpoint = None # updated below, depending on `transport` - api_key_value = getattr(client_options, "api_key", None) + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -403,20 +612,33 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, MetricsServiceV2Transport): + transport_provided = isinstance(transport, MetricsServiceV2Transport) + if transport_provided: # transport is a MetricsServiceV2Transport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(MetricsServiceV2Transport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or MetricsServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -426,17 +648,17 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def list_log_metrics( @@ -537,6 +759,9 @@ def sample_list_log_metrics(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -662,6 +887,9 @@ def sample_get_log_metric(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -795,6 +1023,9 @@ def sample_create_log_metric(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -929,6 +1160,9 @@ def sample_update_log_metric(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1026,6 +1260,9 @@ def sample_delete_log_metric(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1090,6 +1327,9 @@ def list_operations( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1144,6 +1384,9 @@ def get_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1201,6 +1444,9 @@ def cancel_operation( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index dd23001cc906..70bad4bea533 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index 57d82514d79f..820b7f2a2c4a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index f8c4b954fbd7..f63d896b2572 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -65,7 +65,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -128,6 +128,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 9426a670c598..3c4a2f38fb89 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -66,7 +66,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 1756f9a1d19f..33f85cc9627c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -111,7 +111,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py index 38dec7cdf17a..a7e5bed5f224 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py index 98f768fb27c5..df4901dc2886 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py index 02a17fc7b505..325ec1ded454 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index 7826bd0264d9..0e106779009c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py index 74d167d5beea..b437ba8cca64 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index 8e3057207574..8666de9e01a5 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -68,6 +68,7 @@ def place_before(path, text, *before_text, escape=None): "**/gapic_version.py", "setup.py", "testing/constraints-3.7.txt", + "testing/constraints-3.8.txt", "README.rst", "google/cloud/logging/__init__.py", # generated types are hidden from users "google/cloud/logging_v2/__init__.py", diff --git a/packages/google-cloud-logging/pytest.ini b/packages/google-cloud-logging/pytest.ini index 8b380c7df275..5dbd08fa7eed 100644 --- a/packages/google-cloud-logging/pytest.ini +++ b/packages/google-cloud-logging/pytest.ini @@ -11,6 +11,8 @@ filterwarnings = ignore:unclosed:ResourceWarning # Remove after support for Python 3.7 is dropped ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning + # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/1939 is fixed + ignore:get_mtls_endpoint_and_cert_source is deprecated.:DeprecationWarning # DeprecationWarnings triggered by Flask 1.0 testing by Flask dependencies in test code # 3.7 deprecation warnings ignore:Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated since Python 3.3,and in 3.9 it will stop working:DeprecationWarning diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py index 993293752c38..ebb0fb6e8327 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py index b95b83ab4f96..81e324800269 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py index 089263531030..946976965e26 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py index 8d55ee0bfea5..27530c87e7db 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py index 9b71e2d74123..fbca2158fb6e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py index 111a2d272676..8d2cd4568caa 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py index b592719058ba..adeda1dbc4a3 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py index a3b20a5f3a35..e68bd72bd27b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py index c130fe56dd93..ae99ac5dcdd2 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py index ce3bbfd12de8..c385ec07fdb8 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py index c4deb526b3b5..54d9ae63f6c5 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py index 16db9a155869..216f5aff4ecc 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py index 8eaba2353bd4..505cf48d6431 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py index 7f1f4a7dc6ac..5984c5a59f7f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py index cb409bf4bc5b..d5be4998e62a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py index a31d04ceb85d..2746db10658d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py index 6bd56016a3f5..ed33724d94fa 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py index 66c82b08b68a..706281a237f1 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py index 9c47004edf17..e19a7a781251 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py index 209651ad6d78..18a34126e074 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py index d8b4f483242c..f00e20418f79 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py index 947fdf52df7c..78f486498b04 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py index 1fe4e6dae87b..bf1af9401568 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py index 6416ff773e33..3ef94f7a79d5 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py index 11ce2f13ac02..4b36ba8f3266 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py index ac8db344413a..c13a534f204d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py index 660759e092de..90e7db19f00f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py index eedf30d59132..71459b5b6e09 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py index a296e0bdd504..0bf125892cec 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py index bd47eede1dcd..afd01364f46a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py index efc87806ddbe..0fb41ff507ca 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py index 8db2ca31071d..0650a0dbced0 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py index 0eb6fb853d33..f40cd796fba6 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py index b0290a2fb0a7..2c35d7ed7b47 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py index 694d6ddabc8a..b5a1f32ad51a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py index 2a0f1c100e48..27fecef31d66 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py index f0438a0a1a7d..576d0f67fc53 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py index f0e60b745a29..affd70728d9e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py index 883810c4ed62..7ea5d3bd0306 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py index 641d8f6b613c..1f78f496894e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py index 444ca9c5be28..231c07081921 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py index ec66239ed7e6..e27b3be4f94a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py index cad31c4d4345..3c8fdf4fd5cf 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py index ec752eda86b2..2dbd4b41bcd4 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py index 83754a2383a3..2e73bbab99c7 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py index d79a68b0256e..971da2b3d3dd 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py index 1a36ac6659b6..0324db46320a 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py index 1fdb4e9c1513..7fc0350e0165 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py index 52001dd745b7..eba1b485dffb 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py index 9e04ebadc403..a4f4ee0686a1 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py index 6bebb379278f..fd9c7c9cc357 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py index 8f0b5b1077d0..ea9cf9dee5e8 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py index 7c6c3716052f..148fdc440dff 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py index d1f37e9299c3..1093d553f46e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py index 89fb901e5cdc..e8ef2a1a574c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py index 31b5415fc6e5..71ce93d619e9 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py index 7df03d1e77cd..bfdee5a1d333 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py index cc17ec23c4de..e90b2f0e3556 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py index 1242c3cfb0b1..ac1601fb83e3 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py index 3edc24c9693f..110f14903883 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py index d9739167724e..d71cd2e3d7b3 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py index ec2ff7fbf6a6..27884e87287d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py index 949b9d98b71c..25eed782e618 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py index 53890848417a..b72847a7706e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py index 7032872fabdf..8122bbb6f960 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py index 12124e531d12..c77abc3bc883 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index e310819b79da..2a3b80a7c64e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index 7e20ad1659de..4b51c26537b6 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py index 3149daeb1a76..3b1d6b8d76a4 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py index 04441e67173c..54ee4886d33f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py index a1867444113f..26ff77764849 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py index 399bf369a7c3..e83497a80167 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index 1ce36bba4bab..904bd1400fcd 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index 1756dccec4f0..9f11c2f2dcd2 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py index eb377d2264b9..0ee78a31cb84 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py index 4d30f92fb122..136677f0b41c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py index 9af902280e94..c0fbe4247dbf 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py index a0a68cfedda6..122a776d5448 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py index 0d0f9f4c8cb6..64c85005273b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py index 5452c586f99e..f1be9234d569 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py index 53f9e5b06c2d..530611d0c1d2 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py index 26409d6d9c65..adfab558f7e5 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py index 325cf4d44bbc..0ee2265c28e3 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py index 9442a7a01981..fa9a650c26bf 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py index 047ae2c869eb..dc0a60d7c609 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py index 583fa4c7bafd..8baebc548d56 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index f1b714b6b7b8..b62675ba6439 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.9.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 978175d3a1b4..db7b392d5543 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -36,7 +36,10 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "google-cloud-appengine-logging>=0.1.0, <2.0.0dev", "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", "google-cloud-core >= 2.0.0, <3.0.0dev", diff --git a/packages/google-cloud-logging/testing/constraints-3.10.txt b/packages/google-cloud-logging/testing/constraints-3.10.txt index 577d7b4cd853..ed7f9aed2559 100644 --- a/packages/google-cloud-logging/testing/constraints-3.10.txt +++ b/packages/google-cloud-logging/testing/constraints-3.10.txt @@ -4,13 +4,3 @@ google-api-core proto-plus protobuf - -# Lower bound testing for optional dependencies -django==3.2 - -# Need specific versions of Flask dependencies for Flask 1.0 to work -flask==1.0.3 -jinja2==2.11 -markupsafe==2.0.1 -itsdangerous==2.0.1 -werkzeug==1.0.1 \ No newline at end of file diff --git a/packages/google-cloud-logging/testing/constraints-3.11.txt b/packages/google-cloud-logging/testing/constraints-3.11.txt index 68a9effe9f7e..ed7f9aed2559 100644 --- a/packages/google-cloud-logging/testing/constraints-3.11.txt +++ b/packages/google-cloud-logging/testing/constraints-3.11.txt @@ -4,13 +4,3 @@ google-api-core proto-plus protobuf - -# Lower bound testing for optional dependencies -django==4.1 - -# Need specific versions of Flask dependencies for Flask 1.0 to work -flask==1.0.3 -jinja2==2.11 -markupsafe==2.0.1 -itsdangerous==2.0.1 -werkzeug==1.0.1 \ No newline at end of file diff --git a/packages/google-cloud-logging/testing/constraints-3.12.txt b/packages/google-cloud-logging/testing/constraints-3.12.txt index 5548b06affcf..ed7f9aed2559 100644 --- a/packages/google-cloud-logging/testing/constraints-3.12.txt +++ b/packages/google-cloud-logging/testing/constraints-3.12.txt @@ -4,13 +4,3 @@ google-api-core proto-plus protobuf - -# Lower bound testing for optional dependencies -django==4.2 - -# Need specific versions of Flask dependencies for Flask 1.0 to work -flask==1.0.3 -jinja2==2.11 -markupsafe==2.0.1 -itsdangerous==2.0.1 -werkzeug==1.0.1 \ No newline at end of file diff --git a/packages/google-cloud-logging/testing/constraints-3.7.txt b/packages/google-cloud-logging/testing/constraints-3.7.txt index 977767328234..3aded209e3d8 100644 --- a/packages/google-cloud-logging/testing/constraints-3.7.txt +++ b/packages/google-cloud-logging/testing/constraints-3.7.txt @@ -4,7 +4,8 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.33.2 +google-api-core==1.34.1 +google-auth==2.14.1 proto-plus==1.22.0 protobuf==3.19.5 google-cloud-core==2.0.0 diff --git a/packages/google-cloud-logging/testing/constraints-3.8.txt b/packages/google-cloud-logging/testing/constraints-3.8.txt index cc90422a45f4..3f30789875d5 100644 --- a/packages/google-cloud-logging/testing/constraints-3.8.txt +++ b/packages/google-cloud-logging/testing/constraints-3.8.txt @@ -1,16 +1,6 @@ # -*- coding: utf-8 -*- # This constraints file is required for unit tests. # List all library dependencies and extras in this file. -google-api-core +google-api-core==2.14.0 proto-plus protobuf - -# Lower bound testing for optional dependencies -django==3.2 - -# Need specific versions of Flask dependencies for Flask 1.0 to work -flask==1.0.0 -jinja2==2.10.1 -markupsafe==2.0.1 -itsdangerous==2.0.1 -werkzeug==1.0.1 \ No newline at end of file diff --git a/packages/google-cloud-logging/testing/constraints-3.9.txt b/packages/google-cloud-logging/testing/constraints-3.9.txt index cc90422a45f4..ed7f9aed2559 100644 --- a/packages/google-cloud-logging/testing/constraints-3.9.txt +++ b/packages/google-cloud-logging/testing/constraints-3.9.txt @@ -4,13 +4,3 @@ google-api-core proto-plus protobuf - -# Lower bound testing for optional dependencies -django==3.2 - -# Need specific versions of Flask dependencies for Flask 1.0 to work -flask==1.0.0 -jinja2==2.10.1 -markupsafe==2.0.1 -itsdangerous==2.0.1 -werkzeug==1.0.1 \ No newline at end of file diff --git a/packages/google-cloud-logging/tests/__init__.py b/packages/google-cloud-logging/tests/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-logging/tests/__init__.py +++ b/packages/google-cloud-logging/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/__init__.py b/packages/google-cloud-logging/tests/unit/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-logging/tests/unit/__init__.py +++ b/packages/google-cloud-logging/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/__init__.py b/packages/google-cloud-logging/tests/unit/gapic/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-logging/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index abe89b5aec5d..96cb15e894d0 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ from grpc.experimental import aio import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers @@ -71,6 +72,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -100,6 +112,273 @@ def test__get_default_mtls_endpoint(): ) +def test__read_environment_variables(): + assert ConfigServiceV2Client._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ConfigServiceV2Client._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ConfigServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ConfigServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ConfigServiceV2Client._get_client_cert_source(None, False) is None + assert ( + ConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + ConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ConfigServiceV2Client._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ConfigServiceV2Client._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ConfigServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2Client), +) +@mock.patch.object( + ConfigServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2AsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ConfigServiceV2Client._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ConfigServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "always") + == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ConfigServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ConfigServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ConfigServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ConfigServiceV2Client._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ConfigServiceV2Client._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ConfigServiceV2Client._get_universe_domain(None, None) + == ConfigServiceV2Client._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ConfigServiceV2Client._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -202,13 +481,13 @@ def test_config_service_v2_client_get_transport_class(): ) @mock.patch.object( ConfigServiceV2Client, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ConfigServiceV2Client), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2Client), ) @mock.patch.object( ConfigServiceV2AsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ConfigServiceV2AsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2AsyncClient), ) def test_config_service_v2_client_client_options( client_class, transport_class, transport_name @@ -250,7 +529,9 @@ def test_config_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -280,15 +561,23 @@ def test_config_service_v2_client_client_options( # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -298,7 +587,9 @@ def test_config_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -316,7 +607,9 @@ def test_config_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -357,13 +650,13 @@ def test_config_service_v2_client_client_options( ) @mock.patch.object( ConfigServiceV2Client, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ConfigServiceV2Client), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2Client), ) @mock.patch.object( ConfigServiceV2AsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ConfigServiceV2AsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2AsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_config_service_v2_client_mtls_env_auto( @@ -386,7 +679,9 @@ def test_config_service_v2_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -418,7 +713,9 @@ def test_config_service_v2_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -452,7 +749,9 @@ def test_config_service_v2_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -542,6 +841,115 @@ def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient] +) +@mock.patch.object( + ConfigServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2Client), +) +@mock.patch.object( + ConfigServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2AsyncClient), +) +def test_config_service_v2_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -567,7 +975,9 @@ def test_config_service_v2_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -606,7 +1016,9 @@ def test_config_service_v2_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -666,7 +1078,9 @@ def test_config_service_v2_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -943,7 +1357,7 @@ async def test_list_buckets_flattened_error_async(): def test_list_buckets_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -993,7 +1407,7 @@ def test_list_buckets_pager(transport_name: str = "grpc"): def test_list_buckets_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1035,7 +1449,7 @@ def test_list_buckets_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_buckets_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1085,7 +1499,7 @@ async def test_list_buckets_async_pager(): @pytest.mark.asyncio async def test_list_buckets_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2475,7 +2889,7 @@ async def test_list_views_flattened_error_async(): def test_list_views_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2525,7 +2939,7 @@ def test_list_views_pager(transport_name: str = "grpc"): def test_list_views_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2567,7 +2981,7 @@ def test_list_views_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_views_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2617,7 +3031,7 @@ async def test_list_views_async_pager(): @pytest.mark.asyncio async def test_list_views_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3511,7 +3925,7 @@ async def test_list_sinks_flattened_error_async(): def test_list_sinks_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -3561,7 +3975,7 @@ def test_list_sinks_pager(transport_name: str = "grpc"): def test_list_sinks_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -3603,7 +4017,7 @@ def test_list_sinks_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_sinks_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3653,7 +4067,7 @@ async def test_list_sinks_async_pager(): @pytest.mark.asyncio async def test_list_sinks_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5435,7 +5849,7 @@ async def test_list_links_flattened_error_async(): def test_list_links_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -5485,7 +5899,7 @@ def test_list_links_pager(transport_name: str = "grpc"): def test_list_links_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -5527,7 +5941,7 @@ def test_list_links_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_links_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5577,7 +5991,7 @@ async def test_list_links_async_pager(): @pytest.mark.asyncio async def test_list_links_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6093,7 +6507,7 @@ async def test_list_exclusions_flattened_error_async(): def test_list_exclusions_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -6143,7 +6557,7 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): def test_list_exclusions_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -6185,7 +6599,7 @@ def test_list_exclusions_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_exclusions_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6235,7 +6649,7 @@ async def test_list_exclusions_async_pager(): @pytest.mark.asyncio async def test_list_exclusions_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8231,7 +8645,7 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = ConfigServiceV2Client( @@ -9570,7 +9984,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 498ad94afd40..7dbb865f2c00 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ from grpc.experimental import aio import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers @@ -72,6 +73,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -102,6 +114,273 @@ def test__get_default_mtls_endpoint(): ) +def test__read_environment_variables(): + assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert LoggingServiceV2Client._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + LoggingServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + LoggingServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert LoggingServiceV2Client._get_client_cert_source(None, False) is None + assert ( + LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + LoggingServiceV2Client._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + LoggingServiceV2Client._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + LoggingServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + LoggingServiceV2Client._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + LoggingServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "always") + == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LoggingServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + LoggingServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + LoggingServiceV2Client._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + LoggingServiceV2Client._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + LoggingServiceV2Client._get_universe_domain(None, None) + == LoggingServiceV2Client._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + LoggingServiceV2Client._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -204,13 +483,13 @@ def test_logging_service_v2_client_get_transport_class(): ) @mock.patch.object( LoggingServiceV2Client, - "DEFAULT_ENDPOINT", - modify_default_endpoint(LoggingServiceV2Client), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), ) @mock.patch.object( LoggingServiceV2AsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(LoggingServiceV2AsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), ) def test_logging_service_v2_client_client_options( client_class, transport_class, transport_name @@ -252,7 +531,9 @@ def test_logging_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -282,15 +563,23 @@ def test_logging_service_v2_client_client_options( # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -300,7 +589,9 @@ def test_logging_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -318,7 +609,9 @@ def test_logging_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -359,13 +652,13 @@ def test_logging_service_v2_client_client_options( ) @mock.patch.object( LoggingServiceV2Client, - "DEFAULT_ENDPOINT", - modify_default_endpoint(LoggingServiceV2Client), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), ) @mock.patch.object( LoggingServiceV2AsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(LoggingServiceV2AsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_logging_service_v2_client_mtls_env_auto( @@ -388,7 +681,9 @@ def test_logging_service_v2_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -420,7 +715,9 @@ def test_logging_service_v2_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -454,7 +751,9 @@ def test_logging_service_v2_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -544,6 +843,115 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient] +) +@mock.patch.object( + LoggingServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), +) +def test_logging_service_v2_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -569,7 +977,9 @@ def test_logging_service_v2_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -608,7 +1018,9 @@ def test_logging_service_v2_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -668,7 +1080,9 @@ def test_logging_service_v2_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -1330,7 +1744,7 @@ async def test_list_log_entries_flattened_error_async(): def test_list_log_entries_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1377,7 +1791,7 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): def test_list_log_entries_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1419,7 +1833,7 @@ def test_list_log_entries_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_log_entries_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1469,7 +1883,7 @@ async def test_list_log_entries_async_pager(): @pytest.mark.asyncio async def test_list_log_entries_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1613,7 +2027,7 @@ async def test_list_monitored_resource_descriptors_async_from_dict(): def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1665,7 +2079,7 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1709,7 +2123,7 @@ def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc") @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1764,7 +2178,7 @@ async def test_list_monitored_resource_descriptors_async_pager(): @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2050,7 +2464,7 @@ async def test_list_logs_flattened_error_async(): def test_list_logs_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2100,7 +2514,7 @@ def test_list_logs_pager(transport_name: str = "grpc"): def test_list_logs_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2142,7 +2556,7 @@ def test_list_logs_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_logs_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2192,7 +2606,7 @@ async def test_list_logs_async_pager(): @pytest.mark.asyncio async def test_list_logs_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2344,7 +2758,7 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = LoggingServiceV2Client( @@ -3471,7 +3885,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index f1d5ba3a382a..f20c7cfd3852 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ from grpc.experimental import aio import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers @@ -70,6 +71,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -100,6 +112,273 @@ def test__get_default_mtls_endpoint(): ) +def test__read_environment_variables(): + assert MetricsServiceV2Client._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert MetricsServiceV2Client._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + MetricsServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + MetricsServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert MetricsServiceV2Client._get_client_cert_source(None, False) is None + assert ( + MetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + MetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + MetricsServiceV2Client._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + MetricsServiceV2Client._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + MetricsServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2Client), +) +@mock.patch.object( + MetricsServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2AsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + MetricsServiceV2Client._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + MetricsServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "always") + == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MetricsServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MetricsServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + MetricsServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + MetricsServiceV2Client._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + MetricsServiceV2Client._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + MetricsServiceV2Client._get_universe_domain(None, None) + == MetricsServiceV2Client._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + MetricsServiceV2Client._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -217,13 +496,13 @@ def test_metrics_service_v2_client_get_transport_class(): ) @mock.patch.object( MetricsServiceV2Client, - "DEFAULT_ENDPOINT", - modify_default_endpoint(MetricsServiceV2Client), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2Client), ) @mock.patch.object( MetricsServiceV2AsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(MetricsServiceV2AsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2AsyncClient), ) def test_metrics_service_v2_client_client_options( client_class, transport_class, transport_name @@ -265,7 +544,9 @@ def test_metrics_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -295,15 +576,23 @@ def test_metrics_service_v2_client_client_options( # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -313,7 +602,9 @@ def test_metrics_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -331,7 +622,9 @@ def test_metrics_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -372,13 +665,13 @@ def test_metrics_service_v2_client_client_options( ) @mock.patch.object( MetricsServiceV2Client, - "DEFAULT_ENDPOINT", - modify_default_endpoint(MetricsServiceV2Client), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2Client), ) @mock.patch.object( MetricsServiceV2AsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(MetricsServiceV2AsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2AsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_metrics_service_v2_client_mtls_env_auto( @@ -401,7 +694,9 @@ def test_metrics_service_v2_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -433,7 +728,9 @@ def test_metrics_service_v2_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -467,7 +764,9 @@ def test_metrics_service_v2_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -557,6 +856,115 @@ def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient] +) +@mock.patch.object( + MetricsServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2Client), +) +@mock.patch.object( + MetricsServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2AsyncClient), +) +def test_metrics_service_v2_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -582,7 +990,9 @@ def test_metrics_service_v2_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -621,7 +1031,9 @@ def test_metrics_service_v2_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -681,7 +1093,9 @@ def test_metrics_service_v2_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -959,7 +1373,7 @@ async def test_list_log_metrics_flattened_error_async(): def test_list_log_metrics_pager(transport_name: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1009,7 +1423,7 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): def test_list_log_metrics_pages(transport_name: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1051,7 +1465,7 @@ def test_list_log_metrics_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_log_metrics_async_pager(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1101,7 +1515,7 @@ async def test_list_log_metrics_async_pager(): @pytest.mark.asyncio async def test_list_log_metrics_async_pages(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2231,7 +2645,7 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = MetricsServiceV2Client( @@ -3357,7 +3771,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, From f3941f780c8b922b032329dce27a3c0ca1be81bd Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 13 Mar 2024 11:01:04 -0400 Subject: [PATCH 797/855] fix: remove usage in including_default_value_fields to prepare for protobuf 5.x (#866) Co-authored-by: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> --- .../google-cloud-logging/google/cloud/logging_v2/_gapic.py | 7 ------- packages/google-cloud-logging/tests/unit/test__gapic.py | 1 - 2 files changed, 8 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py index f6f6dca1f78d..688a9bfc4325 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py @@ -271,7 +271,6 @@ def sink_create( return MessageToDict( LogSink.pb(created_pb), preserving_proto_field_name=False, - including_default_value_fields=False, ) def sink_get(self, sink_name): @@ -298,7 +297,6 @@ def sink_get(self, sink_name): return MessageToDict( LogSink.pb(sink_pb), preserving_proto_field_name=False, - including_default_value_fields=False, ) def sink_update( @@ -351,7 +349,6 @@ def sink_update( return MessageToDict( LogSink.pb(sink_pb), preserving_proto_field_name=False, - including_default_value_fields=False, ) def sink_delete(self, sink_name): @@ -459,7 +456,6 @@ def metric_get(self, project, metric_name): return MessageToDict( LogMetric.pb(metric_pb), preserving_proto_field_name=False, - including_default_value_fields=False, ) def metric_update( @@ -496,7 +492,6 @@ def metric_update( return MessageToDict( LogMetric.pb(metric_pb), preserving_proto_field_name=False, - including_default_value_fields=False, ) def metric_delete(self, project, metric_name): @@ -530,7 +525,6 @@ def _parse_log_entry(entry_pb): return MessageToDict( entry_pb, preserving_proto_field_name=False, - including_default_value_fields=False, ) except TypeError: if entry_pb.HasField("proto_payload"): @@ -539,7 +533,6 @@ def _parse_log_entry(entry_pb): entry_mapping = MessageToDict( entry_pb, preserving_proto_field_name=False, - including_default_value_fields=False, ) entry_mapping["protoPayload"] = proto_payload return entry_mapping diff --git a/packages/google-cloud-logging/tests/unit/test__gapic.py b/packages/google-cloud-logging/tests/unit/test__gapic.py index 8bf25870ab96..74ed47b1e30a 100644 --- a/packages/google-cloud-logging/tests/unit/test__gapic.py +++ b/packages/google-cloud-logging/tests/unit/test__gapic.py @@ -595,7 +595,6 @@ def test_non_registry_failure(self, msg_to_dict_mock): msg_to_dict_mock.assert_called_once_with( entry_pb, preserving_proto_field_name=False, - including_default_value_fields=False, ) def test_unregistered_type(self): From a710fc8da805188ce094396459f0b65eb0a8d032 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 14 Mar 2024 14:15:16 -0400 Subject: [PATCH 798/855] chore(main): release 3.10.0 (#835) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-logging/CHANGELOG.md | 20 +++++++++++++++++++ .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 24 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json index 7c3079b2d898..fc62d3d35012 100644 --- a/packages/google-cloud-logging/.release-please-manifest.json +++ b/packages/google-cloud-logging/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.9.0" + ".": "3.10.0" } diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 0ae1f74bb841..a41083f53375 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,26 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.10.0](https://github.com/googleapis/python-logging/compare/v3.9.0...v3.10.0) (2024-03-13) + + +### Features + +* Allow users to explicitly configure universe domain ([#846](https://github.com/googleapis/python-logging/issues/846)) ([e998a21](https://github.com/googleapis/python-logging/commit/e998a219740cf8b2373e462867244a6860b0c88c)) + + +### Bug Fixes + +* Added placeholder kwargs to StructuredLogHandler ([#845](https://github.com/googleapis/python-logging/issues/845)) ([9bc0a37](https://github.com/googleapis/python-logging/commit/9bc0a37d910340d828db8bab33d67785f184f00c)) +* Allowed for a partial override of loggers that get excluded from setup_client ([#831](https://github.com/googleapis/python-logging/issues/831)) ([870c940](https://github.com/googleapis/python-logging/commit/870c9403e03d31a0f22dddc257cd5fb2b4fc5ee3)) +* Remove usage in including_default_value_fields to prepare for protobuf 5.x ([#866](https://github.com/googleapis/python-logging/issues/866)) ([66a534d](https://github.com/googleapis/python-logging/commit/66a534d1b83d7c63f5c7b013bf27ed54dd2786c3)) +* Use value of cluster-location in GKE for tagging location ([#830](https://github.com/googleapis/python-logging/issues/830)) ([c15847c](https://github.com/googleapis/python-logging/commit/c15847c215c18ad3970efba12f5d337e6d499883)) + + +### Documentation + +* Added documentation for Django/Flask integrations and dictConfig ([#848](https://github.com/googleapis/python-logging/issues/848)) ([c65ec92](https://github.com/googleapis/python-logging/commit/c65ec92bf348e2bcdd8f4c5bacc152cfb4737eb1)) + ## [3.9.0](https://github.com/googleapis/python-logging/compare/v3.8.0...v3.9.0) (2023-12-08) diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py index 90b3aae3d90f..b2ead68dd53a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.9.0" # {x-release-please-version} +__version__ = "3.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py index 90b3aae3d90f..b2ead68dd53a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.9.0" # {x-release-please-version} +__version__ = "3.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index b62675ba6439..3fb89838a939 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "0.1.0" + "version": "3.10.0" }, "snippets": [ { From 3e045ada6502a2019ed5530aa5a126aa395c71ca Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 11:21:19 -0400 Subject: [PATCH 799/855] chore: remove nox uninstall/reinstall from python build.sh template (#869) Source-Link: https://github.com/googleapis/synthtool/commit/26358881238150aa51939ccc82b78c0e33d3bc9c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:007e7e46ef05e5a32e652bd0062be02f6ff050347d91e0f357b28caab0a042c4 Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-logging/.kokoro/build.sh | 7 ------- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index e4e943e0259a..af879fdecde7 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad -# created: 2024-02-27T15:56:18.442440378Z + digest: sha256:007e7e46ef05e5a32e652bd0062be02f6ff050347d91e0f357b28caab0a042c4 +# created: 2024-03-15T14:27:15.879623611Z diff --git a/packages/google-cloud-logging/.kokoro/build.sh b/packages/google-cloud-logging/.kokoro/build.sh index afa7a81aa8d6..ff6554281417 100755 --- a/packages/google-cloud-logging/.kokoro/build.sh +++ b/packages/google-cloud-logging/.kokoro/build.sh @@ -33,13 +33,6 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json # Setup project id. export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -# Remove old nox -python3 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version - # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then From a5227a430db78ee70ef6f6aa9e6e459b78244df4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 13:43:56 -0400 Subject: [PATCH 800/855] chore(python): add requirements for docs build (#870) Source-Link: https://github.com/googleapis/synthtool/commit/85c23b6bc4352c1b0674848eaeb4e48645aeda6b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3741fd1f5f5150378563c76afa06bcc12777b5fe54c5ee01115218f83872134f Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/docker/docs/Dockerfile | 4 ++ .../.kokoro/docker/docs/requirements.in | 1 + .../.kokoro/docker/docs/requirements.txt | 38 +++++++++++++++++++ 4 files changed, 45 insertions(+), 2 deletions(-) create mode 100644 packages/google-cloud-logging/.kokoro/docker/docs/requirements.in create mode 100644 packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index af879fdecde7..5d9542b1cb21 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:007e7e46ef05e5a32e652bd0062be02f6ff050347d91e0f357b28caab0a042c4 -# created: 2024-03-15T14:27:15.879623611Z + digest: sha256:3741fd1f5f5150378563c76afa06bcc12777b5fe54c5ee01115218f83872134f +# created: 2024-03-15T16:26:15.743347415Z diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile index 8e39a2cc438d..bdaf39fe22d0 100644 --- a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile @@ -80,4 +80,8 @@ RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ # Test pip RUN python3 -m pip +# Install build requirements +COPY requirements.txt /requirements.txt +RUN python3 -m pip install --require-hashes -r requirements.txt + CMD ["python3.8"] diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.in b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.in new file mode 100644 index 000000000000..816817c672a1 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.in @@ -0,0 +1 @@ +nox diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt new file mode 100644 index 000000000000..0e5d70f20f83 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==3.2.3 \ + --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ + --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c + # via nox +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 + # via nox +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 + # via virtualenv +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c + # via virtualenv +nox==2024.3.2 \ + --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ + --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 + # via -r requirements.in +packaging==24.0 \ + --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ + --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 + # via nox +platformdirs==4.2.0 \ + --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ + --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 + # via virtualenv +virtualenv==20.25.1 \ + --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ + --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 + # via nox From 3e5228ba1ce2729f409d0767f40cbec9465be995 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Tue, 26 Mar 2024 14:51:33 -0400 Subject: [PATCH 801/855] docs: Changed table in web-framework-integration to bulleted list (#875) --- .../docs/web-framework-integration.rst | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-logging/docs/web-framework-integration.rst b/packages/google-cloud-logging/docs/web-framework-integration.rst index d91d714b31b0..d7bc3229dec7 100644 --- a/packages/google-cloud-logging/docs/web-framework-integration.rst +++ b/packages/google-cloud-logging/docs/web-framework-integration.rst @@ -18,15 +18,12 @@ Flask Flask integration has been tested to work with the following versions of Flask: -=============== ============== -Python version Flask versions -=============== ============== -3.7 >=1.0.0 -3.8 >=1.0.0 -3.9 >=1.0.0 -3.10 >=1.0.3 -3.11 >=1.0.3 -3.12 >=1.0.3 -=============== ============== +- Python 3.7 - 3.9: + + - Flask >=1.0.0 + +- Python >=3.10: + + - Flask >=1.0.3 Be sure to :doc:`set up logging ` before declaring the Flask app. From a61e2e7ff96d36d682c6d7349daaaaebabc5f9ea Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 27 Mar 2024 09:36:03 -0400 Subject: [PATCH 802/855] chore: Update gapic-generator-python to v1.16.1 (#873) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.16.1 PiperOrigin-RevId: 618243632 Source-Link: https://github.com/googleapis/googleapis/commit/078a38bd240827be8e69a5b62993380d1b047994 Source-Link: https://github.com/googleapis/googleapis-gen/commit/7af768c3f8ce58994482350f7401173329950a31 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2FmNzY4YzNmOGNlNTg5OTQ0ODIzNTBmNzQwMTE3MzMyOTk1MGEzMSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> --- .../snippet_metadata_google.logging.v2.json | 2 +- .../logging_v2/test_config_service_v2.py | 1924 +++++++++++++++-- .../logging_v2/test_logging_service_v2.py | 279 ++- .../logging_v2/test_metrics_service_v2.py | 298 ++- 4 files changed, 2359 insertions(+), 144 deletions(-) diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 3fb89838a939..b62675ba6439 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.10.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 96cb15e894d0..b1c25ba9ea90 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -1151,7 +1151,8 @@ def test_list_buckets(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() + request = logging_config.ListBucketsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsPager) @@ -1174,6 +1175,56 @@ def test_list_buckets_empty_call(): assert args[0] == logging_config.ListBucketsRequest() +def test_list_buckets_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListBucketsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + client.list_buckets(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListBucketsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_buckets_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListBucketsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_buckets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListBucketsRequest() + + @pytest.mark.asyncio async def test_list_buckets_async( transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest @@ -1200,7 +1251,8 @@ async def test_list_buckets_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() + request = logging_config.ListBucketsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsAsyncPager) @@ -1579,7 +1631,8 @@ def test_get_bucket(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() + request = logging_config.GetBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) @@ -1608,6 +1661,60 @@ def test_get_bucket_empty_call(): assert args[0] == logging_config.GetBucketRequest() +def test_get_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetBucketRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + client.get_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetBucketRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_bucket_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) + response = await client.get_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetBucketRequest() + + @pytest.mark.asyncio async def test_get_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest @@ -1640,7 +1747,8 @@ async def test_get_bucket_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() + request = logging_config.GetBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) @@ -1747,7 +1855,8 @@ def test_create_bucket_async(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() + request = logging_config.CreateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1771,6 +1880,58 @@ def test_create_bucket_async_empty_call(): assert args[0] == logging_config.CreateBucketRequest() +def test_create_bucket_async_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), "__call__" + ) as call: + client.create_bucket_async(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_bucket_async_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_bucket_async() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + + @pytest.mark.asyncio async def test_create_bucket_async_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest @@ -1797,7 +1958,8 @@ async def test_create_bucket_async_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() + request = logging_config.CreateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1901,7 +2063,8 @@ def test_update_bucket_async(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + request = logging_config.UpdateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1925,6 +2088,56 @@ def test_update_bucket_async_empty_call(): assert args[0] == logging_config.UpdateBucketRequest() +def test_update_bucket_async_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateBucketRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), "__call__" + ) as call: + client.update_bucket_async(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_update_bucket_async_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_bucket_async() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + + @pytest.mark.asyncio async def test_update_bucket_async_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest @@ -1951,7 +2164,8 @@ async def test_update_bucket_async_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + request = logging_config.UpdateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2061,7 +2275,8 @@ def test_create_bucket(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() + request = logging_config.CreateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) @@ -2090,6 +2305,62 @@ def test_create_bucket_empty_call(): assert args[0] == logging_config.CreateBucketRequest() +def test_create_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + client.create_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_bucket_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) + response = await client.create_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + + @pytest.mark.asyncio async def test_create_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest @@ -2122,7 +2393,8 @@ async def test_create_bucket_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() + request = logging_config.CreateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) @@ -2235,7 +2507,8 @@ def test_update_bucket(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + request = logging_config.UpdateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) @@ -2264,19 +2537,40 @@ def test_update_bucket_empty_call(): assert args[0] == logging_config.UpdateBucketRequest() +def test_update_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateBucketRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + client.update_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest( + name="name_value", + ) + + @pytest.mark.asyncio -async def test_update_bucket_async( - transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest -): +async def test_update_bucket_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc_asyncio", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: # Designate an appropriate return value for the call. @@ -2291,20 +2585,54 @@ async def test_update_bucket_async( restricted_fields=["restricted_fields_value"], ) ) - response = await client.update_bucket(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + response = await client.update_bucket() + call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + +@pytest.mark.asyncio +async def test_update_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) + response = await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True assert response.restricted_fields == ["restricted_fields_value"] @@ -2401,7 +2729,8 @@ def test_delete_bucket(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() + request = logging_config.DeleteBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2423,6 +2752,50 @@ def test_delete_bucket_empty_call(): assert args[0] == logging_config.DeleteBucketRequest() +def test_delete_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteBucketRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + client.delete_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteBucketRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_bucket_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteBucketRequest() + + @pytest.mark.asyncio async def test_delete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest @@ -2445,7 +2818,8 @@ async def test_delete_bucket_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() + request = logging_config.DeleteBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2541,7 +2915,8 @@ def test_undelete_bucket(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() + request = logging_config.UndeleteBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2563,6 +2938,50 @@ def test_undelete_bucket_empty_call(): assert args[0] == logging_config.UndeleteBucketRequest() +def test_undelete_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UndeleteBucketRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + client.undelete_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UndeleteBucketRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_undelete_bucket_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.undelete_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UndeleteBucketRequest() + + @pytest.mark.asyncio async def test_undelete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest @@ -2585,7 +3004,8 @@ async def test_undelete_bucket_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() + request = logging_config.UndeleteBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2683,7 +3103,8 @@ def test_list_views(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() + request = logging_config.ListViewsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListViewsPager) @@ -2706,6 +3127,56 @@ def test_list_views_empty_call(): assert args[0] == logging_config.ListViewsRequest() +def test_list_views_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListViewsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + client.list_views(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListViewsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_views_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_views() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListViewsRequest() + + @pytest.mark.asyncio async def test_list_views_async( transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest @@ -2732,7 +3203,8 @@ async def test_list_views_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() + request = logging_config.ListViewsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListViewsAsyncPager) @@ -3107,7 +3579,8 @@ def test_get_view(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() + request = logging_config.GetViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) @@ -3132,6 +3605,56 @@ def test_get_view_empty_call(): assert args[0] == logging_config.GetViewRequest() +def test_get_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetViewRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + client.get_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetViewRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) + response = await client.get_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetViewRequest() + + @pytest.mark.asyncio async def test_get_view_async( transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest @@ -3160,7 +3683,8 @@ async def test_get_view_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() + request = logging_config.GetViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) @@ -3265,7 +3789,8 @@ def test_create_view(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() + request = logging_config.CreateViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) @@ -3290,6 +3815,58 @@ def test_create_view_empty_call(): assert args[0] == logging_config.CreateViewRequest() +def test_create_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + client.create_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) + response = await client.create_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateViewRequest() + + @pytest.mark.asyncio async def test_create_view_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest @@ -3318,7 +3895,8 @@ async def test_create_view_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() + request = logging_config.CreateViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) @@ -3423,7 +4001,8 @@ def test_update_view(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() + request = logging_config.UpdateViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) @@ -3448,25 +4027,75 @@ def test_update_view_empty_call(): assert args[0] == logging_config.UpdateViewRequest() -@pytest.mark.asyncio -async def test_update_view_async( - transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest -): - client = ConfigServiceV2AsyncClient( +def test_update_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateViewRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_view), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogView( - name="name_value", + client.update_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateViewRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_update_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) + response = await client.update_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateViewRequest() + + +@pytest.mark.asyncio +async def test_update_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", description="description_value", filter="filter_value", ) @@ -3476,7 +4105,8 @@ async def test_update_view_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() + request = logging_config.UpdateViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) @@ -3577,7 +4207,8 @@ def test_delete_view(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() + request = logging_config.DeleteViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -3599,6 +4230,50 @@ def test_delete_view_empty_call(): assert args[0] == logging_config.DeleteViewRequest() +def test_delete_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteViewRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + client.delete_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest() + + @pytest.mark.asyncio async def test_delete_view_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest @@ -3621,7 +4296,8 @@ async def test_delete_view_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() + request = logging_config.DeleteViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -3719,7 +4395,8 @@ def test_list_sinks(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() + request = logging_config.ListSinksRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSinksPager) @@ -3742,6 +4419,56 @@ def test_list_sinks_empty_call(): assert args[0] == logging_config.ListSinksRequest() +def test_list_sinks_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListSinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + client.list_sinks(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListSinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_sinks_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListSinksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_sinks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListSinksRequest() + + @pytest.mark.asyncio async def test_list_sinks_async( transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest @@ -3768,7 +4495,8 @@ async def test_list_sinks_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() + request = logging_config.ListSinksRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSinksAsyncPager) @@ -4148,7 +4876,8 @@ def test_get_sink(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() + request = logging_config.GetSinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) @@ -4178,6 +4907,61 @@ def test_get_sink_empty_call(): assert args[0] == logging_config.GetSinkRequest() +def test_get_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetSinkRequest( + sink_name="sink_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + client.get_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSinkRequest( + sink_name="sink_name_value", + ) + + +@pytest.mark.asyncio +async def test_get_sink_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) + response = await client.get_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSinkRequest() + + @pytest.mark.asyncio async def test_get_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest @@ -4211,7 +4995,8 @@ async def test_get_sink_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() + request = logging_config.GetSinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) @@ -4408,7 +5193,8 @@ def test_create_sink(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() + request = logging_config.CreateSinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) @@ -4438,6 +5224,61 @@ def test_create_sink_empty_call(): assert args[0] == logging_config.CreateSinkRequest() +def test_create_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateSinkRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + client.create_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateSinkRequest( + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_create_sink_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) + response = await client.create_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateSinkRequest() + + @pytest.mark.asyncio async def test_create_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest @@ -4471,7 +5312,8 @@ async def test_create_sink_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() + request = logging_config.CreateSinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) @@ -4678,7 +5520,8 @@ def test_update_sink(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() + request = logging_config.UpdateSinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) @@ -4708,19 +5551,40 @@ def test_update_sink_empty_call(): assert args[0] == logging_config.UpdateSinkRequest() +def test_update_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateSinkRequest( + sink_name="sink_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + client.update_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSinkRequest( + sink_name="sink_name_value", + ) + + @pytest.mark.asyncio -async def test_update_sink_async( - transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest -): +async def test_update_sink_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc_asyncio", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: # Designate an appropriate return value for the call. @@ -4736,16 +5600,51 @@ async def test_update_sink_async( include_children=True, ) ) - response = await client.update_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + response = await client.update_sink() + call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSinkRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" + +@pytest.mark.asyncio +async def test_update_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) + response = await client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == "name_value" assert response.destination == "destination_value" assert response.filter == "filter_value" assert response.description == "description_value" @@ -4949,7 +5848,8 @@ def test_delete_sink(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() + request = logging_config.DeleteSinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -4971,6 +5871,50 @@ def test_delete_sink_empty_call(): assert args[0] == logging_config.DeleteSinkRequest() +def test_delete_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + client.delete_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_sink_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest() + + @pytest.mark.asyncio async def test_delete_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest @@ -4993,7 +5937,8 @@ async def test_delete_sink_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() + request = logging_config.DeleteSinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -5169,7 +6114,8 @@ def test_create_link(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateLinkRequest() + request = logging_config.CreateLinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -5191,6 +6137,54 @@ def test_create_link_empty_call(): assert args[0] == logging_config.CreateLinkRequest() +def test_create_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + client.create_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateLinkRequest() + + @pytest.mark.asyncio async def test_create_link_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateLinkRequest @@ -5215,7 +6209,8 @@ async def test_create_link_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateLinkRequest() + request = logging_config.CreateLinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -5415,7 +6410,8 @@ def test_delete_link(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteLinkRequest() + request = logging_config.DeleteLinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -5437,6 +6433,52 @@ def test_delete_link_empty_call(): assert args[0] == logging_config.DeleteLinkRequest() +def test_delete_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + client.delete_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteLinkRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteLinkRequest() + + @pytest.mark.asyncio async def test_delete_link_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteLinkRequest @@ -5461,7 +6503,8 @@ async def test_delete_link_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteLinkRequest() + request = logging_config.DeleteLinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -5643,7 +6686,8 @@ def test_list_links(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListLinksRequest() + request = logging_config.ListLinksRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLinksPager) @@ -5666,6 +6710,56 @@ def test_list_links_empty_call(): assert args[0] == logging_config.ListLinksRequest() +def test_list_links_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListLinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + client.list_links(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListLinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_links_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_links() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListLinksRequest() + + @pytest.mark.asyncio async def test_list_links_async( transport: str = "grpc_asyncio", request_type=logging_config.ListLinksRequest @@ -5692,7 +6786,8 @@ async def test_list_links_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListLinksRequest() + request = logging_config.ListLinksRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLinksAsyncPager) @@ -6067,7 +7162,8 @@ def test_get_link(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetLinkRequest() + request = logging_config.GetLinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Link) @@ -6092,6 +7188,56 @@ def test_get_link_empty_call(): assert args[0] == logging_config.GetLinkRequest() +def test_get_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + client.get_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetLinkRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Link( + name="name_value", + description="description_value", + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + ) + response = await client.get_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetLinkRequest() + + @pytest.mark.asyncio async def test_get_link_async( transport: str = "grpc_asyncio", request_type=logging_config.GetLinkRequest @@ -6120,7 +7266,8 @@ async def test_get_link_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetLinkRequest() + request = logging_config.GetLinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Link) @@ -6301,7 +7448,8 @@ def test_list_exclusions(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + request = logging_config.ListExclusionsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExclusionsPager) @@ -6324,33 +7472,84 @@ def test_list_exclusions_empty_call(): assert args[0] == logging_config.ListExclusionsRequest() -@pytest.mark.asyncio -async def test_list_exclusions_async( - transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest -): - client = ConfigServiceV2AsyncClient( +def test_list_exclusions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListExclusionsRequest( + parent="parent_value", + page_token="page_token_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListExclusionsResponse( - next_page_token="next_page_token_value", - ) + client.list_exclusions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListExclusionsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_exclusions_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_exclusions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListExclusionsRequest() + + +@pytest.mark.asyncio +async def test_list_exclusions_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + request = logging_config.ListExclusionsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExclusionsAsyncPager) @@ -6726,7 +7925,8 @@ def test_get_exclusion(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + request = logging_config.GetExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) @@ -6752,6 +7952,57 @@ def test_get_exclusion_empty_call(): assert args[0] == logging_config.GetExclusionRequest() +def test_get_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetExclusionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + client.get_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_exclusion_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + response = await client.get_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest() + + @pytest.mark.asyncio async def test_get_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest @@ -6781,7 +8032,8 @@ async def test_get_exclusion_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + request = logging_config.GetExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) @@ -6970,7 +8222,8 @@ def test_create_exclusion(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + request = logging_config.CreateExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) @@ -6996,6 +8249,57 @@ def test_create_exclusion_empty_call(): assert args[0] == logging_config.CreateExclusionRequest() +def test_create_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateExclusionRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + client.create_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest( + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_create_exclusion_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + response = await client.create_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest() + + @pytest.mark.asyncio async def test_create_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest @@ -7025,7 +8329,8 @@ async def test_create_exclusion_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + request = logging_config.CreateExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) @@ -7224,7 +8529,8 @@ def test_update_exclusion(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + request = logging_config.UpdateExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) @@ -7250,6 +8556,57 @@ def test_update_exclusion_empty_call(): assert args[0] == logging_config.UpdateExclusionRequest() +def test_update_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateExclusionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + client.update_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_update_exclusion_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + response = await client.update_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() + + @pytest.mark.asyncio async def test_update_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest @@ -7279,7 +8636,8 @@ async def test_update_exclusion_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + request = logging_config.UpdateExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) @@ -7483,7 +8841,8 @@ def test_delete_exclusion(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + request = logging_config.DeleteExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -7505,6 +8864,50 @@ def test_delete_exclusion_empty_call(): assert args[0] == logging_config.DeleteExclusionRequest() +def test_delete_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteExclusionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + client.delete_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_exclusion_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() + + @pytest.mark.asyncio async def test_delete_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest @@ -7527,7 +8930,8 @@ async def test_delete_exclusion_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + request = logging_config.DeleteExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -7710,7 +9114,8 @@ def test_get_cmek_settings(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + request = logging_config.GetCmekSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) @@ -7738,6 +9143,61 @@ def test_get_cmek_settings_empty_call(): assert args[0] == logging_config.GetCmekSettingsRequest() +def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetCmekSettingsRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + client.get_cmek_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_cmek_settings_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + ) + response = await client.get_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() + + @pytest.mark.asyncio async def test_get_cmek_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest @@ -7769,7 +9229,8 @@ async def test_get_cmek_settings_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + request = logging_config.GetCmekSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) @@ -7882,7 +9343,8 @@ def test_update_cmek_settings(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + request = logging_config.UpdateCmekSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) @@ -7910,6 +9372,61 @@ def test_update_cmek_settings_empty_call(): assert args[0] == logging_config.UpdateCmekSettingsRequest() +def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + client.update_cmek_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_update_cmek_settings_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + ) + response = await client.update_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() + + @pytest.mark.asyncio async def test_update_cmek_settings_async( transport: str = "grpc_asyncio", @@ -7942,7 +9459,8 @@ async def test_update_cmek_settings_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + request = logging_config.UpdateCmekSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) @@ -8054,7 +9572,8 @@ def test_get_settings(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() + request = logging_config.GetSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) @@ -8081,6 +9600,58 @@ def test_get_settings_empty_call(): assert args[0] == logging_config.GetSettingsRequest() +def test_get_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetSettingsRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + client.get_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_settings_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) + response = await client.get_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() + + @pytest.mark.asyncio async def test_get_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSettingsRequest @@ -8111,7 +9682,8 @@ async def test_get_settings_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() + request = logging_config.GetSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) @@ -8302,7 +9874,8 @@ def test_update_settings(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() + request = logging_config.UpdateSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) @@ -8329,6 +9902,58 @@ def test_update_settings_empty_call(): assert args[0] == logging_config.UpdateSettingsRequest() +def test_update_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateSettingsRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + client.update_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_update_settings_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) + response = await client.update_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() + + @pytest.mark.asyncio async def test_update_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateSettingsRequest @@ -8359,7 +9984,8 @@ async def test_update_settings_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() + request = logging_config.UpdateSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) @@ -8554,7 +10180,8 @@ def test_copy_log_entries(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CopyLogEntriesRequest() + request = logging_config.CopyLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -8576,6 +10203,56 @@ def test_copy_log_entries_empty_call(): assert args[0] == logging_config.CopyLogEntriesRequest() +def test_copy_log_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CopyLogEntriesRequest( + name="name_value", + filter="filter_value", + destination="destination_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + client.copy_log_entries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest( + name="name_value", + filter="filter_value", + destination="destination_value", + ) + + +@pytest.mark.asyncio +async def test_copy_log_entries_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.copy_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() + + @pytest.mark.asyncio async def test_copy_log_entries_async( transport: str = "grpc_asyncio", request_type=logging_config.CopyLogEntriesRequest @@ -8600,7 +10277,8 @@ async def test_copy_log_entries_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CopyLogEntriesRequest() + request = logging_config.CopyLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 7dbb865f2c00..facbea0fa2d9 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1152,7 +1152,8 @@ def test_delete_log(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() + request = logging.DeleteLogRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -1174,6 +1175,50 @@ def test_delete_log_empty_call(): assert args[0] == logging.DeleteLogRequest() +def test_delete_log_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.DeleteLogRequest( + log_name="log_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + client.delete_log(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.DeleteLogRequest( + log_name="log_name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_log_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_log() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.DeleteLogRequest() + + @pytest.mark.asyncio async def test_delete_log_async( transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest @@ -1196,7 +1241,8 @@ async def test_delete_log_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() + request = logging.DeleteLogRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -1374,7 +1420,8 @@ def test_write_log_entries(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() + request = logging.WriteLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging.WriteLogEntriesResponse) @@ -1398,6 +1445,56 @@ def test_write_log_entries_empty_call(): assert args[0] == logging.WriteLogEntriesRequest() +def test_write_log_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.WriteLogEntriesRequest( + log_name="log_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), "__call__" + ) as call: + client.write_log_entries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.WriteLogEntriesRequest( + log_name="log_name_value", + ) + + +@pytest.mark.asyncio +async def test_write_log_entries_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.WriteLogEntriesResponse() + ) + response = await client.write_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.WriteLogEntriesRequest() + + @pytest.mark.asyncio async def test_write_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest @@ -1424,7 +1521,8 @@ async def test_write_log_entries_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() + request = logging.WriteLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging.WriteLogEntriesResponse) @@ -1579,7 +1677,8 @@ def test_list_log_entries(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() + request = logging.ListLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesPager) @@ -1602,6 +1701,58 @@ def test_list_log_entries_empty_call(): assert args[0] == logging.ListLogEntriesRequest() +def test_list_log_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.ListLogEntriesRequest( + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + client.list_log_entries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogEntriesRequest( + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_log_entries_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogEntriesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogEntriesRequest() + + @pytest.mark.asyncio async def test_list_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest @@ -1628,7 +1779,8 @@ async def test_list_log_entries_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() + request = logging.ListLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesAsyncPager) @@ -1959,7 +2111,8 @@ def test_list_monitored_resource_descriptors(request_type, transport: str = "grp # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + request = logging.ListMonitoredResourceDescriptorsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) @@ -1984,6 +2137,58 @@ def test_list_monitored_resource_descriptors_empty_call(): assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() +def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.ListMonitoredResourceDescriptorsRequest( + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: + client.list_monitored_resource_descriptors(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest( + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListMonitoredResourceDescriptorsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_monitored_resource_descriptors() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + + @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async( transport: str = "grpc_asyncio", @@ -2013,7 +2218,8 @@ async def test_list_monitored_resource_descriptors_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + request = logging.ListMonitoredResourceDescriptorsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) @@ -2255,7 +2461,8 @@ def test_list_logs(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() + request = logging.ListLogsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsPager) @@ -2279,6 +2486,57 @@ def test_list_logs_empty_call(): assert args[0] == logging.ListLogsRequest() +def test_list_logs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.ListLogsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + client.list_logs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_logs_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogsResponse( + log_names=["log_names_value"], + next_page_token="next_page_token_value", + ) + ) + response = await client.list_logs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogsRequest() + + @pytest.mark.asyncio async def test_list_logs_async( transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest @@ -2306,7 +2564,8 @@ async def test_list_logs_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() + request = logging.ListLogsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsAsyncPager) diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index f20c7cfd3852..abeaa4c6e0f2 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1167,7 +1167,8 @@ def test_list_log_metrics(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() + request = logging_metrics.ListLogMetricsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsPager) @@ -1190,6 +1191,56 @@ def test_list_log_metrics_empty_call(): assert args[0] == logging_metrics.ListLogMetricsRequest() +def test_list_log_metrics_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.ListLogMetricsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + client.list_log_metrics(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.ListLogMetricsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_log_metrics_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.ListLogMetricsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_log_metrics() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.ListLogMetricsRequest() + + @pytest.mark.asyncio async def test_list_log_metrics_async( transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest @@ -1216,7 +1267,8 @@ async def test_list_log_metrics_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() + request = logging_metrics.ListLogMetricsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsAsyncPager) @@ -1595,7 +1647,8 @@ def test_get_log_metric(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() + request = logging_metrics.GetLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) @@ -1624,6 +1677,60 @@ def test_get_log_metric_empty_call(): assert args[0] == logging_metrics.GetLogMetricRequest() +def test_get_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.GetLogMetricRequest( + metric_name="metric_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + client.get_log_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.GetLogMetricRequest( + metric_name="metric_name_value", + ) + + +@pytest.mark.asyncio +async def test_get_log_metric_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) + response = await client.get_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.GetLogMetricRequest() + + @pytest.mark.asyncio async def test_get_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest @@ -1656,7 +1763,8 @@ async def test_get_log_metric_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() + request = logging_metrics.GetLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) @@ -1853,7 +1961,8 @@ def test_create_log_metric(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() + request = logging_metrics.CreateLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) @@ -1884,6 +1993,64 @@ def test_create_log_metric_empty_call(): assert args[0] == logging_metrics.CreateLogMetricRequest() +def test_create_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.CreateLogMetricRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), "__call__" + ) as call: + client.create_log_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.CreateLogMetricRequest( + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_create_log_metric_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) + response = await client.create_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.CreateLogMetricRequest() + + @pytest.mark.asyncio async def test_create_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest @@ -1918,7 +2085,8 @@ async def test_create_log_metric_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() + request = logging_metrics.CreateLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) @@ -2133,7 +2301,8 @@ def test_update_log_metric(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() + request = logging_metrics.UpdateLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) @@ -2164,6 +2333,64 @@ def test_update_log_metric_empty_call(): assert args[0] == logging_metrics.UpdateLogMetricRequest() +def test_update_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.UpdateLogMetricRequest( + metric_name="metric_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), "__call__" + ) as call: + client.update_log_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.UpdateLogMetricRequest( + metric_name="metric_name_value", + ) + + +@pytest.mark.asyncio +async def test_update_log_metric_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) + response = await client.update_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.UpdateLogMetricRequest() + + @pytest.mark.asyncio async def test_update_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest @@ -2198,7 +2425,8 @@ async def test_update_log_metric_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() + request = logging_metrics.UpdateLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) @@ -2405,7 +2633,8 @@ def test_delete_log_metric(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() + request = logging_metrics.DeleteLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2429,6 +2658,54 @@ def test_delete_log_metric_empty_call(): assert args[0] == logging_metrics.DeleteLogMetricRequest() +def test_delete_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), "__call__" + ) as call: + client.delete_log_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_log_metric_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.DeleteLogMetricRequest() + + @pytest.mark.asyncio async def test_delete_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest @@ -2453,7 +2730,8 @@ async def test_delete_log_metric_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() + request = logging_metrics.DeleteLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None From 8b5ba783e64e43681d6693754578cb339d16cb9c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 12 Apr 2024 13:06:33 -0400 Subject: [PATCH 803/855] docs: add summary_overview template (#878) * docs: add summary_overview template Source-Link: https://github.com/googleapis/synthtool/commit/d7c2271d319aeb7e3043ec3f1ecec6f3604f1f1e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:db05f70829de86fe8e34ba972b7fe56da57eaccf1691f875ed4867db80d5cec9 * Manually added reference to summary_overview.md in docs/index.rst --------- Co-authored-by: Owl Bot Co-authored-by: Kevin Zheng --- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/blunderbuss.yml | 17 ++- .../.kokoro/requirements.in | 3 +- .../.kokoro/requirements.txt | 114 ++++++++---------- packages/google-cloud-logging/docs/index.rst | 5 + .../docs/summary_overview.md | 22 ++++ 6 files changed, 98 insertions(+), 67 deletions(-) create mode 100644 packages/google-cloud-logging/docs/summary_overview.md diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 5d9542b1cb21..3189719173b1 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3741fd1f5f5150378563c76afa06bcc12777b5fe54c5ee01115218f83872134f -# created: 2024-03-15T16:26:15.743347415Z + digest: sha256:db05f70829de86fe8e34ba972b7fe56da57eaccf1691f875ed4867db80d5cec9 +# created: 2024-04-05T19:51:26.466869535Z diff --git a/packages/google-cloud-logging/.github/blunderbuss.yml b/packages/google-cloud-logging/.github/blunderbuss.yml index febbb3f31340..8574279a5f28 100644 --- a/packages/google-cloud-logging/.github/blunderbuss.yml +++ b/packages/google-cloud-logging/.github/blunderbuss.yml @@ -1,4 +1,17 @@ +# Blunderbuss config +# +# This file controls who is assigned for pull requests and issues. +# Note: This file is autogenerated. To make changes to the assignee +# team, please update `codeowner_team` in `.repo-metadata.json`. assign_issues: - - googleapis/api-logging-python-reviewers + - googleapis/api-logging googleapis/api-logging-partners + +assign_issues_by: + - labels: + - "samples" + to: + - googleapis/python-samples-reviewers + - googleapis/api-logging googleapis/api-logging-partners + assign_prs: - - googleapis/api-logging-python-reviewers + - googleapis/api-logging googleapis/api-logging-partners diff --git a/packages/google-cloud-logging/.kokoro/requirements.in b/packages/google-cloud-logging/.kokoro/requirements.in index ec867d9fd65a..fff4d9ce0d0a 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.in +++ b/packages/google-cloud-logging/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x +gcp-releasetool>=2 # required for compatibility with cryptography>=42.x importlib-metadata typing-extensions twine @@ -8,3 +8,4 @@ setuptools nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 +cryptography>=42.0.5 diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index bda8e38c4f31..dd61f5f32018 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -93,40 +93,41 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.4 \ - --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ - --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ - --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ - --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ - --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ - --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ - --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ - --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ - --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ - --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ - --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ - --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ - --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ - --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ - --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ - --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ - --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ - --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ - --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ - --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ - --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ - --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ - --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ - --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ - --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ - --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ - --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ - --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ - --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ - --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ - --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ - --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 +cryptography==42.0.5 \ + --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ + --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ + --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ + --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ + --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ + --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ + --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ + --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ + --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ + --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ + --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ + --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ + --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ + --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ + --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ + --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ + --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ + --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ + --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ + --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ + --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ + --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ + --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ + --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ + --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ + --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ + --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ + --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ + --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ + --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ + --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ + --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 # via + # -r requirements.in # gcp-releasetool # secretstorage distlib==0.3.7 \ @@ -145,9 +146,9 @@ gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.16.0 \ - --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ - --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 +gcp-releasetool==2.0.0 \ + --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ + --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f # via -r requirements.in google-api-core==2.12.0 \ --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ @@ -392,29 +393,18 @@ platformdirs==3.11.0 \ --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv -protobuf==3.20.3 \ - --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ - --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ - --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ - --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ - --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ - --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ - --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ - --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ - --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ - --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ - --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ - --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ - --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ - --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ - --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ - --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ - --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ - --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ - --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ - --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ - --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ - --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee +protobuf==4.25.3 \ + --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ + --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ + --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ + --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ + --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ + --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ + --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ + --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ + --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ + --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ + --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 # via # gcp-docuploader # gcp-releasetool @@ -518,7 +508,7 @@ zipp==3.17.0 \ # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 \ - --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ - --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a +setuptools==69.2.0 \ + --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ + --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c # via -r requirements.in diff --git a/packages/google-cloud-logging/docs/index.rst b/packages/google-cloud-logging/docs/index.rst index 01d8e4eee753..ce3e3e82d223 100644 --- a/packages/google-cloud-logging/docs/index.rst +++ b/packages/google-cloud-logging/docs/index.rst @@ -41,3 +41,8 @@ For a list of all ``google-cloud-logging`` releases: :maxdepth: 2 changelog + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-logging/docs/summary_overview.md b/packages/google-cloud-logging/docs/summary_overview.md new file mode 100644 index 000000000000..4786fbcaabe9 --- /dev/null +++ b/packages/google-cloud-logging/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Cloud Logging API + +Overview of the APIs available for Cloud Logging API. + +## All entries + +Classes, methods and properties & attributes for +Cloud Logging API. + +[classes](https://cloud.google.com/python/docs/reference/logging/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/logging/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/logging/latest/summary_property.html) From e53f3e72c3e0ed97dc4dcc93618b8024907b10f2 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 12 Apr 2024 19:08:44 +0200 Subject: [PATCH 804/855] chore(deps): update all dependencies (#853) * chore(deps): update all dependencies * pin pytest for python 3.7 --------- Co-authored-by: Anthonios Partheniou --- .../.kokoro/docker/docs/requirements.txt | 6 +++--- .../samples/snippets/requirements-test.txt | 4 ++-- .../samples/snippets/requirements.txt | 8 ++++---- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt index 0e5d70f20f83..2e124dfdd14c 100644 --- a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt @@ -16,9 +16,9 @@ distlib==0.3.8 \ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.13.4 \ + --hash=sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f \ + --hash=sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4 # via virtualenv nox==2024.3.2 \ --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 9d5ac84b9f2d..11bcddbb7b98 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,3 +1,3 @@ backoff==2.2.1 -pytest==7.4.4; python_version == '3.7' -pytest==8.0.0; python_version >= '3.8' +pytest===7.4.4; python_version == '3.7' +pytest==8.1.1; python_version >= '3.8' diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index bcf91785d284..07cef98b9057 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.9.0 -google-cloud-bigquery==3.17.2 -google-cloud-storage==2.14.0 -google-cloud-pubsub==2.19.4 +google-cloud-logging==3.10.0 +google-cloud-bigquery==3.20.1 +google-cloud-storage==2.16.0 +google-cloud-pubsub==2.21.1 From f7c9795c269a0d53383c8914864581a24c411412 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 12 Apr 2024 13:41:04 -0400 Subject: [PATCH 805/855] chore(python): bump idna from 3.4 to 3.7 in .kokoro (#882) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): bump idna from 3.4 to 3.7 in .kokoro Source-Link: https://github.com/googleapis/synthtool/commit/d50980e704793a2d3310bfb3664f3a82f24b5796 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 * Apply changes from googleapis/synthtool#1950 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-logging/.github/blunderbuss.yml | 9 ++++++--- .../.kokoro/docker/docs/requirements.txt | 6 +++--- packages/google-cloud-logging/.kokoro/requirements.txt | 6 +++--- packages/google-cloud-logging/docs/index.rst | 4 ++-- 5 files changed, 16 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 3189719173b1..81f87c56917d 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:db05f70829de86fe8e34ba972b7fe56da57eaccf1691f875ed4867db80d5cec9 -# created: 2024-04-05T19:51:26.466869535Z + digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 +# created: 2024-04-12T11:35:58.922854369Z diff --git a/packages/google-cloud-logging/.github/blunderbuss.yml b/packages/google-cloud-logging/.github/blunderbuss.yml index 8574279a5f28..d5f69b10a4ef 100644 --- a/packages/google-cloud-logging/.github/blunderbuss.yml +++ b/packages/google-cloud-logging/.github/blunderbuss.yml @@ -4,14 +4,17 @@ # Note: This file is autogenerated. To make changes to the assignee # team, please update `codeowner_team` in `.repo-metadata.json`. assign_issues: - - googleapis/api-logging googleapis/api-logging-partners + - googleapis/api-logging + - googleapis/api-logging-partners assign_issues_by: - labels: - "samples" to: - googleapis/python-samples-reviewers - - googleapis/api-logging googleapis/api-logging-partners + - googleapis/api-logging + - googleapis/api-logging-partners assign_prs: - - googleapis/api-logging googleapis/api-logging-partners + - googleapis/api-logging + - googleapis/api-logging-partners diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt index 2e124dfdd14c..0e5d70f20f83 100644 --- a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt @@ -16,9 +16,9 @@ distlib==0.3.8 \ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -filelock==3.13.4 \ - --hash=sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f \ - --hash=sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4 +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c # via virtualenv nox==2024.3.2 \ --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index dd61f5f32018..51f92b8e12f1 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -252,9 +252,9 @@ googleapis-common-protos==1.61.0 \ --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 +idna==3.7 \ + --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ + --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests importlib-metadata==6.8.0 \ --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ diff --git a/packages/google-cloud-logging/docs/index.rst b/packages/google-cloud-logging/docs/index.rst index ce3e3e82d223..08f049c16eb2 100644 --- a/packages/google-cloud-logging/docs/index.rst +++ b/packages/google-cloud-logging/docs/index.rst @@ -43,6 +43,6 @@ For a list of all ``google-cloud-logging`` releases: changelog .. toctree:: - :hidden: + :hidden: - summary_overview.md + summary_overview.md From 7195cdf145791bd9b2e9bbc101435e8a59afd33c Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 17 Apr 2024 11:03:29 -0400 Subject: [PATCH 806/855] fix: Added environment specific labels to client library when running in Cloud Run Jobs (#877) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Added environment specific labels to client library when running in Cloud Run Jobs * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Removed unnecessary import * Changed unit tests to pytest * Renamed add_environmental_labels to add_resource_labels; cached portions of add_resource_labels * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Updated comments and _get_environmental_labels * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../handlers/_monitored_resources.py | 72 ++++++++++++++++++- .../cloud/logging_v2/handlers/handlers.py | 16 ++--- .../handlers/test__monitored_resources.py | 61 ++++++++++++---- 3 files changed, 123 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py index f93d549886bd..5240fe746fc4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_monitored_resources.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import functools +import logging import os from google.cloud.logging_v2.resource import Resource @@ -67,6 +69,20 @@ _PROJECT_NAME = "project/project-id" """Attribute in metadata server when in GKE environment.""" +_GAE_RESOURCE_TYPE = "gae_app" +"""Resource type for App Engine environment.""" + +_CLOUD_RUN_JOB_RESOURCE_TYPE = "cloud_run_job" +"""Resource type for Cloud Run Jobs.""" + +_GAE_TRACE_ID_LABEL = "appengine.googleapis.com/trace_id" +"""Extra trace label to be added on App Engine environments""" + +_CLOUD_RUN_JOBS_EXECUTION_NAME_LABEL = "run.googleapis.com/execution_name" +_CLOUD_RUN_JOBS_TASK_INDEX_LABEL = "run.googleapis.com/task_index" +_CLOUD_RUN_JOBS_TASK_ATTEMPT_LABEL = "run.googleapis.com/task_attempt" +"""Extra labels for Cloud Run environments to be recognized by Cloud Run Jobs web UI.""" + def _create_functions_resource(): """Create a standardized Cloud Functions resource. @@ -159,7 +175,7 @@ def _create_cloud_run_job_resource(): region = retrieve_metadata_server(_REGION_ID) project = retrieve_metadata_server(_PROJECT_NAME) resource = Resource( - type="cloud_run_job", + type=_CLOUD_RUN_JOB_RESOURCE_TYPE, labels={ "project_id": project if project else "", "job_name": os.environ.get(_CLOUD_RUN_JOB_ID, ""), @@ -177,7 +193,7 @@ def _create_app_engine_resource(): zone = retrieve_metadata_server(_ZONE_ID) project = retrieve_metadata_server(_PROJECT_NAME) resource = Resource( - type="gae_app", + type=_GAE_RESOURCE_TYPE, labels={ "project_id": project if project else "", "module_id": os.environ.get(_GAE_SERVICE_ENV, ""), @@ -233,3 +249,55 @@ def detect_resource(project=""): else: # use generic global resource return _create_global_resource(project) + + +@functools.lru_cache(maxsize=None) +def _get_environmental_labels(resource_type): + """Builds a dictionary of labels to be inserted into a LogRecord of the given resource type. + This function should only build a dict of items that are consistent across multiple logging statements + of the same resource type, such as environment variables. Th + + Returns: + dict: + A dict representation of labels and the values of those labels + """ + labels = {} + environ_vars = { + _CLOUD_RUN_JOB_RESOURCE_TYPE: { + _CLOUD_RUN_JOBS_EXECUTION_NAME_LABEL: _CLOUD_RUN_EXECUTION_ID, + _CLOUD_RUN_JOBS_TASK_INDEX_LABEL: _CLOUD_RUN_TASK_INDEX, + _CLOUD_RUN_JOBS_TASK_ATTEMPT_LABEL: _CLOUD_RUN_TASK_ATTEMPT, + } + } + + if resource_type in environ_vars: + for key, env_var in environ_vars[resource_type].items(): + val = os.environ.get(env_var, "") + if val: + labels[key] = val + + return labels + + +def add_resource_labels(resource: Resource, record: logging.LogRecord): + """Returns additional labels to be appended on to a LogRecord object based on the + local environment. Defaults to an empty dictionary if none apply. This is only to be + used for CloudLoggingHandler, as the structured logging daemon already does this. + + Args: + resource (google.cloud.logging.Resource): Resource based on the environment + record (logging.LogRecord): A LogRecord object representing a log record + Returns: + Dict[str, str]: New labels to append to the labels of the LogRecord + """ + if not resource: + return None + + # Get environmental labels from the resource type + labels = _get_environmental_labels(resource.type) + + # Add labels from log record + if resource.type == _GAE_RESOURCE_TYPE and record._trace is not None: + labels[_GAE_TRACE_ID_LABEL] = record._trace + + return labels diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 3d6ab9d1e9a7..06e131442d5a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -19,7 +19,10 @@ import logging from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport -from google.cloud.logging_v2.handlers._monitored_resources import detect_resource +from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + add_resource_labels, +) from google.cloud.logging_v2.handlers._helpers import get_request_data DEFAULT_LOGGER_NAME = "python" @@ -40,12 +43,6 @@ """These environments require us to remove extra handlers on setup""" _CLEAR_HANDLER_RESOURCE_TYPES = ("gae_app", "cloud_function") -"""Extra trace label to be added on App Engine environments""" -_GAE_TRACE_ID_LABEL = "appengine.googleapis.com/trace_id" - -"""Resource name for App Engine environments""" -_GAE_RESOURCE_TYPE = "gae_app" - class CloudLoggingFilter(logging.Filter): """Python standard ``logging`` Filter class to add Cloud Logging @@ -206,9 +203,8 @@ def emit(self, record): labels = record._labels message = _format_and_parse_message(record, self) - if resource.type == _GAE_RESOURCE_TYPE and record._trace is not None: - # add GAE-specific label - labels = {_GAE_TRACE_ID_LABEL: record._trace, **(labels or {})} + labels = {**add_resource_labels(resource, record), **(labels or {})} or None + # send off request self.transport.send( record, diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py b/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py index e788f8e3425e..28f064b7bd99 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__monitored_resources.py @@ -12,34 +12,25 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest import unittest +import logging import mock import os import functools -from google.cloud.logging_v2.handlers._monitored_resources import ( - _create_functions_resource, -) from google.cloud.logging_v2.handlers._monitored_resources import ( _create_app_engine_resource, -) -from google.cloud.logging_v2.handlers._monitored_resources import ( + _create_functions_resource, _create_kubernetes_resource, -) -from google.cloud.logging_v2.handlers._monitored_resources import ( _create_cloud_run_service_resource, -) -from google.cloud.logging_v2.handlers._monitored_resources import ( _create_cloud_run_job_resource, -) -from google.cloud.logging_v2.handlers._monitored_resources import ( _create_compute_resource, -) -from google.cloud.logging_v2.handlers._monitored_resources import ( _create_global_resource, + detect_resource, + add_resource_labels, ) -from google.cloud.logging_v2.handlers._monitored_resources import detect_resource from google.cloud.logging_v2.handlers import _monitored_resources from google.cloud.logging_v2.resource import Resource @@ -353,3 +344,45 @@ def test_detect_partial_data(self): # project id not returned from metadata serve # should be empty string self.assertEqual(resource.labels["project_id"], "") + + +@pytest.mark.parametrize( + "resource_type,os_environ,record_attrs,expected_labels", + [ + ( + _monitored_resources._GAE_RESOURCE_TYPE, + {}, + {"_trace": "trace_id"}, + {_monitored_resources._GAE_TRACE_ID_LABEL: "trace_id"}, + ), + ( + _monitored_resources._CLOUD_RUN_JOB_RESOURCE_TYPE, + { + _monitored_resources._CLOUD_RUN_EXECUTION_ID: "test_job_12345", + _monitored_resources._CLOUD_RUN_TASK_INDEX: "1", + _monitored_resources._CLOUD_RUN_TASK_ATTEMPT: "12", + }, + {}, + { + _monitored_resources._CLOUD_RUN_JOBS_EXECUTION_NAME_LABEL: "test_job_12345", + _monitored_resources._CLOUD_RUN_JOBS_TASK_INDEX_LABEL: "1", + _monitored_resources._CLOUD_RUN_JOBS_TASK_ATTEMPT_LABEL: "12", + }, + ), + ("global", {}, {}, {}), + ], +) +def test_add_resource_labels(resource_type, os_environ, record_attrs, expected_labels): + os.environ.clear() + record = logging.LogRecord("logname", None, None, None, "test", None, None) + + resource = Resource(type=resource_type, labels={}) + + for attr, val in record_attrs.items(): + setattr(record, attr, val) + + os.environ.update(os_environ) + + labels = add_resource_labels(resource, record) + + assert expected_labels == labels From d6f8642ad7fb01b45429bd884c85e34e193daee3 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 24 Apr 2024 11:39:24 -0400 Subject: [PATCH 807/855] docs: Update `dictConfig` snippet (#885) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Update `dictConfig` snippet to add line that applies the config * Added `import logging.config` into snippet * Update root handlers dict entry in dictConfig * Update usage_guide.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Remove propagate config option from loggers * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Added test for dictConfig snippet * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../samples/snippets/usage_guide.py | 17 ++++++++++------- .../samples/snippets/usage_guide_test.py | 6 ++++++ 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/usage_guide.py b/packages/google-cloud-logging/samples/snippets/usage_guide.py index f4292a9de83a..ef8847ba5949 100644 --- a/packages/google-cloud-logging/samples/snippets/usage_guide.py +++ b/packages/google-cloud-logging/samples/snippets/usage_guide.py @@ -486,9 +486,9 @@ def setup_logging(client): @snippet def logging_dict_config(client): + # [START logging_dict_config] import logging.config - # [START logging_dict_config] import google.cloud.logging client = google.cloud.logging.Client() @@ -496,23 +496,26 @@ def logging_dict_config(client): LOGGING = { "version": 1, "handlers": { - "cloud_logging": { + "cloud_logging_handler": { "class": "google.cloud.logging.handlers.CloudLoggingHandler", "client": client, }, - "structured_log": { + "structured_log_handler": { "class": "google.cloud.logging.handlers.StructuredLogHandler" }, }, - "root": {"handlers": ["console"], "level": "WARNING"}, + "root": {"handlers": [], "level": "WARNING"}, "loggers": { - "my_logger": {"handlers": ["cloud_logging"], "level": "INFO"}, - "my_other_logger": {"handlers": ["structured_log"], "level": "INFO"}, + "cloud_logger": {"handlers": ["cloud_logging_handler"], "level": "INFO"}, + "structured_logger": { + "handlers": ["structured_log_handler"], + "level": "INFO", + }, }, } - # [END logging_dict_config] logging.config.dictConfig(LOGGING) + # [END logging_dict_config] def _line_no(func): diff --git a/packages/google-cloud-logging/samples/snippets/usage_guide_test.py b/packages/google-cloud-logging/samples/snippets/usage_guide_test.py index f02d82fbde79..3f606dd656bd 100644 --- a/packages/google-cloud-logging/samples/snippets/usage_guide_test.py +++ b/packages/google-cloud-logging/samples/snippets/usage_guide_test.py @@ -88,3 +88,9 @@ def test_client_list_entries(): for item in to_delete: usage_guide._backoff_not_found(item.delete) + + +def test_dict_config(): + client = Client() + + usage_guide.logging_dict_config(client) From 2febdc559150caf7013686c0b60fe8bd19bcf260 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 22 May 2024 11:26:40 -0400 Subject: [PATCH 808/855] chore: fix minor typos (#894) --- .../google/cloud/logging_v2/__init__.py | 2 +- .../google/cloud/logging_v2/_gapic.py | 2 +- .../google/cloud/logging_v2/_http.py | 2 +- .../cloud/logging_v2/handlers/transports/sync.py | 2 +- .../google/cloud/logging_v2/logger.py | 2 +- .../tests/unit/handlers/test_app_engine.py | 2 +- .../tests/unit/handlers/test_handlers.py | 2 +- .../tests/unit/handlers/test_structured_log.py | 2 +- .../tests/unit/test__instrumentation.py | 14 +++++++------- 9 files changed, 15 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py index 9860f1e06415..fac0b7d02e57 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/__init__.py @@ -36,7 +36,7 @@ ASCENDING = "timestamp asc" """Query string to order by ascending timestamps.""" DESCENDING = "timestamp desc" -"""Query string to order by decending timestamps.""" +"""Query string to order by descending timestamps.""" _instrumentation_emitted = False """Flag for whether instrumentation info has been emitted""" diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py index 688a9bfc4325..039a830ce916 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py @@ -331,7 +331,7 @@ def sink_update( dict: The sink resource returned from the API (converted from a protobuf to a dictionary). """ - name = sink_name.split("/")[-1] # parse name out of full resoure name + name = sink_name.split("/")[-1] # parse name out of full resource name sink_pb = LogSink( name=name, filter=filter_, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_http.py b/packages/google-cloud-logging/google/cloud/logging_v2/_http.py index b90789353dc9..c629b8d92a53 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/_http.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_http.py @@ -347,7 +347,7 @@ def sink_update( dict: The returned (updated) resource. """ target = f"/{sink_name}" - name = sink_name.split("/")[-1] # parse name out of full resoure name + name = sink_name.split("/")[-1] # parse name out of full resource name data = {"name": name, "filter": filter_, "destination": destination} query_params = {"uniqueWriterIdentity": unique_writer_identity} return self.api_request( diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py index 6f93b2e57003..17a4e554e8ea 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py @@ -14,7 +14,7 @@ """Transport for Python logging handler. -Logs directly to the the Cloud Logging API with a synchronous call. +Logs directly to the Cloud Logging API with a synchronous call. """ from google.cloud.logging_v2 import _helpers from google.cloud.logging_v2.handlers.transports.base import Transport diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py index 88424b27cae7..64130f02f137 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -359,7 +359,7 @@ def __init__(self, logger, client, *, resource=None): Args: logger (logging_v2.logger.Logger): the logger to which entries will be logged. - client (~logging_V2.client.Cilent): + client (~logging_V2.client.Client): The client to use. resource (Optional[~logging_v2.resource.Resource]): Monitored resource of the batch, defaults diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py index 868fc9be8a41..38d607e9909e 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_app_engine.py @@ -166,7 +166,7 @@ def test_emit_manual_field_override(self): setattr(record, "trace", expected_trace) expected_span = "456" setattr(record, "span_id", expected_span) - expected_http = {"reuqest_url": "manual"} + expected_http = {"request_url": "manual"} setattr(record, "http_request", expected_http) expected_resource = Resource(type="test", labels={}) setattr(record, "resource", expected_resource) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index c301327a9d84..2aaa4560dc55 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -435,7 +435,7 @@ def test_emit_manual_field_override(self): setattr(record, "span_id", expected_span) expected_sampled = True setattr(record, "trace_sampled", expected_sampled) - expected_http = {"reuqest_url": "manual"} + expected_http = {"request_url": "manual"} setattr(record, "http_request", expected_http) expected_source = {"file": "test-file"} setattr(record, "source_location", expected_source) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index fc6b7c598406..06e5ff1aef60 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -459,7 +459,7 @@ def test_format_overrides(self): """ Allow users to override log fields using `logging.info("", extra={})` - If supported fields were overriden by the user, those choices should + If supported fields were overridden by the user, those choices should take precedence. """ import logging diff --git a/packages/google-cloud-logging/tests/unit/test__instrumentation.py b/packages/google-cloud-logging/tests/unit/test__instrumentation.py index a98aae34c3d9..97473ee6196b 100644 --- a/packages/google-cloud-logging/tests/unit/test__instrumentation.py +++ b/packages/google-cloud-logging/tests/unit/test__instrumentation.py @@ -25,7 +25,7 @@ class TestInstrumentation(unittest.TestCase): # LONG_VERSION > 16 characters LONG_VERSION = TEST_VERSION + "6789ABCDEF12" - def _get_diagonstic_value(self, entry, key): + def _get_diagnostic_value(self, entry, key): return entry.payload[i._DIAGNOSTIC_INFO_KEY][i._INSTRUMENTATION_SOURCE_KEY][-1][ key ] @@ -34,10 +34,10 @@ def test_default_diagnostic_info(self): entry = i._create_diagnostic_entry() self.assertEqual( i._PYTHON_LIBRARY_NAME, - self._get_diagonstic_value(entry, "name"), + self._get_diagnostic_value(entry, "name"), ) self.assertEqual( - i._LIBRARY_VERSION, self._get_diagonstic_value(entry, "version") + i._LIBRARY_VERSION, self._get_diagnostic_value(entry, "version") ) def test_custom_diagnostic_info(self): @@ -46,10 +46,10 @@ def test_custom_diagnostic_info(self): ) self.assertEqual( self.TEST_NAME, - self._get_diagonstic_value(entry, "name"), + self._get_diagnostic_value(entry, "name"), ) self.assertEqual( - self.TEST_VERSION, self._get_diagonstic_value(entry, "version") + self.TEST_VERSION, self._get_diagnostic_value(entry, "version") ) def test_truncate_long_values(self): @@ -60,8 +60,8 @@ def test_truncate_long_values(self): expected_name = self.LONG_NAME[: i._MAX_NAME_LENGTH] + "*" expected_version = self.LONG_VERSION[: i._MAX_VERSION_LENGTH] + "*" - self.assertEqual(expected_name, self._get_diagonstic_value(entry, "name")) - self.assertEqual(expected_version, self._get_diagonstic_value(entry, "version")) + self.assertEqual(expected_name, self._get_diagnostic_value(entry, "name")) + self.assertEqual(expected_version, self._get_diagnostic_value(entry, "version")) def test_drop_labels(self): """Labels should not be copied in instrumentation log""" From da856eb09b6095106578bdbf69dd1318d68b4275 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 22 May 2024 11:58:20 -0400 Subject: [PATCH 809/855] feat: OpenTelemetry trace/spanID integration for Python handlers (#889) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: OpenTelemetry trace/spanID integration for Python handlers * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Added more tests for OTel Python integration * linting * more linting * renamed _parse_current_open_telemetry_span and fixed otel testcases * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * linting + removed print statements * added opentelemetry sdk module cleanup to system test * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Refactored get_request_and_trace_data back into get_request_data * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../cloud/logging_v2/handlers/_helpers.py | 62 +++++++- packages/google-cloud-logging/noxfile.py | 1 + packages/google-cloud-logging/owlbot.py | 1 + packages/google-cloud-logging/setup.py | 1 + .../tests/system/test_system.py | 57 +++++++ .../tests/unit/handlers/__init__.py | 41 +++++ .../tests/unit/handlers/test__helpers.py | 143 ++++++++++++++++++ .../tests/unit/handlers/test_handlers.py | 137 +++++++++++++++++ .../unit/handlers/test_structured_log.py | 101 +++++++++++++ 9 files changed, 538 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py index f0c301ceb034..98bf0cd2934f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py @@ -24,6 +24,8 @@ except ImportError: # pragma: NO COVER flask = None +import opentelemetry.trace + from google.cloud.logging_v2.handlers.middleware.request import _get_django_request _DJANGO_CONTENT_LENGTH = "CONTENT_LENGTH" @@ -191,23 +193,71 @@ def _parse_xcloud_trace(header): return trace_id, span_id, trace_sampled +def _retrieve_current_open_telemetry_span(): + """Helper to retrieve trace, span ID, and trace sampled information from the current + OpenTelemetry span. + + Returns: + Tuple[Optional[str], Optional[str], bool]: + Data related to the current trace_id, span_id, and trace_sampled for the + current OpenTelemetry span. If a span is not found, return None/False for all + fields. + """ + span = opentelemetry.trace.get_current_span() + if span != opentelemetry.trace.span.INVALID_SPAN: + context = span.get_span_context() + trace_id = opentelemetry.trace.format_trace_id(context.trace_id) + span_id = opentelemetry.trace.format_span_id(context.span_id) + trace_sampled = context.trace_flags.sampled + + return trace_id, span_id, trace_sampled + + return None, None, False + + def get_request_data(): """Helper to get http_request and trace data from supported web - frameworks (currently supported: Flask and Django). + frameworks (currently supported: Flask and Django), as well as OpenTelemetry. Attempts + to retrieve trace/spanID from OpenTelemetry first, before going to Traceparent then XCTC. + HTTP request data is taken from a supporting web framework (currently Flask or Django). + Because HTTP request data is decoupled from OpenTelemetry, it is possible to get as a + return value the HTTP request from the web framework of choice, and trace/span data from + OpenTelemetry, even if trace data is present in the HTTP request headers. Returns: Tuple[Optional[dict], Optional[str], Optional[str], bool]: Data related to the current http request, trace_id, span_id, and trace_sampled for the request. All fields will be None if a http request isn't found. """ + + ( + otel_trace_id, + otel_span_id, + otel_trace_sampled, + ) = _retrieve_current_open_telemetry_span() + + # Get HTTP request data checkers = ( get_request_data_from_django, get_request_data_from_flask, ) - for checker in checkers: - http_request, trace_id, span_id, trace_sampled = checker() - if http_request is not None: - return http_request, trace_id, span_id, trace_sampled + http_request, http_trace_id, http_span_id, http_trace_sampled = ( + None, + None, + None, + False, + ) - return None, None, None, False + for checker in checkers: + http_request, http_trace_id, http_span_id, http_trace_sampled = checker() + if http_request is None: + http_trace_id, http_span_id, http_trace_sampled = None, None, False + else: + break + + # otel_trace_id existing means the other return values are non-null + if otel_trace_id: + return http_request, otel_trace_id, otel_span_id, otel_trace_sampled + else: + return http_request, http_trace_id, http_span_id, http_trace_sampled diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 9478ab93cf94..db5e78a61130 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -63,6 +63,7 @@ "google-cloud-pubsub", "google-cloud-storage", "google-cloud-testutils", + "opentelemetry-sdk", ] SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] SYSTEM_TEST_DEPENDENCIES: List[str] = [] diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index 8666de9e01a5..f1a5b697eca0 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -93,6 +93,7 @@ def place_before(path, text, *before_text, escape=None): "google-cloud-pubsub", "google-cloud-storage", "google-cloud-testutils", + "opentelemetry-sdk" ], unit_test_external_dependencies=["flask", "webob", "django"], samples=True, diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index db7b392d5543..5414f1b087b7 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -44,6 +44,7 @@ "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", "google-cloud-core >= 2.0.0, <3.0.0dev", "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", + "opentelemetry-api >= 1.0.0", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index c5000f1463cd..801cab341c6d 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -19,6 +19,7 @@ import numbers import os import pytest +import sys import unittest import uuid @@ -117,6 +118,25 @@ def setUpModule(): ) +def _cleanup_otel_sdk_modules(f): + """ + Decorator to delete all references to opentelemetry SDK modules after a + testcase is run. Test case should import opentelemetry SDK modules inside + the function. This is to test situations where the opentelemetry SDK + is not imported at all. + """ + + def wrapped(*args, **kwargs): + f(*args, **kwargs) + + # Deleting from sys.modules should be good enough in this use case + for module_name in list(sys.modules.keys()): + if module_name.startswith("opentelemetry.sdk"): + sys.modules.pop(module_name) + + return wrapped + + class TestLogging(unittest.TestCase): JSON_PAYLOAD = { "message": "System test: test_log_struct", @@ -662,6 +682,43 @@ def test_log_root_handler(self): self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) + @_cleanup_otel_sdk_modules + def test_log_handler_otel_integration(self): + # Doing OTel imports here to not taint the other tests with OTel SDK imports + from opentelemetry import trace + from opentelemetry.sdk.trace import TracerProvider + + LOG_MESSAGE = "This is a test of OpenTelemetry" + LOGGER_NAME = "otel-integration" + handler_name = self._logger_name(LOGGER_NAME) + + handler = CloudLoggingHandler( + Config.CLIENT, name=handler_name, transport=SyncTransport + ) + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler.name) + self.to_delete.append(logger) + + # Set up OTel SDK + provider = TracerProvider() + + tracer = provider.get_tracer("test_system") + with tracer.start_as_current_span("test-span") as span: + context = span.get_span_context() + expected_trace_id = f"projects/{Config.CLIENT.project}/traces/{trace.format_trace_id(context.trace_id)}" + expected_span_id = trace.format_span_id(context.span_id) + expected_tracesampled = context.trace_flags.sampled + + cloud_logger = logging.getLogger(LOGGER_NAME) + cloud_logger.addHandler(handler) + cloud_logger.warning(LOG_MESSAGE) + + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].trace, expected_trace_id) + self.assertEqual(entries[0].span_id, expected_span_id) + self.assertTrue(entries[0].trace_sampled, expected_tracesampled) + def test_create_metric(self): METRIC_NAME = "test-create-metric%s" % (_RESOURCE_ID,) metric = Config.CLIENT.metric( diff --git a/packages/google-cloud-logging/tests/unit/handlers/__init__.py b/packages/google-cloud-logging/tests/unit/handlers/__init__.py index df379f1e9d88..32eba185fd59 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/__init__.py +++ b/packages/google-cloud-logging/tests/unit/handlers/__init__.py @@ -11,3 +11,44 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + + +# Utility functions to setup mock OpenTelemetry spans, needed by multiple test +# suites. + +import contextlib + +import opentelemetry.context +import opentelemetry.trace + +from opentelemetry.trace import NonRecordingSpan +from opentelemetry.trace.span import TraceFlags + +_OTEL_SPAN_CONTEXT_TRACE_ID = 0x123456789123456789 +_OTEL_SPAN_CONTEXT_SPAN_ID = 0x123456789 +_OTEL_SPAN_CONTEXT_TRACEFLAGS = TraceFlags(TraceFlags.SAMPLED) + +_EXPECTED_OTEL_TRACE_ID = "00000000000000123456789123456789" +_EXPECTED_OTEL_SPAN_ID = "0000000123456789" +_EXPECTED_OTEL_TRACESAMPLED = True + + +@contextlib.contextmanager +def _setup_otel_span_context(): + """Sets up a nonrecording OpenTelemetry span with a mock span context that gets returned + by opentelemetry.trace.get_current_span, and returns it as a contextmanager + """ + span_context = opentelemetry.trace.SpanContext( + _OTEL_SPAN_CONTEXT_TRACE_ID, + _OTEL_SPAN_CONTEXT_SPAN_ID, + False, + trace_flags=_OTEL_SPAN_CONTEXT_TRACEFLAGS, + ) + ctx = opentelemetry.trace.set_span_in_context(NonRecordingSpan(span_context)) + tracer = opentelemetry.trace.NoOpTracer() + token = opentelemetry.context.attach(ctx) + try: + with tracer.start_as_current_span("test-span", context=ctx): + yield + finally: + opentelemetry.context.detach(token) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index 5eeae4ba47e4..b8c8fc99d33a 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -16,6 +16,13 @@ import mock +from tests.unit.handlers import ( + _setup_otel_span_context, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, +) + _FLASK_TRACE_ID = "flask0id" _FLASK_SPAN_ID = "span0flask" _FLASK_HTTP_REQUEST = {"requestUrl": "https://flask.palletsprojects.com/en/1.1.x/"} @@ -356,6 +363,120 @@ def test_wo_libraries(self): output = self._call_fut() self.assertEqual(output, (None, None, None, False)) + def test_otel_span_exists_no_request(self): + flask_expected = (None, None, None, False) + django_expected = (None, None, None, False) + + with _setup_otel_span_context(): + _, _, output = self._helper(django_expected, flask_expected) + self.assertEqual( + output, + ( + None, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, + ), + ) + + def test_otel_span_exists_django_request(self): + django_expected = ( + _DJANGO_HTTP_REQUEST, + _DJANGO_TRACE_ID, + _DJANGO_SPAN_ID, + False, + ) + flask_expected = (None, None, None, False) + + with _setup_otel_span_context(): + _, _, output = self._helper(django_expected, flask_expected) + self.assertEqual( + output, + ( + _DJANGO_HTTP_REQUEST, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, + ), + ) + + def test_otel_span_exists_flask_request(self): + django_expected = (None, None, None, False) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False) + + with _setup_otel_span_context(): + _, _, output = self._helper(django_expected, flask_expected) + self.assertEqual( + output, + ( + _FLASK_HTTP_REQUEST, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, + ), + ) + + def test_otel_span_exists_both_django_and_flask(self): + django_expected = ( + _DJANGO_HTTP_REQUEST, + _DJANGO_TRACE_ID, + _DJANGO_SPAN_ID, + False, + ) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False) + + with _setup_otel_span_context(): + _, _, output = self._helper(django_expected, flask_expected) + + # Django wins + self.assertEqual( + output, + ( + _DJANGO_HTTP_REQUEST, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, + ), + ) + + def test_no_otel_span_no_requests(self): + flask_expected = (None, None, None, False) + django_expected = (None, None, None, False) + _, _, output = self._helper(django_expected, flask_expected) + self.assertEqual(output, (None, None, None, False)) + + def test_no_otel_span_django_request(self): + django_expected = ( + _DJANGO_HTTP_REQUEST, + _DJANGO_TRACE_ID, + _DJANGO_SPAN_ID, + False, + ) + flask_expected = (None, None, None, False) + _, _, output = self._helper(django_expected, flask_expected) + self.assertEqual(output, django_expected) + + def test_no_otel_span_flask_request(self): + django_expected = (None, None, None, False) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False) + _, _, output = self._helper(django_expected, flask_expected) + + # Django wins + self.assertEqual(output, flask_expected) + + def test_no_otel_span_both_django_and_flask(self): + django_expected = ( + _DJANGO_HTTP_REQUEST, + _DJANGO_TRACE_ID, + _DJANGO_SPAN_ID, + False, + ) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False) + _, _, output = self._helper(django_expected, flask_expected) + + # Django wins + self.assertEqual(output, django_expected) + class Test__parse_xcloud_trace(unittest.TestCase): @staticmethod @@ -477,3 +598,25 @@ def test_invalid_headers(self): self.assertIsNone(trace_id) self.assertIsNone(span_id) self.assertEqual(sampled, False) + + +class Test__parse_open_telemetry_data(unittest.TestCase): + @staticmethod + def _call_fut(): + from google.cloud.logging_v2.handlers import _helpers + + trace, span, sampled = _helpers._retrieve_current_open_telemetry_span() + return trace, span, sampled + + def test_no_op(self): + trace_id, span_id, sampled = self._call_fut() + self.assertIsNone(trace_id) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) + + def test_span_exists(self): + with _setup_otel_span_context(): + trace_id, span_id, sampled = self._call_fut() + self.assertEqual(trace_id, _EXPECTED_OTEL_TRACE_ID) + self.assertEqual(span_id, _EXPECTED_OTEL_SPAN_ID) + self.assertEqual(sampled, _EXPECTED_OTEL_TRACESAMPLED) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 2aaa4560dc55..535c1f4b121d 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -28,6 +28,13 @@ _GAE_ENV_VARS, ) +from tests.unit.handlers import ( + _setup_otel_span_context, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, +) + class TestCloudLoggingFilter(unittest.TestCase): PROJECT = "PROJECT" @@ -230,6 +237,136 @@ def test_record_with_traceparent_request(self): self.assertEqual(record._http_request, expected_request) self.assertEqual(record._http_request_str, json.dumps(expected_request)) + def test_record_with_opentelemetry_span_no_request(self): + filter_obj = self._make_one() + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) + record.created = None + + with _setup_otel_span_context(): + success = filter_obj.filter(record) + self.assertTrue(success) + + self.assertEqual(record._trace, _EXPECTED_OTEL_TRACE_ID) + self.assertEqual(record._trace_str, _EXPECTED_OTEL_TRACE_ID) + self.assertEqual(record._span_id, _EXPECTED_OTEL_SPAN_ID) + self.assertEqual(record._span_id_str, _EXPECTED_OTEL_SPAN_ID) + self.assertEqual(record._trace_sampled, _EXPECTED_OTEL_TRACESAMPLED) + self.assertEqual(record._trace_sampled_str, "true") + self.assertIsNone(record._http_request) + self.assertEqual(record._http_request_str, "{}") + + def test_record_with_opentelemetry_span_and_request(self): + filter_obj = self._make_one() + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) + record.created = None + http_path = "http://testserver/123" + http_agent = "Mozilla/5.0" + http_trace = "123" + http_span = "456" + combined_trace = f"{http_trace}/{http_span};o=1" + expected_request = { + "requestMethod": "GET", + "requestUrl": http_path, + "userAgent": http_agent, + "protocol": "HTTP/1.1", + } + + app = self.create_app() + with app.test_request_context( + http_path, + headers={ + "User-Agent": http_agent, + "X_CLOUD_TRACE_CONTEXT": combined_trace, + }, + ): + with _setup_otel_span_context(): + success = filter_obj.filter(record) + self.assertTrue(success) + + self.assertEqual(record._trace, _EXPECTED_OTEL_TRACE_ID) + self.assertEqual(record._trace_str, _EXPECTED_OTEL_TRACE_ID) + self.assertEqual(record._span_id, _EXPECTED_OTEL_SPAN_ID) + self.assertEqual(record._span_id_str, _EXPECTED_OTEL_SPAN_ID) + self.assertEqual(record._trace_sampled, _EXPECTED_OTEL_TRACESAMPLED) + self.assertEqual(record._trace_sampled_str, "true") + + self.assertEqual(record._http_request, expected_request) + self.assertEqual(record._http_request_str, json.dumps(expected_request)) + + def test_record_with_opentelemetry_span_and_request_with_overrides(self): + """ + sort of does what the test after this one does, but more in the context of OTel precedence + """ + filter_obj = self._make_one() + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) + record.created = None + http_path = "http://testserver/123" + http_agent = "Mozilla/5.0" + http_trace = "123" + http_span = "456" + combined_trace = f"{http_trace}/{http_span};o=1" + expected_request = { + "requestMethod": "GET", + "requestUrl": http_path, + "userAgent": http_agent, + "protocol": "HTTP/1.1", + } + + overwritten_trace = "01234" + overwritten_span = "43210" + overwritten_tracesampled = False + record.trace = overwritten_trace + record.span_id = overwritten_span + record.trace_sampled = overwritten_tracesampled + + app = self.create_app() + with app.test_request_context( + http_path, + headers={ + "User-Agent": http_agent, + "X_CLOUD_TRACE_CONTEXT": combined_trace, + }, + ): + with _setup_otel_span_context(): + success = filter_obj.filter(record) + self.assertTrue(success) + + self.assertEqual(record._trace, overwritten_trace) + self.assertEqual(record._trace_str, overwritten_trace) + self.assertEqual(record._span_id, overwritten_span) + self.assertEqual(record._span_id_str, overwritten_span) + self.assertFalse(record._trace_sampled) + self.assertEqual( + record._trace_sampled_str, json.dumps(overwritten_tracesampled) + ) + + self.assertEqual(record._http_request, expected_request) + self.assertEqual(record._http_request_str, json.dumps(expected_request)) + def test_user_overrides(self): """ ensure user can override fields diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 06e5ff1aef60..920ca15eae34 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -512,6 +512,107 @@ def test_format_overrides(self): for key, value in expected_payload.items(): self.assertEqual(value, result[key]) + def test_format_with_opentelemetry_span(self): + import logging + import json + + from tests.unit.handlers import ( + _setup_otel_span_context, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, + ) + + handler = self._make_one() + logname = "loggername" + message = "hello world,嗨 世界" + record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None) + expected_payload = { + "logging.googleapis.com/trace": _EXPECTED_OTEL_TRACE_ID, + "logging.googleapis.com/spanId": _EXPECTED_OTEL_SPAN_ID, + "logging.googleapis.com/trace_sampled": _EXPECTED_OTEL_TRACESAMPLED, + } + + with _setup_otel_span_context(): + handler.filter(record) + result = json.loads(handler.format(record)) + for key, value in expected_payload.items(): + self.assertEqual(value, result[key]) + + def test_format_with_opentelemetry_span_and_request(self): + import logging + import json + + from tests.unit.handlers import ( + _setup_otel_span_context, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, + ) + + handler = self._make_one() + logname = "loggername" + message = "hello world,嗨 世界" + record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None) + expected_path = "http://testserver/123" + expected_agent = "Mozilla/5.0" + http_trace = "123" + http_span = "456" + trace_header = f"{http_trace}/{http_span};o=1" + expected_payload = { + "logging.googleapis.com/trace": _EXPECTED_OTEL_TRACE_ID, + "logging.googleapis.com/spanId": _EXPECTED_OTEL_SPAN_ID, + "logging.googleapis.com/trace_sampled": _EXPECTED_OTEL_TRACESAMPLED, + "httpRequest": { + "requestMethod": "GET", + "requestUrl": expected_path, + "userAgent": expected_agent, + "protocol": "HTTP/1.1", + }, + } + + app = self.create_app() + with app.test_request_context( + expected_path, + headers={ + "User-Agent": expected_agent, + "X_CLOUD_TRACE_CONTEXT": trace_header, + }, + ): + with _setup_otel_span_context(): + handler.filter(record) + result = json.loads(handler.format(record)) + for key, value in expected_payload.items(): + self.assertEqual(value, result[key]) + + def test_format_with_opentelemetry_span_and_overrides(self): + import logging + import json + + from tests.unit.handlers import _setup_otel_span_context + + handler = self._make_one() + logname = "loggername" + message = "hello world,嗨 世界" + record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None) + overwrite_trace = "abc" + overwrite_span = "123" + overwrite_tracesampled = False + record.trace = overwrite_trace + record.span_id = overwrite_span + record.trace_sampled = overwrite_tracesampled + expected_payload = { + "logging.googleapis.com/trace": overwrite_trace, + "logging.googleapis.com/spanId": overwrite_span, + "logging.googleapis.com/trace_sampled": overwrite_tracesampled, + } + + with _setup_otel_span_context(): + handler.filter(record) + result = json.loads(handler.format(record)) + for key, value in expected_payload.items(): + self.assertEqual(value, result[key]) + def test_format_with_json_fields(self): """ User can add json_fields to the record, which should populate the payload From 3cdf8167f3f260c7a3a2d6d122ba721a4fa05b93 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Thu, 23 May 2024 11:51:14 -0400 Subject: [PATCH 810/855] fix: Added missing import into logger.py (#896) --- packages/google-cloud-logging/google/cloud/logging_v2/logger.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py index 64130f02f137..27553994b12e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -29,6 +29,7 @@ from google.api_core.exceptions import InvalidArgument from google.rpc.error_details_pb2 import DebugInfo +import google.cloud.logging_v2 import google.protobuf.message _GLOBAL_RESOURCE = Resource(type="global", labels={}) From 4a65afc2ceaba71f5076aad00d88bd6e5ff17b3f Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Fri, 7 Jun 2024 10:43:04 -0400 Subject: [PATCH 811/855] test: Fixed unsupported resource type in system test (#904) --- packages/google-cloud-logging/tests/system/test_system.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index 801cab341c6d..d4ec4da36b15 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -622,7 +622,7 @@ def test_handlers_w_extras(self): "trace_sampled": True, "http_request": expected_request, "source_location": expected_source, - "resource": Resource(type="cloudiot_device", labels={}), + "resource": Resource(type="global", labels={}), "labels": {"test-label": "manual"}, } cloud_logger.warning(LOG_MESSAGE, extra=extra) From dd1edc7b871c5adb33b6994e188c290655e5d0ce Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Mon, 10 Jun 2024 13:30:42 -0400 Subject: [PATCH 812/855] fix: Added type hints to CloudLoggingHandler constructor (#903) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Added type hints to CloudLoggingHandler constructor * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Removed Client typing due to circular imports --------- Co-authored-by: Owl Bot --- .../cloud/logging_v2/handlers/handlers.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 06e131442d5a..5b11bfe30483 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -18,12 +18,19 @@ import json import logging -from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport +from typing import Optional, IO + +from google.cloud.logging_v2.handlers.transports import ( + BackgroundThreadTransport, + Transport, +) from google.cloud.logging_v2.handlers._monitored_resources import ( detect_resource, add_resource_labels, ) from google.cloud.logging_v2.handlers._helpers import get_request_data +from google.cloud.logging_v2.resource import Resource + DEFAULT_LOGGER_NAME = "python" @@ -149,11 +156,11 @@ def __init__( self, client, *, - name=DEFAULT_LOGGER_NAME, - transport=BackgroundThreadTransport, - resource=None, - labels=None, - stream=None, + name: str = DEFAULT_LOGGER_NAME, + transport: Transport = BackgroundThreadTransport, + resource: Resource = None, + labels: Optional[dict] = None, + stream: Optional[IO] = None, **kwargs, ): """ From c62517bba52be6d63b8982ad04ad44b81c46fc1f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 9 Jul 2024 13:06:58 -0400 Subject: [PATCH 813/855] chore(python): Use latest python runtime in prerelease_deps session (#910) * chore(python): Use latest python runtime in prerelease_deps session Source-Link: https://github.com/googleapis/synthtool/commit/14d8b284c826cd8501142aeb9ab7e721b630417e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5651442a6336971a2fb2df40fb56b3337df67cafa14c0809cc89cb34ccee1b8e * See https://github.com/protocolbuffers/protobuf/issues/17345 * add comment --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/.flake8 | 2 +- .../.github/.OwlBot.lock.yaml | 4 +- .../google-cloud-logging/.kokoro/build.sh | 2 +- .../.kokoro/docker/docs/Dockerfile | 2 +- .../.kokoro/populate-secrets.sh | 2 +- .../.kokoro/publish-docs.sh | 2 +- .../google-cloud-logging/.kokoro/release.sh | 2 +- .../.kokoro/requirements.txt | 509 +++++++++--------- .../.kokoro/test-samples-against-head.sh | 2 +- .../.kokoro/test-samples-impl.sh | 2 +- .../.kokoro/test-samples.sh | 2 +- .../.kokoro/trampoline.sh | 2 +- .../.kokoro/trampoline_v2.sh | 2 +- .../.pre-commit-config.yaml | 2 +- packages/google-cloud-logging/.trampolinerc | 2 +- packages/google-cloud-logging/MANIFEST.in | 2 +- packages/google-cloud-logging/docs/conf.py | 2 +- packages/google-cloud-logging/noxfile.py | 56 +- packages/google-cloud-logging/pytest.ini | 2 + .../scripts/decrypt-secrets.sh | 2 +- .../scripts/readme-gen/readme_gen.py | 2 +- 21 files changed, 330 insertions(+), 275 deletions(-) diff --git a/packages/google-cloud-logging/.flake8 b/packages/google-cloud-logging/.flake8 index 89954f8bd02e..d93385ea1ac7 100644 --- a/packages/google-cloud-logging/.flake8 +++ b/packages/google-cloud-logging/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 81f87c56917d..76524393faf1 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 -# created: 2024-04-12T11:35:58.922854369Z + digest: sha256:5651442a6336971a2fb2df40fb56b3337df67cafa14c0809cc89cb34ccee1b8e +# created: 2024-07-04T19:38:10.086106449Z diff --git a/packages/google-cloud-logging/.kokoro/build.sh b/packages/google-cloud-logging/.kokoro/build.sh index ff6554281417..beab7b9d4334 100755 --- a/packages/google-cloud-logging/.kokoro/build.sh +++ b/packages/google-cloud-logging/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile index bdaf39fe22d0..a26ce61930f5 100644 --- a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/populate-secrets.sh b/packages/google-cloud-logging/.kokoro/populate-secrets.sh index 6f3972140e80..c435402f473e 100755 --- a/packages/google-cloud-logging/.kokoro/populate-secrets.sh +++ b/packages/google-cloud-logging/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC. +# Copyright 2024 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/publish-docs.sh b/packages/google-cloud-logging/.kokoro/publish-docs.sh index 9eafe0be3bba..38f083f05aa0 100755 --- a/packages/google-cloud-logging/.kokoro/publish-docs.sh +++ b/packages/google-cloud-logging/.kokoro/publish-docs.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/release.sh b/packages/google-cloud-logging/.kokoro/release.sh index 9bdfbceb56b0..8941eaef67e7 100755 --- a/packages/google-cloud-logging/.kokoro/release.sh +++ b/packages/google-cloud-logging/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 51f92b8e12f1..35ece0e4d2e9 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -4,21 +4,25 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.1.4 \ - --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ - --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox -attrs==23.1.0 \ - --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ - --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 +attrs==23.2.0 \ + --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ + --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 # via gcp-releasetool -cachetools==5.3.2 \ - --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ - --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 +backports-tarfile==1.2.0 \ + --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ + --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 + # via jaraco-context +cachetools==5.3.3 \ + --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ + --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 +certifi==2024.6.2 \ + --hash=sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516 \ + --hash=sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56 # via requests cffi==1.16.0 \ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ @@ -87,90 +91,90 @@ click==8.0.4 \ # -r requirements.in # gcp-docuploader # gcp-releasetool -colorlog==6.7.0 \ - --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ - --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 # via # gcp-docuploader # nox -cryptography==42.0.5 \ - --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ - --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ - --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ - --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ - --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ - --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ - --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ - --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ - --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ - --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ - --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ - --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ - --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ - --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ - --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ - --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ - --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ - --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ - --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ - --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ - --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ - --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ - --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ - --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ - --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ - --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ - --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ - --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ - --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ - --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ - --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ - --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 +cryptography==42.0.8 \ + --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ + --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ + --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ + --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ + --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ + --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ + --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ + --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ + --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ + --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ + --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ + --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ + --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ + --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ + --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ + --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ + --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ + --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ + --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ + --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ + --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ + --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ + --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ + --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ + --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ + --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ + --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ + --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ + --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ + --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ + --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ + --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e # via # -r requirements.in # gcp-releasetool # secretstorage -distlib==0.3.7 \ - --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ - --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -docutils==0.20.1 \ - --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ - --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b +docutils==0.21.2 \ + --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ + --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 # via readme-renderer -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==2.0.0 \ - --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ - --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f +gcp-releasetool==2.0.1 \ + --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ + --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 # via -r requirements.in -google-api-core==2.12.0 \ - --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ - --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 +google-api-core==2.19.1 \ + --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ + --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd # via # google-cloud-core # google-cloud-storage -google-auth==2.23.4 \ - --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ - --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 +google-auth==2.31.0 \ + --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ + --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.3 \ - --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ - --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==2.13.0 \ - --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ - --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 +google-cloud-storage==2.17.0 \ + --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ + --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -244,28 +248,36 @@ google-crc32c==1.5.0 \ # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.6.0 \ - --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ - --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b +google-resumable-media==2.7.1 \ + --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ + --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 # via google-cloud-storage -googleapis-common-protos==1.61.0 \ - --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ - --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b +googleapis-common-protos==1.63.2 \ + --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ + --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 # via google-api-core idna==3.7 \ --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests -importlib-metadata==6.8.0 \ - --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ - --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 +importlib-metadata==8.0.0 \ + --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ + --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 # via # -r requirements.in # keyring # twine -jaraco-classes==3.3.0 \ - --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ - --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 +jaraco-classes==3.4.0 \ + --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ + --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 + # via keyring +jaraco-context==5.3.0 \ + --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ + --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 + # via keyring +jaraco-functools==4.0.1 \ + --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ + --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -273,13 +285,13 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.3 \ - --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ - --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 +jinja2==3.1.4 \ + --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ + --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via gcp-releasetool -keyring==24.2.0 \ - --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ - --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 +keyring==25.2.1 \ + --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ + --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b # via # gcp-releasetool # twine @@ -287,146 +299,153 @@ markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -markupsafe==2.1.3 \ - --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ - --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ - --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ - --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ - --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ - --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ - --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ - --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ - --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ - --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ - --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ - --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ - --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ - --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ - --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ - --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ - --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ - --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ - --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ - --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ - --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ - --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ - --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ - --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ - --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ - --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ - --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ - --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ - --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ - --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ - --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ - --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ - --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ - --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ - --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ - --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ - --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ - --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ - --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ - --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ - --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ - --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ - --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ - --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ - --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ - --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ - --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ - --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ - --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ - --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ - --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ - --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ - --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ - --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ - --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ - --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ - --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ - --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ - --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ - --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 +markupsafe==2.1.5 \ + --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ + --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ + --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ + --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ + --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ + --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ + --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ + --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ + --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ + --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ + --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ + --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ + --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ + --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ + --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ + --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ + --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ + --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ + --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ + --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ + --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ + --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ + --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ + --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ + --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ + --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ + --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ + --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ + --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ + --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ + --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ + --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ + --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ + --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ + --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ + --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ + --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ + --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ + --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ + --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ + --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ + --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ + --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ + --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ + --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ + --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ + --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ + --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ + --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ + --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ + --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ + --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ + --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ + --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ + --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ + --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ + --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ + --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ + --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ + --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 # via jinja2 mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -more-itertools==10.1.0 \ - --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ - --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 - # via jaraco-classes -nh3==0.2.14 \ - --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ - --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ - --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ - --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ - --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ - --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ - --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ - --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ - --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ - --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ - --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ - --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ - --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ - --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ - --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ - --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 +more-itertools==10.3.0 \ + --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ + --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 + # via + # jaraco-classes + # jaraco-functools +nh3==0.2.17 \ + --hash=sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a \ + --hash=sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911 \ + --hash=sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb \ + --hash=sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a \ + --hash=sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc \ + --hash=sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028 \ + --hash=sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9 \ + --hash=sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3 \ + --hash=sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351 \ + --hash=sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10 \ + --hash=sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71 \ + --hash=sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f \ + --hash=sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b \ + --hash=sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a \ + --hash=sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062 \ + --hash=sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a # via readme-renderer -nox==2023.4.22 \ - --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ - --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==23.2 \ - --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ - --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via # gcp-releasetool # nox -pkginfo==1.9.6 \ - --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ - --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 +pkginfo==1.10.0 \ + --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ + --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 # via twine -platformdirs==3.11.0 \ - --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ - --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -protobuf==4.25.3 \ - --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ - --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ - --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ - --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ - --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ - --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ - --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ - --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ - --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ - --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ - --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 +proto-plus==1.24.0 \ + --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ + --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 + # via google-api-core +protobuf==5.27.2 \ + --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ + --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ + --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ + --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ + --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ + --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ + --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ + --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ + --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ + --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ + --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 # via # gcp-docuploader # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # proto-plus +pyasn1==0.6.0 \ + --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ + --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 # via # pyasn1-modules # rsa -pyasn1-modules==0.3.0 \ - --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ - --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d +pyasn1-modules==0.4.0 \ + --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ + --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b # via google-auth -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 +pycparser==2.22 \ + --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ + --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc # via cffi -pygments==2.16.1 \ - --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ - --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 +pygments==2.18.0 \ + --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ + --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a # via # readme-renderer # rich @@ -434,20 +453,20 @@ pyjwt==2.8.0 \ --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyperclip==1.8.2 \ - --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 +pyperclip==1.9.0 \ + --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 # via gcp-releasetool -python-dateutil==2.8.2 \ - --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ - --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 +python-dateutil==2.9.0.post0 \ + --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ + --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool -readme-renderer==42.0 \ - --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ - --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 +readme-renderer==43.0 \ + --hash=sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311 \ + --hash=sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 # via twine -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 # via # gcp-releasetool # google-api-core @@ -462,9 +481,9 @@ rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==13.6.0 \ - --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ - --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef +rich==13.7.1 \ + --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ + --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -480,35 +499,39 @@ six==1.16.0 \ # via # gcp-docuploader # python-dateutil -twine==4.0.2 \ - --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ - --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +twine==5.1.1 \ + --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ + --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db # via -r requirements.in -typing-extensions==4.8.0 \ - --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ - --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef +typing-extensions==4.12.2 \ + --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ + --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 # via -r requirements.in -urllib3==2.0.7 \ - --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ - --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e +urllib3==2.2.2 \ + --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ + --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 # via # requests # twine -virtualenv==20.24.6 \ - --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ - --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox -wheel==0.41.3 \ - --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ - --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 +wheel==0.43.0 \ + --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ + --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 # via -r requirements.in -zipp==3.17.0 \ - --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ - --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 +zipp==3.19.2 \ + --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ + --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==69.2.0 \ - --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ - --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c +setuptools==70.2.0 \ + --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ + --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 # via -r requirements.in diff --git a/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh b/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh index 63ac41dfae1d..e9d8bd79a644 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh index 5a0f5fab6a89..55910c8ba178 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/test-samples.sh b/packages/google-cloud-logging/.kokoro/test-samples.sh index 50b35a48c190..7933d820149a 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/trampoline.sh b/packages/google-cloud-logging/.kokoro/trampoline.sh index d85b1f267693..48f79699706e 100755 --- a/packages/google-cloud-logging/.kokoro/trampoline.sh +++ b/packages/google-cloud-logging/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.kokoro/trampoline_v2.sh b/packages/google-cloud-logging/.kokoro/trampoline_v2.sh index 59a7cf3a9373..35fa529231dc 100755 --- a/packages/google-cloud-logging/.kokoro/trampoline_v2.sh +++ b/packages/google-cloud-logging/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.pre-commit-config.yaml b/packages/google-cloud-logging/.pre-commit-config.yaml index 6a8e16950664..1d74695f70b6 100644 --- a/packages/google-cloud-logging/.pre-commit-config.yaml +++ b/packages/google-cloud-logging/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/.trampolinerc b/packages/google-cloud-logging/.trampolinerc index 65248f703aa6..636e35c3257f 100644 --- a/packages/google-cloud-logging/.trampolinerc +++ b/packages/google-cloud-logging/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/MANIFEST.in b/packages/google-cloud-logging/MANIFEST.in index e0a66705318e..d6814cd60037 100644 --- a/packages/google-cloud-logging/MANIFEST.in +++ b/packages/google-cloud-logging/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/docs/conf.py b/packages/google-cloud-logging/docs/conf.py index fffea8f16413..a65cf85ffeef 100644 --- a/packages/google-cloud-logging/docs/conf.py +++ b/packages/google-cloud-logging/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index db5e78a61130..15c54ef329e1 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -169,14 +169,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -190,15 +204,12 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -366,10 +377,17 @@ def docfx(session): ) -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def prerelease_deps(session): +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -404,9 +422,9 @@ def prerelease_deps(session): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -432,7 +450,13 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -445,6 +469,9 @@ def prerelease_deps(session): f"--junitxml=system_{session.python}_sponge_log.xml", system_test_path, *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) if os.path.exists(system_test_folder_path): session.run( @@ -453,4 +480,7 @@ def prerelease_deps(session): f"--junitxml=system_{session.python}_sponge_log.xml", system_test_folder_path, *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/packages/google-cloud-logging/pytest.ini b/packages/google-cloud-logging/pytest.ini index 5dbd08fa7eed..5cad3409b005 100644 --- a/packages/google-cloud-logging/pytest.ini +++ b/packages/google-cloud-logging/pytest.ini @@ -23,3 +23,5 @@ filterwarnings = ignore:Attribute s is deprecated and will be removed in Python 3.14; use value instead:DeprecationWarning ignore:ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead:DeprecationWarning ignore:'pkgutil.get_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec\(\) instead:DeprecationWarning + # Remove warning once https://github.com/protocolbuffers/protobuf/issues/17345 is fixed + ignore:.*Please use message_factory.GetMessageClass\(\) instead. SymbolDatabase.GetPrototype\(\) will be removed soon.:UserWarning diff --git a/packages/google-cloud-logging/scripts/decrypt-secrets.sh b/packages/google-cloud-logging/scripts/decrypt-secrets.sh index 0018b421ddf8..120b0ddc4364 100755 --- a/packages/google-cloud-logging/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-logging/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2023 Google LLC All rights reserved. +# Copyright 2024 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/scripts/readme-gen/readme_gen.py b/packages/google-cloud-logging/scripts/readme-gen/readme_gen.py index 1acc119835b5..8f5e248a0da1 100644 --- a/packages/google-cloud-logging/scripts/readme-gen/readme_gen.py +++ b/packages/google-cloud-logging/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 030b62c39b6c3e9c4e04d2dd1b0af91f64ac55bb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 9 Jul 2024 15:40:08 -0400 Subject: [PATCH 814/855] chore(python): use python 3.10 for docs build (#913) Source-Link: https://github.com/googleapis/synthtool/commit/9ae07858520bf035a3d5be569b5a65d960ee4392 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/docker/docs/Dockerfile | 21 +++++---- .../.kokoro/docker/docs/requirements.txt | 40 ++++++++-------- .../.kokoro/requirements.txt | 46 +++++++++---------- packages/google-cloud-logging/noxfile.py | 2 +- 5 files changed, 60 insertions(+), 53 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 76524393faf1..f30cb3775afc 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5651442a6336971a2fb2df40fb56b3337df67cafa14c0809cc89cb34ccee1b8e -# created: 2024-07-04T19:38:10.086106449Z + digest: sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e +# created: 2024-07-08T19:25:35.862283192Z diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile index a26ce61930f5..5205308b334d 100644 --- a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:22.04 +from ubuntu:24.04 ENV DEBIAN_FRONTEND noninteractive @@ -40,7 +40,6 @@ RUN apt-get update \ libssl-dev \ libsqlite3-dev \ portaudio19-dev \ - python3-distutils \ redis-server \ software-properties-common \ ssh \ @@ -60,18 +59,22 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.9.13 -# Download python 3.9.13 -RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz +###################### Install python 3.10.14 for docs/docfx session + +# Download python 3.10.14 +RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz # Extract files -RUN tar -xvf Python-3.9.13.tgz +RUN tar -xvf Python-3.10.14.tgz -# Install python 3.9.13 -RUN ./Python-3.9.13/configure --enable-optimizations +# Install python 3.10.14 +RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall +RUN python3.10 -m venv /venv +ENV PATH /venv/bin:$PATH + ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ && python3 /tmp/get-pip.py \ @@ -84,4 +87,4 @@ RUN python3 -m pip COPY requirements.txt /requirements.txt RUN python3 -m pip install --require-hashes -r requirements.txt -CMD ["python3.8"] +CMD ["python3.10"] diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt index 0e5d70f20f83..7129c7715594 100644 --- a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.2.3 \ - --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ - --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox colorlog==6.8.2 \ --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ @@ -16,23 +16,27 @@ distlib==0.3.8 \ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv -nox==2024.3.2 \ - --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ - --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==24.0 \ - --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ - --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via nox -platformdirs==4.2.0 \ - --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ - --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -virtualenv==20.25.1 \ - --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ - --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt index 35ece0e4d2e9..9622baf0ba38 100644 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.3.3 \ --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2024.6.2 \ - --hash=sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516 \ - --hash=sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56 +certifi==2024.7.4 \ + --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ + --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 # via requests cffi==1.16.0 \ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ @@ -371,23 +371,23 @@ more-itertools==10.3.0 \ # via # jaraco-classes # jaraco-functools -nh3==0.2.17 \ - --hash=sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a \ - --hash=sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911 \ - --hash=sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb \ - --hash=sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a \ - --hash=sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc \ - --hash=sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028 \ - --hash=sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9 \ - --hash=sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3 \ - --hash=sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351 \ - --hash=sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10 \ - --hash=sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71 \ - --hash=sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f \ - --hash=sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b \ - --hash=sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a \ - --hash=sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062 \ - --hash=sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a +nh3==0.2.18 \ + --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ + --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ + --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ + --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ + --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ + --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ + --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ + --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ + --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ + --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ + --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ + --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ + --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ + --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ + --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ + --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe # via readme-renderer nox==2024.4.15 \ --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ @@ -460,9 +460,9 @@ python-dateutil==2.9.0.post0 \ --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool -readme-renderer==43.0 \ - --hash=sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311 \ - --hash=sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 +readme-renderer==44.0 \ + --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ + --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 # via twine requests==2.32.3 \ --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 15c54ef329e1..65e583ec27ec 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -296,7 +296,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.9") +@nox.session(python="3.10") def docs(session): """Build the docs for this library.""" From fffbe0d0b7d5accbca9609bca9f6186ef5cc04f0 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 10 Jul 2024 17:16:23 +0200 Subject: [PATCH 815/855] chore(deps): update all dependencies (#914) --- .../samples/snippets/requirements-test.txt | 2 +- .../google-cloud-logging/samples/snippets/requirements.txt | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/requirements-test.txt b/packages/google-cloud-logging/samples/snippets/requirements-test.txt index 11bcddbb7b98..37eb1f9aa7a2 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements-test.txt @@ -1,3 +1,3 @@ backoff==2.2.1 pytest===7.4.4; python_version == '3.7' -pytest==8.1.1; python_version >= '3.8' +pytest==8.2.2; python_version >= '3.8' diff --git a/packages/google-cloud-logging/samples/snippets/requirements.txt b/packages/google-cloud-logging/samples/snippets/requirements.txt index 07cef98b9057..8a52ee5c6801 100644 --- a/packages/google-cloud-logging/samples/snippets/requirements.txt +++ b/packages/google-cloud-logging/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.10.0 -google-cloud-bigquery==3.20.1 -google-cloud-storage==2.16.0 -google-cloud-pubsub==2.21.1 +google-cloud-bigquery==3.25.0 +google-cloud-storage==2.17.0 +google-cloud-pubsub==2.22.0 From 14bc84db3b46e3dd9633c6976e9823263c18aa4a Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Mon, 15 Jul 2024 13:34:03 -0400 Subject: [PATCH 816/855] docs: Documentation update for OpenTelemetry (#915) * docs: Documentation update for OpenTelemetry * updated documentation * Update docs/auto-trace-span-extraction.rst Co-authored-by: Aaron Abbott --------- Co-authored-by: Aaron Abbott --- .../docs/auto-trace-span-extraction.rst | 27 +++++++++++++++++++ .../docs/std-lib-integration.rst | 20 +++++++++----- packages/google-cloud-logging/docs/usage.rst | 1 + 3 files changed, 41 insertions(+), 7 deletions(-) create mode 100644 packages/google-cloud-logging/docs/auto-trace-span-extraction.rst diff --git a/packages/google-cloud-logging/docs/auto-trace-span-extraction.rst b/packages/google-cloud-logging/docs/auto-trace-span-extraction.rst new file mode 100644 index 000000000000..1eb21fb78ad8 --- /dev/null +++ b/packages/google-cloud-logging/docs/auto-trace-span-extraction.rst @@ -0,0 +1,27 @@ +Automatic Trace/Span ID Extraction +================================== + +.. note:: + All `LogEntry fields`_ populated :ref:`manually` will override those populated via methods referred to in this + section. + +The Google Cloud Logging library can automatically populate `LogEntry fields`_ +`trace`, `span_id`, and `trace_sampled` via OpenTelemetry integration, or extracting header information from an HTTP request. + +OpenTelemetry Integration +------------------------- + +If you have the OpenTelemetry SDK package installed and are logging from within an active OpenTelemetry span, that log entry will automatically +have the `trace`, `span_id`, and `trace_sampled` fields populated from that span. More information about OpenTelemetry can be found +`here `_. + +HTTP headers +------------ + +Another possible method of automatic `trace` / `span_id` is via extraction from HTTP headers. +This is prioritized after OpenTelemetry and requires a :doc:`supported Python web framework `. +Trace information is automatically populated from either the `W3C Traceparent `_ +or `X-Cloud-Trace-Context `_ headers. +Populating trace information this way also automatically populates the `http_request` field in the `LogEntry` as well. + +.. _LogEntry fields: https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry diff --git a/packages/google-cloud-logging/docs/std-lib-integration.rst b/packages/google-cloud-logging/docs/std-lib-integration.rst index be43231fdc5e..8a016b8e5101 100644 --- a/packages/google-cloud-logging/docs/std-lib-integration.rst +++ b/packages/google-cloud-logging/docs/std-lib-integration.rst @@ -102,31 +102,35 @@ The Google Cloud Logging library attempts to detect and attach additional The following fields are currently supported: - labels -- trace* -- span_id* -- trace_sampled* -- http_request* +- trace +- span_id +- trace_sampled +- http_request - source_location - resource - :ref:`json_fields` .. note:: - Fields marked with "*" require a :doc:`supported Python web framework `. + | More information about `trace`, `span_id`, and `trace_sampled` can be found :doc:`here `. + | `http_request` requires a :doc:`supported Python web framework `. + Manual Metadata Using the `extra` Argument -------------------------------------------- +.. _Manual-Metadata: + The Python :mod:`logging` standard library accepts `an "extra" argument `_ when writing logs. You can use this argument to populate LogRecord objects with user-defined key-value pairs. Google Cloud Logging uses the `extra` field as a way to pass in additional -metadata to populate `LogEntry fields `_. +metadata to populate `LogEntry fields`_. .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logging_extras] :end-before: [END logging_extras] :dedent: 4 -All of the `LogEntry fields `_ +All of the `LogEntry fields`_ that can be :ref:`autodetected` can also be set manually through the `extra` argument. Fields sent explicitly through the `extra` argument override any :ref:`automatically detected` fields. @@ -153,3 +157,5 @@ You can use both transport options over :doc:`gRPC or HTTP`. .. note:: :class:`~google.cloud.logging_v2.handlers.structured_log.StructuredLogHandler` prints logs as formatted JSON to standard output, and does not use a Transport class. + +.. _LogEntry fields: https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry \ No newline at end of file diff --git a/packages/google-cloud-logging/docs/usage.rst b/packages/google-cloud-logging/docs/usage.rst index 7541f355b466..c28be0c6fe52 100644 --- a/packages/google-cloud-logging/docs/usage.rst +++ b/packages/google-cloud-logging/docs/usage.rst @@ -4,6 +4,7 @@ Usage Guide :maxdepth: 2 std-lib-integration + auto-trace-span-extraction web-framework-integration direct-lib-usage grpc-vs-http From a2a83104b71e81c61b50faa553092f510a316755 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 25 Jul 2024 12:19:30 -0400 Subject: [PATCH 817/855] chore(main): release 3.11.0 (#876) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-logging/CHANGELOG.md | 22 +++++++++++++++++++ .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 26 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json index fc62d3d35012..19f9217cb316 100644 --- a/packages/google-cloud-logging/.release-please-manifest.json +++ b/packages/google-cloud-logging/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.10.0" + ".": "3.11.0" } diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index a41083f53375..79fa006559e4 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,28 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.11.0](https://github.com/googleapis/python-logging/compare/v3.10.0...v3.11.0) (2024-07-15) + + +### Features + +* OpenTelemetry trace/spanID integration for Python handlers ([#889](https://github.com/googleapis/python-logging/issues/889)) ([78168a3](https://github.com/googleapis/python-logging/commit/78168a38577b698130a861af4e4d229f42660330)) + + +### Bug Fixes + +* Added environment specific labels to client library when running in Cloud Run Jobs ([#877](https://github.com/googleapis/python-logging/issues/877)) ([9c5e8f0](https://github.com/googleapis/python-logging/commit/9c5e8f0548f88235fe6474469bc37685e2498dd1)) +* Added missing import into logger.py ([#896](https://github.com/googleapis/python-logging/issues/896)) ([9ca242d](https://github.com/googleapis/python-logging/commit/9ca242d10f9f3bca120b292f478d62f5fa1d3c06)) +* Added type hints to CloudLoggingHandler constructor ([#903](https://github.com/googleapis/python-logging/issues/903)) ([6959345](https://github.com/googleapis/python-logging/commit/69593459614be968f7a0136aa76701c4fc408834)) + + +### Documentation + +* Add summary_overview template ([#878](https://github.com/googleapis/python-logging/issues/878)) ([b60714c](https://github.com/googleapis/python-logging/commit/b60714cb1cc3aac79c86225f8f9cbd24d8ab170f)) +* Changed table in web-framework-integration to bulleted list ([#875](https://github.com/googleapis/python-logging/issues/875)) ([a4aa3a7](https://github.com/googleapis/python-logging/commit/a4aa3a7cf1e3bb32ec2772084a7dc6c16e1454ff)) +* Documentation update for OpenTelemetry ([#915](https://github.com/googleapis/python-logging/issues/915)) ([2a0539a](https://github.com/googleapis/python-logging/commit/2a0539a30e6dcf45c0970e3aacfd4a2772877526)) +* Update `dictConfig` snippet ([#885](https://github.com/googleapis/python-logging/issues/885)) ([6264107](https://github.com/googleapis/python-logging/commit/62641075042a3da9bb9c059d963bad14a1586b1c)) + ## [3.10.0](https://github.com/googleapis/python-logging/compare/v3.9.0...v3.10.0) (2024-03-13) diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py index b2ead68dd53a..6c2e88f2b9bb 100644 --- a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.10.0" # {x-release-please-version} +__version__ = "3.11.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py index b2ead68dd53a..6c2e88f2b9bb 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.10.0" # {x-release-please-version} +__version__ = "3.11.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index b62675ba6439..9d5a375e9d98 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "0.1.0" + "version": "3.11.0" }, "snippets": [ { From 31e1d51b7cc8613fd09191c4baa61714960a9c34 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Thu, 1 Aug 2024 16:04:00 -0400 Subject: [PATCH 818/855] test: Added timestamp to sink names + autodelete sinks older than 2 hours in export_test.py (#925) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Running this to remove all sinks * readd try block * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * added timestamp to sink name * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Fixed regex string * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../samples/snippets/export_test.py | 49 ++++++++++++++----- 1 file changed, 37 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/export_test.py b/packages/google-cloud-logging/samples/snippets/export_test.py index c5830e3b24d3..c21fab2daef1 100644 --- a/packages/google-cloud-logging/samples/snippets/export_test.py +++ b/packages/google-cloud-logging/samples/snippets/export_test.py @@ -13,8 +13,10 @@ # limitations under the License. import os +import re import random import string +import time import backoff from google.cloud import logging @@ -24,8 +26,13 @@ BUCKET = os.environ["CLOUD_STORAGE_BUCKET"] -TEST_SINK_NAME_TMPL = "example_sink_{}" +TEST_SINK_NAME_TMPL = "example_sink_{}_{}" TEST_SINK_FILTER = "severity>=CRITICAL" +TIMESTAMP = int(time.time()) + +# Threshold beyond which the cleanup_old_sinks fixture will delete +# old sink, in seconds +CLEANUP_THRESHOLD = 7200 # 2 hours def _random_id(): @@ -34,12 +41,36 @@ def _random_id(): ) +def _create_sink_name(): + return TEST_SINK_NAME_TMPL.format(TIMESTAMP, _random_id()) + + +@backoff.on_exception(backoff.expo, Exception, max_time=60, raise_on_giveup=False) +def _delete_sink(sink): + sink.delete() + + +# Runs once for entire test suite +@pytest.fixture(scope="module") +def cleanup_old_sinks(): + client = logging.Client() + test_sink_name_regex = ( + r"^" + TEST_SINK_NAME_TMPL.format(r"(\d+)", r"[A-Z0-9]{6}") + r"$" + ) + for sink in client.list_sinks(): + match = re.match(test_sink_name_regex, sink.name) + if match: + sink_timestamp = int(match.group(1)) + if TIMESTAMP - sink_timestamp > CLEANUP_THRESHOLD: + _delete_sink(sink) + + @pytest.fixture -def example_sink(): +def example_sink(cleanup_old_sinks): client = logging.Client() sink = client.sink( - TEST_SINK_NAME_TMPL.format(_random_id()), + _create_sink_name(), filter_=TEST_SINK_FILTER, destination="storage.googleapis.com/{bucket}".format(bucket=BUCKET), ) @@ -48,10 +79,7 @@ def example_sink(): yield sink - try: - sink.delete() - except Exception: - pass + _delete_sink(sink) def test_list(example_sink, capsys): @@ -65,16 +93,13 @@ def eventually_consistent_test(): def test_create(capsys): - sink_name = TEST_SINK_NAME_TMPL.format(_random_id()) + sink_name = _create_sink_name() try: export.create_sink(sink_name, BUCKET, TEST_SINK_FILTER) # Clean-up the temporary sink. finally: - try: - logging.Client().sink(sink_name).delete() - except Exception: - pass + _delete_sink(logging.Client().sink(sink_name)) out, _ = capsys.readouterr() assert sink_name in out From 7a1cff0e7ab5f07c3b1aeabdabf7635d2ee335cf Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 6 Aug 2024 17:16:42 -0400 Subject: [PATCH 819/855] fix: Allow protobuf 5.x (#888) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.17.1 PiperOrigin-RevId: 629071173 Source-Link: https://github.com/googleapis/googleapis/commit/4afa392105cc62e965631d15b772ff68454ecf1c Source-Link: https://github.com/googleapis/googleapis-gen/commit/16dbbb4d0457db5e61ac9f99b0d52a46154455ac Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTZkYmJiNGQwNDU3ZGI1ZTYxYWM5Zjk5YjBkNTJhNDYxNTQ0NTVhYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.18.0 PiperOrigin-RevId: 638650618 Source-Link: https://github.com/googleapis/googleapis/commit/6330f0389afdd04235c59898cc44f715b077aa25 Source-Link: https://github.com/googleapis/googleapis-gen/commit/44fa4f1979dc45c1778fd7caf13f8e61c6d1cae8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDRmYTRmMTk3OWRjNDVjMTc3OGZkN2NhZjEzZjhlNjFjNmQxY2FlOCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat(spanner): Add support for Cloud Spanner Scheduled Backups PiperOrigin-RevId: 649277844 Source-Link: https://github.com/googleapis/googleapis/commit/fd7efa2da3860e813485e63661d3bdd21fc9ba82 Source-Link: https://github.com/googleapis/googleapis-gen/commit/50be251329d8db5b555626ebd4886721f547d3cc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTBiZTI1MTMyOWQ4ZGI1YjU1NTYyNmViZDQ4ODY3MjFmNTQ3ZDNjYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * allow protobuf 5.x * update constraints * See https://github.com/googleapis/gapic-generator-python/issues/2046 * chore: Update gapic-generator-python to v1.18.3 PiperOrigin-RevId: 655567917 Source-Link: https://github.com/googleapis/googleapis/commit/43aa65e3897557c11d947f3133ddb76e5c4b2a6c Source-Link: https://github.com/googleapis/googleapis-gen/commit/0e38378753074c0f66ff63348d6864929e104d5c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGUzODM3ODc1MzA3NGMwZjY2ZmY2MzM0OGQ2ODY0OTI5ZTEwNGQ1YyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.18.3 PiperOrigin-RevId: 656040068 Source-Link: https://github.com/googleapis/googleapis/commit/3f4e29a88f2e1f412439e61c48c88f81dec0bbbf Source-Link: https://github.com/googleapis/googleapis-gen/commit/b8feb2109dde7b0938c22c993d002251ac6714dc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjhmZWIyMTA5ZGRlN2IwOTM4YzIyYzk5M2QwMDIyNTFhYzY3MTRkYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.18.4 PiperOrigin-RevId: 657207628 Source-Link: https://github.com/googleapis/googleapis/commit/33fe71e5a2061402283e0455636a98e5b78eaf7f Source-Link: https://github.com/googleapis/googleapis-gen/commit/e02739d122ed15bd5ef5771c57f12a83d47a1dda Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTAyNzM5ZDEyMmVkMTViZDVlZjU3NzFjNTdmMTJhODNkNDdhMWRkYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: Daniel Sanche --- .../config_service_v2/async_client.py | 585 ++-- .../services/config_service_v2/client.py | 300 +- .../services/config_service_v2/pagers.py | 153 +- .../config_service_v2/transports/base.py | 4 +- .../config_service_v2/transports/grpc.py | 28 +- .../transports/grpc_asyncio.py | 271 +- .../logging_service_v2/async_client.py | 177 +- .../services/logging_service_v2/client.py | 79 +- .../services/logging_service_v2/pagers.py | 97 +- .../logging_service_v2/transports/base.py | 4 +- .../logging_service_v2/transports/grpc.py | 28 +- .../transports/grpc_asyncio.py | 130 +- .../metrics_service_v2/async_client.py | 147 +- .../services/metrics_service_v2/client.py | 79 +- .../services/metrics_service_v2/pagers.py | 41 +- .../metrics_service_v2/transports/base.py | 4 +- .../metrics_service_v2/transports/grpc.py | 28 +- .../transports/grpc_asyncio.py | 103 +- packages/google-cloud-logging/pytest.ini | 2 + .../snippet_metadata_google.logging.v2.json | 2 +- packages/google-cloud-logging/setup.py | 2 +- .../testing/constraints-3.7.txt | 2 +- .../logging_v2/test_config_service_v2.py | 2815 ++++++++++++++++- .../logging_v2/test_logging_service_v2.py | 528 +++- .../logging_v2/test_metrics_service_v2.py | 431 ++- 25 files changed, 5125 insertions(+), 915 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 729a878be035..69fa55a4802b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -37,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -215,7 +217,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ConfigServiceV2Transport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport] + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -227,9 +233,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.ConfigServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ConfigServiceV2Transport,Callable[..., ConfigServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ConfigServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -348,8 +356,8 @@ async def sample_list_buckets(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -357,7 +365,10 @@ async def sample_list_buckets(): "the individual field arguments should be set." ) - request = logging_config.ListBucketsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListBucketsRequest): + request = logging_config.ListBucketsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -366,11 +377,9 @@ async def sample_list_buckets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_buckets, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_buckets + ] # Certain fields should be provided within the metadata header; # add these here. @@ -395,6 +404,8 @@ async def sample_list_buckets(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -453,15 +464,16 @@ async def sample_get_bucket(): """ # Create or coerce a protobuf request object. - request = logging_config.GetBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetBucketRequest): + request = logging_config.GetBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_bucket + ] # Certain fields should be provided within the metadata header; # add these here. @@ -546,15 +558,16 @@ async def sample_create_bucket_async(): """ # Create or coerce a protobuf request object. - request = logging_config.CreateBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_bucket_async, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_bucket_async + ] # Certain fields should be provided within the metadata header; # add these here. @@ -649,15 +662,16 @@ async def sample_update_bucket_async(): """ # Create or coerce a protobuf request object. - request = logging_config.UpdateBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_bucket_async, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_bucket_async + ] # Certain fields should be provided within the metadata header; # add these here. @@ -742,15 +756,16 @@ async def sample_create_bucket(): """ # Create or coerce a protobuf request object. - request = logging_config.CreateBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_bucket + ] # Certain fields should be provided within the metadata header; # add these here. @@ -830,15 +845,16 @@ async def sample_update_bucket(): """ # Create or coerce a protobuf request object. - request = logging_config.UpdateBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_bucket + ] # Certain fields should be provided within the metadata header; # add these here. @@ -908,15 +924,16 @@ async def sample_delete_bucket(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.DeleteBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteBucketRequest): + request = logging_config.DeleteBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_bucket + ] # Certain fields should be provided within the metadata header; # add these here. @@ -980,15 +997,16 @@ async def sample_undelete_bucket(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.UndeleteBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UndeleteBucketRequest): + request = logging_config.UndeleteBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.undelete_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.undelete_bucket + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1074,8 +1092,8 @@ async def sample_list_views(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1083,7 +1101,10 @@ async def sample_list_views(): "the individual field arguments should be set." ) - request = logging_config.ListViewsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListViewsRequest): + request = logging_config.ListViewsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1092,11 +1113,9 @@ async def sample_list_views(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_views, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_views + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1121,6 +1140,8 @@ async def sample_list_views(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1179,15 +1200,14 @@ async def sample_get_view(): """ # Create or coerce a protobuf request object. - request = logging_config.GetViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetViewRequest): + request = logging_config.GetViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_view] # Certain fields should be provided within the metadata header; # add these here. @@ -1263,15 +1283,16 @@ async def sample_create_view(): """ # Create or coerce a protobuf request object. - request = logging_config.CreateViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateViewRequest): + request = logging_config.CreateViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_view + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1349,15 +1370,16 @@ async def sample_update_view(): """ # Create or coerce a protobuf request object. - request = logging_config.UpdateViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateViewRequest): + request = logging_config.UpdateViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_view + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1425,15 +1447,16 @@ async def sample_delete_view(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.DeleteViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteViewRequest): + request = logging_config.DeleteViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_view + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1522,8 +1545,8 @@ async def sample_list_sinks(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1531,7 +1554,10 @@ async def sample_list_sinks(): "the individual field arguments should be set." ) - request = logging_config.ListSinksRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListSinksRequest): + request = logging_config.ListSinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1540,22 +1566,9 @@ async def sample_list_sinks(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_sinks, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_sinks + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1580,6 +1593,8 @@ async def sample_list_sinks(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1663,8 +1678,8 @@ async def sample_get_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: raise ValueError( @@ -1672,7 +1687,10 @@ async def sample_get_sink(): "the individual field arguments should be set." ) - request = logging_config.GetSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetSinkRequest): + request = logging_config.GetSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1681,22 +1699,7 @@ async def sample_get_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_sink, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_sink] # Certain fields should be provided within the metadata header; # add these here. @@ -1814,8 +1817,8 @@ async def sample_create_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, sink]) if request is not None and has_flattened_params: raise ValueError( @@ -1823,7 +1826,10 @@ async def sample_create_sink(): "the individual field arguments should be set." ) - request = logging_config.CreateSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateSinkRequest): + request = logging_config.CreateSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1834,11 +1840,9 @@ async def sample_create_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_sink, - default_timeout=120.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_sink + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1979,8 +1983,8 @@ async def sample_update_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name, sink, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1988,7 +1992,10 @@ async def sample_update_sink(): "the individual field arguments should be set." ) - request = logging_config.UpdateSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateSinkRequest): + request = logging_config.UpdateSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2001,22 +2008,9 @@ async def sample_update_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_sink, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_sink + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2103,8 +2097,8 @@ async def sample_delete_sink(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: raise ValueError( @@ -2112,7 +2106,10 @@ async def sample_delete_sink(): "the individual field arguments should be set." ) - request = logging_config.DeleteSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteSinkRequest): + request = logging_config.DeleteSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2121,22 +2118,9 @@ async def sample_delete_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_sink, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_sink + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2250,8 +2234,8 @@ async def sample_create_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, link, link_id]) if request is not None and has_flattened_params: raise ValueError( @@ -2259,7 +2243,10 @@ async def sample_create_link(): "the individual field arguments should be set." ) - request = logging_config.CreateLinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateLinkRequest): + request = logging_config.CreateLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2272,11 +2259,9 @@ async def sample_create_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_link, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_link + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2385,8 +2370,8 @@ async def sample_delete_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2394,7 +2379,10 @@ async def sample_delete_link(): "the individual field arguments should be set." ) - request = logging_config.DeleteLinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteLinkRequest): + request = logging_config.DeleteLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2403,11 +2391,9 @@ async def sample_delete_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_link, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_link + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2506,8 +2492,8 @@ async def sample_list_links(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2515,7 +2501,10 @@ async def sample_list_links(): "the individual field arguments should be set." ) - request = logging_config.ListLinksRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListLinksRequest): + request = logging_config.ListLinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2524,11 +2513,9 @@ async def sample_list_links(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_links, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_links + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2553,6 +2540,8 @@ async def sample_list_links(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2623,8 +2612,8 @@ async def sample_get_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2632,7 +2621,10 @@ async def sample_get_link(): "the individual field arguments should be set." ) - request = logging_config.GetLinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetLinkRequest): + request = logging_config.GetLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2641,11 +2633,7 @@ async def sample_get_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_link, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2738,8 +2726,8 @@ async def sample_list_exclusions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2747,7 +2735,10 @@ async def sample_list_exclusions(): "the individual field arguments should be set." ) - request = logging_config.ListExclusionsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListExclusionsRequest): + request = logging_config.ListExclusionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2756,22 +2747,9 @@ async def sample_list_exclusions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_exclusions, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_exclusions + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2796,6 +2774,8 @@ async def sample_list_exclusions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2877,8 +2857,8 @@ async def sample_get_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2886,7 +2866,10 @@ async def sample_get_exclusion(): "the individual field arguments should be set." ) - request = logging_config.GetExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetExclusionRequest): + request = logging_config.GetExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2895,22 +2878,9 @@ async def sample_get_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_exclusion, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3025,8 +2995,8 @@ async def sample_create_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, exclusion]) if request is not None and has_flattened_params: raise ValueError( @@ -3034,7 +3004,10 @@ async def sample_create_exclusion(): "the individual field arguments should be set." ) - request = logging_config.CreateExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateExclusionRequest): + request = logging_config.CreateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3045,11 +3018,9 @@ async def sample_create_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_exclusion, - default_timeout=120.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3176,8 +3147,8 @@ async def sample_update_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, exclusion, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -3185,7 +3156,10 @@ async def sample_update_exclusion(): "the individual field arguments should be set." ) - request = logging_config.UpdateExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateExclusionRequest): + request = logging_config.UpdateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3198,11 +3172,9 @@ async def sample_update_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_exclusion, - default_timeout=120.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3286,8 +3258,8 @@ async def sample_delete_exclusion(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3295,7 +3267,10 @@ async def sample_delete_exclusion(): "the individual field arguments should be set." ) - request = logging_config.DeleteExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteExclusionRequest): + request = logging_config.DeleteExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3304,22 +3279,9 @@ async def sample_delete_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_exclusion, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3414,15 +3376,16 @@ async def sample_get_cmek_settings(): """ # Create or coerce a protobuf request object. - request = logging_config.GetCmekSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetCmekSettingsRequest): + request = logging_config.GetCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_cmek_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_cmek_settings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3525,15 +3488,16 @@ async def sample_update_cmek_settings(): """ # Create or coerce a protobuf request object. - request = logging_config.UpdateCmekSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateCmekSettingsRequest): + request = logging_config.UpdateCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_cmek_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_cmek_settings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3648,8 +3612,8 @@ async def sample_get_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3657,7 +3621,10 @@ async def sample_get_settings(): "the individual field arguments should be set." ) - request = logging_config.GetSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetSettingsRequest): + request = logging_config.GetSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3666,11 +3633,9 @@ async def sample_get_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_settings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3793,8 +3758,8 @@ async def sample_update_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([settings, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -3802,7 +3767,10 @@ async def sample_update_settings(): "the individual field arguments should be set." ) - request = logging_config.UpdateSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateSettingsRequest): + request = logging_config.UpdateSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3813,11 +3781,9 @@ async def sample_update_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_settings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3901,15 +3867,16 @@ async def sample_copy_log_entries(): """ # Create or coerce a protobuf request object. - request = logging_config.CopyLogEntriesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CopyLogEntriesRequest): + request = logging_config.CopyLogEntriesRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.copy_log_entries, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.copy_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 5257f8ddf204..8c210c0cda82 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -644,7 +645,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ConfigServiceV2Transport]] = None, + transport: Optional[ + Union[ + str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -656,9 +661,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ConfigServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ConfigServiceV2Transport,Callable[..., ConfigServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ConfigServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -767,8 +774,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ConfigServiceV2Transport], Callable[..., ConfigServiceV2Transport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ConfigServiceV2Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -855,8 +869,8 @@ def sample_list_buckets(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -864,10 +878,8 @@ def sample_list_buckets(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListBucketsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListBucketsRequest): request = logging_config.ListBucketsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -902,6 +914,8 @@ def sample_list_buckets(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -960,10 +974,8 @@ def sample_get_bucket(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetBucketRequest): request = logging_config.GetBucketRequest(request) @@ -1054,10 +1066,8 @@ def sample_create_bucket_async(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateBucketRequest): request = logging_config.CreateBucketRequest(request) @@ -1158,10 +1168,8 @@ def sample_update_bucket_async(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateBucketRequest): request = logging_config.UpdateBucketRequest(request) @@ -1252,10 +1260,8 @@ def sample_create_bucket(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateBucketRequest): request = logging_config.CreateBucketRequest(request) @@ -1341,10 +1347,8 @@ def sample_update_bucket(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateBucketRequest): request = logging_config.UpdateBucketRequest(request) @@ -1420,10 +1424,8 @@ def sample_delete_bucket(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteBucketRequest): request = logging_config.DeleteBucketRequest(request) @@ -1493,10 +1495,8 @@ def sample_undelete_bucket(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UndeleteBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UndeleteBucketRequest): request = logging_config.UndeleteBucketRequest(request) @@ -1588,8 +1588,8 @@ def sample_list_views(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1597,10 +1597,8 @@ def sample_list_views(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListViewsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListViewsRequest): request = logging_config.ListViewsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1635,6 +1633,8 @@ def sample_list_views(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1693,10 +1693,8 @@ def sample_get_view(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetViewRequest): request = logging_config.GetViewRequest(request) @@ -1778,10 +1776,8 @@ def sample_create_view(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateViewRequest): request = logging_config.CreateViewRequest(request) @@ -1865,10 +1861,8 @@ def sample_update_view(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateViewRequest): request = logging_config.UpdateViewRequest(request) @@ -1942,10 +1936,8 @@ def sample_delete_view(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteViewRequest): request = logging_config.DeleteViewRequest(request) @@ -2040,8 +2032,8 @@ def sample_list_sinks(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2049,10 +2041,8 @@ def sample_list_sinks(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListSinksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListSinksRequest): request = logging_config.ListSinksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2087,6 +2077,8 @@ def sample_list_sinks(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2170,8 +2162,8 @@ def sample_get_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: raise ValueError( @@ -2179,10 +2171,8 @@ def sample_get_sink(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetSinkRequest): request = logging_config.GetSinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2310,8 +2300,8 @@ def sample_create_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, sink]) if request is not None and has_flattened_params: raise ValueError( @@ -2319,10 +2309,8 @@ def sample_create_sink(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateSinkRequest): request = logging_config.CreateSinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2475,8 +2463,8 @@ def sample_update_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name, sink, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -2484,10 +2472,8 @@ def sample_update_sink(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateSinkRequest): request = logging_config.UpdateSinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2588,8 +2574,8 @@ def sample_delete_sink(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: raise ValueError( @@ -2597,10 +2583,8 @@ def sample_delete_sink(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteSinkRequest): request = logging_config.DeleteSinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2724,8 +2708,8 @@ def sample_create_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, link, link_id]) if request is not None and has_flattened_params: raise ValueError( @@ -2733,10 +2717,8 @@ def sample_create_link(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateLinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateLinkRequest): request = logging_config.CreateLinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2859,8 +2841,8 @@ def sample_delete_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2868,10 +2850,8 @@ def sample_delete_link(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteLinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteLinkRequest): request = logging_config.DeleteLinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2980,8 +2960,8 @@ def sample_list_links(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2989,10 +2969,8 @@ def sample_list_links(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListLinksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListLinksRequest): request = logging_config.ListLinksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3027,6 +3005,8 @@ def sample_list_links(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -3097,8 +3077,8 @@ def sample_get_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3106,10 +3086,8 @@ def sample_get_link(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetLinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetLinkRequest): request = logging_config.GetLinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3212,8 +3190,8 @@ def sample_list_exclusions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -3221,10 +3199,8 @@ def sample_list_exclusions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListExclusionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListExclusionsRequest): request = logging_config.ListExclusionsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3259,6 +3235,8 @@ def sample_list_exclusions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -3340,8 +3318,8 @@ def sample_get_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3349,10 +3327,8 @@ def sample_get_exclusion(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetExclusionRequest): request = logging_config.GetExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3477,8 +3453,8 @@ def sample_create_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, exclusion]) if request is not None and has_flattened_params: raise ValueError( @@ -3486,10 +3462,8 @@ def sample_create_exclusion(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateExclusionRequest): request = logging_config.CreateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3628,8 +3602,8 @@ def sample_update_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, exclusion, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -3637,10 +3611,8 @@ def sample_update_exclusion(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateExclusionRequest): request = logging_config.UpdateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3738,8 +3710,8 @@ def sample_delete_exclusion(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3747,10 +3719,8 @@ def sample_delete_exclusion(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteExclusionRequest): request = logging_config.DeleteExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3855,10 +3825,8 @@ def sample_get_cmek_settings(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetCmekSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetCmekSettingsRequest): request = logging_config.GetCmekSettingsRequest(request) @@ -3967,10 +3935,8 @@ def sample_update_cmek_settings(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateCmekSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateCmekSettingsRequest): request = logging_config.UpdateCmekSettingsRequest(request) @@ -4091,8 +4057,8 @@ def sample_get_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -4100,10 +4066,8 @@ def sample_get_settings(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetSettingsRequest): request = logging_config.GetSettingsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4236,8 +4200,8 @@ def sample_update_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([settings, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -4245,10 +4209,8 @@ def sample_update_settings(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateSettingsRequest): request = logging_config.UpdateSettingsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4344,10 +4306,8 @@ def sample_copy_log_entries(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CopyLogEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CopyLogEntriesRequest): request = logging_config.CopyLogEntriesRequest(request) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index 8a9710005a8e..1a1ba7c25977 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.cloud.logging_v2.types import logging_config @@ -51,6 +64,8 @@ def __init__( request: logging_config.ListBucketsRequest, response: logging_config.ListBucketsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -62,12 +77,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListBucketsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListBucketsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -78,7 +98,12 @@ def pages(self) -> Iterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogBucket]: @@ -113,6 +138,8 @@ def __init__( request: logging_config.ListBucketsRequest, response: logging_config.ListBucketsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -124,12 +151,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListBucketsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListBucketsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -140,7 +172,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogBucket]: @@ -179,6 +216,8 @@ def __init__( request: logging_config.ListViewsRequest, response: logging_config.ListViewsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -190,12 +229,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListViewsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListViewsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -206,7 +250,12 @@ def pages(self) -> Iterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogView]: @@ -241,6 +290,8 @@ def __init__( request: logging_config.ListViewsRequest, response: logging_config.ListViewsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -252,12 +303,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListViewsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListViewsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -268,7 +324,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogView]: @@ -307,6 +368,8 @@ def __init__( request: logging_config.ListSinksRequest, response: logging_config.ListSinksResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -318,12 +381,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListSinksResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListSinksRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -334,7 +402,12 @@ def pages(self) -> Iterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogSink]: @@ -369,6 +442,8 @@ def __init__( request: logging_config.ListSinksRequest, response: logging_config.ListSinksResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -380,12 +455,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListSinksResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListSinksRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -396,7 +476,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogSink]: @@ -435,6 +520,8 @@ def __init__( request: logging_config.ListLinksRequest, response: logging_config.ListLinksResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -446,12 +533,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLinksResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListLinksRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -462,7 +554,12 @@ def pages(self) -> Iterator[logging_config.ListLinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.Link]: @@ -497,6 +594,8 @@ def __init__( request: logging_config.ListLinksRequest, response: logging_config.ListLinksResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -508,12 +607,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLinksResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListLinksRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -524,7 +628,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListLinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.Link]: @@ -563,6 +672,8 @@ def __init__( request: logging_config.ListExclusionsRequest, response: logging_config.ListExclusionsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -574,12 +685,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListExclusionsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListExclusionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -590,7 +706,12 @@ def pages(self) -> Iterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogExclusion]: @@ -625,6 +746,8 @@ def __init__( request: logging_config.ListExclusionsRequest, response: logging_config.ListExclusionsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -636,12 +759,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListExclusionsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListExclusionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -652,7 +780,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogExclusion]: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index e9b3dae141ba..ac03c526de84 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -90,6 +90,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -102,7 +104,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index ccb53fe66b1b..0764afcfd301 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -53,7 +53,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -73,14 +73,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -90,11 +93,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -121,9 +124,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -162,7 +166,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 41894f1ebca2..e4a8d16f9743 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -68,7 +70,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -98,7 +99,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -118,15 +119,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -136,11 +140,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -167,9 +171,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -207,7 +212,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -1236,6 +1243,248 @@ def copy_log_entries( ) return self._stubs["copy_log_entries"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_buckets: gapic_v1.method_async.wrap_method( + self.list_buckets, + default_timeout=None, + client_info=client_info, + ), + self.get_bucket: gapic_v1.method_async.wrap_method( + self.get_bucket, + default_timeout=None, + client_info=client_info, + ), + self.create_bucket_async: gapic_v1.method_async.wrap_method( + self.create_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket_async: gapic_v1.method_async.wrap_method( + self.update_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.create_bucket: gapic_v1.method_async.wrap_method( + self.create_bucket, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket: gapic_v1.method_async.wrap_method( + self.update_bucket, + default_timeout=None, + client_info=client_info, + ), + self.delete_bucket: gapic_v1.method_async.wrap_method( + self.delete_bucket, + default_timeout=None, + client_info=client_info, + ), + self.undelete_bucket: gapic_v1.method_async.wrap_method( + self.undelete_bucket, + default_timeout=None, + client_info=client_info, + ), + self.list_views: gapic_v1.method_async.wrap_method( + self.list_views, + default_timeout=None, + client_info=client_info, + ), + self.get_view: gapic_v1.method_async.wrap_method( + self.get_view, + default_timeout=None, + client_info=client_info, + ), + self.create_view: gapic_v1.method_async.wrap_method( + self.create_view, + default_timeout=None, + client_info=client_info, + ), + self.update_view: gapic_v1.method_async.wrap_method( + self.update_view, + default_timeout=None, + client_info=client_info, + ), + self.delete_view: gapic_v1.method_async.wrap_method( + self.delete_view, + default_timeout=None, + client_info=client_info, + ), + self.list_sinks: gapic_v1.method_async.wrap_method( + self.list_sinks, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_sink: gapic_v1.method_async.wrap_method( + self.get_sink, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_sink: gapic_v1.method_async.wrap_method( + self.create_sink, + default_timeout=120.0, + client_info=client_info, + ), + self.update_sink: gapic_v1.method_async.wrap_method( + self.update_sink, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_sink: gapic_v1.method_async.wrap_method( + self.delete_sink, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_link: gapic_v1.method_async.wrap_method( + self.create_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_link: gapic_v1.method_async.wrap_method( + self.delete_link, + default_timeout=None, + client_info=client_info, + ), + self.list_links: gapic_v1.method_async.wrap_method( + self.list_links, + default_timeout=None, + client_info=client_info, + ), + self.get_link: gapic_v1.method_async.wrap_method( + self.get_link, + default_timeout=None, + client_info=client_info, + ), + self.list_exclusions: gapic_v1.method_async.wrap_method( + self.list_exclusions, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_exclusion: gapic_v1.method_async.wrap_method( + self.get_exclusion, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_exclusion: gapic_v1.method_async.wrap_method( + self.create_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self.update_exclusion: gapic_v1.method_async.wrap_method( + self.update_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self.delete_exclusion: gapic_v1.method_async.wrap_method( + self.delete_exclusion, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cmek_settings: gapic_v1.method_async.wrap_method( + self.get_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_cmek_settings: gapic_v1.method_async.wrap_method( + self.update_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + self.get_settings: gapic_v1.method_async.wrap_method( + self.get_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_settings: gapic_v1.method_async.wrap_method( + self.update_settings, + default_timeout=None, + client_info=client_info, + ), + self.copy_log_entries: gapic_v1.method_async.wrap_method( + self.copy_log_entries, + default_timeout=None, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 890361b49e28..27e8ca22617a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -40,6 +41,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -199,7 +201,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, LoggingServiceV2Transport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport] + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -211,9 +217,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.LoggingServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,LoggingServiceV2Transport,Callable[..., LoggingServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LoggingServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -323,8 +331,8 @@ async def sample_delete_log(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name]) if request is not None and has_flattened_params: raise ValueError( @@ -332,7 +340,10 @@ async def sample_delete_log(): "the individual field arguments should be set." ) - request = logging.DeleteLogRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.DeleteLogRequest): + request = logging.DeleteLogRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -341,22 +352,9 @@ async def sample_delete_log(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_log, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_log + ] # Certain fields should be provided within the metadata header; # add these here. @@ -524,8 +522,8 @@ async def sample_write_log_entries(): Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name, resource, labels, entries]) if request is not None and has_flattened_params: raise ValueError( @@ -533,7 +531,10 @@ async def sample_write_log_entries(): "the individual field arguments should be set." ) - request = logging.WriteLogEntriesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.WriteLogEntriesRequest): + request = logging.WriteLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -549,22 +550,9 @@ async def sample_write_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.write_log_entries, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.write_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -688,8 +676,8 @@ async def sample_list_log_entries(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource_names, filter, order_by]) if request is not None and has_flattened_params: raise ValueError( @@ -697,7 +685,10 @@ async def sample_list_log_entries(): "the individual field arguments should be set." ) - request = logging.ListLogEntriesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListLogEntriesRequest): + request = logging.ListLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -710,22 +701,9 @@ async def sample_list_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_log_entries, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -744,6 +722,8 @@ async def sample_list_log_entries(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -809,26 +789,16 @@ async def sample_list_monitored_resource_descriptors(): """ # Create or coerce a protobuf request object. - request = logging.ListMonitoredResourceDescriptorsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest): + request = logging.ListMonitoredResourceDescriptorsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_monitored_resource_descriptors, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_monitored_resource_descriptors + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -847,6 +817,8 @@ async def sample_list_monitored_resource_descriptors(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -923,8 +895,8 @@ async def sample_list_logs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -932,7 +904,10 @@ async def sample_list_logs(): "the individual field arguments should be set." ) - request = logging.ListLogsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListLogsRequest): + request = logging.ListLogsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -941,22 +916,9 @@ async def sample_list_logs(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_logs, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_logs + ] # Certain fields should be provided within the metadata header; # add these here. @@ -981,6 +943,8 @@ async def sample_list_logs(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1052,22 +1016,9 @@ def request_generator(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.tail_log_entries, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.tail_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index a9d6e082b3af..2c86aecca89b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -526,7 +527,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LoggingServiceV2Transport]] = None, + transport: Optional[ + Union[ + str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -538,9 +543,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, LoggingServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,LoggingServiceV2Transport,Callable[..., LoggingServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LoggingServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -649,8 +656,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[LoggingServiceV2Transport], + Callable[..., LoggingServiceV2Transport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., LoggingServiceV2Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -728,8 +743,8 @@ def sample_delete_log(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name]) if request is not None and has_flattened_params: raise ValueError( @@ -737,10 +752,8 @@ def sample_delete_log(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging.DeleteLogRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.DeleteLogRequest): request = logging.DeleteLogRequest(request) # If we have keyword arguments corresponding to fields on the @@ -918,8 +931,8 @@ def sample_write_log_entries(): Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name, resource, labels, entries]) if request is not None and has_flattened_params: raise ValueError( @@ -927,10 +940,8 @@ def sample_write_log_entries(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging.WriteLogEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.WriteLogEntriesRequest): request = logging.WriteLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1070,8 +1081,8 @@ def sample_list_log_entries(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource_names, filter, order_by]) if request is not None and has_flattened_params: raise ValueError( @@ -1079,10 +1090,8 @@ def sample_list_log_entries(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging.ListLogEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.ListLogEntriesRequest): request = logging.ListLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1115,6 +1124,8 @@ def sample_list_log_entries(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1180,10 +1191,8 @@ def sample_list_monitored_resource_descriptors(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging.ListMonitoredResourceDescriptorsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest): request = logging.ListMonitoredResourceDescriptorsRequest(request) @@ -1210,6 +1219,8 @@ def sample_list_monitored_resource_descriptors(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1286,8 +1297,8 @@ def sample_list_logs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1295,10 +1306,8 @@ def sample_list_logs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging.ListLogsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.ListLogsRequest): request = logging.ListLogsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1333,6 +1342,8 @@ def sample_list_logs(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index f8a63387bfbf..0eece8acc529 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging @@ -53,6 +66,8 @@ def __init__( request: logging.ListLogEntriesRequest, response: logging.ListLogEntriesResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -64,12 +79,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogEntriesResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListLogEntriesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -80,7 +100,12 @@ def pages(self) -> Iterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[log_entry.LogEntry]: @@ -115,6 +140,8 @@ def __init__( request: logging.ListLogEntriesRequest, response: logging.ListLogEntriesResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -126,12 +153,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogEntriesResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListLogEntriesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -142,7 +174,12 @@ async def pages(self) -> AsyncIterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[log_entry.LogEntry]: @@ -181,6 +218,8 @@ def __init__( request: logging.ListMonitoredResourceDescriptorsRequest, response: logging.ListMonitoredResourceDescriptorsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -192,12 +231,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListMonitoredResourceDescriptorsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -208,7 +252,12 @@ def pages(self) -> Iterator[logging.ListMonitoredResourceDescriptorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescriptor]: @@ -245,6 +294,8 @@ def __init__( request: logging.ListMonitoredResourceDescriptorsRequest, response: logging.ListMonitoredResourceDescriptorsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -256,12 +307,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListMonitoredResourceDescriptorsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -274,7 +330,12 @@ async def pages( yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__( @@ -315,6 +376,8 @@ def __init__( request: logging.ListLogsRequest, response: logging.ListLogsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -326,12 +389,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListLogsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -342,7 +410,12 @@ def pages(self) -> Iterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[str]: @@ -377,6 +450,8 @@ def __init__( request: logging.ListLogsRequest, response: logging.ListLogsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -388,12 +463,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListLogsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -404,7 +484,12 @@ async def pages(self) -> AsyncIterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[str]: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 5bbd7cc78a06..6f7e1c99d5f4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -90,6 +90,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -102,7 +104,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 8a6a3efd3819..bce7e8ffc3dc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -52,7 +52,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -72,14 +72,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -89,11 +92,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -119,9 +122,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -160,7 +164,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 159a0e2e4975..f03c1fad7251 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -67,7 +69,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -97,7 +98,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -117,15 +118,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -135,11 +139,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -165,9 +169,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -205,7 +210,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -420,6 +427,107 @@ def tail_log_entries( ) return self._stubs["tail_log_entries"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.delete_log: gapic_v1.method_async.wrap_method( + self.delete_log, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.write_log_entries: gapic_v1.method_async.wrap_method( + self.write_log_entries, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_log_entries: gapic_v1.method_async.wrap_method( + self.list_log_entries, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_monitored_resource_descriptors: gapic_v1.method_async.wrap_method( + self.list_monitored_resource_descriptors, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_logs: gapic_v1.method_async.wrap_method( + self.list_logs, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.tail_log_entries: gapic_v1.method_async.wrap_method( + self.tail_log_entries, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 1053158e95ac..19513e12620e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -37,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -197,7 +199,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, MetricsServiceV2Transport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport] + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -209,9 +215,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.MetricsServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,MetricsServiceV2Transport,Callable[..., MetricsServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetricsServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -323,8 +331,8 @@ async def sample_list_log_metrics(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -332,7 +340,10 @@ async def sample_list_log_metrics(): "the individual field arguments should be set." ) - request = logging_metrics.ListLogMetricsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.ListLogMetricsRequest): + request = logging_metrics.ListLogMetricsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -341,22 +352,9 @@ async def sample_list_log_metrics(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_log_metrics, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_log_metrics + ] # Certain fields should be provided within the metadata header; # add these here. @@ -381,6 +379,8 @@ async def sample_list_log_metrics(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -460,8 +460,8 @@ async def sample_get_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: raise ValueError( @@ -469,7 +469,10 @@ async def sample_get_log_metric(): "the individual field arguments should be set." ) - request = logging_metrics.GetLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.GetLogMetricRequest): + request = logging_metrics.GetLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -478,22 +481,9 @@ async def sample_get_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_log_metric, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. @@ -607,8 +597,8 @@ async def sample_create_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, metric]) if request is not None and has_flattened_params: raise ValueError( @@ -616,7 +606,10 @@ async def sample_create_log_metric(): "the individual field arguments should be set." ) - request = logging_metrics.CreateLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.CreateLogMetricRequest): + request = logging_metrics.CreateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -627,11 +620,9 @@ async def sample_create_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_log_metric, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. @@ -742,8 +733,8 @@ async def sample_update_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name, metric]) if request is not None and has_flattened_params: raise ValueError( @@ -751,7 +742,10 @@ async def sample_update_log_metric(): "the individual field arguments should be set." ) - request = logging_metrics.UpdateLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.UpdateLogMetricRequest): + request = logging_metrics.UpdateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -762,22 +756,9 @@ async def sample_update_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_log_metric, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. @@ -855,8 +836,8 @@ async def sample_delete_log_metric(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: raise ValueError( @@ -864,7 +845,10 @@ async def sample_delete_log_metric(): "the individual field arguments should be set." ) - request = logging_metrics.DeleteLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.DeleteLogMetricRequest): + request = logging_metrics.DeleteLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -873,22 +857,9 @@ async def sample_delete_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_log_metric, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 9309f5c1779b..5f577decf4e1 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -525,7 +526,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetricsServiceV2Transport]] = None, + transport: Optional[ + Union[ + str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -537,9 +542,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, MetricsServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,MetricsServiceV2Transport,Callable[..., MetricsServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetricsServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -648,8 +655,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[MetricsServiceV2Transport], + Callable[..., MetricsServiceV2Transport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., MetricsServiceV2Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -729,8 +744,8 @@ def sample_list_log_metrics(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -738,10 +753,8 @@ def sample_list_log_metrics(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.ListLogMetricsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.ListLogMetricsRequest): request = logging_metrics.ListLogMetricsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -776,6 +789,8 @@ def sample_list_log_metrics(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -855,8 +870,8 @@ def sample_get_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: raise ValueError( @@ -864,10 +879,8 @@ def sample_get_log_metric(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.GetLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.GetLogMetricRequest): request = logging_metrics.GetLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the @@ -991,8 +1004,8 @@ def sample_create_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, metric]) if request is not None and has_flattened_params: raise ValueError( @@ -1000,10 +1013,8 @@ def sample_create_log_metric(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.CreateLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.CreateLogMetricRequest): request = logging_metrics.CreateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1126,8 +1137,8 @@ def sample_update_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name, metric]) if request is not None and has_flattened_params: raise ValueError( @@ -1135,10 +1146,8 @@ def sample_update_log_metric(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.UpdateLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.UpdateLogMetricRequest): request = logging_metrics.UpdateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1228,8 +1237,8 @@ def sample_delete_log_metric(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: raise ValueError( @@ -1237,10 +1246,8 @@ def sample_delete_log_metric(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.DeleteLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.DeleteLogMetricRequest): request = logging_metrics.DeleteLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 70bad4bea533..6975ae0d9653 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.cloud.logging_v2.types import logging_metrics @@ -51,6 +64,8 @@ def __init__( request: logging_metrics.ListLogMetricsRequest, response: logging_metrics.ListLogMetricsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -62,12 +77,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogMetricsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_metrics.ListLogMetricsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -78,7 +98,12 @@ def pages(self) -> Iterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_metrics.LogMetric]: @@ -113,6 +138,8 @@ def __init__( request: logging_metrics.ListLogMetricsRequest, response: logging_metrics.ListLogMetricsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -124,12 +151,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogMetricsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_metrics.ListLogMetricsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -140,7 +172,12 @@ async def pages(self) -> AsyncIterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_metrics.LogMetric]: diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index f63d896b2572..aeb86e1e9566 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -90,6 +90,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -102,7 +104,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 3c4a2f38fb89..1b16e97017a8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -52,7 +52,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -72,14 +72,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -89,11 +92,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -119,9 +122,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -160,7 +164,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 33f85cc9627c..81c675d85409 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -67,7 +69,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -97,7 +98,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -117,15 +118,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -135,11 +139,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -165,9 +169,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -205,7 +210,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -373,6 +380,80 @@ def delete_log_metric( ) return self._stubs["delete_log_metric"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_log_metrics: gapic_v1.method_async.wrap_method( + self.list_log_metrics, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_log_metric: gapic_v1.method_async.wrap_method( + self.get_log_metric, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_log_metric: gapic_v1.method_async.wrap_method( + self.create_log_metric, + default_timeout=60.0, + client_info=client_info, + ), + self.update_log_metric: gapic_v1.method_async.wrap_method( + self.update_log_metric, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_log_metric: gapic_v1.method_async.wrap_method( + self.delete_log_metric, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-logging/pytest.ini b/packages/google-cloud-logging/pytest.ini index 5cad3409b005..2d8ce14b8cb5 100644 --- a/packages/google-cloud-logging/pytest.ini +++ b/packages/google-cloud-logging/pytest.ini @@ -25,3 +25,5 @@ filterwarnings = ignore:'pkgutil.get_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec\(\) instead:DeprecationWarning # Remove warning once https://github.com/protocolbuffers/protobuf/issues/17345 is fixed ignore:.*Please use message_factory.GetMessageClass\(\) instead. SymbolDatabase.GetPrototype\(\) will be removed soon.:UserWarning + # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/2046 is fixed + ignore:coroutine 'AsyncMockMixin._execute_mock_call' was never awaited:RuntimeWarning diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 9d5a375e9d98..b62675ba6439 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.11.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 5414f1b087b7..1e214751dd07 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -47,7 +47,7 @@ "opentelemetry-api >= 1.0.0", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-logging" diff --git a/packages/google-cloud-logging/testing/constraints-3.7.txt b/packages/google-cloud-logging/testing/constraints-3.7.txt index 3aded209e3d8..fa18c36c0260 100644 --- a/packages/google-cloud-logging/testing/constraints-3.7.txt +++ b/packages/google-cloud-logging/testing/constraints-3.7.txt @@ -7,7 +7,7 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.0 -protobuf==3.19.5 +protobuf==3.20.2 google-cloud-core==2.0.0 # Lower bound testing for optional dependencies diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index b1c25ba9ea90..72e028529c6a 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -40,6 +40,7 @@ from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.config_service_v2 import ( @@ -1169,6 +1170,9 @@ def test_list_buckets_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_buckets() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1193,6 +1197,9 @@ def test_list_buckets_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_buckets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1202,6 +1209,41 @@ def test_list_buckets_non_empty_request_with_auto_populated_field(): ) +def test_list_buckets_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_buckets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_buckets] = mock_rpc + request = {} + client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_buckets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_buckets_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1225,6 +1267,47 @@ async def test_list_buckets_empty_call_async(): assert args[0] == logging_config.ListBucketsRequest() +@pytest.mark.asyncio +async def test_list_buckets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_buckets + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_buckets + ] = mock_object + + request = {} + await client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_buckets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_buckets_async( transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest @@ -1444,13 +1527,17 @@ def test_list_buckets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_buckets(request={}) + pager = client.list_buckets(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -1655,6 +1742,9 @@ def test_get_bucket_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_bucket() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1678,6 +1768,9 @@ def test_get_bucket_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1686,6 +1779,41 @@ def test_get_bucket_non_empty_request_with_auto_populated_field(): ) +def test_get_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_bucket] = mock_rpc + request = {} + client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1715,6 +1843,45 @@ async def test_get_bucket_empty_call_async(): assert args[0] == logging_config.GetBucketRequest() +@pytest.mark.asyncio +async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_bucket + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_bucket + ] = mock_object + + request = {} + await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest @@ -1874,6 +2041,9 @@ def test_create_bucket_async_empty_call(): with mock.patch.object( type(client.transport.create_bucket_async), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_bucket_async() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1900,6 +2070,9 @@ def test_create_bucket_async_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_bucket_async), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_bucket_async(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1909,6 +2082,49 @@ def test_create_bucket_async_non_empty_request_with_auto_populated_field(): ) +def test_create_bucket_async_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_bucket_async in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_bucket_async + ] = mock_rpc + request = {} + client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_bucket_async_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1932,6 +2148,51 @@ async def test_create_bucket_async_empty_call_async(): assert args[0] == logging_config.CreateBucketRequest() +@pytest.mark.asyncio +async def test_create_bucket_async_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_bucket_async + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_bucket_async + ] = mock_object + + request = {} + await client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_bucket_async_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest @@ -2082,6 +2343,9 @@ def test_update_bucket_async_empty_call(): with mock.patch.object( type(client.transport.update_bucket_async), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_bucket_async() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2107,6 +2371,9 @@ def test_update_bucket_async_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_bucket_async), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_bucket_async(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2115,6 +2382,49 @@ def test_update_bucket_async_non_empty_request_with_auto_populated_field(): ) +def test_update_bucket_async_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_bucket_async in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_bucket_async + ] = mock_rpc + request = {} + client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_bucket_async_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2138,6 +2448,51 @@ async def test_update_bucket_async_empty_call_async(): assert args[0] == logging_config.UpdateBucketRequest() +@pytest.mark.asyncio +async def test_update_bucket_async_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_bucket_async + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_bucket_async + ] = mock_object + + request = {} + await client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_bucket_async_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest @@ -2299,6 +2654,9 @@ def test_create_bucket_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_bucket() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2323,6 +2681,9 @@ def test_create_bucket_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2332,6 +2693,41 @@ def test_create_bucket_non_empty_request_with_auto_populated_field(): ) +def test_create_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_bucket] = mock_rpc + request = {} + client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2361,6 +2757,47 @@ async def test_create_bucket_empty_call_async(): assert args[0] == logging_config.CreateBucketRequest() +@pytest.mark.asyncio +async def test_create_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_bucket + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_bucket + ] = mock_object + + request = {} + await client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest @@ -2531,6 +2968,9 @@ def test_update_bucket_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_bucket() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2554,6 +2994,9 @@ def test_update_bucket_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2562,6 +3005,41 @@ def test_update_bucket_non_empty_request_with_auto_populated_field(): ) +def test_update_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_bucket] = mock_rpc + request = {} + client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2591,6 +3069,47 @@ async def test_update_bucket_empty_call_async(): assert args[0] == logging_config.UpdateBucketRequest() +@pytest.mark.asyncio +async def test_update_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_bucket + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_bucket + ] = mock_object + + request = {} + await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest @@ -2746,6 +3265,9 @@ def test_delete_bucket_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2769,6 +3291,9 @@ def test_delete_bucket_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2777,6 +3302,41 @@ def test_delete_bucket_non_empty_request_with_auto_populated_field(): ) +def test_delete_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_bucket] = mock_rpc + request = {} + client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2796,6 +3356,47 @@ async def test_delete_bucket_empty_call_async(): assert args[0] == logging_config.DeleteBucketRequest() +@pytest.mark.asyncio +async def test_delete_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_bucket + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_bucket + ] = mock_object + + request = {} + await client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest @@ -2932,6 +3533,9 @@ def test_undelete_bucket_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.undelete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2955,6 +3559,9 @@ def test_undelete_bucket_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.undelete_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2963,6 +3570,41 @@ def test_undelete_bucket_non_empty_request_with_auto_populated_field(): ) +def test_undelete_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.undelete_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.undelete_bucket] = mock_rpc + request = {} + client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.undelete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_undelete_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2982,6 +3624,47 @@ async def test_undelete_bucket_empty_call_async(): assert args[0] == logging_config.UndeleteBucketRequest() +@pytest.mark.asyncio +async def test_undelete_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.undelete_bucket + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.undelete_bucket + ] = mock_object + + request = {} + await client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.undelete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_undelete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest @@ -3121,6 +3804,9 @@ def test_list_views_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_views() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3145,6 +3831,9 @@ def test_list_views_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_views(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3154,6 +3843,41 @@ def test_list_views_non_empty_request_with_auto_populated_field(): ) +def test_list_views_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_views in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_views] = mock_rpc + request = {} + client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_views_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3177,6 +3901,45 @@ async def test_list_views_empty_call_async(): assert args[0] == logging_config.ListViewsRequest() +@pytest.mark.asyncio +async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_views + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_views + ] = mock_object + + request = {} + await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_views_async( transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest @@ -3396,13 +4159,17 @@ def test_list_views_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_views(request={}) + pager = client.list_views(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -3599,6 +4366,9 @@ def test_get_view_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_view() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3622,6 +4392,9 @@ def test_get_view_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3630,6 +4403,41 @@ def test_get_view_non_empty_request_with_auto_populated_field(): ) +def test_get_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_view] = mock_rpc + request = {} + client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3655,6 +4463,45 @@ async def test_get_view_empty_call_async(): assert args[0] == logging_config.GetViewRequest() +@pytest.mark.asyncio +async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_view + ] = mock_object + + request = {} + await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_view_async( transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest @@ -3809,6 +4656,9 @@ def test_create_view_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_view() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3833,6 +4683,9 @@ def test_create_view_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3842,6 +4695,41 @@ def test_create_view_non_empty_request_with_auto_populated_field(): ) +def test_create_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_view] = mock_rpc + request = {} + client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3867,6 +4755,47 @@ async def test_create_view_empty_call_async(): assert args[0] == logging_config.CreateViewRequest() +@pytest.mark.asyncio +async def test_create_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_view + ] = mock_object + + request = {} + await client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_view_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest @@ -4021,6 +4950,9 @@ def test_update_view_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_view() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4044,6 +4976,9 @@ def test_update_view_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4052,6 +4987,41 @@ def test_update_view_non_empty_request_with_auto_populated_field(): ) +def test_update_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_view] = mock_rpc + request = {} + client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4077,6 +5047,47 @@ async def test_update_view_empty_call_async(): assert args[0] == logging_config.UpdateViewRequest() +@pytest.mark.asyncio +async def test_update_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_view + ] = mock_object + + request = {} + await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_view_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest @@ -4224,6 +5235,9 @@ def test_delete_view_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_view() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4247,6 +5261,9 @@ def test_delete_view_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4255,23 +5272,99 @@ def test_delete_view_non_empty_request_with_auto_populated_field(): ) -@pytest.mark.asyncio -async def test_delete_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) +def test_delete_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_view] = mock_rpc + request = {} + client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest() + + +@pytest.mark.asyncio +async def test_delete_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_view + ] = mock_object + + request = {} + await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_view(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 @pytest.mark.asyncio @@ -4413,6 +5506,9 @@ def test_list_sinks_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_sinks() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4437,6 +5533,9 @@ def test_list_sinks_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_sinks(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4446,6 +5545,41 @@ def test_list_sinks_non_empty_request_with_auto_populated_field(): ) +def test_list_sinks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sinks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_sinks] = mock_rpc + request = {} + client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sinks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_sinks_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4469,6 +5603,45 @@ async def test_list_sinks_empty_call_async(): assert args[0] == logging_config.ListSinksRequest() +@pytest.mark.asyncio +async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_sinks + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_sinks + ] = mock_object + + request = {} + await client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_sinks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_sinks_async( transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest @@ -4688,13 +5861,17 @@ def test_list_sinks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_sinks(request={}) + pager = client.list_sinks(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -4901,6 +6078,9 @@ def test_get_sink_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_sink() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4924,6 +6104,9 @@ def test_get_sink_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4932,6 +6115,41 @@ def test_get_sink_non_empty_request_with_auto_populated_field(): ) +def test_get_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_sink] = mock_rpc + request = {} + client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4962,6 +6180,45 @@ async def test_get_sink_empty_call_async(): assert args[0] == logging_config.GetSinkRequest() +@pytest.mark.asyncio +async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_sink + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_sink + ] = mock_object + + request = {} + await client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest @@ -5218,6 +6475,9 @@ def test_create_sink_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_sink() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5241,6 +6501,9 @@ def test_create_sink_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5249,6 +6512,41 @@ def test_create_sink_non_empty_request_with_auto_populated_field(): ) +def test_create_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_sink] = mock_rpc + request = {} + client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5279,6 +6577,47 @@ async def test_create_sink_empty_call_async(): assert args[0] == logging_config.CreateSinkRequest() +@pytest.mark.asyncio +async def test_create_sink_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_sink + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_sink + ] = mock_object + + request = {} + await client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest @@ -5545,6 +6884,9 @@ def test_update_sink_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_sink() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5568,6 +6910,9 @@ def test_update_sink_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5576,6 +6921,41 @@ def test_update_sink_non_empty_request_with_auto_populated_field(): ) +def test_update_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_sink] = mock_rpc + request = {} + client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5606,6 +6986,47 @@ async def test_update_sink_empty_call_async(): assert args[0] == logging_config.UpdateSinkRequest() +@pytest.mark.asyncio +async def test_update_sink_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_sink + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_sink + ] = mock_object + + request = {} + await client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest @@ -5865,6 +7286,9 @@ def test_delete_sink_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_sink() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5888,6 +7312,9 @@ def test_delete_sink_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5896,6 +7323,41 @@ def test_delete_sink_non_empty_request_with_auto_populated_field(): ) +def test_delete_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_sink] = mock_rpc + request = {} + client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5915,6 +7377,47 @@ async def test_delete_sink_empty_call_async(): assert args[0] == logging_config.DeleteSinkRequest() +@pytest.mark.asyncio +async def test_delete_sink_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_sink + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_sink + ] = mock_object + + request = {} + await client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest @@ -6131,6 +7634,9 @@ def test_create_link_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_link() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6155,6 +7661,9 @@ def test_create_link_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6164,25 +7673,109 @@ def test_create_link_non_empty_request_with_auto_populated_field(): ) -@pytest.mark.asyncio -async def test_create_link_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) +def test_create_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_link] = mock_rpc + request = {} + client.create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateLinkRequest() + + +@pytest.mark.asyncio +async def test_create_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_link + ] = mock_object + + request = {} + await client.create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_link(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_link), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateLinkRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 @pytest.mark.asyncio @@ -6427,6 +8020,9 @@ def test_delete_link_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_link() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6450,6 +8046,9 @@ def test_delete_link_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6458,6 +8057,45 @@ def test_delete_link_non_empty_request_with_auto_populated_field(): ) +def test_delete_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_link] = mock_rpc + request = {} + client.delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6479,6 +8117,51 @@ async def test_delete_link_empty_call_async(): assert args[0] == logging_config.DeleteLinkRequest() +@pytest.mark.asyncio +async def test_delete_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_link + ] = mock_object + + request = {} + await client.delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_link_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteLinkRequest @@ -6704,6 +8387,9 @@ def test_list_links_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_links), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_links() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6728,6 +8414,9 @@ def test_list_links_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_links), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_links(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6737,6 +8426,41 @@ def test_list_links_non_empty_request_with_auto_populated_field(): ) +def test_list_links_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_links in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_links] = mock_rpc + request = {} + client.list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_links_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6760,6 +8484,45 @@ async def test_list_links_empty_call_async(): assert args[0] == logging_config.ListLinksRequest() +@pytest.mark.asyncio +async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_links + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_links + ] = mock_object + + request = {} + await client.list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_links_async( transport: str = "grpc_asyncio", request_type=logging_config.ListLinksRequest @@ -6979,13 +8742,17 @@ def test_list_links_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_links(request={}) + pager = client.list_links(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -7182,6 +8949,9 @@ def test_get_link_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_link() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7205,6 +8975,9 @@ def test_get_link_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7213,6 +8986,41 @@ def test_get_link_non_empty_request_with_auto_populated_field(): ) +def test_get_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_link] = mock_rpc + request = {} + client.get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -7238,6 +9046,45 @@ async def test_get_link_empty_call_async(): assert args[0] == logging_config.GetLinkRequest() +@pytest.mark.asyncio +async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_link + ] = mock_object + + request = {} + await client.get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_link_async( transport: str = "grpc_asyncio", request_type=logging_config.GetLinkRequest @@ -7466,6 +9313,9 @@ def test_list_exclusions_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_exclusions() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7490,6 +9340,9 @@ def test_list_exclusions_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_exclusions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7499,6 +9352,41 @@ def test_list_exclusions_non_empty_request_with_auto_populated_field(): ) +def test_list_exclusions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_exclusions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_exclusions] = mock_rpc + request = {} + client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_exclusions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_exclusions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -7522,6 +9410,47 @@ async def test_list_exclusions_empty_call_async(): assert args[0] == logging_config.ListExclusionsRequest() +@pytest.mark.asyncio +async def test_list_exclusions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_exclusions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_exclusions + ] = mock_object + + request = {} + await client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_exclusions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_exclusions_async( transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest @@ -7741,13 +9670,17 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_exclusions(request={}) + pager = client.list_exclusions(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -7946,6 +9879,9 @@ def test_get_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7969,6 +9905,9 @@ def test_get_exclusion_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7977,6 +9916,41 @@ def test_get_exclusion_non_empty_request_with_auto_populated_field(): ) +def test_get_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_exclusion] = mock_rpc + request = {} + client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_exclusion_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -8003,6 +9977,47 @@ async def test_get_exclusion_empty_call_async(): assert args[0] == logging_config.GetExclusionRequest() +@pytest.mark.asyncio +async def test_get_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_exclusion + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_exclusion + ] = mock_object + + request = {} + await client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest @@ -8243,6 +10258,9 @@ def test_create_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] @@ -8266,6 +10284,9 @@ def test_create_exclusion_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -8274,6 +10295,43 @@ def test_create_exclusion_non_empty_request_with_auto_populated_field(): ) +def test_create_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_exclusion + ] = mock_rpc + request = {} + client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_exclusion_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -8300,6 +10358,47 @@ async def test_create_exclusion_empty_call_async(): assert args[0] == logging_config.CreateExclusionRequest() +@pytest.mark.asyncio +async def test_create_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_exclusion + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_exclusion + ] = mock_object + + request = {} + await client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest @@ -8550,6 +10649,9 @@ def test_update_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] @@ -8571,14 +10673,54 @@ def test_update_exclusion_non_empty_request_with_auto_populated_field(): name="name_value", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: - client.update_exclusion(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest( - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest( + name="name_value", + ) + + +def test_update_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[ + client._transport.update_exclusion + ] = mock_rpc + request = {} + client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8607,6 +10749,47 @@ async def test_update_exclusion_empty_call_async(): assert args[0] == logging_config.UpdateExclusionRequest() +@pytest.mark.asyncio +async def test_update_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_exclusion + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_exclusion + ] = mock_object + + request = {} + await client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest @@ -8858,6 +11041,9 @@ def test_delete_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] @@ -8881,6 +11067,9 @@ def test_delete_exclusion_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -8889,6 +11078,43 @@ def test_delete_exclusion_non_empty_request_with_auto_populated_field(): ) +def test_delete_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_exclusion + ] = mock_rpc + request = {} + client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_exclusion_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -8908,6 +11134,47 @@ async def test_delete_exclusion_empty_call_async(): assert args[0] == logging_config.DeleteExclusionRequest() +@pytest.mark.asyncio +async def test_delete_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_exclusion + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_exclusion + ] = mock_object + + request = {} + await client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest @@ -9137,6 +11404,9 @@ def test_get_cmek_settings_empty_call(): with mock.patch.object( type(client.transport.get_cmek_settings), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_cmek_settings() call.assert_called() _, args, _ = call.mock_calls[0] @@ -9162,6 +11432,9 @@ def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_cmek_settings), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_cmek_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -9170,6 +11443,43 @@ def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): ) +def test_get_cmek_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_cmek_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cmek_settings + ] = mock_rpc + request = {} + client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_cmek_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -9198,6 +11508,47 @@ async def test_get_cmek_settings_empty_call_async(): assert args[0] == logging_config.GetCmekSettingsRequest() +@pytest.mark.asyncio +async def test_get_cmek_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_cmek_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_cmek_settings + ] = mock_object + + request = {} + await client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_cmek_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest @@ -9366,6 +11717,9 @@ def test_update_cmek_settings_empty_call(): with mock.patch.object( type(client.transport.update_cmek_settings), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_cmek_settings() call.assert_called() _, args, _ = call.mock_calls[0] @@ -9391,6 +11745,9 @@ def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_cmek_settings), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_cmek_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -9399,6 +11756,45 @@ def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): ) +def test_update_cmek_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_cmek_settings in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_cmek_settings + ] = mock_rpc + request = {} + client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_cmek_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -9427,6 +11823,47 @@ async def test_update_cmek_settings_empty_call_async(): assert args[0] == logging_config.UpdateCmekSettingsRequest() +@pytest.mark.asyncio +async def test_update_cmek_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_cmek_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_cmek_settings + ] = mock_object + + request = {} + await client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_cmek_settings_async( transport: str = "grpc_asyncio", @@ -9594,6 +12031,9 @@ def test_get_settings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_settings() call.assert_called() _, args, _ = call.mock_calls[0] @@ -9617,6 +12057,9 @@ def test_get_settings_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -9625,6 +12068,41 @@ def test_get_settings_non_empty_request_with_auto_populated_field(): ) +def test_get_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc + request = {} + client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -9652,6 +12130,47 @@ async def test_get_settings_empty_call_async(): assert args[0] == logging_config.GetSettingsRequest() +@pytest.mark.asyncio +async def test_get_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_settings + ] = mock_object + + request = {} + await client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSettingsRequest @@ -9896,6 +12415,9 @@ def test_update_settings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_settings() call.assert_called() _, args, _ = call.mock_calls[0] @@ -9919,6 +12441,9 @@ def test_update_settings_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -9927,6 +12452,41 @@ def test_update_settings_non_empty_request_with_auto_populated_field(): ) +def test_update_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc + request = {} + client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -9954,6 +12514,47 @@ async def test_update_settings_empty_call_async(): assert args[0] == logging_config.UpdateSettingsRequest() +@pytest.mark.asyncio +async def test_update_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_settings + ] = mock_object + + request = {} + await client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateSettingsRequest @@ -10197,6 +12798,9 @@ def test_copy_log_entries_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.copy_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] @@ -10222,6 +12826,9 @@ def test_copy_log_entries_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.copy_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -10232,6 +12839,47 @@ def test_copy_log_entries_non_empty_request_with_auto_populated_field(): ) +def test_copy_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.copy_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.copy_log_entries + ] = mock_rpc + request = {} + client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.copy_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_copy_log_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -10253,6 +12901,51 @@ async def test_copy_log_entries_empty_call_async(): assert args[0] == logging_config.CopyLogEntriesRequest() +@pytest.mark.asyncio +async def test_copy_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.copy_log_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.copy_log_entries + ] = mock_object + + request = {} + await client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.copy_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_copy_log_entries_async( transport: str = "grpc_asyncio", request_type=logging_config.CopyLogEntriesRequest diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index facbea0fa2d9..b1cae4824cce 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -37,6 +37,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.logging_service_v2 import ( @@ -1169,6 +1170,9 @@ def test_delete_log_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_log() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1192,6 +1196,9 @@ def test_delete_log_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_log(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1200,6 +1207,41 @@ def test_delete_log_non_empty_request_with_auto_populated_field(): ) +def test_delete_log_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_log in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_log] = mock_rpc + request = {} + client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_log(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_log_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1219,6 +1261,45 @@ async def test_delete_log_empty_call_async(): assert args[0] == logging.DeleteLogRequest() +@pytest.mark.asyncio +async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_log + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_log + ] = mock_object + + request = {} + await client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_log(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_log_async( transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest @@ -1439,6 +1520,9 @@ def test_write_log_entries_empty_call(): with mock.patch.object( type(client.transport.write_log_entries), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.write_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1464,6 +1548,9 @@ def test_write_log_entries_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.write_log_entries), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.write_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1472,6 +1559,43 @@ def test_write_log_entries_non_empty_request_with_auto_populated_field(): ) +def test_write_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.write_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.write_log_entries + ] = mock_rpc + request = {} + client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.write_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_write_log_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1495,6 +1619,47 @@ async def test_write_log_entries_empty_call_async(): assert args[0] == logging.WriteLogEntriesRequest() +@pytest.mark.asyncio +async def test_write_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.write_log_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.write_log_entries + ] = mock_object + + request = {} + await client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.write_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_write_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest @@ -1695,6 +1860,9 @@ def test_list_log_entries_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1720,6 +1888,9 @@ def test_list_log_entries_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1730,6 +1901,43 @@ def test_list_log_entries_non_empty_request_with_auto_populated_field(): ) +def test_list_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_log_entries + ] = mock_rpc + request = {} + client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_log_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1753,6 +1961,47 @@ async def test_list_log_entries_empty_call_async(): assert args[0] == logging.ListLogEntriesRequest() +@pytest.mark.asyncio +async def test_list_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_log_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_log_entries + ] = mock_object + + request = {} + await client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest @@ -1931,10 +2180,14 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - pager = client.list_log_entries(request={}) + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.list_log_entries(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -2131,6 +2384,9 @@ def test_list_monitored_resource_descriptors_empty_call(): with mock.patch.object( type(client.transport.list_monitored_resource_descriptors), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_monitored_resource_descriptors() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2156,6 +2412,9 @@ def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populat with mock.patch.object( type(client.transport.list_monitored_resource_descriptors), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_monitored_resource_descriptors(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2164,6 +2423,46 @@ def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populat ) +def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_monitored_resource_descriptors + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_monitored_resource_descriptors + ] = mock_rpc + request = {} + client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_monitored_resource_descriptors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2189,6 +2488,47 @@ async def test_list_monitored_resource_descriptors_empty_call_async(): assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_monitored_resource_descriptors + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_monitored_resource_descriptors + ] = mock_object + + request = {} + await client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_monitored_resource_descriptors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async( transport: str = "grpc_asyncio", @@ -2270,10 +2610,16 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") RuntimeError, ) - metadata = () - pager = client.list_monitored_resource_descriptors(request={}) + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.list_monitored_resource_descriptors( + request={}, retry=retry, timeout=timeout + ) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -2480,6 +2826,9 @@ def test_list_logs_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_logs() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2504,6 +2853,9 @@ def test_list_logs_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_logs(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2513,6 +2865,41 @@ def test_list_logs_non_empty_request_with_auto_populated_field(): ) +def test_list_logs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_logs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_logs] = mock_rpc + request = {} + client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_logs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_logs_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2537,6 +2924,45 @@ async def test_list_logs_empty_call_async(): assert args[0] == logging.ListLogsRequest() +@pytest.mark.asyncio +async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_logs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_logs + ] = mock_object + + request = {} + await client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_logs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_logs_async( transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest @@ -2758,13 +3184,17 @@ def test_list_logs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_logs(request={}) + pager = client.list_logs(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -2945,6 +3375,84 @@ def test_tail_log_entries(request_type, transport: str = "grpc"): assert isinstance(message, logging.TailLogEntriesResponse) +def test_tail_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.tail_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.tail_log_entries + ] = mock_rpc + request = [{}] + client.tail_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.tail_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_tail_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.tail_log_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.tail_log_entries + ] = mock_object + + request = [{}] + await client.tail_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.tail_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_tail_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index abeaa4c6e0f2..7909609fabd2 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -40,6 +40,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.metrics_service_v2 import ( @@ -1185,6 +1186,9 @@ def test_list_log_metrics_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_log_metrics() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1209,6 +1213,9 @@ def test_list_log_metrics_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_log_metrics(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1218,6 +1225,43 @@ def test_list_log_metrics_non_empty_request_with_auto_populated_field(): ) +def test_list_log_metrics_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_log_metrics in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_log_metrics + ] = mock_rpc + request = {} + client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_log_metrics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_log_metrics_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1241,6 +1285,47 @@ async def test_list_log_metrics_empty_call_async(): assert args[0] == logging_metrics.ListLogMetricsRequest() +@pytest.mark.asyncio +async def test_list_log_metrics_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_log_metrics + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_log_metrics + ] = mock_object + + request = {} + await client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_log_metrics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_log_metrics_async( transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest @@ -1460,13 +1545,17 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_log_metrics(request={}) + pager = client.list_log_metrics(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -1671,6 +1760,9 @@ def test_get_log_metric_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1694,6 +1786,9 @@ def test_get_log_metric_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1702,6 +1797,41 @@ def test_get_log_metric_non_empty_request_with_auto_populated_field(): ) +def test_get_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_log_metric] = mock_rpc + request = {} + client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1731,6 +1861,47 @@ async def test_get_log_metric_empty_call_async(): assert args[0] == logging_metrics.GetLogMetricRequest() +@pytest.mark.asyncio +async def test_get_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_log_metric + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_log_metric + ] = mock_object + + request = {} + await client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest @@ -1987,6 +2158,9 @@ def test_create_log_metric_empty_call(): with mock.patch.object( type(client.transport.create_log_metric), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2012,6 +2186,9 @@ def test_create_log_metric_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_log_metric), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2020,6 +2197,43 @@ def test_create_log_metric_non_empty_request_with_auto_populated_field(): ) +def test_create_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_log_metric + ] = mock_rpc + request = {} + client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2051,6 +2265,47 @@ async def test_create_log_metric_empty_call_async(): assert args[0] == logging_metrics.CreateLogMetricRequest() +@pytest.mark.asyncio +async def test_create_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_log_metric + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_log_metric + ] = mock_object + + request = {} + await client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest @@ -2327,6 +2582,9 @@ def test_update_log_metric_empty_call(): with mock.patch.object( type(client.transport.update_log_metric), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2352,6 +2610,9 @@ def test_update_log_metric_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_log_metric), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2360,6 +2621,43 @@ def test_update_log_metric_non_empty_request_with_auto_populated_field(): ) +def test_update_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_log_metric + ] = mock_rpc + request = {} + client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2391,6 +2689,47 @@ async def test_update_log_metric_empty_call_async(): assert args[0] == logging_metrics.UpdateLogMetricRequest() +@pytest.mark.asyncio +async def test_update_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_log_metric + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_log_metric + ] = mock_object + + request = {} + await client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest @@ -2652,6 +2991,9 @@ def test_delete_log_metric_empty_call(): with mock.patch.object( type(client.transport.delete_log_metric), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2677,6 +3019,9 @@ def test_delete_log_metric_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_log_metric), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2685,6 +3030,43 @@ def test_delete_log_metric_non_empty_request_with_auto_populated_field(): ) +def test_delete_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_log_metric + ] = mock_rpc + request = {} + client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2706,6 +3088,47 @@ async def test_delete_log_metric_empty_call_async(): assert args[0] == logging_metrics.DeleteLogMetricRequest() +@pytest.mark.asyncio +async def test_delete_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_log_metric + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_log_metric + ] = mock_object + + request = {} + await client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest From 26426119d846b4979fb4756b171633d9b85eebc3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 6 Aug 2024 15:08:09 -0700 Subject: [PATCH 820/855] chore(main): release 3.11.1 (#927) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-logging/.release-please-manifest.json | 2 +- packages/google-cloud-logging/CHANGELOG.md | 7 +++++++ .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json index 19f9217cb316..2ecd919c775a 100644 --- a/packages/google-cloud-logging/.release-please-manifest.json +++ b/packages/google-cloud-logging/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.11.0" + ".": "3.11.1" } diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 79fa006559e4..fe470282897d 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.11.1](https://github.com/googleapis/python-logging/compare/v3.11.0...v3.11.1) (2024-08-06) + + +### Bug Fixes + +* Allow protobuf 5.x ([#888](https://github.com/googleapis/python-logging/issues/888)) ([7746e64](https://github.com/googleapis/python-logging/commit/7746e643af29b1008d6e6d6a9958c8337c958dd4)) + ## [3.11.0](https://github.com/googleapis/python-logging/compare/v3.10.0...v3.11.0) (2024-07-15) diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py index 6c2e88f2b9bb..f897ec818e47 100644 --- a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.11.0" # {x-release-please-version} +__version__ = "3.11.1" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py index 6c2e88f2b9bb..f897ec818e47 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.11.0" # {x-release-please-version} +__version__ = "3.11.1" # {x-release-please-version} diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index b62675ba6439..263c808b8fc8 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "0.1.0" + "version": "3.11.1" }, "snippets": [ { From 6225ec5f1692ee3a09ed47d6c464606060dc9105 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 7 Aug 2024 13:54:27 -0400 Subject: [PATCH 821/855] fix: Fixed type hinting issue with specifying Transport class (#930) * Fixed type hinting issue with specifying Transport class * Switching to typing.Type for compatibility with older Python versions --- .../google/cloud/logging_v2/handlers/handlers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 5b11bfe30483..ea84bb3cc3e5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -18,7 +18,7 @@ import json import logging -from typing import Optional, IO +from typing import Optional, IO, Type from google.cloud.logging_v2.handlers.transports import ( BackgroundThreadTransport, @@ -157,7 +157,7 @@ def __init__( client, *, name: str = DEFAULT_LOGGER_NAME, - transport: Transport = BackgroundThreadTransport, + transport: Type[Transport] = BackgroundThreadTransport, resource: Resource = None, labels: Optional[dict] = None, stream: Optional[IO] = None, From 638c70cc6332d1c1eb68c7db9f7088fbd13c9d61 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 15 Aug 2024 16:23:02 -0400 Subject: [PATCH 822/855] build: update constraints files (#935) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * build: update constraints files * add constraints for python 3.9 to test protobuf 5.x * update constraints * fix(deps): require google-cloud-audit-log >= 0.1.1 * fix(deps): require opentelemetry-api>=1.9.0 * fix(deps): require google-cloud-appengine-logging>=0.1.3 * fix(deps): require google-cloud-audit-log >= 0.2.4 * Remove constraints for python 3.9 constraints except protobuf 5.x * remove protobuf 5.x from testing/constraints-3.9.txt * test against the pre-release version of google-cloud-audit-log * use python 3.12 for system tests * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google-cloud-logging/CONTRIBUTING.rst | 4 ++-- packages/google-cloud-logging/noxfile.py | 3 ++- packages/google-cloud-logging/owlbot.py | 11 ++++++++-- packages/google-cloud-logging/setup.py | 6 +++--- .../testing/constraints-3.10.txt | 10 ++++++++++ .../testing/constraints-3.11.txt | 10 ++++++++++ .../testing/constraints-3.12.txt | 10 ++++++++++ .../testing/constraints-3.7.txt | 4 ++++ .../testing/constraints-3.8.txt | 20 +++++++++++++++++-- .../testing/constraints-3.9.txt | 16 +++++++++++++++ 10 files changed, 84 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index f5be18c3dfcb..4d4b79d75a3a 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system-3.8 -- -k + $ nox -s system-3.12 -- -k .. note:: - System tests are only configured to run under Python 3.8. + System tests are only configured to run under Python 3.12. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 65e583ec27ec..8a410531a581 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -52,7 +52,7 @@ UNIT_TEST_EXTRAS: List[str] = [] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8"] +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.12"] SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock", "pytest", @@ -419,6 +419,7 @@ def prerelease_deps(session, protobuf_implementation): session.install(*constraints_deps) prerel_deps = [ + "google-cloud-audit-log", "protobuf", # dependency of grpc "six", diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index f1a5b697eca0..54e5ab5a6f61 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -67,8 +67,7 @@ def place_before(path, text, *before_text, escape=None): s.move([library], excludes=[ "**/gapic_version.py", "setup.py", - "testing/constraints-3.7.txt", - "testing/constraints-3.8.txt", + "testing/constraints*.txt", "README.rst", "google/cloud/logging/__init__.py", # generated types are hidden from users "google/cloud/logging_v2/__init__.py", @@ -95,6 +94,7 @@ def place_before(path, text, *before_text, escape=None): "google-cloud-testutils", "opentelemetry-sdk" ], + system_test_python_versions=["3.12"], unit_test_external_dependencies=["flask", "webob", "django"], samples=True, ) @@ -110,6 +110,13 @@ def place_before(path, text, *before_text, escape=None): "README.rst", # This repo has a customized README ], ) +s.replace("noxfile.py", +"""prerel_deps = \[ + "protobuf",""", +"""prerel_deps = [ + "google-cloud-audit-log", + "protobuf",""", +) # adjust .trampolinerc for environment tests s.replace(".trampolinerc", "required_envvars[^\)]*\)", "required_envvars+=()") diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 1e214751dd07..67ae0f70cfa6 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -40,11 +40,11 @@ # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "google-cloud-appengine-logging>=0.1.0, <2.0.0dev", - "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", + "google-cloud-appengine-logging>=0.1.3, <2.0.0dev", + "google-cloud-audit-log >= 0.2.4, < 1.0.0dev", "google-cloud-core >= 2.0.0, <3.0.0dev", "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", - "opentelemetry-api >= 1.0.0", + "opentelemetry-api >= 1.9.0", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/google-cloud-logging/testing/constraints-3.10.txt b/packages/google-cloud-logging/testing/constraints-3.10.txt index ed7f9aed2559..981d37ac6a88 100644 --- a/packages/google-cloud-logging/testing/constraints-3.10.txt +++ b/packages/google-cloud-logging/testing/constraints-3.10.txt @@ -2,5 +2,15 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth proto-plus protobuf +google-cloud-core +google-cloud-appengine-logging +google-cloud-audit-log +grpc-google-iam-v1 +opentelemetry-api + +# optional dependencies +django +flask diff --git a/packages/google-cloud-logging/testing/constraints-3.11.txt b/packages/google-cloud-logging/testing/constraints-3.11.txt index ed7f9aed2559..981d37ac6a88 100644 --- a/packages/google-cloud-logging/testing/constraints-3.11.txt +++ b/packages/google-cloud-logging/testing/constraints-3.11.txt @@ -2,5 +2,15 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth proto-plus protobuf +google-cloud-core +google-cloud-appengine-logging +google-cloud-audit-log +grpc-google-iam-v1 +opentelemetry-api + +# optional dependencies +django +flask diff --git a/packages/google-cloud-logging/testing/constraints-3.12.txt b/packages/google-cloud-logging/testing/constraints-3.12.txt index ed7f9aed2559..981d37ac6a88 100644 --- a/packages/google-cloud-logging/testing/constraints-3.12.txt +++ b/packages/google-cloud-logging/testing/constraints-3.12.txt @@ -2,5 +2,15 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth proto-plus protobuf +google-cloud-core +google-cloud-appengine-logging +google-cloud-audit-log +grpc-google-iam-v1 +opentelemetry-api + +# optional dependencies +django +flask diff --git a/packages/google-cloud-logging/testing/constraints-3.7.txt b/packages/google-cloud-logging/testing/constraints-3.7.txt index fa18c36c0260..d3ab26cf2be1 100644 --- a/packages/google-cloud-logging/testing/constraints-3.7.txt +++ b/packages/google-cloud-logging/testing/constraints-3.7.txt @@ -9,6 +9,10 @@ google-auth==2.14.1 proto-plus==1.22.0 protobuf==3.20.2 google-cloud-core==2.0.0 +google-cloud-appengine-logging==0.1.3 +google-cloud-audit-log==0.2.4 +grpc-google-iam-v1==0.12.4 +opentelemetry-api==1.9.0 # Lower bound testing for optional dependencies django==3.2 diff --git a/packages/google-cloud-logging/testing/constraints-3.8.txt b/packages/google-cloud-logging/testing/constraints-3.8.txt index 3f30789875d5..443e69ae2e2f 100644 --- a/packages/google-cloud-logging/testing/constraints-3.8.txt +++ b/packages/google-cloud-logging/testing/constraints-3.8.txt @@ -2,5 +2,21 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core==2.14.0 -proto-plus -protobuf +google-auth==2.14.1 +proto-plus==1.22.0 +protobuf==4.21.6 +google-cloud-core==2.0.0 +google-cloud-appengine-logging==0.1.3 +google-cloud-audit-log==0.2.4 +grpc-google-iam-v1==0.12.4 +opentelemetry-api==1.9.0 + +# Lower bound testing for optional dependencies +django==3.2 + +# Need specific versions of Flask dependencies for Flask 1.0 to work +flask==1.0.0 +jinja2==2.10.1 +markupsafe==2.0.1 +itsdangerous==2.0.1 +werkzeug==1.0.1 diff --git a/packages/google-cloud-logging/testing/constraints-3.9.txt b/packages/google-cloud-logging/testing/constraints-3.9.txt index ed7f9aed2559..10c5cba87b8c 100644 --- a/packages/google-cloud-logging/testing/constraints-3.9.txt +++ b/packages/google-cloud-logging/testing/constraints-3.9.txt @@ -2,5 +2,21 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth proto-plus protobuf +google-cloud-core +google-cloud-appengine-logging +google-cloud-audit-log +grpc-google-iam-v1 +opentelemetry-api==1.9.0 + +# Lower bound testing for optional dependencies +django==3.2 + +# Need specific versions of Flask dependencies for Flask 1.0 to work +flask==1.0.0 +jinja2==2.10.1 +markupsafe==2.0.1 +itsdangerous==2.0.1 +werkzeug==1.0.1 From e1b7c9431c17b03dd487d37fb638780c3c02e79b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 13:16:36 -0400 Subject: [PATCH 823/855] chore(main): release 3.11.2 (#931) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-logging/.release-please-manifest.json | 2 +- packages/google-cloud-logging/CHANGELOG.md | 10 ++++++++++ .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 14 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json index 2ecd919c775a..0c91a27a2ef0 100644 --- a/packages/google-cloud-logging/.release-please-manifest.json +++ b/packages/google-cloud-logging/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.11.1" + ".": "3.11.2" } diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index fe470282897d..67352df908b1 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,16 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.11.2](https://github.com/googleapis/python-logging/compare/v3.11.1...v3.11.2) (2024-08-15) + + +### Bug Fixes + +* **deps:** Require google-cloud-appengine-logging>=0.1.3 ([550abca](https://github.com/googleapis/python-logging/commit/550abca2846218d114a6b4b42cb165489e630374)) +* **deps:** Require google-cloud-audit-log >= 0.2.4 ([550abca](https://github.com/googleapis/python-logging/commit/550abca2846218d114a6b4b42cb165489e630374)) +* **deps:** Require opentelemetry-api>=1.9.0 ([550abca](https://github.com/googleapis/python-logging/commit/550abca2846218d114a6b4b42cb165489e630374)) +* Fixed type hinting issue with specifying Transport class ([#930](https://github.com/googleapis/python-logging/issues/930)) ([e2875d6](https://github.com/googleapis/python-logging/commit/e2875d664c153a4328bd42790dfb7b4ac36a9048)) + ## [3.11.1](https://github.com/googleapis/python-logging/compare/v3.11.0...v3.11.1) (2024-08-06) diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py index f897ec818e47..d60f7f6c0aae 100644 --- a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.11.1" # {x-release-please-version} +__version__ = "3.11.2" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py index f897ec818e47..d60f7f6c0aae 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.11.1" # {x-release-please-version} +__version__ = "3.11.2" # {x-release-please-version} diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 263c808b8fc8..0f640c540b63 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.11.1" + "version": "3.11.2" }, "snippets": [ { From e6de105a97168f60ef61a715605edb159a43e417 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 5 Sep 2024 09:39:02 -0400 Subject: [PATCH 824/855] chore(python): update unittest workflow template (#939) Source-Link: https://github.com/googleapis/synthtool/commit/e6f91eb4db419b02af74197905b99fa00a6030c0 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:365d92ef2206cfad00a8c5955c36789d0de124e2b6d92a72dd0486315a0f2e57 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 ++-- .../.kokoro/docker/docs/Dockerfile | 9 ++++----- .../.kokoro/publish-docs.sh | 20 +++++++++---------- 3 files changed, 16 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index f30cb3775afc..f8bd8149fa87 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e -# created: 2024-07-08T19:25:35.862283192Z + digest: sha256:365d92ef2206cfad00a8c5955c36789d0de124e2b6d92a72dd0486315a0f2e57 +# created: 2024-09-04T14:50:52.658171431Z diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile index 5205308b334d..e5410e296bd8 100644 --- a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile @@ -72,19 +72,18 @@ RUN tar -xvf Python-3.10.14.tgz RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall -RUN python3.10 -m venv /venv -ENV PATH /venv/bin:$PATH +ENV PATH /usr/local/bin/python3.10:$PATH ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3 /tmp/get-pip.py \ + && python3.10 /tmp/get-pip.py \ && rm /tmp/get-pip.py # Test pip -RUN python3 -m pip +RUN python3.10 -m pip # Install build requirements COPY requirements.txt /requirements.txt -RUN python3 -m pip install --require-hashes -r requirements.txt +RUN python3.10 -m pip install --require-hashes -r requirements.txt CMD ["python3.10"] diff --git a/packages/google-cloud-logging/.kokoro/publish-docs.sh b/packages/google-cloud-logging/.kokoro/publish-docs.sh index 38f083f05aa0..233205d580e9 100755 --- a/packages/google-cloud-logging/.kokoro/publish-docs.sh +++ b/packages/google-cloud-logging/.kokoro/publish-docs.sh @@ -21,18 +21,18 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3 -m pip install --require-hashes -r .kokoro/requirements.txt -python3 -m nox --version +python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt +python3.10 -m nox --version # build docs nox -s docs # create metadata -python3 -m docuploader create-metadata \ +python3.10 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ + --version=$(python3.10 setup.py --version) \ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ + --distribution-name=$(python3.10 setup.py --name) \ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) @@ -40,18 +40,18 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" +python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" # docfx yaml files nox -s docfx # create metadata. -python3 -m docuploader create-metadata \ +python3.10 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ + --version=$(python3.10 setup.py --version) \ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ + --distribution-name=$(python3.10 setup.py --name) \ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) @@ -59,4 +59,4 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" +python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" From 16f927260a7d9f2708b50bc1f279d84da7383b0b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 17 Sep 2024 11:10:48 -0400 Subject: [PATCH 825/855] build(python): release script update (#940) Source-Link: https://github.com/googleapis/synthtool/commit/71a72973dddbc66ea64073b53eda49f0d22e0942 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455 Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-logging/.kokoro/release.sh | 2 +- packages/google-cloud-logging/.kokoro/release/common.cfg | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index f8bd8149fa87..597e0c3261ca 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:365d92ef2206cfad00a8c5955c36789d0de124e2b6d92a72dd0486315a0f2e57 -# created: 2024-09-04T14:50:52.658171431Z + digest: sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455 +# created: 2024-09-16T21:04:09.091105552Z diff --git a/packages/google-cloud-logging/.kokoro/release.sh b/packages/google-cloud-logging/.kokoro/release.sh index 8941eaef67e7..782a65bc52ed 100755 --- a/packages/google-cloud-logging/.kokoro/release.sh +++ b/packages/google-cloud-logging/.kokoro/release.sh @@ -23,7 +23,7 @@ python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source / export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") cd github/python-logging python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-logging/.kokoro/release/common.cfg b/packages/google-cloud-logging/.kokoro/release/common.cfg index 4dc3167a532f..1669dffb999d 100644 --- a/packages/google-cloud-logging/.kokoro/release/common.cfg +++ b/packages/google-cloud-logging/.kokoro/release/common.cfg @@ -28,7 +28,7 @@ before_action { fetch_keystore { keystore_resource { keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-1" + keyname: "google-cloud-pypi-token-keystore-2" } } } From 22b7b24c9ae4afee12e0d76ea8626bb22db45b47 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Tue, 15 Oct 2024 12:32:13 -0400 Subject: [PATCH 826/855] fix: 16-bit hexadecimal formatting for XCTC span IDs (#946) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: 16-bit hexadecimal formatting for XCTC span IDs * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * addressed nit * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Fixed test + docstring --------- Co-authored-by: Owl Bot --- .../cloud/logging_v2/handlers/_helpers.py | 24 ++++++++- .../tests/unit/handlers/test__helpers.py | 51 ++++++++++++++----- .../tests/unit/handlers/test_handlers.py | 7 +-- .../unit/handlers/test_structured_log.py | 7 +-- 4 files changed, 67 insertions(+), 22 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py index 98bf0cd2934f..ff5838e054c2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/_helpers.py @@ -174,13 +174,22 @@ def _parse_xcloud_trace(header): Args: header (str): the string extracted from the X_CLOUD_TRACE header Returns: - Tuple[Optional[dict], Optional[str], bool]: + Tuple[Optional[str], Optional[str], bool]: The trace_id, span_id and trace_sampled extracted from the header Each field will be None if not found. """ trace_id = span_id = None trace_sampled = False - # see https://cloud.google.com/trace/docs/setup for X-Cloud-Trace_Context format + + # As per the format described at https://cloud.google.com/trace/docs/trace-context#legacy-http-header + # "X-Cloud-Trace-Context: TRACE_ID[/SPAN_ID][;o=OPTIONS]" + # for example: + # "X-Cloud-Trace-Context: 105445aa7843bc8bf206b12000100000/1;o=1" + # + # We expect: + # * trace_id (optional, 128-bit hex string): "105445aa7843bc8bf206b12000100000" + # * span_id (optional, 16-bit hex string): "0000000000000001" (needs to be converted into 16 bit hex string) + # * trace_sampled (optional, bool): true if header: try: regex = r"([\w-]+)?(\/?([\w-]+))?(;?o=(\d))?" @@ -188,6 +197,17 @@ def _parse_xcloud_trace(header): trace_id = match.group(1) span_id = match.group(3) trace_sampled = match.group(5) == "1" + + # Convert the span ID to 16-bit hexadecimal instead of decimal + try: + span_id_int = int(span_id) + if span_id_int > 0 and span_id_int < 2**64: + span_id = f"{span_id_int:016x}" + else: + span_id = None + except (ValueError, TypeError): + span_id = None + except IndexError: pass return trace_id, span_id, trace_sampled diff --git a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py index b8c8fc99d33a..d0577cf22d20 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test__helpers.py @@ -25,9 +25,13 @@ _FLASK_TRACE_ID = "flask0id" _FLASK_SPAN_ID = "span0flask" +_FLASK_SPAN_ID_XCTC_DEC = "12345" +_FLASK_SPAN_ID_XCTC_HEX = "3039".zfill(16) _FLASK_HTTP_REQUEST = {"requestUrl": "https://flask.palletsprojects.com/en/1.1.x/"} _DJANGO_TRACE_ID = "django0id" _DJANGO_SPAN_ID = "span0django" +_DJANGO_SPAN_ID_XCTC_DEC = "54321" +_DJANGO_SPAN_ID_XCTC_HEX = "d431".zfill(16) _DJANGO_HTTP_REQUEST = {"requestUrl": "https://www.djangoproject.com/"} @@ -64,8 +68,9 @@ def test_no_context_header(self): def test_xcloud_header(self): flask_trace_header = "X_CLOUD_TRACE_CONTEXT" expected_trace_id = _FLASK_TRACE_ID - expected_span_id = _FLASK_SPAN_ID - flask_trace_id = f"{expected_trace_id}/{expected_span_id};o=1" + input_span_id = _FLASK_SPAN_ID_XCTC_DEC + expected_span_id = _FLASK_SPAN_ID_XCTC_HEX + flask_trace_id = f"{expected_trace_id}/{input_span_id};o=1" app = self.create_app() context = app.test_request_context( @@ -173,9 +178,10 @@ def test_xcloud_header(self): from google.cloud.logging_v2.handlers.middleware import request django_trace_header = "HTTP_X_CLOUD_TRACE_CONTEXT" - expected_span_id = _DJANGO_SPAN_ID + input_span_id = _DJANGO_SPAN_ID_XCTC_DEC + expected_span_id = _DJANGO_SPAN_ID_XCTC_HEX expected_trace_id = _DJANGO_TRACE_ID - django_trace_id = f"{expected_trace_id}/{expected_span_id};o=1" + django_trace_id = f"{expected_trace_id}/{input_span_id};o=1" django_request = RequestFactory().get( "/", **{django_trace_header: django_trace_id} @@ -501,25 +507,40 @@ def test_no_span(self): self.assertEqual(sampled, False) def test_no_trace(self): - header = "/12345" + input_span = "12345" + expected_span = "3039".zfill(16) + header = f"/{input_span}" trace_id, span_id, sampled = self._call_fut(header) self.assertIsNone(trace_id) - self.assertEqual(span_id, "12345") + self.assertEqual(span_id, expected_span) self.assertEqual(sampled, False) def test_with_span(self): expected_trace = "12345" - expected_span = "67890" - header = f"{expected_trace}/{expected_span}" + input_span = "67890" + expected_span = "10932".zfill(16) + header = f"{expected_trace}/{input_span}" trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) self.assertEqual(sampled, False) + def test_with_span_decimal_not_in_bounds(self): + input_spans = ["0", "9" * 100] + + for input_span in input_spans: + expected_trace = "12345" + header = f"{expected_trace}/{input_span}" + trace_id, span_id, sampled = self._call_fut(header) + self.assertEqual(trace_id, expected_trace) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) + def test_with_extra_characters(self): expected_trace = "12345" - expected_span = "67890" - header = f"{expected_trace}/{expected_span};abc" + input_span = "67890" + expected_span = "10932".zfill(16) + header = f"{expected_trace}/{input_span};abc" trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) @@ -527,8 +548,9 @@ def test_with_extra_characters(self): def test_with_explicit_no_sampled(self): expected_trace = "12345" - expected_span = "67890" - header = f"{expected_trace}/{expected_span};o=0" + input_span = "67890" + expected_span = "10932".zfill(16) + header = f"{expected_trace}/{input_span};o=0" trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) @@ -536,8 +558,9 @@ def test_with_explicit_no_sampled(self): def test_with__sampled(self): expected_trace = "12345" - expected_span = "67890" - header = f"{expected_trace}/{expected_span};o=1" + input_span = "67890" + expected_span = "10932".zfill(16) + header = f"{expected_trace}/{input_span};o=1" trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 535c1f4b121d..14b2e5cba6f8 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -140,7 +140,7 @@ def test_minimal_record(self): self.assertIsNone(record._labels) self.assertEqual(record._labels_str, "{}") - def test_record_with_request(self): + def test_record_with_xctc_request(self): """ test filter adds http request data when available """ @@ -161,8 +161,9 @@ def test_record_with_request(self): expected_path = "http://testserver/123" expected_agent = "Mozilla/5.0" expected_trace = "123" - expected_span = "456" - combined_trace = f"{expected_trace}/{expected_span};o=1" + input_span = "456" + expected_span = "1c8".zfill(16) + combined_trace = f"{expected_trace}/{input_span};o=1" expected_request = { "requestMethod": "GET", "requestUrl": expected_path, diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py index 920ca15eae34..90875874932e 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_structured_log.py @@ -382,7 +382,7 @@ def test_format_with_arguments(self): result = handler.format(record) self.assertIn(expected_result, result) - def test_format_with_request(self): + def test_format_with_xctc_request(self): import logging import json @@ -393,8 +393,9 @@ def test_format_with_request(self): expected_path = "http://testserver/123" expected_agent = "Mozilla/5.0" expected_trace = "123" - expected_span = "456" - trace_header = f"{expected_trace}/{expected_span};o=1" + input_span = "456" + expected_span = "1c8".zfill(16) + trace_header = f"{expected_trace}/{input_span};o=1" expected_payload = { "logging.googleapis.com/trace": expected_trace, "logging.googleapis.com/spanId": expected_span, From ca3ebfaf71146eb795c749d98aa3837dc85d3ab0 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 15 Oct 2024 16:02:37 -0400 Subject: [PATCH 827/855] chore(main): release 3.11.3 (#948) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-logging/.release-please-manifest.json | 2 +- packages/google-cloud-logging/CHANGELOG.md | 7 +++++++ .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json index 0c91a27a2ef0..a89b341963dd 100644 --- a/packages/google-cloud-logging/.release-please-manifest.json +++ b/packages/google-cloud-logging/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.11.2" + ".": "3.11.3" } diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 67352df908b1..202cdafa715e 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.11.3](https://github.com/googleapis/python-logging/compare/v3.11.2...v3.11.3) (2024-10-15) + + +### Bug Fixes + +* 16-bit hexadecimal formatting for XCTC span IDs ([#946](https://github.com/googleapis/python-logging/issues/946)) ([1f2b190](https://github.com/googleapis/python-logging/commit/1f2b190c0d1a7125d9412c157915d0011cdd4c47)) + ## [3.11.2](https://github.com/googleapis/python-logging/compare/v3.11.1...v3.11.2) (2024-08-15) diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py index d60f7f6c0aae..69b0cd300297 100644 --- a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.11.2" # {x-release-please-version} +__version__ = "3.11.3" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py index d60f7f6c0aae..69b0cd300297 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.11.2" # {x-release-please-version} +__version__ = "3.11.3" # {x-release-please-version} diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 0f640c540b63..ca75689b0809 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.11.2" + "version": "3.11.3" }, "snippets": [ { From 9e01de9ca68b4e61547cf0cdfa3d0ab618205d91 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 20 Nov 2024 17:10:20 -0500 Subject: [PATCH 828/855] chore(python): update dependencies in .kokoro/docker/docs (#954) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): update dependencies in .kokoro/docker/docs Source-Link: https://github.com/googleapis/synthtool/commit/59171c8f83f3522ce186e4d110d27e772da4ba7a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * add constraints file for Python 3.13 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Remove obsolete release configs and script --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/release-trigger.yml | 1 + .../.kokoro/docker/docs/requirements.txt | 56 +- .../.kokoro/docs/common.cfg | 1 + .../google-cloud-logging/.kokoro/release.sh | 29 - .../.kokoro/release/common.cfg | 69 --- .../.kokoro/release/release.cfg | 1 - .../.kokoro/requirements.in | 11 - .../.kokoro/requirements.txt | 537 ------------------ .../.kokoro/samples/python3.13/common.cfg | 60 ++ .../.kokoro/samples/python3.13/continuous.cfg | 6 + .../samples/python3.13/periodic-head.cfg | 11 + .../.kokoro/samples/python3.13/periodic.cfg | 6 + .../.kokoro/samples/python3.13/presubmit.cfg | 6 + .../.kokoro/test-samples-impl.sh | 3 +- .../google-cloud-logging/CONTRIBUTING.rst | 6 +- packages/google-cloud-logging/noxfile.py | 18 +- packages/google-cloud-logging/owlbot.py | 2 +- .../samples/snippets/noxfile.py | 2 +- .../testing/constraints-3.13.txt | 16 + 20 files changed, 158 insertions(+), 687 deletions(-) delete mode 100755 packages/google-cloud-logging/.kokoro/release.sh delete mode 100644 packages/google-cloud-logging/.kokoro/release/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/release/release.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/requirements.in delete mode 100644 packages/google-cloud-logging/.kokoro/requirements.txt create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.13/common.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.13/continuous.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.13/periodic-head.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.13/periodic.cfg create mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.13/presubmit.cfg create mode 100644 packages/google-cloud-logging/testing/constraints-3.13.txt diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 597e0c3261ca..6301519a9a05 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455 -# created: 2024-09-16T21:04:09.091105552Z + digest: sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562 +# created: 2024-11-12T12:09:45.821174897Z diff --git a/packages/google-cloud-logging/.github/release-trigger.yml b/packages/google-cloud-logging/.github/release-trigger.yml index d4ca94189e16..d47d146a9bc5 100644 --- a/packages/google-cloud-logging/.github/release-trigger.yml +++ b/packages/google-cloud-logging/.github/release-trigger.yml @@ -1 +1,2 @@ enabled: true +multiScmName: python-logging diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt index 7129c7715594..8bb0764594b1 100644 --- a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt @@ -1,42 +1,42 @@ # -# This file is autogenerated by pip-compile with Python 3.9 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.4.0 \ - --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ - --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f +argcomplete==3.5.1 \ + --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ + --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 # via nox -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 +colorlog==6.9.0 \ + --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ + --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 # via nox -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 +distlib==0.3.9 \ + --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ + --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 # via virtualenv -filelock==3.15.4 \ - --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ - --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock==3.16.1 \ + --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ + --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv -nox==2024.4.15 \ - --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ - --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f +nox==2024.10.9 \ + --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ + --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +packaging==24.2 \ + --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ + --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f # via nox -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f +tomli==2.0.2 \ + --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ + --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed # via nox -virtualenv==20.26.3 \ - --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ - --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 +virtualenv==20.27.1 \ + --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ + --hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4 # via nox diff --git a/packages/google-cloud-logging/.kokoro/docs/common.cfg b/packages/google-cloud-logging/.kokoro/docs/common.cfg index 36e4a6540088..3bf9925da058 100644 --- a/packages/google-cloud-logging/.kokoro/docs/common.cfg +++ b/packages/google-cloud-logging/.kokoro/docs/common.cfg @@ -65,6 +65,7 @@ before_action { } } + ############################################# # this section merged from .kokoro/common_env_vars.cfg using owlbot.py diff --git a/packages/google-cloud-logging/.kokoro/release.sh b/packages/google-cloud-logging/.kokoro/release.sh deleted file mode 100755 index 782a65bc52ed..000000000000 --- a/packages/google-cloud-logging/.kokoro/release.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Start the releasetool reporter -python3 -m pip install --require-hashes -r github/python-logging/.kokoro/requirements.txt -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") -cd github/python-logging -python3 setup.py sdist bdist_wheel -twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-logging/.kokoro/release/common.cfg b/packages/google-cloud-logging/.kokoro/release/common.cfg deleted file mode 100644 index 1669dffb999d..000000000000 --- a/packages/google-cloud-logging/.kokoro/release/common.cfg +++ /dev/null @@ -1,69 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/release.sh" -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-2" - } - } -} - -# Tokens needed to report release status back to GitHub -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} - -# Store the packages we uploaded to PyPI. That way, we have a record of exactly -# what we published, which we can use to generate SBOMs and attestations. -action { - define_artifacts { - regex: "github/python-logging/**/*.tar.gz" - strip_prefix: "github/python-logging" - } -} - - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/packages/google-cloud-logging/.kokoro/release/release.cfg b/packages/google-cloud-logging/.kokoro/release/release.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-logging/.kokoro/release/release.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/requirements.in b/packages/google-cloud-logging/.kokoro/requirements.in deleted file mode 100644 index fff4d9ce0d0a..000000000000 --- a/packages/google-cloud-logging/.kokoro/requirements.in +++ /dev/null @@ -1,11 +0,0 @@ -gcp-docuploader -gcp-releasetool>=2 # required for compatibility with cryptography>=42.x -importlib-metadata -typing-extensions -twine -wheel -setuptools -nox>=2022.11.21 # required to remove dependency on py -charset-normalizer<3 -click<8.1.0 -cryptography>=42.0.5 diff --git a/packages/google-cloud-logging/.kokoro/requirements.txt b/packages/google-cloud-logging/.kokoro/requirements.txt deleted file mode 100644 index 9622baf0ba38..000000000000 --- a/packages/google-cloud-logging/.kokoro/requirements.txt +++ /dev/null @@ -1,537 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.4.0 \ - --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ - --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f - # via nox -attrs==23.2.0 \ - --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ - --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 - # via gcp-releasetool -backports-tarfile==1.2.0 \ - --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ - --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 - # via jaraco-context -cachetools==5.3.3 \ - --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ - --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 - # via google-auth -certifi==2024.7.4 \ - --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ - --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 - # via requests -cffi==1.16.0 \ - --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ - --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ - --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ - --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ - --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ - --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ - --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ - --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ - --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ - --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ - --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ - --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ - --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ - --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ - --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ - --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ - --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ - --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ - --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ - --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ - --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ - --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ - --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ - --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ - --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ - --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ - --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ - --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ - --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ - --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ - --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ - --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ - --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ - --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ - --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ - --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ - --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ - --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ - --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ - --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ - --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ - --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ - --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ - --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ - --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ - --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ - --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ - --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ - --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ - --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ - --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ - --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 - # via cryptography -charset-normalizer==2.1.1 \ - --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ - --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via - # -r requirements.in - # requests -click==8.0.4 \ - --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ - --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb - # via - # -r requirements.in - # gcp-docuploader - # gcp-releasetool -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 - # via - # gcp-docuploader - # nox -cryptography==42.0.8 \ - --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ - --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ - --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ - --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ - --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ - --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ - --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ - --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ - --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ - --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ - --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ - --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ - --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ - --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ - --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ - --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ - --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ - --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ - --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ - --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ - --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ - --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ - --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ - --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ - --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ - --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ - --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ - --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ - --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ - --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ - --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ - --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e - # via - # -r requirements.in - # gcp-releasetool - # secretstorage -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 - # via virtualenv -docutils==0.21.2 \ - --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ - --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 - # via readme-renderer -filelock==3.15.4 \ - --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ - --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -gcp-releasetool==2.0.1 \ - --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ - --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 - # via -r requirements.in -google-api-core==2.19.1 \ - --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ - --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.31.0 \ - --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ - --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 - # via - # gcp-releasetool - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.17.0 \ - --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ - --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 - # via gcp-docuploader -google-crc32c==1.5.0 \ - --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ - --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ - --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ - --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ - --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ - --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ - --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ - --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ - --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ - --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ - --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ - --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ - --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ - --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ - --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ - --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ - --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ - --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ - --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ - --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ - --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ - --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ - --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ - --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ - --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ - --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ - --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ - --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ - --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ - --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ - --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ - --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ - --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ - --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ - --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ - --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ - --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ - --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ - --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ - --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ - --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ - --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ - --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ - --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ - --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ - --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ - --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ - --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ - --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ - --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ - --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ - --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ - --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ - --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ - --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ - --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ - --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ - --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ - --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ - --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ - --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ - --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ - --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ - --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ - --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ - --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ - --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ - --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.1 \ - --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ - --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 - # via google-cloud-storage -googleapis-common-protos==1.63.2 \ - --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ - --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 - # via google-api-core -idna==3.7 \ - --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ - --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 - # via requests -importlib-metadata==8.0.0 \ - --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ - --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 - # via - # -r requirements.in - # keyring - # twine -jaraco-classes==3.4.0 \ - --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ - --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 - # via keyring -jaraco-context==5.3.0 \ - --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ - --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 - # via keyring -jaraco-functools==4.0.1 \ - --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ - --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 - # via keyring -jeepney==0.8.0 \ - --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ - --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via - # keyring - # secretstorage -jinja2==3.1.4 \ - --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ - --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d - # via gcp-releasetool -keyring==25.2.1 \ - --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ - --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b - # via - # gcp-releasetool - # twine -markdown-it-py==3.0.0 \ - --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ - --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb - # via rich -markupsafe==2.1.5 \ - --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ - --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ - --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ - --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ - --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ - --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ - --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ - --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ - --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ - --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ - --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ - --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ - --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ - --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ - --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ - --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ - --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ - --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ - --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ - --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ - --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ - --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ - --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ - --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ - --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ - --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ - --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ - --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ - --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ - --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ - --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ - --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ - --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ - --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ - --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ - --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ - --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ - --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ - --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ - --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ - --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ - --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ - --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ - --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ - --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ - --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ - --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ - --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ - --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ - --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ - --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ - --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ - --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ - --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ - --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ - --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ - --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ - --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ - --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ - --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 - # via jinja2 -mdurl==0.1.2 \ - --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ - --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba - # via markdown-it-py -more-itertools==10.3.0 \ - --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ - --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 - # via - # jaraco-classes - # jaraco-functools -nh3==0.2.18 \ - --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ - --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ - --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ - --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ - --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ - --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ - --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ - --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ - --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ - --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ - --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ - --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ - --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ - --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ - --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ - --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe - # via readme-renderer -nox==2024.4.15 \ - --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ - --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f - # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 - # via - # gcp-releasetool - # nox -pkginfo==1.10.0 \ - --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ - --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 - # via twine -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 - # via virtualenv -proto-plus==1.24.0 \ - --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ - --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 - # via google-api-core -protobuf==5.27.2 \ - --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ - --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ - --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ - --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ - --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ - --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ - --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ - --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ - --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ - --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ - --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 - # via - # gcp-docuploader - # gcp-releasetool - # google-api-core - # googleapis-common-protos - # proto-plus -pyasn1==0.6.0 \ - --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ - --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.0 \ - --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ - --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b - # via google-auth -pycparser==2.22 \ - --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ - --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc - # via cffi -pygments==2.18.0 \ - --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ - --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a - # via - # readme-renderer - # rich -pyjwt==2.8.0 \ - --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ - --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 - # via gcp-releasetool -pyperclip==1.9.0 \ - --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 - # via gcp-releasetool -python-dateutil==2.9.0.post0 \ - --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ - --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 - # via gcp-releasetool -readme-renderer==44.0 \ - --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ - --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 - # via twine -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via - # gcp-releasetool - # google-api-core - # google-cloud-storage - # requests-toolbelt - # twine -requests-toolbelt==1.0.0 \ - --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ - --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 - # via twine -rfc3986==2.0.0 \ - --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ - --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c - # via twine -rich==13.7.1 \ - --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ - --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 - # via twine -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -secretstorage==3.3.3 \ - --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ - --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - # via keyring -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via - # gcp-docuploader - # python-dateutil -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f - # via nox -twine==5.1.1 \ - --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ - --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db - # via -r requirements.in -typing-extensions==4.12.2 \ - --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ - --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via -r requirements.in -urllib3==2.2.2 \ - --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ - --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 - # via - # requests - # twine -virtualenv==20.26.3 \ - --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ - --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 - # via nox -wheel==0.43.0 \ - --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ - --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 - # via -r requirements.in -zipp==3.19.2 \ - --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ - --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -setuptools==70.2.0 \ - --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ - --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 - # via -r requirements.in diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.13/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.13/common.cfg new file mode 100644 index 000000000000..4eb8ee8be91b --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.13/common.cfg @@ -0,0 +1,60 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.13" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-313" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline_v2.sh" + + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.13/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.13/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.13/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.13/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.13/periodic-head.cfg new file mode 100644 index 000000000000..7e2973e3b659 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.13/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.13/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.13/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.13/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.13/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.13/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-logging/.kokoro/samples/python3.13/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh index 55910c8ba178..53e365bc4e79 100755 --- a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh @@ -33,7 +33,8 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.9 -m pip install --upgrade --quiet nox +# `virtualenv==20.26.6` is added for Python 3.7 compatibility +python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6 # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then diff --git a/packages/google-cloud-logging/CONTRIBUTING.rst b/packages/google-cloud-logging/CONTRIBUTING.rst index 4d4b79d75a3a..7bbacd5ca849 100644 --- a/packages/google-cloud-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-logging/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.12 -- -k + $ nox -s unit-3.13 -- -k .. note:: @@ -227,6 +227,7 @@ We support: - `Python 3.10`_ - `Python 3.11`_ - `Python 3.12`_ +- `Python 3.13`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ @@ -234,6 +235,7 @@ We support: .. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ .. _Python 3.12: https://docs.python.org/3.12/ +.. _Python 3.13: https://docs.python.org/3.13/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index 8a410531a581..b75e78ac3f09 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -34,7 +34,15 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -72,7 +80,6 @@ CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() -# 'docfx' is excluded since it only needs to run in 'docs-presubmit' nox.options.sessions = [ "unit", "system", @@ -81,6 +88,7 @@ "lint_setup_py", "blacken", "docs", + "docfx", "format", ] @@ -177,7 +185,7 @@ def install_unittest_dependencies(session, *constraints): def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") constraints_path = str( @@ -377,7 +385,7 @@ def docfx(session): ) -@nox.session(python="3.12") +@nox.session(python="3.13") @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], @@ -385,7 +393,7 @@ def docfx(session): def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index 54e5ab5a6f61..3a0271ca1787 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -261,7 +261,7 @@ def place_before(path, text, *before_text, escape=None): # -------------------------------------------------------------------------- # add shared environment variables to test configs -tracked_subdirs = ["continuous", "presubmit", "release", "samples", "docs"] +tracked_subdirs = ["continuous", "presubmit", "samples", "docs"] for subdir in tracked_subdirs: for path, subdirs, files in os.walk(f".kokoro/{subdir}"): for name in files: diff --git a/packages/google-cloud-logging/samples/snippets/noxfile.py b/packages/google-cloud-logging/samples/snippets/noxfile.py index 3b7135946fd5..c9a3d1ecbf2a 100644 --- a/packages/google-cloud-logging/samples/snippets/noxfile.py +++ b/packages/google-cloud-logging/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/google-cloud-logging/testing/constraints-3.13.txt b/packages/google-cloud-logging/testing/constraints-3.13.txt new file mode 100644 index 000000000000..981d37ac6a88 --- /dev/null +++ b/packages/google-cloud-logging/testing/constraints-3.13.txt @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +proto-plus +protobuf +google-cloud-core +google-cloud-appengine-logging +google-cloud-audit-log +grpc-google-iam-v1 +opentelemetry-api + +# optional dependencies +django +flask From 13bba01efd27a792ec454f36519b48b951916b5a Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Thu, 21 Nov 2024 10:53:28 -0500 Subject: [PATCH 829/855] fix: require proto-plus >= 1.25 for Python 3.13 (#955) + --- packages/google-cloud-logging/setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 67ae0f70cfa6..244b30c7cdee 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -47,6 +47,7 @@ "opentelemetry-api >= 1.9.0", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "proto-plus >= 1.25.0, <2.0.0dev; python_version>='3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-logging" From 573dbbf85c75ec7e2de154f7c2ef7ee6f86d6e4b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 09:42:44 -0700 Subject: [PATCH 830/855] chore(python): update dependencies in .kokoro/docker/docs (#957) Source-Link: https://github.com/googleapis/synthtool/commit/e808c98e1ab7eec3df2a95a05331619f7001daef Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8e3e7e18255c22d1489258d0374c901c01f9c4fd77a12088670cd73d580aa737 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/docker/docs/requirements.txt | 52 +++++++++++++++---- 2 files changed, 43 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 6301519a9a05..26306af66f81 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562 -# created: 2024-11-12T12:09:45.821174897Z + digest: sha256:8e3e7e18255c22d1489258d0374c901c01f9c4fd77a12088670cd73d580aa737 +# created: 2024-12-17T00:59:58.625514486Z diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt index 8bb0764594b1..f99a5c4aac7f 100644 --- a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt @@ -2,11 +2,11 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes requirements.in +# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in # -argcomplete==3.5.1 \ - --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ - --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 +argcomplete==3.5.2 \ + --hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \ + --hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb # via nox colorlog==6.9.0 \ --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ @@ -23,7 +23,7 @@ filelock==3.16.1 \ nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in + # via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f @@ -32,11 +32,41 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv -tomli==2.0.2 \ - --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ - --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed +tomli==2.2.1 \ + --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ + --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ + --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ + --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ + --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ + --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ + --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ + --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ + --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ + --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ + --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ + --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ + --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ + --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ + --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ + --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ + --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ + --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ + --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ + --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ + --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ + --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ + --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ + --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ + --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ + --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ + --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ + --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ + --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ + --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ + --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ + --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via nox -virtualenv==20.27.1 \ - --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ - --hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4 +virtualenv==20.28.0 \ + --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \ + --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa # via nox From df2fd66462caf30a75ceb9b3436f543322ffd4f6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 14 Jan 2025 11:24:04 -0500 Subject: [PATCH 831/855] chore(python): exclude .github/workflows/unittest.yml in renovate config (#959) Source-Link: https://github.com/googleapis/synthtool/commit/106d292bd234e5d9977231dcfbc4831e34eba13a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8ff1efe878e18bd82a0fb7b70bb86f77e7ab6901fed394440b6135db0ba8d84a Co-authored-by: Owl Bot --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 6 +++--- packages/google-cloud-logging/renovate.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 26306af66f81..10cf433a8b00 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8e3e7e18255c22d1489258d0374c901c01f9c4fd77a12088670cd73d580aa737 -# created: 2024-12-17T00:59:58.625514486Z + digest: sha256:8ff1efe878e18bd82a0fb7b70bb86f77e7ab6901fed394440b6135db0ba8d84a +# created: 2025-01-09T12:01:16.422459506Z diff --git a/packages/google-cloud-logging/renovate.json b/packages/google-cloud-logging/renovate.json index dde963098619..ff5e5c4c6016 100644 --- a/packages/google-cloud-logging/renovate.json +++ b/packages/google-cloud-logging/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] }, From f00a2cdf37d3dbdaa3028c89efa7b16f6f5498cb Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 22 Jan 2025 11:13:15 -0500 Subject: [PATCH 832/855] fix: Made `write_entries` raise `ValueError` on `ParseError`s (#958) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Log Logger errors internally rather than crashing * Documentation. * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Made write_entries raise ValueError on ParseErrors * Finished fixing up merge conflicts * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * linting pt.2 * docstring change * Improved docstring message --------- Co-authored-by: Owl Bot --- .../google/cloud/logging_v2/_gapic.py | 6 ++- .../google/cloud/logging_v2/logger.py | 45 +++++++++++++++++-- .../tests/unit/test__gapic.py | 17 +++++++ 3 files changed, 64 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py index 039a830ce916..379665248b6c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/_gapic.py @@ -30,6 +30,7 @@ from google.protobuf.json_format import MessageToDict from google.protobuf.json_format import ParseDict +from google.protobuf.json_format import ParseError from google.cloud.logging_v2._helpers import entry_from_resource from google.cloud.logging_v2.sink import Sink @@ -151,7 +152,10 @@ def write_entries( Useful for checking whether the logging API endpoints are working properly before sending valuable data. """ - log_entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries] + try: + log_entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries] + except ParseError as e: + raise ValueError(f"Invalid log entry: {str(e)}") from e request = WriteLogEntriesRequest( log_name=logger_name, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py index 27553994b12e..eaa8d2d360fb 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/logger.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/logger.py @@ -162,6 +162,7 @@ def _do_log(self, client, _entry_class, payload=None, **kw): api_repr = entry.to_api_repr() entries = [api_repr] + if google.cloud.logging_v2._instrumentation_emitted is False: entries = _add_instrumentation(entries, **kw) google.cloud.logging_v2._instrumentation_emitted = True @@ -200,18 +201,38 @@ def log_text(self, text, *, client=None, **kw): self._do_log(client, TextEntry, text, **kw) def log_struct(self, info, *, client=None, **kw): - """Log a dictionary message + """Logs a dictionary message. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write + The message must be able to be serializable to a Protobuf Struct. + It must be a dictionary of strings to one of the following: + + - :class:`str` + - :class:`int` + - :class:`float` + - :class:`bool` + - :class:`list[str|float|int|bool|list|dict|None]` + - :class:`dict[str, str|float|int|bool|list|dict|None]` + + For more details on Protobuf structs, see https://protobuf.dev/reference/protobuf/google.protobuf/#value. + If the provided dictionary cannot be serialized into a Protobuf struct, + it will not be logged, and a :class:`ValueError` will be raised. + Args: - info (dict): the log entry information + info (dict[str, str|float|int|bool|list|dict|None]): + the log entry information. client (Optional[~logging_v2.client.Client]): The client to use. If not passed, falls back to the ``client`` stored on the current sink. kw (Optional[dict]): additional keyword arguments for the entry. See :class:`~logging_v2.entries.LogEntry`. + + Raises: + ValueError: + if the dictionary message provided cannot be serialized into a Protobuf + struct. """ for field in _STRUCT_EXTRACTABLE_FIELDS: # attempt to copy relevant fields from the payload into the LogEntry body @@ -405,8 +426,22 @@ def log_text(self, text, **kw): def log_struct(self, info, **kw): """Add a struct entry to be logged during :meth:`commit`. + The message must be able to be serializable to a Protobuf Struct. + It must be a dictionary of strings to one of the following: + + - :class:`str` + - :class:`int` + - :class:`float` + - :class:`bool` + - :class:`list[str|float|int|bool|list|dict|None]` + - :class:`dict[str, str|float|int|bool|list|dict|None]` + + For more details on Protobuf structs, see https://protobuf.dev/reference/protobuf/google.protobuf/#value. + If the provided dictionary cannot be serialized into a Protobuf struct, + it will not be logged, and a :class:`ValueError` will be raised during :meth:`commit`. + Args: - info (dict): The struct entry, + info (dict[str, str|float|int|bool|list|dict|None]): The struct entry, kw (Optional[dict]): Additional keyword arguments for the entry. See :class:`~logging_v2.entries.LogEntry`. """ @@ -451,6 +486,10 @@ def commit(self, *, client=None, partial_success=True): Whether a batch's valid entries should be written even if some other entry failed due to a permanent error such as INVALID_ARGUMENT or PERMISSION_DENIED. + + Raises: + ValueError: + if one of the messages in the batch cannot be successfully parsed. """ if client is None: client = self.client diff --git a/packages/google-cloud-logging/tests/unit/test__gapic.py b/packages/google-cloud-logging/tests/unit/test__gapic.py index 74ed47b1e30a..58e230129453 100644 --- a/packages/google-cloud-logging/tests/unit/test__gapic.py +++ b/packages/google-cloud-logging/tests/unit/test__gapic.py @@ -17,6 +17,8 @@ import google.auth.credentials import mock +from datetime import datetime + import google.cloud.logging from google.cloud import logging_v2 from google.cloud.logging_v2 import _gapic @@ -173,6 +175,21 @@ def test_write_entries_single(self): assert request.entries[0].resource.type == entry["resource"]["type"] assert request.entries[0].text_payload == "text" + def test_write_entries_parse_error(self): + client = self.make_logging_api() + with self.assertRaises(ValueError): + with mock.patch.object( + type(client._gapic_api.transport.write_log_entries), "__call__" + ) as call: + entry = { + "logName": self.LOG_PATH, + "resource": {"type": "global"}, + "jsonPayload": {"time": datetime.now()}, + } + client.write_entries([entry]) + + call.assert_not_called() + def test_logger_delete(self): client = self.make_logging_api() From 6dc0aa7ea3065875a4c625ec1741017504affd72 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 29 Jan 2025 09:39:37 -0500 Subject: [PATCH 833/855] chore(main): release 3.11.4 (#956) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-logging/.release-please-manifest.json | 2 +- packages/google-cloud-logging/CHANGELOG.md | 9 +++++++++ .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 13 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json index a89b341963dd..5256172d0efb 100644 --- a/packages/google-cloud-logging/.release-please-manifest.json +++ b/packages/google-cloud-logging/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.11.3" + ".": "3.11.4" } diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 202cdafa715e..18201e5aaa36 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,15 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.11.4](https://github.com/googleapis/python-logging/compare/v3.11.3...v3.11.4) (2025-01-22) + + +### Bug Fixes + +* Made `write_entries` raise `ValueError` on `ParseError`s ([#958](https://github.com/googleapis/python-logging/issues/958)) ([5309478](https://github.com/googleapis/python-logging/commit/5309478c054d0f2b9301817fd835f2098f51dc3a)) +* Require proto-plus >= 1.25 for Python 3.13 ([#955](https://github.com/googleapis/python-logging/issues/955)) ([7baed8e](https://github.com/googleapis/python-logging/commit/7baed8e968f0bfa6abdbf0715dc43822f2fba8ba)) +* Require proto-plus >= 1.25 for Python 3.13 ([#955](https://github.com/googleapis/python-logging/issues/955)) ([002b1fc](https://github.com/googleapis/python-logging/commit/002b1fcb395d77d94d7216560c30015b9aefca81)) + ## [3.11.3](https://github.com/googleapis/python-logging/compare/v3.11.2...v3.11.3) (2024-10-15) diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py index 69b0cd300297..846b83eacf1d 100644 --- a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.11.3" # {x-release-please-version} +__version__ = "3.11.4" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py index 69b0cd300297..846b83eacf1d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.11.3" # {x-release-please-version} +__version__ = "3.11.4" # {x-release-please-version} diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index ca75689b0809..530b6ccf5f3f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.11.3" + "version": "3.11.4" }, "snippets": [ { From da9d793e9efea7e90cb3b926edc8bce1dcac73f8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 3 Mar 2025 11:27:13 -0500 Subject: [PATCH 834/855] chore(python): conditionally load credentials in .kokoro/build.sh (#970) Source-Link: https://github.com/googleapis/synthtool/commit/aa69fb74717c8f4c58c60f8cc101d3f4b2c07b09 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../google-cloud-logging/.kokoro/build.sh | 20 +- .../.kokoro/docker/docs/requirements.in | 1 + .../.kokoro/docker/docs/requirements.txt | 243 +++++++++++++++++- .../.kokoro/publish-docs.sh | 4 - 5 files changed, 251 insertions(+), 21 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 10cf433a8b00..3f7634f25f8e 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8ff1efe878e18bd82a0fb7b70bb86f77e7ab6901fed394440b6135db0ba8d84a -# created: 2025-01-09T12:01:16.422459506Z + digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf +# created: 2025-02-21T19:32:52.01306189Z diff --git a/packages/google-cloud-logging/.kokoro/build.sh b/packages/google-cloud-logging/.kokoro/build.sh index beab7b9d4334..d41b45aa1dd0 100755 --- a/packages/google-cloud-logging/.kokoro/build.sh +++ b/packages/google-cloud-logging/.kokoro/build.sh @@ -15,11 +15,13 @@ set -eo pipefail +CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") + if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-logging" + PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") fi -cd "${PROJECT_ROOT}" +pushd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -28,10 +30,16 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] +then + export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +fi # Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] +then + export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +fi # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. @@ -46,7 +54,7 @@ fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} + python3 -m nox -s ${NOX_SESSION:-} else - python3 -m nox + python3 -m nox fi diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.in b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.in index 816817c672a1..586bd07037ae 100644 --- a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.in +++ b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.in @@ -1 +1,2 @@ nox +gcp-docuploader diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt index f99a5c4aac7f..a9360a25b707 100644 --- a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt +++ b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt @@ -2,16 +2,124 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in +# pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.5.2 \ - --hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \ - --hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb +argcomplete==3.5.3 \ + --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ + --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 # via nox +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a + # via google-auth +certifi==2024.12.14 \ + --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ + --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db + # via requests +charset-normalizer==3.4.1 \ + --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ + --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ + --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ + --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ + --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ + --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ + --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ + --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ + --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ + --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ + --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ + --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ + --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ + --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ + --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ + --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ + --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ + --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ + --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ + --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ + --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ + --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ + --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ + --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ + --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ + --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ + --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ + --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ + --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ + --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ + --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ + --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ + --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ + --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ + --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ + --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ + --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ + --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ + --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ + --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ + --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ + --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ + --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ + --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ + --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ + --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ + --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ + --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ + --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ + --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ + --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ + --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ + --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ + --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ + --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ + --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ + --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ + --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ + --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ + --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ + --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ + --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ + --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ + --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ + --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ + --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ + --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ + --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ + --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ + --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ + --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ + --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ + --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ + --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ + --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ + --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ + --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ + --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ + --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ + --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ + --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ + --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ + --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ + --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ + --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ + --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ + --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ + --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ + --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ + --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ + --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ + --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 + # via requests +click==8.1.8 \ + --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ + --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a + # via gcp-docuploader colorlog==6.9.0 \ --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 - # via nox + # via + # gcp-docuploader + # nox distlib==0.3.9 \ --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 @@ -20,10 +128,78 @@ filelock==3.16.1 \ --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea + # via -r requirements.in +google-api-core==2.24.0 \ + --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ + --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.37.0 \ + --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ + --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 + # via + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 + # via google-cloud-storage +google-cloud-storage==2.19.0 \ + --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ + --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 + # via gcp-docuploader +google-crc32c==1.6.0 \ + --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ + --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ + --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ + --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ + --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ + --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ + --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ + --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ + --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ + --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ + --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ + --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ + --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ + --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ + --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ + --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ + --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ + --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ + --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ + --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ + --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ + --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ + --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ + --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ + --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ + --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ + --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.7.2 \ + --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ + --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 + # via google-cloud-storage +googleapis-common-protos==1.66.0 \ + --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ + --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed + # via google-api-core +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 + # via requests nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in + # via -r requirements.in packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f @@ -32,6 +208,51 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv +proto-plus==1.25.0 \ + --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ + --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 + # via google-api-core +protobuf==5.29.3 \ + --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ + --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ + --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ + --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ + --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ + --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ + --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ + --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ + --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ + --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ + --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 + # via + # gcp-docuploader + # google-api-core + # googleapis-common-protos + # proto-plus +pyasn1==0.6.1 \ + --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ + --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.4.1 \ + --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ + --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c + # via google-auth +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 + # via + # google-api-core + # google-cloud-storage +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +six==1.17.0 \ + --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ + --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 + # via gcp-docuploader tomli==2.2.1 \ --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ @@ -66,7 +287,11 @@ tomli==2.2.1 \ --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via nox -virtualenv==20.28.0 \ - --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \ - --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa +urllib3==2.3.0 \ + --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ + --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d + # via requests +virtualenv==20.28.1 \ + --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ + --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 # via nox diff --git a/packages/google-cloud-logging/.kokoro/publish-docs.sh b/packages/google-cloud-logging/.kokoro/publish-docs.sh index 233205d580e9..4ed4aaf1346f 100755 --- a/packages/google-cloud-logging/.kokoro/publish-docs.sh +++ b/packages/google-cloud-logging/.kokoro/publish-docs.sh @@ -20,10 +20,6 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" -# Install nox -python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt -python3.10 -m nox --version - # build docs nox -s docs From 669733c968e91ef711e37cfb10a94f56ea6ee669 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 3 Mar 2025 13:29:54 -0500 Subject: [PATCH 835/855] chore: Update gapic-generator-python to v1.23.2 (#933) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.18.5 PiperOrigin-RevId: 661268868 Source-Link: https://github.com/googleapis/googleapis/commit/f7d214cb08cd7d9b018d44564a8b184263f64177 Source-Link: https://github.com/googleapis/googleapis-gen/commit/79a8411bbdb25a983fa3aae8c0e14327df129f94 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.19.1 PiperOrigin-RevId: 684571179 Source-Link: https://github.com/googleapis/googleapis/commit/fbdc238931e0a7a95c0f55e0cd3ad9e3de2535c8 Source-Link: https://github.com/googleapis/googleapis-gen/commit/3a2cdcfb80c2d0f5ec0cc663c2bab0a9486229d0 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiM2EyY2RjZmI4MGMyZDBmNWVjMGNjNjYzYzJiYWIwYTk0ODYyMjlkMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Configure Ruby clients for google-ads-ad_manager PiperOrigin-RevId: 689139590 Source-Link: https://github.com/googleapis/googleapis/commit/296f2ac1aa9abccb7708b639b7839faa1809087f Source-Link: https://github.com/googleapis/googleapis-gen/commit/26927362e0aa1293258fc23fe3ce83c5c21d5fbb Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjY5MjczNjJlMGFhMTI5MzI1OGZjMjNmZTNjZTgzYzVjMjFkNWZiYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: remove body selector from http rule PiperOrigin-RevId: 693215877 Source-Link: https://github.com/googleapis/googleapis/commit/bb6b53e326ce2db403d18be7158c265e07948920 Source-Link: https://github.com/googleapis/googleapis-gen/commit/db8b5a93484ad44055b2bacc4c7cf87e970fe0ed Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGI4YjVhOTM0ODRhZDQ0MDU1YjJiYWNjNGM3Y2Y4N2U5NzBmZTBlZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add support for opt-in debug logging fix: Fix typing issue with gRPC metadata when key ends in -bin chore: Update gapic-generator-python to v1.21.0 PiperOrigin-RevId: 705285820 Source-Link: https://github.com/googleapis/googleapis/commit/f9b8b9150f7fcd600b0acaeef91236b1843f5e49 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ca1e0a1e472d6e6f5de883a5cb54724f112ce348 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2ExZTBhMWU0NzJkNmU2ZjVkZTg4M2E1Y2I1NDcyNGYxMTJjZTM0OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add REST Interceptors which support reading metadata feat: Add support for reading selective GAPIC generation methods from service YAML chore: Update gapic-generator-python to v1.22.0 PiperOrigin-RevId: 724026024 Source-Link: https://github.com/googleapis/googleapis/commit/ad9963857109513e77eed153a66264481789109f Source-Link: https://github.com/googleapis/googleapis-gen/commit/e291c4dd1d670eda19998de76f967e1603a48993 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTI5MWM0ZGQxZDY3MGVkYTE5OTk4ZGU3NmY5NjdlMTYwM2E0ODk5MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.23.2 PiperOrigin-RevId: 732281673 Source-Link: https://github.com/googleapis/googleapis/commit/2f37e0ad56637325b24f8603284ccb6f05796f9a Source-Link: https://github.com/googleapis/googleapis-gen/commit/016b7538ba5a798f2ae423d4ccd7f82b06cdf6d2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDE2Yjc1MzhiYTVhNzk4ZjJhZTQyM2Q0Y2NkN2Y4MmIwNmNkZjZkMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../config_service_v2/async_client.py | 425 +- .../services/config_service_v2/client.py | 538 ++- .../services/config_service_v2/pagers.py | 80 +- .../config_service_v2/transports/README.rst | 9 + .../config_service_v2/transports/base.py | 15 + .../config_service_v2/transports/grpc.py | 164 +- .../transports/grpc_asyncio.py | 253 +- .../logging_service_v2/async_client.py | 147 +- .../services/logging_service_v2/client.py | 260 +- .../services/logging_service_v2/pagers.py | 48 +- .../logging_service_v2/transports/README.rst | 9 + .../logging_service_v2/transports/base.py | 15 + .../logging_service_v2/transports/grpc.py | 108 +- .../transports/grpc_asyncio.py | 147 +- .../metrics_service_v2/async_client.py | 144 +- .../services/metrics_service_v2/client.py | 257 +- .../services/metrics_service_v2/pagers.py | 16 +- .../metrics_service_v2/transports/README.rst | 9 + .../metrics_service_v2/transports/base.py | 15 + .../metrics_service_v2/transports/grpc.py | 106 +- .../transports/grpc_asyncio.py | 143 +- .../snippet_metadata_google.logging.v2.json | 174 +- .../logging_v2/test_config_service_v2.py | 4139 +++++++++-------- .../logging_v2/test_logging_service_v2.py | 799 ++-- .../logging_v2/test_metrics_service_v2.py | 834 ++-- 25 files changed, 5190 insertions(+), 3664 deletions(-) create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/README.rst create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst create mode 100644 packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 69fa55a4802b..a2a4126d922b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict -import functools import re from typing import ( Dict, @@ -56,6 +56,15 @@ from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport from .client import ConfigServiceV2Client +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class ConfigServiceV2AsyncClient: """Service for configuring sinks used to route log entries.""" @@ -209,9 +218,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ConfigServiceV2Client).get_transport_class, type(ConfigServiceV2Client) - ) + get_transport_class = ConfigServiceV2Client.get_transport_class def __init__( self, @@ -281,6 +288,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.ConfigServiceV2AsyncClient`.", + extra={ + "serviceName": "google.logging.v2.ConfigServiceV2", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.logging.v2.ConfigServiceV2", + "credentialsType": None, + }, + ) + async def list_buckets( self, request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, @@ -288,7 +317,7 @@ async def list_buckets( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListBucketsAsyncPager: r"""Lists log buckets. @@ -343,8 +372,10 @@ async def sample_list_buckets(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager: @@ -358,7 +389,10 @@ async def sample_list_buckets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -418,7 +452,7 @@ async def get_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Gets a log bucket. @@ -454,8 +488,10 @@ async def sample_get_bucket(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -501,7 +537,7 @@ async def create_bucket_async( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Creates a log bucket asynchronously that can be used to store log entries. @@ -545,8 +581,10 @@ async def sample_create_bucket_async(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -603,7 +641,7 @@ async def update_bucket_async( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Updates a log bucket asynchronously. @@ -649,8 +687,10 @@ async def sample_update_bucket_async(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -707,7 +747,7 @@ async def create_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's @@ -746,8 +786,10 @@ async def sample_create_bucket(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -793,7 +835,7 @@ async def update_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Updates a log bucket. @@ -835,8 +877,10 @@ async def sample_update_bucket(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -882,7 +926,7 @@ async def delete_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a log bucket. @@ -920,8 +964,10 @@ async def sample_delete_bucket(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -958,7 +1004,7 @@ async def undelete_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 @@ -993,8 +1039,10 @@ async def sample_undelete_bucket(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -1032,7 +1080,7 @@ async def list_views( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListViewsAsyncPager: r"""Lists views on a log bucket. @@ -1079,8 +1127,10 @@ async def sample_list_views(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager: @@ -1094,7 +1144,10 @@ async def sample_list_views(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1154,7 +1207,7 @@ async def get_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Gets a view on a log bucket.. @@ -1190,8 +1243,10 @@ async def sample_get_view(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1235,7 +1290,7 @@ async def create_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -1273,8 +1328,10 @@ async def sample_create_view(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1320,7 +1377,7 @@ async def update_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new @@ -1360,8 +1417,10 @@ async def sample_update_view(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1407,7 +1466,7 @@ async def delete_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it @@ -1443,8 +1502,10 @@ async def sample_delete_view(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -1482,7 +1543,7 @@ async def list_sinks( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. @@ -1533,8 +1594,10 @@ async def sample_list_sinks(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager: @@ -1547,7 +1610,10 @@ async def sample_list_sinks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1608,7 +1674,7 @@ async def get_sink( sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Gets a sink. @@ -1661,8 +1727,10 @@ async def sample_get_sink(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -1680,7 +1748,10 @@ async def sample_get_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + flattened_params = [sink_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1731,7 +1802,7 @@ async def create_sink( sink: Optional[logging_config.LogSink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins @@ -1800,8 +1871,10 @@ async def sample_create_sink(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -1819,7 +1892,10 @@ async def sample_create_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, sink]) + flattened_params = [parent, sink] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1873,7 +1949,7 @@ async def update_sink( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, @@ -1966,8 +2042,10 @@ async def sample_update_sink(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -1985,7 +2063,10 @@ async def sample_update_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name, sink, update_mask]) + flattened_params = [sink_name, sink, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2041,7 +2122,7 @@ async def delete_sink( sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -2093,13 +2174,18 @@ async def sample_delete_sink(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + flattened_params = [sink_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2150,7 +2236,7 @@ async def create_link( link_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs @@ -2221,8 +2307,10 @@ async def sample_create_link(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -2236,7 +2324,10 @@ async def sample_create_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, link, link_id]) + flattened_params = [parent, link, link_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2298,7 +2389,7 @@ async def delete_link( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a link. This will also delete the corresponding BigQuery linked dataset. @@ -2350,8 +2441,10 @@ async def sample_delete_link(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -2372,7 +2465,10 @@ async def sample_delete_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2430,7 +2526,7 @@ async def list_links( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLinksAsyncPager: r"""Lists links. @@ -2479,8 +2575,10 @@ async def sample_list_links(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager: @@ -2494,7 +2592,10 @@ async def sample_list_links(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2555,7 +2656,7 @@ async def get_link( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Link: r"""Gets a link. @@ -2602,8 +2703,10 @@ async def sample_get_link(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Link: @@ -2614,7 +2717,10 @@ async def sample_get_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2662,7 +2768,7 @@ async def list_exclusions( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListExclusionsAsyncPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -2714,8 +2820,10 @@ async def sample_list_exclusions(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager: @@ -2728,7 +2836,10 @@ async def sample_list_exclusions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2789,7 +2900,7 @@ async def get_exclusion( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. @@ -2842,8 +2953,10 @@ async def sample_get_exclusion(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -2859,7 +2972,10 @@ async def sample_get_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2910,7 +3026,7 @@ async def create_exclusion( exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can @@ -2980,8 +3096,10 @@ async def sample_create_exclusion(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -2997,7 +3115,10 @@ async def sample_create_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, exclusion]) + flattened_params = [parent, exclusion] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3051,7 +3172,7 @@ async def update_exclusion( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -3132,8 +3253,10 @@ async def sample_update_exclusion(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -3149,7 +3272,10 @@ async def sample_update_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, exclusion, update_mask]) + flattened_params = [name, exclusion, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3203,7 +3329,7 @@ async def delete_exclusion( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes an exclusion in the \_Default sink. @@ -3254,13 +3380,18 @@ async def sample_delete_exclusion(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3306,7 +3437,7 @@ async def get_cmek_settings( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. @@ -3356,8 +3487,10 @@ async def sample_get_cmek_settings(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -3413,7 +3546,7 @@ async def update_cmek_settings( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. @@ -3468,8 +3601,10 @@ async def sample_update_cmek_settings(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -3526,7 +3661,7 @@ async def get_settings( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. @@ -3601,8 +3736,10 @@ async def sample_get_settings(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -3614,7 +3751,10 @@ async def sample_get_settings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3665,7 +3805,7 @@ async def update_settings( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. @@ -3747,8 +3887,10 @@ async def sample_update_settings(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -3760,7 +3902,10 @@ async def sample_update_settings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([settings, update_mask]) + flattened_params = [settings, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3811,7 +3956,7 @@ async def copy_log_entries( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -3853,8 +3998,10 @@ async def sample_copy_log_entries(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -3906,7 +4053,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -3917,8 +4064,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -3931,11 +4080,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -3963,7 +4108,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -3974,8 +4119,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -3988,11 +4135,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -4020,7 +4163,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -4035,8 +4178,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -4048,11 +4193,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 8c210c0cda82..653a350aec71 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -14,6 +14,9 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import os import re from typing import ( @@ -48,6 +51,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers @@ -576,52 +588,45 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. Returns: - bool: True iff client_universe matches the universe in credentials. + bool: True iff the configured universe domain is valid. Raises: - ValueError: when client_universe does not match the universe in credentials. + ValueError: If the configured universe domain is not valid. """ - default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) + # NOTE (b/349488459): universe validation is disabled until further notice. return True - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. - Raises: - ValueError: If the configured universe domain is not valid. + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ConfigServiceV2Client._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) @property def api_endpoint(self): @@ -729,6 +734,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -777,7 +786,7 @@ def __init__( transport_init: Union[ Type[ConfigServiceV2Transport], Callable[..., ConfigServiceV2Transport] ] = ( - type(self).get_transport_class(transport) + ConfigServiceV2Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConfigServiceV2Transport], transport) ) @@ -794,6 +803,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.ConfigServiceV2Client`.", + extra={ + "serviceName": "google.logging.v2.ConfigServiceV2", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.logging.v2.ConfigServiceV2", + "credentialsType": None, + }, + ) + def list_buckets( self, request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, @@ -801,7 +833,7 @@ def list_buckets( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListBucketsPager: r"""Lists log buckets. @@ -856,8 +888,10 @@ def sample_list_buckets(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager: @@ -871,7 +905,10 @@ def sample_list_buckets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -928,7 +965,7 @@ def get_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Gets a log bucket. @@ -964,8 +1001,10 @@ def sample_get_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -1009,7 +1048,7 @@ def create_bucket_async( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Creates a log bucket asynchronously that can be used to store log entries. @@ -1053,8 +1092,10 @@ def sample_create_bucket_async(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1109,7 +1150,7 @@ def update_bucket_async( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Updates a log bucket asynchronously. @@ -1155,8 +1196,10 @@ def sample_update_bucket_async(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1211,7 +1254,7 @@ def create_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's @@ -1250,8 +1293,10 @@ def sample_create_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -1295,7 +1340,7 @@ def update_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Updates a log bucket. @@ -1337,8 +1382,10 @@ def sample_update_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -1382,7 +1429,7 @@ def delete_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a log bucket. @@ -1420,8 +1467,10 @@ def sample_delete_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -1456,7 +1505,7 @@ def undelete_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 @@ -1491,8 +1540,10 @@ def sample_undelete_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -1528,7 +1579,7 @@ def list_views( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListViewsPager: r"""Lists views on a log bucket. @@ -1575,8 +1626,10 @@ def sample_list_views(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager: @@ -1590,7 +1643,10 @@ def sample_list_views(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1647,7 +1703,7 @@ def get_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Gets a view on a log bucket.. @@ -1683,8 +1739,10 @@ def sample_get_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1728,7 +1786,7 @@ def create_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -1766,8 +1824,10 @@ def sample_create_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1811,7 +1871,7 @@ def update_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new @@ -1851,8 +1911,10 @@ def sample_update_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1896,7 +1958,7 @@ def delete_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it @@ -1932,8 +1994,10 @@ def sample_delete_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -1969,7 +2033,7 @@ def list_sinks( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSinksPager: r"""Lists sinks. @@ -2020,8 +2084,10 @@ def sample_list_sinks(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager: @@ -2034,7 +2100,10 @@ def sample_list_sinks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2092,7 +2161,7 @@ def get_sink( sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Gets a sink. @@ -2145,8 +2214,10 @@ def sample_get_sink(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -2164,7 +2235,10 @@ def sample_get_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + flattened_params = [sink_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2214,7 +2288,7 @@ def create_sink( sink: Optional[logging_config.LogSink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins @@ -2283,8 +2357,10 @@ def sample_create_sink(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -2302,7 +2378,10 @@ def sample_create_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, sink]) + flattened_params = [parent, sink] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2353,7 +2432,7 @@ def update_sink( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, @@ -2446,8 +2525,10 @@ def sample_update_sink(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -2465,7 +2546,10 @@ def sample_update_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name, sink, update_mask]) + flattened_params = [sink_name, sink, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2518,7 +2602,7 @@ def delete_sink( sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -2570,13 +2654,18 @@ def sample_delete_sink(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + flattened_params = [sink_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2624,7 +2713,7 @@ def create_link( link_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs @@ -2695,8 +2784,10 @@ def sample_create_link(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2710,7 +2801,10 @@ def sample_create_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, link, link_id]) + flattened_params = [parent, link, link_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2769,7 +2863,7 @@ def delete_link( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Deletes a link. This will also delete the corresponding BigQuery linked dataset. @@ -2821,8 +2915,10 @@ def sample_delete_link(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2843,7 +2939,10 @@ def sample_delete_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2898,7 +2997,7 @@ def list_links( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLinksPager: r"""Lists links. @@ -2947,8 +3046,10 @@ def sample_list_links(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager: @@ -2962,7 +3063,10 @@ def sample_list_links(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3020,7 +3124,7 @@ def get_link( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Link: r"""Gets a link. @@ -3067,8 +3171,10 @@ def sample_get_link(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Link: @@ -3079,7 +3185,10 @@ def sample_get_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3126,7 +3235,7 @@ def list_exclusions( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListExclusionsPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -3178,8 +3287,10 @@ def sample_list_exclusions(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager: @@ -3192,7 +3303,10 @@ def sample_list_exclusions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3250,7 +3364,7 @@ def get_exclusion( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. @@ -3303,8 +3417,10 @@ def sample_get_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -3320,7 +3436,10 @@ def sample_get_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3368,7 +3487,7 @@ def create_exclusion( exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can @@ -3438,8 +3557,10 @@ def sample_create_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -3455,7 +3576,10 @@ def sample_create_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, exclusion]) + flattened_params = [parent, exclusion] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3506,7 +3630,7 @@ def update_exclusion( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -3587,8 +3711,10 @@ def sample_update_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -3604,7 +3730,10 @@ def sample_update_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, exclusion, update_mask]) + flattened_params = [name, exclusion, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3655,7 +3784,7 @@ def delete_exclusion( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes an exclusion in the \_Default sink. @@ -3706,13 +3835,18 @@ def sample_delete_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3755,7 +3889,7 @@ def get_cmek_settings( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. @@ -3805,8 +3939,10 @@ def sample_get_cmek_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -3860,7 +3996,7 @@ def update_cmek_settings( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. @@ -3915,8 +4051,10 @@ def sample_update_cmek_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -3971,7 +4109,7 @@ def get_settings( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. @@ -4046,8 +4184,10 @@ def sample_get_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -4059,7 +4199,10 @@ def sample_get_settings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4107,7 +4250,7 @@ def update_settings( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. @@ -4189,8 +4332,10 @@ def sample_update_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -4202,7 +4347,10 @@ def sample_update_settings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([settings, update_mask]) + flattened_params = [settings, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4250,7 +4398,7 @@ def copy_log_entries( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -4292,8 +4440,10 @@ def sample_copy_log_entries(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -4356,7 +4506,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -4367,8 +4517,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -4381,11 +4533,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -4396,16 +4544,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -4413,7 +4565,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -4424,8 +4576,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -4438,11 +4592,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -4453,16 +4603,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -4470,7 +4624,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -4485,8 +4639,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -4498,11 +4654,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index 1a1ba7c25977..f151a7bf665e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -66,7 +66,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -80,8 +80,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListBucketsRequest(request) @@ -140,7 +142,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -154,8 +156,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListBucketsRequest(request) @@ -218,7 +222,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -232,8 +236,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListViewsRequest(request) @@ -292,7 +298,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -306,8 +312,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListViewsRequest(request) @@ -370,7 +378,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -384,8 +392,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListSinksRequest(request) @@ -444,7 +454,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -458,8 +468,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListSinksRequest(request) @@ -522,7 +534,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -536,8 +548,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListLinksRequest(request) @@ -596,7 +610,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -610,8 +624,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListLinksRequest(request) @@ -674,7 +690,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -688,8 +704,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListExclusionsRequest(request) @@ -748,7 +766,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -762,8 +780,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListExclusionsRequest(request) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/README.rst b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/README.rst new file mode 100644 index 000000000000..4ea84879601d --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`ConfigServiceV2Transport` is the ABC for all transports. +- public child `ConfigServiceV2GrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `ConfigServiceV2GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseConfigServiceV2RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `ConfigServiceV2RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index ac03c526de84..fcd20e25a325 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -374,6 +374,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 0764afcfd301..f83ac9b6af7c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,14 +25,92 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.logging_v2.types import logging_config from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport): """gRPC backend transport for ConfigServiceV2. @@ -184,7 +265,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -248,7 +334,9 @@ def operations_client(self) -> operations_v1.OperationsClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) # Return the client from cache. return self._operations_client @@ -274,7 +362,7 @@ def list_buckets( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_buckets" not in self._stubs: - self._stubs["list_buckets"] = self.grpc_channel.unary_unary( + self._stubs["list_buckets"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListBuckets", request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, @@ -300,7 +388,7 @@ def get_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_bucket" not in self._stubs: - self._stubs["get_bucket"] = self.grpc_channel.unary_unary( + self._stubs["get_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetBucket", request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -329,7 +417,7 @@ def create_bucket_async( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_bucket_async" not in self._stubs: - self._stubs["create_bucket_async"] = self.grpc_channel.unary_unary( + self._stubs["create_bucket_async"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateBucketAsync", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -361,7 +449,7 @@ def update_bucket_async( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_bucket_async" not in self._stubs: - self._stubs["update_bucket_async"] = self.grpc_channel.unary_unary( + self._stubs["update_bucket_async"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateBucketAsync", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -389,7 +477,7 @@ def create_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_bucket" not in self._stubs: - self._stubs["create_bucket"] = self.grpc_channel.unary_unary( + self._stubs["create_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateBucket", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -421,7 +509,7 @@ def update_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_bucket" not in self._stubs: - self._stubs["update_bucket"] = self.grpc_channel.unary_unary( + self._stubs["update_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateBucket", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -452,7 +540,7 @@ def delete_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_bucket" not in self._stubs: - self._stubs["delete_bucket"] = self.grpc_channel.unary_unary( + self._stubs["delete_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -480,7 +568,7 @@ def undelete_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "undelete_bucket" not in self._stubs: - self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary( + self._stubs["undelete_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -506,7 +594,7 @@ def list_views( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_views" not in self._stubs: - self._stubs["list_views"] = self.grpc_channel.unary_unary( + self._stubs["list_views"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListViews", request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, @@ -532,7 +620,7 @@ def get_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_view" not in self._stubs: - self._stubs["get_view"] = self.grpc_channel.unary_unary( + self._stubs["get_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetView", request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -559,7 +647,7 @@ def create_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_view" not in self._stubs: - self._stubs["create_view"] = self.grpc_channel.unary_unary( + self._stubs["create_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateView", request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -589,7 +677,7 @@ def update_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_view" not in self._stubs: - self._stubs["update_view"] = self.grpc_channel.unary_unary( + self._stubs["update_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateView", request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -618,7 +706,7 @@ def delete_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_view" not in self._stubs: - self._stubs["delete_view"] = self.grpc_channel.unary_unary( + self._stubs["delete_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -644,7 +732,7 @@ def list_sinks( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_sinks" not in self._stubs: - self._stubs["list_sinks"] = self.grpc_channel.unary_unary( + self._stubs["list_sinks"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListSinks", request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, @@ -670,7 +758,7 @@ def get_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_sink" not in self._stubs: - self._stubs["get_sink"] = self.grpc_channel.unary_unary( + self._stubs["get_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetSink", request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -700,7 +788,7 @@ def create_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_sink" not in self._stubs: - self._stubs["create_sink"] = self.grpc_channel.unary_unary( + self._stubs["create_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateSink", request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -731,7 +819,7 @@ def update_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_sink" not in self._stubs: - self._stubs["update_sink"] = self.grpc_channel.unary_unary( + self._stubs["update_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateSink", request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -758,7 +846,7 @@ def delete_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_sink" not in self._stubs: - self._stubs["delete_sink"] = self.grpc_channel.unary_unary( + self._stubs["delete_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -787,7 +875,7 @@ def create_link( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_link" not in self._stubs: - self._stubs["create_link"] = self.grpc_channel.unary_unary( + self._stubs["create_link"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateLink", request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -814,7 +902,7 @@ def delete_link( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_link" not in self._stubs: - self._stubs["delete_link"] = self.grpc_channel.unary_unary( + self._stubs["delete_link"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteLink", request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -840,7 +928,7 @@ def list_links( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_links" not in self._stubs: - self._stubs["list_links"] = self.grpc_channel.unary_unary( + self._stubs["list_links"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListLinks", request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, @@ -866,7 +954,7 @@ def get_link( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_link" not in self._stubs: - self._stubs["get_link"] = self.grpc_channel.unary_unary( + self._stubs["get_link"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetLink", request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, @@ -895,7 +983,7 @@ def list_exclusions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_exclusions" not in self._stubs: - self._stubs["list_exclusions"] = self.grpc_channel.unary_unary( + self._stubs["list_exclusions"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListExclusions", request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, @@ -921,7 +1009,7 @@ def get_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_exclusion" not in self._stubs: - self._stubs["get_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["get_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetExclusion", request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -949,7 +1037,7 @@ def create_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_exclusion" not in self._stubs: - self._stubs["create_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["create_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateExclusion", request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -976,7 +1064,7 @@ def update_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_exclusion" not in self._stubs: - self._stubs["update_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["update_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateExclusion", request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -1002,7 +1090,7 @@ def delete_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_exclusion" not in self._stubs: - self._stubs["delete_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["delete_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -1037,7 +1125,7 @@ def get_cmek_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_cmek_settings" not in self._stubs: - self._stubs["get_cmek_settings"] = self.grpc_channel.unary_unary( + self._stubs["get_cmek_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetCmekSettings", request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -1079,7 +1167,7 @@ def update_cmek_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_cmek_settings" not in self._stubs: - self._stubs["update_cmek_settings"] = self.grpc_channel.unary_unary( + self._stubs["update_cmek_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -1115,7 +1203,7 @@ def get_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_settings" not in self._stubs: - self._stubs["get_settings"] = self.grpc_channel.unary_unary( + self._stubs["get_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetSettings", request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -1158,7 +1246,7 @@ def update_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_settings" not in self._stubs: - self._stubs["update_settings"] = self.grpc_channel.unary_unary( + self._stubs["update_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateSettings", request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -1185,7 +1273,7 @@ def copy_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "copy_log_entries" not in self._stubs: - self._stubs["copy_log_entries"] = self.grpc_channel.unary_unary( + self._stubs["copy_log_entries"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CopyLogEntries", request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -1193,7 +1281,7 @@ def copy_log_entries( return self._stubs["copy_log_entries"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def cancel_operation( @@ -1205,7 +1293,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -1222,7 +1310,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -1241,7 +1329,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index e4a8d16f9743..5047ae67f45a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -23,8 +27,11 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_config @@ -33,6 +40,82 @@ from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import ConfigServiceV2GrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): """gRPC AsyncIO backend transport for ConfigServiceV2. @@ -230,7 +313,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -253,7 +342,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -281,7 +370,7 @@ def list_buckets( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_buckets" not in self._stubs: - self._stubs["list_buckets"] = self.grpc_channel.unary_unary( + self._stubs["list_buckets"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListBuckets", request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, @@ -309,7 +398,7 @@ def get_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_bucket" not in self._stubs: - self._stubs["get_bucket"] = self.grpc_channel.unary_unary( + self._stubs["get_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetBucket", request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -340,7 +429,7 @@ def create_bucket_async( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_bucket_async" not in self._stubs: - self._stubs["create_bucket_async"] = self.grpc_channel.unary_unary( + self._stubs["create_bucket_async"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateBucketAsync", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -374,7 +463,7 @@ def update_bucket_async( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_bucket_async" not in self._stubs: - self._stubs["update_bucket_async"] = self.grpc_channel.unary_unary( + self._stubs["update_bucket_async"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateBucketAsync", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -404,7 +493,7 @@ def create_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_bucket" not in self._stubs: - self._stubs["create_bucket"] = self.grpc_channel.unary_unary( + self._stubs["create_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateBucket", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -438,7 +527,7 @@ def update_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_bucket" not in self._stubs: - self._stubs["update_bucket"] = self.grpc_channel.unary_unary( + self._stubs["update_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateBucket", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -469,7 +558,7 @@ def delete_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_bucket" not in self._stubs: - self._stubs["delete_bucket"] = self.grpc_channel.unary_unary( + self._stubs["delete_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -497,7 +586,7 @@ def undelete_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "undelete_bucket" not in self._stubs: - self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary( + self._stubs["undelete_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -525,7 +614,7 @@ def list_views( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_views" not in self._stubs: - self._stubs["list_views"] = self.grpc_channel.unary_unary( + self._stubs["list_views"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListViews", request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, @@ -551,7 +640,7 @@ def get_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_view" not in self._stubs: - self._stubs["get_view"] = self.grpc_channel.unary_unary( + self._stubs["get_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetView", request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -580,7 +669,7 @@ def create_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_view" not in self._stubs: - self._stubs["create_view"] = self.grpc_channel.unary_unary( + self._stubs["create_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateView", request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -612,7 +701,7 @@ def update_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_view" not in self._stubs: - self._stubs["update_view"] = self.grpc_channel.unary_unary( + self._stubs["update_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateView", request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -641,7 +730,7 @@ def delete_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_view" not in self._stubs: - self._stubs["delete_view"] = self.grpc_channel.unary_unary( + self._stubs["delete_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -669,7 +758,7 @@ def list_sinks( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_sinks" not in self._stubs: - self._stubs["list_sinks"] = self.grpc_channel.unary_unary( + self._stubs["list_sinks"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListSinks", request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, @@ -695,7 +784,7 @@ def get_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_sink" not in self._stubs: - self._stubs["get_sink"] = self.grpc_channel.unary_unary( + self._stubs["get_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetSink", request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -727,7 +816,7 @@ def create_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_sink" not in self._stubs: - self._stubs["create_sink"] = self.grpc_channel.unary_unary( + self._stubs["create_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateSink", request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -760,7 +849,7 @@ def update_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_sink" not in self._stubs: - self._stubs["update_sink"] = self.grpc_channel.unary_unary( + self._stubs["update_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateSink", request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -787,7 +876,7 @@ def delete_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_sink" not in self._stubs: - self._stubs["delete_sink"] = self.grpc_channel.unary_unary( + self._stubs["delete_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -818,7 +907,7 @@ def create_link( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_link" not in self._stubs: - self._stubs["create_link"] = self.grpc_channel.unary_unary( + self._stubs["create_link"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateLink", request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -847,7 +936,7 @@ def delete_link( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_link" not in self._stubs: - self._stubs["delete_link"] = self.grpc_channel.unary_unary( + self._stubs["delete_link"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteLink", request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -875,7 +964,7 @@ def list_links( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_links" not in self._stubs: - self._stubs["list_links"] = self.grpc_channel.unary_unary( + self._stubs["list_links"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListLinks", request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, @@ -901,7 +990,7 @@ def get_link( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_link" not in self._stubs: - self._stubs["get_link"] = self.grpc_channel.unary_unary( + self._stubs["get_link"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetLink", request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, @@ -931,7 +1020,7 @@ def list_exclusions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_exclusions" not in self._stubs: - self._stubs["list_exclusions"] = self.grpc_channel.unary_unary( + self._stubs["list_exclusions"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListExclusions", request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, @@ -959,7 +1048,7 @@ def get_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_exclusion" not in self._stubs: - self._stubs["get_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["get_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetExclusion", request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -989,7 +1078,7 @@ def create_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_exclusion" not in self._stubs: - self._stubs["create_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["create_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateExclusion", request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -1018,7 +1107,7 @@ def update_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_exclusion" not in self._stubs: - self._stubs["update_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["update_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateExclusion", request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -1044,7 +1133,7 @@ def delete_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_exclusion" not in self._stubs: - self._stubs["delete_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["delete_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -1081,7 +1170,7 @@ def get_cmek_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_cmek_settings" not in self._stubs: - self._stubs["get_cmek_settings"] = self.grpc_channel.unary_unary( + self._stubs["get_cmek_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetCmekSettings", request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -1124,7 +1213,7 @@ def update_cmek_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_cmek_settings" not in self._stubs: - self._stubs["update_cmek_settings"] = self.grpc_channel.unary_unary( + self._stubs["update_cmek_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -1162,7 +1251,7 @@ def get_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_settings" not in self._stubs: - self._stubs["get_settings"] = self.grpc_channel.unary_unary( + self._stubs["get_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetSettings", request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -1207,7 +1296,7 @@ def update_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_settings" not in self._stubs: - self._stubs["update_settings"] = self.grpc_channel.unary_unary( + self._stubs["update_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateSettings", request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -1236,7 +1325,7 @@ def copy_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "copy_log_entries" not in self._stubs: - self._stubs["copy_log_entries"] = self.grpc_channel.unary_unary( + self._stubs["copy_log_entries"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CopyLogEntries", request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -1246,72 +1335,72 @@ def copy_log_entries( def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.list_buckets: gapic_v1.method_async.wrap_method( + self.list_buckets: self._wrap_method( self.list_buckets, default_timeout=None, client_info=client_info, ), - self.get_bucket: gapic_v1.method_async.wrap_method( + self.get_bucket: self._wrap_method( self.get_bucket, default_timeout=None, client_info=client_info, ), - self.create_bucket_async: gapic_v1.method_async.wrap_method( + self.create_bucket_async: self._wrap_method( self.create_bucket_async, default_timeout=None, client_info=client_info, ), - self.update_bucket_async: gapic_v1.method_async.wrap_method( + self.update_bucket_async: self._wrap_method( self.update_bucket_async, default_timeout=None, client_info=client_info, ), - self.create_bucket: gapic_v1.method_async.wrap_method( + self.create_bucket: self._wrap_method( self.create_bucket, default_timeout=None, client_info=client_info, ), - self.update_bucket: gapic_v1.method_async.wrap_method( + self.update_bucket: self._wrap_method( self.update_bucket, default_timeout=None, client_info=client_info, ), - self.delete_bucket: gapic_v1.method_async.wrap_method( + self.delete_bucket: self._wrap_method( self.delete_bucket, default_timeout=None, client_info=client_info, ), - self.undelete_bucket: gapic_v1.method_async.wrap_method( + self.undelete_bucket: self._wrap_method( self.undelete_bucket, default_timeout=None, client_info=client_info, ), - self.list_views: gapic_v1.method_async.wrap_method( + self.list_views: self._wrap_method( self.list_views, default_timeout=None, client_info=client_info, ), - self.get_view: gapic_v1.method_async.wrap_method( + self.get_view: self._wrap_method( self.get_view, default_timeout=None, client_info=client_info, ), - self.create_view: gapic_v1.method_async.wrap_method( + self.create_view: self._wrap_method( self.create_view, default_timeout=None, client_info=client_info, ), - self.update_view: gapic_v1.method_async.wrap_method( + self.update_view: self._wrap_method( self.update_view, default_timeout=None, client_info=client_info, ), - self.delete_view: gapic_v1.method_async.wrap_method( + self.delete_view: self._wrap_method( self.delete_view, default_timeout=None, client_info=client_info, ), - self.list_sinks: gapic_v1.method_async.wrap_method( + self.list_sinks: self._wrap_method( self.list_sinks, default_retry=retries.AsyncRetry( initial=0.1, @@ -1327,7 +1416,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.get_sink: gapic_v1.method_async.wrap_method( + self.get_sink: self._wrap_method( self.get_sink, default_retry=retries.AsyncRetry( initial=0.1, @@ -1343,12 +1432,12 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.create_sink: gapic_v1.method_async.wrap_method( + self.create_sink: self._wrap_method( self.create_sink, default_timeout=120.0, client_info=client_info, ), - self.update_sink: gapic_v1.method_async.wrap_method( + self.update_sink: self._wrap_method( self.update_sink, default_retry=retries.AsyncRetry( initial=0.1, @@ -1364,7 +1453,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.delete_sink: gapic_v1.method_async.wrap_method( + self.delete_sink: self._wrap_method( self.delete_sink, default_retry=retries.AsyncRetry( initial=0.1, @@ -1380,27 +1469,27 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.create_link: gapic_v1.method_async.wrap_method( + self.create_link: self._wrap_method( self.create_link, default_timeout=None, client_info=client_info, ), - self.delete_link: gapic_v1.method_async.wrap_method( + self.delete_link: self._wrap_method( self.delete_link, default_timeout=None, client_info=client_info, ), - self.list_links: gapic_v1.method_async.wrap_method( + self.list_links: self._wrap_method( self.list_links, default_timeout=None, client_info=client_info, ), - self.get_link: gapic_v1.method_async.wrap_method( + self.get_link: self._wrap_method( self.get_link, default_timeout=None, client_info=client_info, ), - self.list_exclusions: gapic_v1.method_async.wrap_method( + self.list_exclusions: self._wrap_method( self.list_exclusions, default_retry=retries.AsyncRetry( initial=0.1, @@ -1416,7 +1505,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.get_exclusion: gapic_v1.method_async.wrap_method( + self.get_exclusion: self._wrap_method( self.get_exclusion, default_retry=retries.AsyncRetry( initial=0.1, @@ -1432,17 +1521,17 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.create_exclusion: gapic_v1.method_async.wrap_method( + self.create_exclusion: self._wrap_method( self.create_exclusion, default_timeout=120.0, client_info=client_info, ), - self.update_exclusion: gapic_v1.method_async.wrap_method( + self.update_exclusion: self._wrap_method( self.update_exclusion, default_timeout=120.0, client_info=client_info, ), - self.delete_exclusion: gapic_v1.method_async.wrap_method( + self.delete_exclusion: self._wrap_method( self.delete_exclusion, default_retry=retries.AsyncRetry( initial=0.1, @@ -1458,35 +1547,59 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.get_cmek_settings: gapic_v1.method_async.wrap_method( + self.get_cmek_settings: self._wrap_method( self.get_cmek_settings, default_timeout=None, client_info=client_info, ), - self.update_cmek_settings: gapic_v1.method_async.wrap_method( + self.update_cmek_settings: self._wrap_method( self.update_cmek_settings, default_timeout=None, client_info=client_info, ), - self.get_settings: gapic_v1.method_async.wrap_method( + self.get_settings: self._wrap_method( self.get_settings, default_timeout=None, client_info=client_info, ), - self.update_settings: gapic_v1.method_async.wrap_method( + self.update_settings: self._wrap_method( self.update_settings, default_timeout=None, client_info=client_info, ), - self.copy_log_entries: gapic_v1.method_async.wrap_method( + self.copy_log_entries: self._wrap_method( self.copy_log_entries, default_timeout=None, client_info=client_info, ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" @property def cancel_operation( @@ -1498,7 +1611,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -1515,7 +1628,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -1534,7 +1647,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 27e8ca22617a..7832dda52f56 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict -import functools import re from typing import ( Dict, @@ -56,6 +56,15 @@ from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport from .client import LoggingServiceV2Client +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class LoggingServiceV2AsyncClient: """Service for ingesting and querying logs.""" @@ -193,9 +202,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(LoggingServiceV2Client).get_transport_class, type(LoggingServiceV2Client) - ) + get_transport_class = LoggingServiceV2Client.get_transport_class def __init__( self, @@ -265,6 +272,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.LoggingServiceV2AsyncClient`.", + extra={ + "serviceName": "google.logging.v2.LoggingServiceV2", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.logging.v2.LoggingServiceV2", + "credentialsType": None, + }, + ) + async def delete_log( self, request: Optional[Union[logging.DeleteLogRequest, dict]] = None, @@ -272,7 +301,7 @@ async def delete_log( log_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log @@ -327,13 +356,18 @@ async def sample_delete_log(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name]) + flattened_params = [log_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -383,7 +417,7 @@ async def write_log_entries( entries: Optional[MutableSequence[log_entry.LogEntry]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is @@ -514,8 +548,10 @@ async def sample_write_log_entries(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.WriteLogEntriesResponse: @@ -524,7 +560,10 @@ async def sample_write_log_entries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name, resource, labels, entries]) + flattened_params = [log_name, resource, labels, entries] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -577,7 +616,7 @@ async def list_log_entries( order_by: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogEntriesAsyncPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. @@ -664,8 +703,10 @@ async def sample_list_log_entries(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager: @@ -678,7 +719,10 @@ async def sample_list_log_entries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource_names, filter, order_by]) + flattened_params = [resource_names, filter, order_by] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -738,7 +782,7 @@ async def list_monitored_resource_descriptors( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -776,8 +820,10 @@ async def sample_list_monitored_resource_descriptors(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager: @@ -832,7 +878,7 @@ async def list_logs( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogsAsyncPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are @@ -882,8 +928,10 @@ async def sample_list_logs(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager: @@ -897,7 +945,10 @@ async def sample_list_logs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -957,7 +1008,7 @@ def tail_log_entries( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading @@ -1006,8 +1057,10 @@ def request_generator(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: AsyncIterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: @@ -1040,7 +1093,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1051,8 +1104,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1065,11 +1120,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1097,7 +1148,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1108,8 +1159,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1122,11 +1175,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1154,7 +1203,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1169,8 +1218,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1182,11 +1233,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 2c86aecca89b..4624c1af883a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -14,6 +14,9 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import os import re from typing import ( @@ -50,6 +53,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry @@ -458,52 +470,45 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. Returns: - bool: True iff client_universe matches the universe in credentials. + bool: True iff the configured universe domain is valid. Raises: - ValueError: when client_universe does not match the universe in credentials. + ValueError: If the configured universe domain is not valid. """ - default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) + # NOTE (b/349488459): universe validation is disabled until further notice. return True - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. - Raises: - ValueError: If the configured universe domain is not valid. + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or LoggingServiceV2Client._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) @property def api_endpoint(self): @@ -611,6 +616,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -660,7 +669,7 @@ def __init__( Type[LoggingServiceV2Transport], Callable[..., LoggingServiceV2Transport], ] = ( - type(self).get_transport_class(transport) + LoggingServiceV2Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LoggingServiceV2Transport], transport) ) @@ -677,6 +686,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.LoggingServiceV2Client`.", + extra={ + "serviceName": "google.logging.v2.LoggingServiceV2", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.logging.v2.LoggingServiceV2", + "credentialsType": None, + }, + ) + def delete_log( self, request: Optional[Union[logging.DeleteLogRequest, dict]] = None, @@ -684,7 +716,7 @@ def delete_log( log_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log @@ -739,13 +771,18 @@ def sample_delete_log(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name]) + flattened_params = [log_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -792,7 +829,7 @@ def write_log_entries( entries: Optional[MutableSequence[log_entry.LogEntry]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is @@ -923,8 +960,10 @@ def sample_write_log_entries(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.WriteLogEntriesResponse: @@ -933,7 +972,10 @@ def sample_write_log_entries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name, resource, labels, entries]) + flattened_params = [log_name, resource, labels, entries] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -982,7 +1024,7 @@ def list_log_entries( order_by: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogEntriesPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. @@ -1069,8 +1111,10 @@ def sample_list_log_entries(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager: @@ -1083,7 +1127,10 @@ def sample_list_log_entries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource_names, filter, order_by]) + flattened_params = [resource_names, filter, order_by] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1140,7 +1187,7 @@ def list_monitored_resource_descriptors( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListMonitoredResourceDescriptorsPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -1178,8 +1225,10 @@ def sample_list_monitored_resource_descriptors(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager: @@ -1234,7 +1283,7 @@ def list_logs( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogsPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are @@ -1284,8 +1333,10 @@ def sample_list_logs(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager: @@ -1299,7 +1350,10 @@ def sample_list_logs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1356,7 +1410,7 @@ def tail_log_entries( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Iterable[logging.TailLogEntriesResponse]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading @@ -1405,8 +1459,10 @@ def request_generator(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: @@ -1450,7 +1506,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1461,8 +1517,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1475,11 +1533,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1490,16 +1544,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1507,7 +1565,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1518,8 +1576,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1532,11 +1592,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1547,16 +1603,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1564,7 +1624,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1579,8 +1639,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1592,11 +1654,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 0eece8acc529..12a5268732aa 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -68,7 +68,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -82,8 +82,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogEntriesRequest(request) @@ -142,7 +144,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -156,8 +158,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogEntriesRequest(request) @@ -220,7 +224,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -234,8 +238,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListMonitoredResourceDescriptorsRequest(request) @@ -296,7 +302,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -310,8 +316,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListMonitoredResourceDescriptorsRequest(request) @@ -378,7 +386,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -392,8 +400,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogsRequest(request) @@ -452,7 +462,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -466,8 +476,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogsRequest(request) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst new file mode 100644 index 000000000000..897a4c7bfaec --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`LoggingServiceV2Transport` is the ABC for all transports. +- public child `LoggingServiceV2GrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `LoggingServiceV2GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseLoggingServiceV2RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `LoggingServiceV2RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 6f7e1c99d5f4..406a2b87865e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -233,6 +233,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=3600.0, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index bce7e8ffc3dc..49bea46d6c1d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,14 +24,92 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.logging_v2.types import logging from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class LoggingServiceV2GrpcTransport(LoggingServiceV2Transport): """gRPC backend transport for LoggingServiceV2. @@ -182,7 +263,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -258,7 +344,7 @@ def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty_pb2.Empty]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_log" not in self._stubs: - self._stubs["delete_log"] = self.grpc_channel.unary_unary( + self._stubs["delete_log"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -290,7 +376,7 @@ def write_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "write_log_entries" not in self._stubs: - self._stubs["write_log_entries"] = self.grpc_channel.unary_unary( + self._stubs["write_log_entries"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/WriteLogEntries", request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, @@ -319,7 +405,7 @@ def list_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_log_entries" not in self._stubs: - self._stubs["list_log_entries"] = self.grpc_channel.unary_unary( + self._stubs["list_log_entries"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/ListLogEntries", request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, @@ -352,7 +438,7 @@ def list_monitored_resource_descriptors( if "list_monitored_resource_descriptors" not in self._stubs: self._stubs[ "list_monitored_resource_descriptors" - ] = self.grpc_channel.unary_unary( + ] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, @@ -380,7 +466,7 @@ def list_logs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_logs" not in self._stubs: - self._stubs["list_logs"] = self.grpc_channel.unary_unary( + self._stubs["list_logs"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/ListLogs", request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, @@ -408,7 +494,7 @@ def tail_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "tail_log_entries" not in self._stubs: - self._stubs["tail_log_entries"] = self.grpc_channel.stream_stream( + self._stubs["tail_log_entries"] = self._logged_channel.stream_stream( "/google.logging.v2.LoggingServiceV2/TailLogEntries", request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, @@ -416,7 +502,7 @@ def tail_log_entries( return self._stubs["tail_log_entries"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def cancel_operation( @@ -428,7 +514,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -445,7 +531,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -464,7 +550,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index f03c1fad7251..4e3a2af1ac40 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,8 +26,11 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging @@ -32,6 +39,82 @@ from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import LoggingServiceV2GrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): """gRPC AsyncIO backend transport for LoggingServiceV2. @@ -228,7 +311,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -264,7 +353,7 @@ def delete_log( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_log" not in self._stubs: - self._stubs["delete_log"] = self.grpc_channel.unary_unary( + self._stubs["delete_log"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -298,7 +387,7 @@ def write_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "write_log_entries" not in self._stubs: - self._stubs["write_log_entries"] = self.grpc_channel.unary_unary( + self._stubs["write_log_entries"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/WriteLogEntries", request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, @@ -329,7 +418,7 @@ def list_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_log_entries" not in self._stubs: - self._stubs["list_log_entries"] = self.grpc_channel.unary_unary( + self._stubs["list_log_entries"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/ListLogEntries", request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, @@ -362,7 +451,7 @@ def list_monitored_resource_descriptors( if "list_monitored_resource_descriptors" not in self._stubs: self._stubs[ "list_monitored_resource_descriptors" - ] = self.grpc_channel.unary_unary( + ] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, @@ -390,7 +479,7 @@ def list_logs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_logs" not in self._stubs: - self._stubs["list_logs"] = self.grpc_channel.unary_unary( + self._stubs["list_logs"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/ListLogs", request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, @@ -420,7 +509,7 @@ def tail_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "tail_log_entries" not in self._stubs: - self._stubs["tail_log_entries"] = self.grpc_channel.stream_stream( + self._stubs["tail_log_entries"] = self._logged_channel.stream_stream( "/google.logging.v2.LoggingServiceV2/TailLogEntries", request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, @@ -430,7 +519,7 @@ def tail_log_entries( def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.delete_log: gapic_v1.method_async.wrap_method( + self.delete_log: self._wrap_method( self.delete_log, default_retry=retries.AsyncRetry( initial=0.1, @@ -446,7 +535,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.write_log_entries: gapic_v1.method_async.wrap_method( + self.write_log_entries: self._wrap_method( self.write_log_entries, default_retry=retries.AsyncRetry( initial=0.1, @@ -462,7 +551,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_log_entries: gapic_v1.method_async.wrap_method( + self.list_log_entries: self._wrap_method( self.list_log_entries, default_retry=retries.AsyncRetry( initial=0.1, @@ -478,7 +567,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_monitored_resource_descriptors: gapic_v1.method_async.wrap_method( + self.list_monitored_resource_descriptors: self._wrap_method( self.list_monitored_resource_descriptors, default_retry=retries.AsyncRetry( initial=0.1, @@ -494,7 +583,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_logs: gapic_v1.method_async.wrap_method( + self.list_logs: self._wrap_method( self.list_logs, default_retry=retries.AsyncRetry( initial=0.1, @@ -510,7 +599,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.tail_log_entries: gapic_v1.method_async.wrap_method( + self.tail_log_entries: self._wrap_method( self.tail_log_entries, default_retry=retries.AsyncRetry( initial=0.1, @@ -526,10 +615,34 @@ def _prep_wrapped_messages(self, client_info): default_timeout=3600.0, client_info=client_info, ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" @property def cancel_operation( @@ -541,7 +654,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -558,7 +671,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -577,7 +690,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 19513e12620e..2ecfe397b676 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict -import functools import re from typing import ( Dict, @@ -54,6 +54,15 @@ from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport from .client import MetricsServiceV2Client +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class MetricsServiceV2AsyncClient: """Service for configuring logs-based metrics.""" @@ -191,9 +200,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(MetricsServiceV2Client).get_transport_class, type(MetricsServiceV2Client) - ) + get_transport_class = MetricsServiceV2Client.get_transport_class def __init__( self, @@ -263,6 +270,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.MetricsServiceV2AsyncClient`.", + extra={ + "serviceName": "google.logging.v2.MetricsServiceV2", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.logging.v2.MetricsServiceV2", + "credentialsType": None, + }, + ) + async def list_log_metrics( self, request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, @@ -270,7 +299,7 @@ async def list_log_metrics( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. @@ -318,8 +347,10 @@ async def sample_list_log_metrics(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager: @@ -333,7 +364,10 @@ async def sample_list_log_metrics(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -394,7 +428,7 @@ async def get_log_metric( metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. @@ -440,8 +474,10 @@ async def sample_get_log_metric(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -462,7 +498,10 @@ async def sample_get_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + flattened_params = [metric_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -515,7 +554,7 @@ async def create_log_metric( metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. @@ -577,8 +616,10 @@ async def sample_create_log_metric(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -599,7 +640,10 @@ async def sample_create_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, metric]) + flattened_params = [parent, metric] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -652,7 +696,7 @@ async def update_log_metric( metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. @@ -713,8 +757,10 @@ async def sample_update_log_metric(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -735,7 +781,10 @@ async def sample_update_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name, metric]) + flattened_params = [metric_name, metric] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -789,7 +838,7 @@ async def delete_log_metric( metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a logs-based metric. @@ -832,13 +881,18 @@ async def sample_delete_log_metric(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + flattened_params = [metric_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -886,7 +940,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -897,8 +951,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -911,11 +967,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -943,7 +995,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -954,8 +1006,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -968,11 +1022,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1000,7 +1050,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1015,8 +1065,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1028,11 +1080,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 5f577decf4e1..3e4421f023e8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -14,6 +14,9 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import os import re from typing import ( @@ -48,6 +51,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers @@ -457,52 +469,45 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. Returns: - bool: True iff client_universe matches the universe in credentials. + bool: True iff the configured universe domain is valid. Raises: - ValueError: when client_universe does not match the universe in credentials. + ValueError: If the configured universe domain is not valid. """ - default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) + # NOTE (b/349488459): universe validation is disabled until further notice. return True - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. - Raises: - ValueError: If the configured universe domain is not valid. + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or MetricsServiceV2Client._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) @property def api_endpoint(self): @@ -610,6 +615,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -659,7 +668,7 @@ def __init__( Type[MetricsServiceV2Transport], Callable[..., MetricsServiceV2Transport], ] = ( - type(self).get_transport_class(transport) + MetricsServiceV2Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MetricsServiceV2Transport], transport) ) @@ -676,6 +685,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.MetricsServiceV2Client`.", + extra={ + "serviceName": "google.logging.v2.MetricsServiceV2", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.logging.v2.MetricsServiceV2", + "credentialsType": None, + }, + ) + def list_log_metrics( self, request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, @@ -683,7 +715,7 @@ def list_log_metrics( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. @@ -731,8 +763,10 @@ def sample_list_log_metrics(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager: @@ -746,7 +780,10 @@ def sample_list_log_metrics(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -804,7 +841,7 @@ def get_log_metric( metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. @@ -850,8 +887,10 @@ def sample_get_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -872,7 +911,10 @@ def sample_get_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + flattened_params = [metric_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -922,7 +964,7 @@ def create_log_metric( metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. @@ -984,8 +1026,10 @@ def sample_create_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -1006,7 +1050,10 @@ def sample_create_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, metric]) + flattened_params = [parent, metric] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1056,7 +1103,7 @@ def update_log_metric( metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. @@ -1117,8 +1164,10 @@ def sample_update_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -1139,7 +1188,10 @@ def sample_update_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name, metric]) + flattened_params = [metric_name, metric] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1190,7 +1242,7 @@ def delete_log_metric( metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a logs-based metric. @@ -1233,13 +1285,18 @@ def sample_delete_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + flattened_params = [metric_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1297,7 +1354,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1308,8 +1365,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1322,11 +1381,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1337,16 +1392,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1354,7 +1413,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1365,8 +1424,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1379,11 +1440,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1394,16 +1451,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1411,7 +1472,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1426,8 +1487,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1439,11 +1502,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 6975ae0d9653..5963fea31b66 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -66,7 +66,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -80,8 +80,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_metrics.ListLogMetricsRequest(request) @@ -140,7 +142,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -154,8 +156,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_metrics.ListLogMetricsRequest(request) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst new file mode 100644 index 000000000000..00dffa25f329 --- /dev/null +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`MetricsServiceV2Transport` is the ABC for all transports. +- public child `MetricsServiceV2GrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `MetricsServiceV2GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseMetricsServiceV2RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `MetricsServiceV2RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index aeb86e1e9566..e4fc7b1e44e9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -206,6 +206,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 1b16e97017a8..0ea3179c8e75 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,14 +24,92 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.logging_v2.types import logging_metrics from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport): """gRPC backend transport for MetricsServiceV2. @@ -182,7 +263,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -258,7 +344,7 @@ def list_log_metrics( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_log_metrics" not in self._stubs: - self._stubs["list_log_metrics"] = self.grpc_channel.unary_unary( + self._stubs["list_log_metrics"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/ListLogMetrics", request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, @@ -284,7 +370,7 @@ def get_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_log_metric" not in self._stubs: - self._stubs["get_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["get_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/GetLogMetric", request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -310,7 +396,7 @@ def create_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_log_metric" not in self._stubs: - self._stubs["create_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["create_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/CreateLogMetric", request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -336,7 +422,7 @@ def update_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_log_metric" not in self._stubs: - self._stubs["update_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["update_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -362,7 +448,7 @@ def delete_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_log_metric" not in self._stubs: - self._stubs["delete_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["delete_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -370,7 +456,7 @@ def delete_log_metric( return self._stubs["delete_log_metric"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def cancel_operation( @@ -382,7 +468,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -399,7 +485,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -418,7 +504,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 81c675d85409..68335a322f80 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,8 +26,11 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_metrics @@ -32,6 +39,82 @@ from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import MetricsServiceV2GrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): """gRPC AsyncIO backend transport for MetricsServiceV2. @@ -228,7 +311,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -263,7 +352,7 @@ def list_log_metrics( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_log_metrics" not in self._stubs: - self._stubs["list_log_metrics"] = self.grpc_channel.unary_unary( + self._stubs["list_log_metrics"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/ListLogMetrics", request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, @@ -291,7 +380,7 @@ def get_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_log_metric" not in self._stubs: - self._stubs["get_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["get_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/GetLogMetric", request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -319,7 +408,7 @@ def create_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_log_metric" not in self._stubs: - self._stubs["create_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["create_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/CreateLogMetric", request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -347,7 +436,7 @@ def update_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_log_metric" not in self._stubs: - self._stubs["update_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["update_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -373,7 +462,7 @@ def delete_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_log_metric" not in self._stubs: - self._stubs["delete_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["delete_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -383,7 +472,7 @@ def delete_log_metric( def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.list_log_metrics: gapic_v1.method_async.wrap_method( + self.list_log_metrics: self._wrap_method( self.list_log_metrics, default_retry=retries.AsyncRetry( initial=0.1, @@ -399,7 +488,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.get_log_metric: gapic_v1.method_async.wrap_method( + self.get_log_metric: self._wrap_method( self.get_log_metric, default_retry=retries.AsyncRetry( initial=0.1, @@ -415,12 +504,12 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.create_log_metric: gapic_v1.method_async.wrap_method( + self.create_log_metric: self._wrap_method( self.create_log_metric, default_timeout=60.0, client_info=client_info, ), - self.update_log_metric: gapic_v1.method_async.wrap_method( + self.update_log_metric: self._wrap_method( self.update_log_metric, default_retry=retries.AsyncRetry( initial=0.1, @@ -436,7 +525,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.delete_log_metric: gapic_v1.method_async.wrap_method( + self.delete_log_metric: self._wrap_method( self.delete_log_metric, default_retry=retries.AsyncRetry( initial=0.1, @@ -452,10 +541,34 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" @property def cancel_operation( @@ -467,7 +580,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -484,7 +597,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -503,7 +616,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 530b6ccf5f3f..50c444f70b85 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.11.4" + "version": "0.1.0" }, "snippets": [ { @@ -43,7 +43,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -119,7 +119,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -196,7 +196,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -272,7 +272,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -349,7 +349,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -425,7 +425,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -510,7 +510,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -594,7 +594,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -683,7 +683,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -771,7 +771,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -856,7 +856,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -940,7 +940,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -1017,7 +1017,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -1093,7 +1093,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -1170,7 +1170,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_bucket" @@ -1243,7 +1243,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_bucket" @@ -1321,7 +1321,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_exclusion" @@ -1398,7 +1398,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_exclusion" @@ -1476,7 +1476,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -1556,7 +1556,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -1637,7 +1637,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_sink" @@ -1714,7 +1714,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_sink" @@ -1788,7 +1788,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_view" @@ -1861,7 +1861,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_view" @@ -1935,7 +1935,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -2011,7 +2011,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -2088,7 +2088,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.CmekSettings", @@ -2164,7 +2164,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.CmekSettings", @@ -2245,7 +2245,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -2325,7 +2325,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -2406,7 +2406,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Link", @@ -2486,7 +2486,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Link", @@ -2567,7 +2567,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Settings", @@ -2647,7 +2647,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Settings", @@ -2728,7 +2728,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -2808,7 +2808,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -2885,7 +2885,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -2961,7 +2961,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -3042,7 +3042,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", @@ -3122,7 +3122,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", @@ -3203,7 +3203,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager", @@ -3283,7 +3283,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager", @@ -3364,7 +3364,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager", @@ -3444,7 +3444,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager", @@ -3525,7 +3525,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager", @@ -3605,7 +3605,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager", @@ -3686,7 +3686,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager", @@ -3766,7 +3766,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager", @@ -3843,7 +3843,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "undelete_bucket" @@ -3916,7 +3916,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "undelete_bucket" @@ -3990,7 +3990,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -4066,7 +4066,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -4143,7 +4143,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -4219,7 +4219,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -4296,7 +4296,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.CmekSettings", @@ -4372,7 +4372,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.CmekSettings", @@ -4461,7 +4461,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -4549,7 +4549,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -4634,7 +4634,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Settings", @@ -4718,7 +4718,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Settings", @@ -4807,7 +4807,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -4895,7 +4895,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -4972,7 +4972,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -5048,7 +5048,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -5129,7 +5129,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log" @@ -5206,7 +5206,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log" @@ -5292,7 +5292,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager", @@ -5380,7 +5380,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager", @@ -5461,7 +5461,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager", @@ -5541,7 +5541,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager", @@ -5618,7 +5618,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager", @@ -5694,7 +5694,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager", @@ -5771,7 +5771,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", @@ -5847,7 +5847,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", @@ -5940,7 +5940,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", @@ -6032,7 +6032,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", @@ -6117,7 +6117,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -6201,7 +6201,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -6282,7 +6282,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log_metric" @@ -6359,7 +6359,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log_metric" @@ -6437,7 +6437,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -6517,7 +6517,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -6598,7 +6598,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager", @@ -6678,7 +6678,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager", @@ -6763,7 +6763,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -6847,7 +6847,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 72e028529c6a..e29dc2a27ef0 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -24,12 +24,20 @@ import grpc from grpc.experimental import aio +import json import math import pytest from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import future @@ -58,10 +66,32 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -302,82 +332,46 @@ def test__get_universe_domain(): @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "error_code,cred_info_json,show_cred_info", [ - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), ], ) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConfigServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConfigServiceV2Client(credentials=cred) + client._transport._credentials = cred - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize( @@ -1160,25 +1154,6 @@ def test_list_buckets(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_buckets_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_buckets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() - - def test_list_buckets_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1244,29 +1219,6 @@ def test_list_buckets_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_buckets_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListBucketsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_buckets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() - - @pytest.mark.asyncio async def test_list_buckets_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1275,7 +1227,7 @@ async def test_list_buckets_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1290,22 +1242,23 @@ async def test_list_buckets_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_buckets - ] = mock_object + ] = mock_rpc request = {} await client.list_buckets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_buckets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1313,7 +1266,7 @@ async def test_list_buckets_async( transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1379,7 +1332,7 @@ def test_list_buckets_field_headers(): @pytest.mark.asyncio async def test_list_buckets_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1449,7 +1402,7 @@ def test_list_buckets_flattened_error(): @pytest.mark.asyncio async def test_list_buckets_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1478,7 +1431,7 @@ async def test_list_buckets_flattened_async(): @pytest.mark.asyncio async def test_list_buckets_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1588,7 +1541,7 @@ def test_list_buckets_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_buckets_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1638,7 +1591,7 @@ async def test_list_buckets_async_pager(): @pytest.mark.asyncio async def test_list_buckets_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1732,25 +1685,6 @@ def test_get_bucket(request_type, transport: str = "grpc"): assert response.restricted_fields == ["restricted_fields_value"] -def test_get_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() - - def test_get_bucket_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1814,42 +1748,13 @@ def test_get_bucket_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_bucket_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket( - name="name_value", - description="description_value", - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=["restricted_fields_value"], - ) - ) - response = await client.get_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() - - @pytest.mark.asyncio async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1864,22 +1769,23 @@ async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_bucket - ] = mock_object + ] = mock_rpc request = {} await client.get_bucket(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_bucket(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1887,7 +1793,7 @@ async def test_get_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1965,7 +1871,7 @@ def test_get_bucket_field_headers(): @pytest.mark.asyncio async def test_get_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2029,27 +1935,6 @@ def test_create_bucket_async(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_create_bucket_async_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket_async), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_bucket_async() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() - - def test_create_bucket_async_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2114,8 +1999,9 @@ def test_create_bucket_async_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_bucket_async(request) @@ -2125,29 +2011,6 @@ def test_create_bucket_async_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_bucket_async_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket_async), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_bucket_async() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() - - @pytest.mark.asyncio async def test_create_bucket_async_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2156,7 +2019,7 @@ async def test_create_bucket_async_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2171,26 +2034,28 @@ async def test_create_bucket_async_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_bucket_async - ] = mock_object + ] = mock_rpc request = {} await client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_bucket_async(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2198,7 +2063,7 @@ async def test_create_bucket_async_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2265,7 +2130,7 @@ def test_create_bucket_async_field_headers(): @pytest.mark.asyncio async def test_create_bucket_async_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2331,27 +2196,6 @@ def test_update_bucket_async(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_update_bucket_async_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket_async), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_bucket_async() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() - - def test_update_bucket_async_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2414,8 +2258,9 @@ def test_update_bucket_async_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_bucket_async(request) @@ -2425,29 +2270,6 @@ def test_update_bucket_async_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_bucket_async_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket_async), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.update_bucket_async() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() - - @pytest.mark.asyncio async def test_update_bucket_async_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2456,7 +2278,7 @@ async def test_update_bucket_async_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2471,26 +2293,28 @@ async def test_update_bucket_async_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_bucket_async - ] = mock_object + ] = mock_rpc request = {} await client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_bucket_async(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2498,7 +2322,7 @@ async def test_update_bucket_async_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2565,7 +2389,7 @@ def test_update_bucket_async_field_headers(): @pytest.mark.asyncio async def test_update_bucket_async_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2644,25 +2468,6 @@ def test_create_bucket(request_type, transport: str = "grpc"): assert response.restricted_fields == ["restricted_fields_value"] -def test_create_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() - - def test_create_bucket_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2728,35 +2533,6 @@ def test_create_bucket_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_bucket_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket( - name="name_value", - description="description_value", - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=["restricted_fields_value"], - ) - ) - response = await client.create_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() - - @pytest.mark.asyncio async def test_create_bucket_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2765,7 +2541,7 @@ async def test_create_bucket_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2780,22 +2556,23 @@ async def test_create_bucket_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_bucket - ] = mock_object + ] = mock_rpc request = {} await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_bucket(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2803,7 +2580,7 @@ async def test_create_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2881,7 +2658,7 @@ def test_create_bucket_field_headers(): @pytest.mark.asyncio async def test_create_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2958,25 +2735,6 @@ def test_update_bucket(request_type, transport: str = "grpc"): assert response.restricted_fields == ["restricted_fields_value"] -def test_update_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() - - def test_update_bucket_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3040,35 +2798,6 @@ def test_update_bucket_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_bucket_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket( - name="name_value", - description="description_value", - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=["restricted_fields_value"], - ) - ) - response = await client.update_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() - - @pytest.mark.asyncio async def test_update_bucket_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -3077,7 +2806,7 @@ async def test_update_bucket_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3092,22 +2821,23 @@ async def test_update_bucket_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_bucket - ] = mock_object + ] = mock_rpc request = {} await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_bucket(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3115,7 +2845,7 @@ async def test_update_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3193,7 +2923,7 @@ def test_update_bucket_field_headers(): @pytest.mark.asyncio async def test_update_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3255,25 +2985,6 @@ def test_delete_bucket(request_type, transport: str = "grpc"): assert response is None -def test_delete_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() - - def test_delete_bucket_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3337,25 +3048,6 @@ def test_delete_bucket_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_bucket_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() - - @pytest.mark.asyncio async def test_delete_bucket_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -3364,7 +3056,7 @@ async def test_delete_bucket_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3379,22 +3071,23 @@ async def test_delete_bucket_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_bucket - ] = mock_object + ] = mock_rpc request = {} await client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_bucket(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3402,7 +3095,7 @@ async def test_delete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3463,7 +3156,7 @@ def test_delete_bucket_field_headers(): @pytest.mark.asyncio async def test_delete_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3523,25 +3216,6 @@ def test_undelete_bucket(request_type, transport: str = "grpc"): assert response is None -def test_undelete_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.undelete_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() - - def test_undelete_bucket_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3605,25 +3279,6 @@ def test_undelete_bucket_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_undelete_bucket_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.undelete_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() - - @pytest.mark.asyncio async def test_undelete_bucket_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -3632,7 +3287,7 @@ async def test_undelete_bucket_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3647,22 +3302,23 @@ async def test_undelete_bucket_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.undelete_bucket - ] = mock_object + ] = mock_rpc request = {} await client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.undelete_bucket(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3670,7 +3326,7 @@ async def test_undelete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3731,7 +3387,7 @@ def test_undelete_bucket_field_headers(): @pytest.mark.asyncio async def test_undelete_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3794,25 +3450,6 @@ def test_list_views(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_views_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_views() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() - - def test_list_views_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3878,36 +3515,13 @@ def test_list_views_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_views_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListViewsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_views() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() - - @pytest.mark.asyncio async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3922,22 +3536,23 @@ async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_views - ] = mock_object + ] = mock_rpc request = {} await client.list_views(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_views(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3945,7 +3560,7 @@ async def test_list_views_async( transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4011,7 +3626,7 @@ def test_list_views_field_headers(): @pytest.mark.asyncio async def test_list_views_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4081,7 +3696,7 @@ def test_list_views_flattened_error(): @pytest.mark.asyncio async def test_list_views_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4110,7 +3725,7 @@ async def test_list_views_flattened_async(): @pytest.mark.asyncio async def test_list_views_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4220,7 +3835,7 @@ def test_list_views_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_views_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4270,7 +3885,7 @@ async def test_list_views_async_pager(): @pytest.mark.asyncio async def test_list_views_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4356,25 +3971,6 @@ def test_get_view(request_type, transport: str = "grpc"): assert response.filter == "filter_value" -def test_get_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() - - def test_get_view_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4438,38 +4034,13 @@ def test_get_view_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogView( - name="name_value", - description="description_value", - filter="filter_value", - ) - ) - response = await client.get_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() - - @pytest.mark.asyncio async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4484,22 +4055,23 @@ async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_view - ] = mock_object + ] = mock_rpc request = {} await client.get_view(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4507,7 +4079,7 @@ async def test_get_view_async( transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4577,7 +4149,7 @@ def test_get_view_field_headers(): @pytest.mark.asyncio async def test_get_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4646,25 +4218,6 @@ def test_create_view(request_type, transport: str = "grpc"): assert response.filter == "filter_value" -def test_create_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() - - def test_create_view_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4730,31 +4283,6 @@ def test_create_view_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogView( - name="name_value", - description="description_value", - filter="filter_value", - ) - ) - response = await client.create_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() - - @pytest.mark.asyncio async def test_create_view_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -4763,7 +4291,7 @@ async def test_create_view_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4778,22 +4306,23 @@ async def test_create_view_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_view - ] = mock_object + ] = mock_rpc request = {} await client.create_view(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4801,7 +4330,7 @@ async def test_create_view_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4871,7 +4400,7 @@ def test_create_view_field_headers(): @pytest.mark.asyncio async def test_create_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4940,25 +4469,6 @@ def test_update_view(request_type, transport: str = "grpc"): assert response.filter == "filter_value" -def test_update_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() - - def test_update_view_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5022,31 +4532,6 @@ def test_update_view_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogView( - name="name_value", - description="description_value", - filter="filter_value", - ) - ) - response = await client.update_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() - - @pytest.mark.asyncio async def test_update_view_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -5055,7 +4540,7 @@ async def test_update_view_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5070,22 +4555,23 @@ async def test_update_view_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_view - ] = mock_object + ] = mock_rpc request = {} await client.update_view(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5093,7 +4579,7 @@ async def test_update_view_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5163,7 +4649,7 @@ def test_update_view_field_headers(): @pytest.mark.asyncio async def test_update_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5225,25 +4711,6 @@ def test_delete_view(request_type, transport: str = "grpc"): assert response is None -def test_delete_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() - - def test_delete_view_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5307,25 +4774,6 @@ def test_delete_view_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() - - @pytest.mark.asyncio async def test_delete_view_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -5334,7 +4782,7 @@ async def test_delete_view_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5349,22 +4797,23 @@ async def test_delete_view_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_view - ] = mock_object + ] = mock_rpc request = {} await client.delete_view(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5372,7 +4821,7 @@ async def test_delete_view_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5433,7 +4882,7 @@ def test_delete_view_field_headers(): @pytest.mark.asyncio async def test_delete_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5496,25 +4945,6 @@ def test_list_sinks(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_sinks_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_sinks() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() - - def test_list_sinks_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5580,36 +5010,13 @@ def test_list_sinks_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_sinks_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListSinksResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_sinks() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() - - @pytest.mark.asyncio async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5624,22 +5031,23 @@ async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sinks - ] = mock_object + ] = mock_rpc request = {} await client.list_sinks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sinks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5647,7 +5055,7 @@ async def test_list_sinks_async( transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5713,7 +5121,7 @@ def test_list_sinks_field_headers(): @pytest.mark.asyncio async def test_list_sinks_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5783,7 +5191,7 @@ def test_list_sinks_flattened_error(): @pytest.mark.asyncio async def test_list_sinks_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5812,7 +5220,7 @@ async def test_list_sinks_flattened_async(): @pytest.mark.asyncio async def test_list_sinks_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -5922,7 +5330,7 @@ def test_list_sinks_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_sinks_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5972,7 +5380,7 @@ async def test_list_sinks_async_pager(): @pytest.mark.asyncio async def test_list_sinks_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6068,25 +5476,6 @@ def test_get_sink(request_type, transport: str = "grpc"): assert response.include_children is True -def test_get_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() - - def test_get_sink_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6150,43 +5539,13 @@ def test_get_sink_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_sink_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink( - name="name_value", - destination="destination_value", - filter="filter_value", - description="description_value", - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity="writer_identity_value", - include_children=True, - ) - ) - response = await client.get_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() - - @pytest.mark.asyncio async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6201,22 +5560,23 @@ async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_sink - ] = mock_object + ] = mock_rpc request = {} await client.get_sink(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_sink(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6224,7 +5584,7 @@ async def test_get_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6304,7 +5664,7 @@ def test_get_sink_field_headers(): @pytest.mark.asyncio async def test_get_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6374,7 +5734,7 @@ def test_get_sink_flattened_error(): @pytest.mark.asyncio async def test_get_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6403,7 +5763,7 @@ async def test_get_sink_flattened_async(): @pytest.mark.asyncio async def test_get_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -6465,25 +5825,6 @@ def test_create_sink(request_type, transport: str = "grpc"): assert response.include_children is True -def test_create_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() - - def test_create_sink_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6547,36 +5888,6 @@ def test_create_sink_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_sink_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink( - name="name_value", - destination="destination_value", - filter="filter_value", - description="description_value", - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity="writer_identity_value", - include_children=True, - ) - ) - response = await client.create_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() - - @pytest.mark.asyncio async def test_create_sink_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -6585,7 +5896,7 @@ async def test_create_sink_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6600,22 +5911,23 @@ async def test_create_sink_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_sink - ] = mock_object + ] = mock_rpc request = {} await client.create_sink(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_sink(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6623,7 +5935,7 @@ async def test_create_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6703,7 +6015,7 @@ def test_create_sink_field_headers(): @pytest.mark.asyncio async def test_create_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6778,7 +6090,7 @@ def test_create_sink_flattened_error(): @pytest.mark.asyncio async def test_create_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6811,7 +6123,7 @@ async def test_create_sink_flattened_async(): @pytest.mark.asyncio async def test_create_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -6874,25 +6186,6 @@ def test_update_sink(request_type, transport: str = "grpc"): assert response.include_children is True -def test_update_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() - - def test_update_sink_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6956,36 +6249,6 @@ def test_update_sink_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_sink_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink( - name="name_value", - destination="destination_value", - filter="filter_value", - description="description_value", - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity="writer_identity_value", - include_children=True, - ) - ) - response = await client.update_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() - - @pytest.mark.asyncio async def test_update_sink_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -6994,7 +6257,7 @@ async def test_update_sink_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7009,22 +6272,23 @@ async def test_update_sink_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_sink - ] = mock_object + ] = mock_rpc request = {} await client.update_sink(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_sink(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7032,7 +6296,7 @@ async def test_update_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7112,7 +6376,7 @@ def test_update_sink_field_headers(): @pytest.mark.asyncio async def test_update_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7192,7 +6456,7 @@ def test_update_sink_flattened_error(): @pytest.mark.asyncio async def test_update_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7229,7 +6493,7 @@ async def test_update_sink_flattened_async(): @pytest.mark.asyncio async def test_update_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -7276,25 +6540,6 @@ def test_delete_sink(request_type, transport: str = "grpc"): assert response is None -def test_delete_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() - - def test_delete_sink_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7358,25 +6603,6 @@ def test_delete_sink_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_sink_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() - - @pytest.mark.asyncio async def test_delete_sink_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -7385,7 +6611,7 @@ async def test_delete_sink_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7400,22 +6626,23 @@ async def test_delete_sink_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_sink - ] = mock_object + ] = mock_rpc request = {} await client.delete_sink(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_sink(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7423,7 +6650,7 @@ async def test_delete_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7484,7 +6711,7 @@ def test_delete_sink_field_headers(): @pytest.mark.asyncio async def test_delete_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7552,7 +6779,7 @@ def test_delete_sink_flattened_error(): @pytest.mark.asyncio async def test_delete_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7579,7 +6806,7 @@ async def test_delete_sink_flattened_async(): @pytest.mark.asyncio async def test_delete_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -7624,25 +6851,6 @@ def test_create_link(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_create_link_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_link), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateLinkRequest() - - def test_create_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7701,8 +6909,9 @@ def test_create_link_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_link(request) @@ -7712,27 +6921,6 @@ def test_create_link_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_link_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_link), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateLinkRequest() - - @pytest.mark.asyncio async def test_create_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -7741,7 +6929,7 @@ async def test_create_link_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7756,26 +6944,28 @@ async def test_create_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_link - ] = mock_object + ] = mock_rpc request = {} await client.create_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7783,7 +6973,7 @@ async def test_create_link_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateLinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7846,7 +7036,7 @@ def test_create_link_field_headers(): @pytest.mark.asyncio async def test_create_link_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7926,7 +7116,7 @@ def test_create_link_flattened_error(): @pytest.mark.asyncio async def test_create_link_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7963,7 +7153,7 @@ async def test_create_link_flattened_async(): @pytest.mark.asyncio async def test_create_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -8010,25 +7200,6 @@ def test_delete_link(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_delete_link_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_link), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteLinkRequest() - - def test_delete_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -8085,8 +7256,9 @@ def test_delete_link_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_link(request) @@ -8096,27 +7268,6 @@ def test_delete_link_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_link_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_link), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.delete_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteLinkRequest() - - @pytest.mark.asyncio async def test_delete_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -8125,7 +7276,7 @@ async def test_delete_link_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8140,26 +7291,28 @@ async def test_delete_link_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_link - ] = mock_object + ] = mock_rpc request = {} await client.delete_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8167,7 +7320,7 @@ async def test_delete_link_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteLinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8230,7 +7383,7 @@ def test_delete_link_field_headers(): @pytest.mark.asyncio async def test_delete_link_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8300,7 +7453,7 @@ def test_delete_link_flattened_error(): @pytest.mark.asyncio async def test_delete_link_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8329,7 +7482,7 @@ async def test_delete_link_flattened_async(): @pytest.mark.asyncio async def test_delete_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -8377,25 +7530,6 @@ def test_list_links(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_links_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_links), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_links() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListLinksRequest() - - def test_list_links_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -8461,36 +7595,13 @@ def test_list_links_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_links_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_links), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListLinksResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_links() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListLinksRequest() - - @pytest.mark.asyncio async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8505,22 +7616,23 @@ async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_links - ] = mock_object + ] = mock_rpc request = {} await client.list_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8528,7 +7640,7 @@ async def test_list_links_async( transport: str = "grpc_asyncio", request_type=logging_config.ListLinksRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8594,7 +7706,7 @@ def test_list_links_field_headers(): @pytest.mark.asyncio async def test_list_links_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8664,7 +7776,7 @@ def test_list_links_flattened_error(): @pytest.mark.asyncio async def test_list_links_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8693,7 +7805,7 @@ async def test_list_links_flattened_async(): @pytest.mark.asyncio async def test_list_links_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -8803,7 +7915,7 @@ def test_list_links_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_links_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8853,7 +7965,7 @@ async def test_list_links_async_pager(): @pytest.mark.asyncio async def test_list_links_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8939,25 +8051,6 @@ def test_get_link(request_type, transport: str = "grpc"): assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_get_link_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_link), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetLinkRequest() - - def test_get_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -9021,38 +8114,13 @@ def test_get_link_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_link_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_link), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.Link( - name="name_value", - description="description_value", - lifecycle_state=logging_config.LifecycleState.ACTIVE, - ) - ) - response = await client.get_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetLinkRequest() - - @pytest.mark.asyncio async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9067,22 +8135,23 @@ async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_link - ] = mock_object + ] = mock_rpc request = {} await client.get_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9090,7 +8159,7 @@ async def test_get_link_async( transport: str = "grpc_asyncio", request_type=logging_config.GetLinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9160,7 +8229,7 @@ def test_get_link_field_headers(): @pytest.mark.asyncio async def test_get_link_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9228,7 +8297,7 @@ def test_get_link_flattened_error(): @pytest.mark.asyncio async def test_get_link_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9255,7 +8324,7 @@ async def test_get_link_flattened_async(): @pytest.mark.asyncio async def test_get_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -9303,25 +8372,6 @@ def test_list_exclusions(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_exclusions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_exclusions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() - - def test_list_exclusions_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -9387,29 +8437,6 @@ def test_list_exclusions_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_exclusions_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListExclusionsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_exclusions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() - - @pytest.mark.asyncio async def test_list_exclusions_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -9418,7 +8445,7 @@ async def test_list_exclusions_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9433,22 +8460,23 @@ async def test_list_exclusions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_exclusions - ] = mock_object + ] = mock_rpc request = {} await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_exclusions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9456,7 +8484,7 @@ async def test_list_exclusions_async( transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9522,7 +8550,7 @@ def test_list_exclusions_field_headers(): @pytest.mark.asyncio async def test_list_exclusions_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9592,7 +8620,7 @@ def test_list_exclusions_flattened_error(): @pytest.mark.asyncio async def test_list_exclusions_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9621,7 +8649,7 @@ async def test_list_exclusions_flattened_async(): @pytest.mark.asyncio async def test_list_exclusions_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -9731,7 +8759,7 @@ def test_list_exclusions_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_exclusions_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9781,7 +8809,7 @@ async def test_list_exclusions_async_pager(): @pytest.mark.asyncio async def test_list_exclusions_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9869,25 +8897,6 @@ def test_get_exclusion(request_type, transport: str = "grpc"): assert response.disabled is True -def test_get_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() - - def test_get_exclusion_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -9951,32 +8960,6 @@ def test_get_exclusion_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_exclusion_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion( - name="name_value", - description="description_value", - filter="filter_value", - disabled=True, - ) - ) - response = await client.get_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() - - @pytest.mark.asyncio async def test_get_exclusion_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -9985,7 +8968,7 @@ async def test_get_exclusion_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -10000,22 +8983,23 @@ async def test_get_exclusion_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_exclusion - ] = mock_object + ] = mock_rpc request = {} await client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_exclusion(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10023,7 +9007,7 @@ async def test_get_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -10095,7 +9079,7 @@ def test_get_exclusion_field_headers(): @pytest.mark.asyncio async def test_get_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -10165,7 +9149,7 @@ def test_get_exclusion_flattened_error(): @pytest.mark.asyncio async def test_get_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -10194,7 +9178,7 @@ async def test_get_exclusion_flattened_async(): @pytest.mark.asyncio async def test_get_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -10248,46 +9232,27 @@ def test_create_exclusion(request_type, transport: str = "grpc"): assert response.disabled is True -def test_create_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_create_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateExclusionRequest( + parent="parent_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() - - -def test_create_exclusion_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = logging_config.CreateExclusionRequest( - parent="parent_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_exclusion(request=request) + client.create_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateExclusionRequest( @@ -10332,32 +9297,6 @@ def test_create_exclusion_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_exclusion_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion( - name="name_value", - description="description_value", - filter="filter_value", - disabled=True, - ) - ) - response = await client.create_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() - - @pytest.mark.asyncio async def test_create_exclusion_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -10366,7 +9305,7 @@ async def test_create_exclusion_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -10381,22 +9320,23 @@ async def test_create_exclusion_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_exclusion - ] = mock_object + ] = mock_rpc request = {} await client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_exclusion(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10404,7 +9344,7 @@ async def test_create_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -10476,7 +9416,7 @@ def test_create_exclusion_field_headers(): @pytest.mark.asyncio async def test_create_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -10551,7 +9491,7 @@ def test_create_exclusion_flattened_error(): @pytest.mark.asyncio async def test_create_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -10584,7 +9524,7 @@ async def test_create_exclusion_flattened_async(): @pytest.mark.asyncio async def test_create_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -10639,25 +9579,6 @@ def test_update_exclusion(request_type, transport: str = "grpc"): assert response.disabled is True -def test_update_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() - - def test_update_exclusion_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -10723,32 +9644,6 @@ def test_update_exclusion_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_exclusion_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion( - name="name_value", - description="description_value", - filter="filter_value", - disabled=True, - ) - ) - response = await client.update_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() - - @pytest.mark.asyncio async def test_update_exclusion_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -10757,7 +9652,7 @@ async def test_update_exclusion_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -10772,22 +9667,23 @@ async def test_update_exclusion_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_exclusion - ] = mock_object + ] = mock_rpc request = {} await client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_exclusion(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10795,7 +9691,7 @@ async def test_update_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -10867,7 +9763,7 @@ def test_update_exclusion_field_headers(): @pytest.mark.asyncio async def test_update_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -10947,7 +9843,7 @@ def test_update_exclusion_flattened_error(): @pytest.mark.asyncio async def test_update_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -10984,7 +9880,7 @@ async def test_update_exclusion_flattened_async(): @pytest.mark.asyncio async def test_update_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -11031,25 +9927,6 @@ def test_delete_exclusion(request_type, transport: str = "grpc"): assert response is None -def test_delete_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() - - def test_delete_exclusion_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -11115,25 +9992,6 @@ def test_delete_exclusion_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_exclusion_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() - - @pytest.mark.asyncio async def test_delete_exclusion_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -11142,7 +10000,7 @@ async def test_delete_exclusion_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -11157,22 +10015,23 @@ async def test_delete_exclusion_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_exclusion - ] = mock_object + ] = mock_rpc request = {} await client.delete_exclusion(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_exclusion(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11180,7 +10039,7 @@ async def test_delete_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -11241,7 +10100,7 @@ def test_delete_exclusion_field_headers(): @pytest.mark.asyncio async def test_delete_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -11309,7 +10168,7 @@ def test_delete_exclusion_flattened_error(): @pytest.mark.asyncio async def test_delete_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -11336,7 +10195,7 @@ async def test_delete_exclusion_flattened_async(): @pytest.mark.asyncio async def test_delete_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -11392,27 +10251,6 @@ def test_get_cmek_settings(request_type, transport: str = "grpc"): assert response.service_account_id == "service_account_id_value" -def test_get_cmek_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_cmek_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() - - def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -11480,34 +10318,6 @@ def test_get_cmek_settings_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_cmek_settings_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.CmekSettings( - name="name_value", - kms_key_name="kms_key_name_value", - kms_key_version_name="kms_key_version_name_value", - service_account_id="service_account_id_value", - ) - ) - response = await client.get_cmek_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() - - @pytest.mark.asyncio async def test_get_cmek_settings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -11516,7 +10326,7 @@ async def test_get_cmek_settings_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -11531,22 +10341,23 @@ async def test_get_cmek_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_cmek_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cmek_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11554,7 +10365,7 @@ async def test_get_cmek_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -11630,7 +10441,7 @@ def test_get_cmek_settings_field_headers(): @pytest.mark.asyncio async def test_get_cmek_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -11705,27 +10516,6 @@ def test_update_cmek_settings(request_type, transport: str = "grpc"): assert response.service_account_id == "service_account_id_value" -def test_update_cmek_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_cmek_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() - - def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -11795,34 +10585,6 @@ def test_update_cmek_settings_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_cmek_settings_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.CmekSettings( - name="name_value", - kms_key_name="kms_key_name_value", - kms_key_version_name="kms_key_version_name_value", - service_account_id="service_account_id_value", - ) - ) - response = await client.update_cmek_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() - - @pytest.mark.asyncio async def test_update_cmek_settings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -11831,7 +10593,7 @@ async def test_update_cmek_settings_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -11846,22 +10608,23 @@ async def test_update_cmek_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_cmek_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_cmek_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11870,7 +10633,7 @@ async def test_update_cmek_settings_async( request_type=logging_config.UpdateCmekSettingsRequest, ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -11946,7 +10709,7 @@ def test_update_cmek_settings_field_headers(): @pytest.mark.asyncio async def test_update_cmek_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12021,25 +10784,6 @@ def test_get_settings(request_type, transport: str = "grpc"): assert response.disable_default_sink is True -def test_get_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() - - def test_get_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -12104,43 +10848,16 @@ def test_get_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_settings_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.Settings( - name="name_value", - kms_key_name="kms_key_name_value", - kms_service_account_id="kms_service_account_id_value", - storage_location="storage_location_value", - disable_default_sink=True, - ) - ) - response = await client.get_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() - - -@pytest.mark.asyncio -async def test_get_settings_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +async def test_get_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 @@ -12153,22 +10870,23 @@ async def test_get_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_settings - ] = mock_object + ] = mock_rpc request = {} await client.get_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12176,7 +10894,7 @@ async def test_get_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSettingsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -12250,7 +10968,7 @@ def test_get_settings_field_headers(): @pytest.mark.asyncio async def test_get_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12320,7 +11038,7 @@ def test_get_settings_flattened_error(): @pytest.mark.asyncio async def test_get_settings_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -12349,7 +11067,7 @@ async def test_get_settings_flattened_async(): @pytest.mark.asyncio async def test_get_settings_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -12405,25 +11123,6 @@ def test_update_settings(request_type, transport: str = "grpc"): assert response.disable_default_sink is True -def test_update_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() - - def test_update_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -12487,33 +11186,6 @@ def test_update_settings_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_settings_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.Settings( - name="name_value", - kms_key_name="kms_key_name_value", - kms_service_account_id="kms_service_account_id_value", - storage_location="storage_location_value", - disable_default_sink=True, - ) - ) - response = await client.update_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() - - @pytest.mark.asyncio async def test_update_settings_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -12522,7 +11194,7 @@ async def test_update_settings_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -12537,22 +11209,23 @@ async def test_update_settings_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_settings - ] = mock_object + ] = mock_rpc request = {} await client.update_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -12560,7 +11233,7 @@ async def test_update_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateSettingsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -12634,7 +11307,7 @@ def test_update_settings_field_headers(): @pytest.mark.asyncio async def test_update_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12709,7 +11382,7 @@ def test_update_settings_flattened_error(): @pytest.mark.asyncio async def test_update_settings_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -12742,7 +11415,7 @@ async def test_update_settings_flattened_async(): @pytest.mark.asyncio async def test_update_settings_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -12788,25 +11461,6 @@ def test_copy_log_entries(request_type, transport: str = "grpc"): assert isinstance(response, future.Future) -def test_copy_log_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.copy_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CopyLogEntriesRequest() - - def test_copy_log_entries_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -12869,221 +11523,1810 @@ def test_copy_log_entries_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.copy_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_copy_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.copy_log_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.copy_log_entries + ] = mock_rpc + + request = {} + await client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.copy_log_entries(request) + await client.copy_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_copy_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging_config.CopyLogEntriesRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.CopyLogEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_copy_log_entries_async_from_dict(): + await test_copy_log_entries_async(request_type=dict) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ConfigServiceV2Client(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ConfigServiceV2GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = ConfigServiceV2Client.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_buckets_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + call.return_value = logging_config.ListBucketsResponse() + client.list_buckets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListBucketsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + client.get_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_bucket_async_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_bucket_async(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_bucket_async_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_bucket_async(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + client.create_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + client.update_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value = None + client.delete_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_undelete_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value = None + client.undelete_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UndeleteBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_views_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = logging_config.ListViewsResponse() + client.list_views(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListViewsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_view_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = logging_config.LogView() + client.get_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_view_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value = logging_config.LogView() + client.create_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_view_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value = logging_config.LogView() + client.update_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_view_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = None + client.delete_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sinks_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + call.return_value = logging_config.ListSinksResponse() + client.list_sinks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListSinksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_sink_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + call.return_value = logging_config.LogSink() + client.get_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_sink_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value = logging_config.LogSink() + client.create_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_sink_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value = logging_config.LogSink() + client.update_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_sink_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + call.return_value = None + client.delete_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_link_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_link_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_links_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + call.return_value = logging_config.ListLinksResponse() + client.list_links(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListLinksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_link_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + call.return_value = logging_config.Link() + client.get_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_exclusions_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value = logging_config.ListExclusionsResponse() + client.list_exclusions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListExclusionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_exclusion_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value = logging_config.LogExclusion() + client.get_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_exclusion_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value = logging_config.LogExclusion() + client.create_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_exclusion_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value = logging_config.LogExclusion() + client.update_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_exclusion_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + call.return_value = None + client.delete_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_cmek_settings_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + call.return_value = logging_config.CmekSettings() + client.get_cmek_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetCmekSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_cmek_settings_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + call.return_value = logging_config.CmekSettings() + client.update_cmek_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateCmekSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_settings_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = logging_config.Settings() + client.get_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_settings_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = logging_config.Settings() + client.update_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_copy_log_entries_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.copy_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CopyLogEntriesRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ConfigServiceV2AsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_buckets_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListBucketsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_buckets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListBucketsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) + await client.get_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_bucket_async_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_bucket_async(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_bucket_async_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_bucket_async(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) + await client.create_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) + await client.update_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_undelete_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.undelete_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UndeleteBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_views_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_views(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListViewsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_view_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) + await client.get_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_view_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) + await client.create_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_view_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) + await client.update_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_view_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_sinks_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListSinksResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_sinks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListSinksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_sink_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) + await client.get_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_sink_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) + await client.create_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_sink_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) + await client.update_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_sink_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_link_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_link_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_links_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_links(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListLinksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_link_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Link( + name="name_value", + description="description_value", + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + ) + await client.get_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_exclusions_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_exclusions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListExclusionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_exclusion_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + await client.get_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_exclusion_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + await client.create_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_exclusion_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + await client.update_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_exclusion_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteExclusionRequest() - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_copy_log_entries_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +async def test_get_cmek_settings_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) ) - response = await client.copy_log_entries() + await client.get_cmek_settings(request=None) + + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CopyLogEntriesRequest() + request_msg = logging_config.GetCmekSettingsRequest() + assert args[0] == request_msg -@pytest.mark.asyncio -async def test_copy_log_entries_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_cmek_settings_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) - # Ensure method has been cached - assert ( - client._client._transport.copy_log_entries - in client._client._transport._wrapped_methods + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) ) + await client.update_cmek_settings(request=None) - # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[ - client._client._transport.copy_log_entries - ] = mock_object - - request = {} - await client.copy_log_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.copy_log_entries(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateCmekSettingsRequest() - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_copy_log_entries_async( - transport: str = "grpc_asyncio", request_type=logging_config.CopyLogEntriesRequest -): +async def test_get_settings_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) ) - response = await client.copy_log_entries(request) + await client.get_settings(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - request = logging_config.CopyLogEntriesRequest() - assert args[0] == request + request_msg = logging_config.GetSettingsRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_copy_log_entries_async_from_dict(): - await test_copy_log_entries_async(request_type=dict) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), +async def test_update_settings_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - # It is an error to provide scopes and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options={"scopes": ["1", "2"]}, - transport=transport, + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) ) + await client.update_settings(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateSettingsRequest() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ConfigServiceV2Client(transport=transport) - assert client.transport is transport - + assert args[0] == request_msg -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - transport = transports.ConfigServiceV2GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_copy_log_entries_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ConfigServiceV2GrpcTransport, - transports.ConfigServiceV2GrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.copy_log_entries(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CopyLogEntriesRequest() -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - ], -) -def test_transport_kind(transport_name): - transport = ConfigServiceV2Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name + assert args[0] == request_msg def test_transport_grpc_default(): @@ -13858,20 +14101,6 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - def test_cancel_operation(transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -13899,7 +14128,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -13952,7 +14181,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13997,7 +14226,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -14038,7 +14267,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -14093,7 +14322,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -14140,7 +14369,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -14183,7 +14412,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -14238,7 +14467,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -14285,7 +14514,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -14301,21 +14530,29 @@ async def test_list_operations_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() def test_client_ctx(): diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index b1cae4824cce..0ae4344deb27 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -24,12 +24,20 @@ import grpc from grpc.experimental import aio +import json import math import pytest from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api import monitored_resource_pb2 # type: ignore from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -59,10 +67,32 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -304,82 +334,46 @@ def test__get_universe_domain(): @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "error_code,cred_info_json,show_cred_info", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), ], ) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = LoggingServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = LoggingServiceV2Client(credentials=cred) + client._transport._credentials = cred - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize( @@ -1160,25 +1154,6 @@ def test_delete_log(request_type, transport: str = "grpc"): assert response is None -def test_delete_log_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_log), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_log() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() - - def test_delete_log_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1242,32 +1217,13 @@ def test_delete_log_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_log_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_log), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_log() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() - - @pytest.mark.asyncio async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1282,22 +1238,23 @@ async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_log - ] = mock_object + ] = mock_rpc request = {} await client.delete_log(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_log(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1305,7 +1262,7 @@ async def test_delete_log_async( transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1366,7 +1323,7 @@ def test_delete_log_field_headers(): @pytest.mark.asyncio async def test_delete_log_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1434,7 +1391,7 @@ def test_delete_log_flattened_error(): @pytest.mark.asyncio async def test_delete_log_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1461,7 +1418,7 @@ async def test_delete_log_flattened_async(): @pytest.mark.asyncio async def test_delete_log_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1508,27 +1465,6 @@ def test_write_log_entries(request_type, transport: str = "grpc"): assert isinstance(response, logging.WriteLogEntriesResponse) -def test_write_log_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.write_log_entries), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.write_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() - - def test_write_log_entries_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1596,29 +1532,6 @@ def test_write_log_entries_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_write_log_entries_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.write_log_entries), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging.WriteLogEntriesResponse() - ) - response = await client.write_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() - - @pytest.mark.asyncio async def test_write_log_entries_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1627,7 +1540,7 @@ async def test_write_log_entries_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1642,22 +1555,23 @@ async def test_write_log_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.write_log_entries - ] = mock_object + ] = mock_rpc request = {} await client.write_log_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.write_log_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1665,7 +1579,7 @@ async def test_write_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1756,7 +1670,7 @@ def test_write_log_entries_flattened_error(): @pytest.mark.asyncio async def test_write_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1799,7 +1713,7 @@ async def test_write_log_entries_flattened_async(): @pytest.mark.asyncio async def test_write_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1850,25 +1764,6 @@ def test_list_log_entries(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_log_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() - - def test_list_log_entries_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1938,29 +1833,6 @@ def test_list_log_entries_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_log_entries_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging.ListLogEntriesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() - - @pytest.mark.asyncio async def test_list_log_entries_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1969,7 +1841,7 @@ async def test_list_log_entries_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1984,22 +1856,23 @@ async def test_list_log_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_log_entries - ] = mock_object + ] = mock_rpc request = {} await client.list_log_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_log_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2007,7 +1880,7 @@ async def test_list_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2092,7 +1965,7 @@ def test_list_log_entries_flattened_error(): @pytest.mark.asyncio async def test_list_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2129,7 +2002,7 @@ async def test_list_log_entries_flattened_async(): @pytest.mark.asyncio async def test_list_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2238,7 +2111,7 @@ def test_list_log_entries_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_log_entries_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2288,7 +2161,7 @@ async def test_list_log_entries_async_pager(): @pytest.mark.asyncio async def test_list_log_entries_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2372,27 +2245,6 @@ def test_list_monitored_resource_descriptors(request_type, transport: str = "grp assert response.next_page_token == "next_page_token_value" -def test_list_monitored_resource_descriptors_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_monitored_resource_descriptors() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() - - def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2463,31 +2315,6 @@ def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging.ListMonitoredResourceDescriptorsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_monitored_resource_descriptors() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() - - @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2496,7 +2323,7 @@ async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2511,22 +2338,23 @@ async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_monitored_resource_descriptors - ] = mock_object + ] = mock_rpc request = {} await client.list_monitored_resource_descriptors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_monitored_resource_descriptors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2535,7 +2363,7 @@ async def test_list_monitored_resource_descriptors_async( request_type=logging.ListMonitoredResourceDescriptorsRequest, ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2675,7 +2503,7 @@ def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc") @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2730,7 +2558,7 @@ async def test_list_monitored_resource_descriptors_async_pager(): @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2816,25 +2644,6 @@ def test_list_logs(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_logs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_logs), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_logs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() - - def test_list_logs_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2900,37 +2709,13 @@ def test_list_logs_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_logs_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_logs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging.ListLogsResponse( - log_names=["log_names_value"], - next_page_token="next_page_token_value", - ) - ) - response = await client.list_logs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() - - @pytest.mark.asyncio async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2945,22 +2730,23 @@ async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_logs - ] = mock_object + ] = mock_rpc request = {} await client.list_logs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_logs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2968,7 +2754,7 @@ async def test_list_logs_async( transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3036,7 +2822,7 @@ def test_list_logs_field_headers(): @pytest.mark.asyncio async def test_list_logs_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3106,7 +2892,7 @@ def test_list_logs_flattened_error(): @pytest.mark.asyncio async def test_list_logs_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3135,7 +2921,7 @@ async def test_list_logs_flattened_async(): @pytest.mark.asyncio async def test_list_logs_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3245,7 +3031,7 @@ def test_list_logs_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_logs_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3295,7 +3081,7 @@ async def test_list_logs_async_pager(): @pytest.mark.asyncio async def test_list_logs_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3420,7 +3206,7 @@ async def test_tail_log_entries_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3435,22 +3221,23 @@ async def test_tail_log_entries_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.tail_log_entries - ] = mock_object + ] = mock_rpc request = [{}] await client.tail_log_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.tail_log_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3458,7 +3245,7 @@ async def test_tail_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3582,17 +3369,275 @@ def test_transport_adc(transport_class): adc.assert_called_once() -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - ], -) -def test_transport_kind(transport_name): - transport = LoggingServiceV2Client.get_transport_class(transport_name)( +def test_transport_kind_grpc(): + transport = LoggingServiceV2Client.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_log_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + call.return_value = None + client.delete_log(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.DeleteLogRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_write_log_entries_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), "__call__" + ) as call: + call.return_value = logging.WriteLogEntriesResponse() + client.write_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.WriteLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_log_entries_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + call.return_value = logging.ListLogEntriesResponse() + client.list_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_monitored_resource_descriptors_empty_call_grpc(): + client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - assert transport.kind == transport_name + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: + call.return_value = logging.ListMonitoredResourceDescriptorsResponse() + client.list_monitored_resource_descriptors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListMonitoredResourceDescriptorsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_logs_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + call.return_value = logging.ListLogsResponse() + client.list_logs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = LoggingServiceV2AsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_log_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_log(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.DeleteLogRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_write_log_entries_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.WriteLogEntriesResponse() + ) + await client.write_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.WriteLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_log_entries_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogEntriesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListMonitoredResourceDescriptorsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_monitored_resource_descriptors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListMonitoredResourceDescriptorsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_logs_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogsResponse( + log_names=["log_names_value"], + next_page_token="next_page_token_value", + ) + ) + await client.list_logs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogsRequest() + + assert args[0] == request_msg def test_transport_grpc_default(): @@ -4155,20 +4200,6 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - def test_cancel_operation(transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4196,7 +4227,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4249,7 +4280,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4294,7 +4325,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -4335,7 +4366,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4390,7 +4421,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4437,7 +4468,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -4480,7 +4511,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4535,7 +4566,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4582,7 +4613,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -4598,21 +4629,29 @@ async def test_list_operations_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() def test_client_ctx(): diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 7909609fabd2..b63f3b750d87 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -24,12 +24,20 @@ import grpc from grpc.experimental import aio +import json import math import pytest from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api import distribution_pb2 # type: ignore from google.api import label_pb2 # type: ignore from google.api import launch_stage_pb2 # type: ignore @@ -57,10 +65,32 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -302,82 +332,46 @@ def test__get_universe_domain(): @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "error_code,cred_info_json,show_cred_info", [ - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), ], ) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MetricsServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MetricsServiceV2Client(credentials=cred) + client._transport._credentials = cred - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize( @@ -1176,25 +1170,6 @@ def test_list_log_metrics(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_log_metrics_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_log_metrics() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() - - def test_list_log_metrics_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1262,29 +1237,6 @@ def test_list_log_metrics_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_log_metrics_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_metrics.ListLogMetricsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_log_metrics() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() - - @pytest.mark.asyncio async def test_list_log_metrics_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1293,7 +1245,7 @@ async def test_list_log_metrics_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1308,22 +1260,23 @@ async def test_list_log_metrics_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_log_metrics - ] = mock_object + ] = mock_rpc request = {} await client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_log_metrics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1331,7 +1284,7 @@ async def test_list_log_metrics_async( transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1397,7 +1350,7 @@ def test_list_log_metrics_field_headers(): @pytest.mark.asyncio async def test_list_log_metrics_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1467,7 +1420,7 @@ def test_list_log_metrics_flattened_error(): @pytest.mark.asyncio async def test_list_log_metrics_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1496,7 +1449,7 @@ async def test_list_log_metrics_flattened_async(): @pytest.mark.asyncio async def test_list_log_metrics_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1606,7 +1559,7 @@ def test_list_log_metrics_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_log_metrics_async_pager(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1656,7 +1609,7 @@ async def test_list_log_metrics_async_pager(): @pytest.mark.asyncio async def test_list_log_metrics_async_pages(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1750,25 +1703,6 @@ def test_get_log_metric(request_type, transport: str = "grpc"): assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_get_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() - - def test_get_log_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1832,35 +1766,6 @@ def test_get_log_metric_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_log_metric_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_metrics.LogMetric( - name="name_value", - description="description_value", - filter="filter_value", - bucket_name="bucket_name_value", - disabled=True, - value_extractor="value_extractor_value", - version=logging_metrics.LogMetric.ApiVersion.V1, - ) - ) - response = await client.get_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() - - @pytest.mark.asyncio async def test_get_log_metric_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1869,7 +1774,7 @@ async def test_get_log_metric_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1884,22 +1789,23 @@ async def test_get_log_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_log_metric - ] = mock_object + ] = mock_rpc request = {} await client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_log_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1907,7 +1813,7 @@ async def test_get_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1985,7 +1891,7 @@ def test_get_log_metric_field_headers(): @pytest.mark.asyncio async def test_get_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2055,7 +1961,7 @@ def test_get_log_metric_flattened_error(): @pytest.mark.asyncio async def test_get_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2084,7 +1990,7 @@ async def test_get_log_metric_flattened_async(): @pytest.mark.asyncio async def test_get_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2146,27 +2052,6 @@ def test_create_log_metric(request_type, transport: str = "grpc"): assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_create_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_log_metric), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() - - def test_create_log_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2234,37 +2119,6 @@ def test_create_log_metric_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_log_metric_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_log_metric), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_metrics.LogMetric( - name="name_value", - description="description_value", - filter="filter_value", - bucket_name="bucket_name_value", - disabled=True, - value_extractor="value_extractor_value", - version=logging_metrics.LogMetric.ApiVersion.V1, - ) - ) - response = await client.create_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() - - @pytest.mark.asyncio async def test_create_log_metric_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2273,7 +2127,7 @@ async def test_create_log_metric_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2288,22 +2142,23 @@ async def test_create_log_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_log_metric - ] = mock_object + ] = mock_rpc request = {} await client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_log_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2311,7 +2166,7 @@ async def test_create_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2393,7 +2248,7 @@ def test_create_log_metric_field_headers(): @pytest.mark.asyncio async def test_create_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2472,7 +2327,7 @@ def test_create_log_metric_flattened_error(): @pytest.mark.asyncio async def test_create_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2507,7 +2362,7 @@ async def test_create_log_metric_flattened_async(): @pytest.mark.asyncio async def test_create_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2570,27 +2425,6 @@ def test_update_log_metric(request_type, transport: str = "grpc"): assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_update_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_log_metric), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() - - def test_update_log_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2658,37 +2492,6 @@ def test_update_log_metric_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_log_metric_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_log_metric), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_metrics.LogMetric( - name="name_value", - description="description_value", - filter="filter_value", - bucket_name="bucket_name_value", - disabled=True, - value_extractor="value_extractor_value", - version=logging_metrics.LogMetric.ApiVersion.V1, - ) - ) - response = await client.update_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() - - @pytest.mark.asyncio async def test_update_log_metric_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2697,7 +2500,7 @@ async def test_update_log_metric_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2712,22 +2515,23 @@ async def test_update_log_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_log_metric - ] = mock_object + ] = mock_rpc request = {} await client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_log_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2735,7 +2539,7 @@ async def test_update_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2817,7 +2621,7 @@ def test_update_log_metric_field_headers(): @pytest.mark.asyncio async def test_update_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2896,7 +2700,7 @@ def test_update_log_metric_flattened_error(): @pytest.mark.asyncio async def test_update_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2931,7 +2735,7 @@ async def test_update_log_metric_flattened_async(): @pytest.mark.asyncio async def test_update_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2979,27 +2783,6 @@ def test_delete_log_metric(request_type, transport: str = "grpc"): assert response is None -def test_delete_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log_metric), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() - - def test_delete_log_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3067,27 +2850,6 @@ def test_delete_log_metric_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_log_metric_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log_metric), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() - - @pytest.mark.asyncio async def test_delete_log_metric_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -3096,7 +2858,7 @@ async def test_delete_log_metric_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3111,22 +2873,23 @@ async def test_delete_log_metric_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_log_metric - ] = mock_object + ] = mock_rpc request = {} await client.delete_log_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_log_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3134,7 +2897,7 @@ async def test_delete_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3199,7 +2962,7 @@ def test_delete_log_metric_field_headers(): @pytest.mark.asyncio async def test_delete_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3271,7 +3034,7 @@ def test_delete_log_metric_flattened_error(): @pytest.mark.asyncio async def test_delete_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3300,7 +3063,7 @@ async def test_delete_log_metric_flattened_async(): @pytest.mark.asyncio async def test_delete_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3403,17 +3166,298 @@ def test_transport_adc(transport_class): adc.assert_called_once() -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - ], -) -def test_transport_kind(transport_name): - transport = MetricsServiceV2Client.get_transport_class(transport_name)( +def test_transport_kind_grpc(): + transport = MetricsServiceV2Client.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_log_metrics_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + call.return_value = logging_metrics.ListLogMetricsResponse() + client.list_log_metrics(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.ListLogMetricsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + call.return_value = logging_metrics.LogMetric() + client.get_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.GetLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), "__call__" + ) as call: + call.return_value = logging_metrics.LogMetric() + client.create_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.CreateLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), "__call__" + ) as call: + call.return_value = logging_metrics.LogMetric() + client.update_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.UpdateLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), "__call__" + ) as call: + call.return_value = None + client.delete_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.DeleteLogMetricRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = MetricsServiceV2AsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_log_metrics_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.ListLogMetricsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_log_metrics(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.ListLogMetricsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) + await client.get_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.GetLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) + await client.create_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.CreateLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) + await client.update_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.UpdateLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - assert transport.kind == transport_name + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.DeleteLogMetricRequest() + + assert args[0] == request_msg def test_transport_grpc_default(): @@ -3975,20 +4019,6 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - def test_cancel_operation(transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4016,7 +4046,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4069,7 +4099,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4114,7 +4144,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -4155,7 +4185,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4210,7 +4240,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4257,7 +4287,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -4300,7 +4330,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4355,7 +4385,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4402,7 +4432,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -4418,21 +4448,29 @@ async def test_list_operations_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() def test_client_ctx(): From d43503c13998409136994be30916b76886d9ea79 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Tue, 4 Mar 2025 16:37:13 -0500 Subject: [PATCH 836/855] docs: Added documentation on log_level and excluded_loggers params in setup_logging (#971) * docs: Added documentation on log_level and excluded_loggers params in setup_logging * Added product prefix to new region tags. --- .../docs/std-lib-integration.rst | 15 +++++++++++++++ .../google/cloud/logging_v2/client.py | 3 ++- .../google/cloud/logging_v2/handlers/handlers.py | 3 ++- .../samples/snippets/usage_guide.py | 8 ++++---- 4 files changed, 23 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/docs/std-lib-integration.rst b/packages/google-cloud-logging/docs/std-lib-integration.rst index 8a016b8e5101..cf00c37ae140 100644 --- a/packages/google-cloud-logging/docs/std-lib-integration.rst +++ b/packages/google-cloud-logging/docs/std-lib-integration.rst @@ -16,6 +16,21 @@ call :meth:`~google.cloud.logging_v2.client.Client.setup_logging` on a :class:`~ :end-before: [END logging_handler_setup] :dedent: 4 + +You can also set the logging level threshold of the logging handler created by :meth:`~google.cloud.logging_v2.client.Client.setup_logging`, +as well as set loggers excluded from the logger that is created: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_setup_logging] + :end-before: [END logging_setup_logging] + :dedent: 4 + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_setup_logging_excludes] + :end-before: [END logging_setup_logging_excludes] + :dedent: 4 + + This :meth:`~google.cloud.logging_v2.client.Client.setup_logging` function chooses the best configurations for the environment your code is running on. For more information, see the `Google Cloud Logging documentation `_. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py index 94c1e6ca7260..1b5beeb245d6 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -400,7 +400,8 @@ def setup_logging( loggers, will report to Cloud Logging. Args: - log_level (Optional[int]): Python logging log level. Defaults to + log_level (Optional[int]): The logging level threshold of the attached logger, + as set by the :meth:`logging.Logger.setLevel` method. Defaults to :const:`logging.INFO`. excluded_loggers (Optional[Tuple[str]]): The loggers to not attach the handler to. This will always include the diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index ea84bb3cc3e5..e71f673f7e3f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -296,7 +296,8 @@ def setup_logging( excluded_loggers (Optional[Tuple[str]]): The loggers to not attach the handler to. This will always include the loggers in the path of the logging client itself. - log_level (Optional[int]): Python logging log level. Defaults to + log_level (Optional[int]): The logging level threshold of the attached logger, + as set by the :meth:`logging.Logger.setLevel` method. Defaults to :const:`logging.INFO`. """ all_excluded_loggers = set(excluded_loggers + _INTERNAL_LOGGERS) diff --git a/packages/google-cloud-logging/samples/snippets/usage_guide.py b/packages/google-cloud-logging/samples/snippets/usage_guide.py index ef8847ba5949..6dee33798b14 100644 --- a/packages/google-cloud-logging/samples/snippets/usage_guide.py +++ b/packages/google-cloud-logging/samples/snippets/usage_guide.py @@ -475,13 +475,13 @@ def using_extras(client): def setup_logging(client): import logging - # [START setup_logging] + # [START logging_setup_logging] client.setup_logging(log_level=logging.INFO) - # [END setup_logging] + # [END logging_setup_logging] - # [START setup_logging_excludes] + # [START logging_setup_logging_excludes] client.setup_logging(log_level=logging.INFO, excluded_loggers=("werkzeug",)) - # [END setup_logging_excludes] + # [END logging_setup_logging_excludes] @snippet From 9cd4a085869ce86440cfb1c932ae81db007fc716 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 Mar 2025 10:33:14 -0400 Subject: [PATCH 837/855] chore: remove unused files (#976) * chore: remove unused files * remove post processing for .kokoro/docs * add new line --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/docker/docs/Dockerfile | 89 ------ .../.kokoro/docker/docs/requirements.in | 2 - .../.kokoro/docker/docs/requirements.txt | 297 ------------------ .../.kokoro/docs/common.cfg | 86 ----- .../.kokoro/docs/docs-presubmit.cfg | 28 -- .../.kokoro/docs/docs.cfg | 1 - .../.kokoro/publish-docs.sh | 58 ---- packages/google-cloud-logging/owlbot.py | 2 +- 9 files changed, 3 insertions(+), 564 deletions(-) delete mode 100644 packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile delete mode 100644 packages/google-cloud-logging/.kokoro/docker/docs/requirements.in delete mode 100644 packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt delete mode 100644 packages/google-cloud-logging/.kokoro/docs/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/docs/docs-presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/docs/docs.cfg delete mode 100755 packages/google-cloud-logging/.kokoro/publish-docs.sh diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index 3f7634f25f8e..c631e1f7d7e9 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf -# created: 2025-02-21T19:32:52.01306189Z + digest: sha256:5581906b957284864632cde4e9c51d1cc66b0094990b27e689132fe5cd036046 +# created: 2025-03-05 diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile deleted file mode 100644 index e5410e296bd8..000000000000 --- a/packages/google-cloud-logging/.kokoro/docker/docs/Dockerfile +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ubuntu:24.04 - -ENV DEBIAN_FRONTEND noninteractive - -# Ensure local Python is preferred over distribution Python. -ENV PATH /usr/local/bin:$PATH - -# Install dependencies. -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - apt-transport-https \ - build-essential \ - ca-certificates \ - curl \ - dirmngr \ - git \ - gpg-agent \ - graphviz \ - libbz2-dev \ - libdb5.3-dev \ - libexpat1-dev \ - libffi-dev \ - liblzma-dev \ - libreadline-dev \ - libsnappy-dev \ - libssl-dev \ - libsqlite3-dev \ - portaudio19-dev \ - redis-server \ - software-properties-common \ - ssh \ - sudo \ - tcl \ - tcl-dev \ - tk \ - tk-dev \ - uuid-dev \ - wget \ - zlib1g-dev \ - && add-apt-repository universe \ - && apt-get update \ - && apt-get -y install jq \ - && apt-get clean autoclean \ - && apt-get autoremove -y \ - && rm -rf /var/lib/apt/lists/* \ - && rm -f /var/cache/apt/archives/*.deb - - -###################### Install python 3.10.14 for docs/docfx session - -# Download python 3.10.14 -RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz - -# Extract files -RUN tar -xvf Python-3.10.14.tgz - -# Install python 3.10.14 -RUN ./Python-3.10.14/configure --enable-optimizations -RUN make altinstall - -ENV PATH /usr/local/bin/python3.10:$PATH - -###################### Install pip -RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.10 /tmp/get-pip.py \ - && rm /tmp/get-pip.py - -# Test pip -RUN python3.10 -m pip - -# Install build requirements -COPY requirements.txt /requirements.txt -RUN python3.10 -m pip install --require-hashes -r requirements.txt - -CMD ["python3.10"] diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.in b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.in deleted file mode 100644 index 586bd07037ae..000000000000 --- a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.in +++ /dev/null @@ -1,2 +0,0 @@ -nox -gcp-docuploader diff --git a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt deleted file mode 100644 index a9360a25b707..000000000000 --- a/packages/google-cloud-logging/.kokoro/docker/docs/requirements.txt +++ /dev/null @@ -1,297 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.5.3 \ - --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ - --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 - # via nox -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a - # via google-auth -certifi==2024.12.14 \ - --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ - --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db - # via requests -charset-normalizer==3.4.1 \ - --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ - --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ - --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ - --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ - --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ - --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ - --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ - --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ - --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ - --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ - --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ - --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ - --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ - --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ - --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ - --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ - --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ - --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ - --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ - --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ - --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ - --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ - --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ - --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ - --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ - --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ - --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ - --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ - --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ - --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ - --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ - --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ - --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ - --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ - --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ - --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ - --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ - --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ - --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ - --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ - --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ - --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ - --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ - --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ - --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ - --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ - --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ - --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ - --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ - --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ - --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ - --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ - --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ - --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ - --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ - --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ - --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ - --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ - --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ - --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ - --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ - --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ - --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ - --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ - --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ - --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ - --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ - --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ - --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ - --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ - --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ - --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ - --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ - --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ - --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ - --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ - --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ - --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ - --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ - --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ - --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ - --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ - --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ - --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ - --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ - --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ - --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ - --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ - --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ - --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ - --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ - --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 - # via requests -click==8.1.8 \ - --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ - --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a - # via gcp-docuploader -colorlog==6.9.0 \ - --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ - --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 - # via - # gcp-docuploader - # nox -distlib==0.3.9 \ - --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ - --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 - # via virtualenv -filelock==3.16.1 \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -google-api-core==2.24.0 \ - --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ - --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.37.0 \ - --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ - --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 - # via - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.19.0 \ - --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ - --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 - # via gcp-docuploader -google-crc32c==1.6.0 \ - --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ - --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ - --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ - --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ - --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ - --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ - --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ - --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ - --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ - --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ - --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ - --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ - --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ - --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ - --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ - --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ - --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ - --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ - --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ - --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ - --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ - --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ - --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ - --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ - --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ - --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ - --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.2 \ - --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ - --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 - # via google-cloud-storage -googleapis-common-protos==1.66.0 \ - --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ - --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed - # via google-api-core -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via requests -nox==2024.10.9 \ - --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ - --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in -packaging==24.2 \ - --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ - --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f - # via nox -platformdirs==4.3.6 \ - --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ - --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb - # via virtualenv -proto-plus==1.25.0 \ - --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ - --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 - # via google-api-core -protobuf==5.29.3 \ - --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ - --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ - --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ - --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ - --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ - --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ - --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ - --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ - --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ - --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ - --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 - # via - # gcp-docuploader - # google-api-core - # googleapis-common-protos - # proto-plus -pyasn1==0.6.1 \ - --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ - --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 \ - --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ - --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c - # via google-auth -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via - # google-api-core - # google-cloud-storage -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -six==1.17.0 \ - --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ - --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 - # via gcp-docuploader -tomli==2.2.1 \ - --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ - --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ - --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ - --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ - --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ - --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ - --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ - --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ - --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ - --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ - --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ - --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ - --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ - --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ - --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ - --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ - --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ - --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ - --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ - --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ - --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ - --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ - --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ - --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ - --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ - --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ - --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ - --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ - --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ - --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ - --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ - --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 - # via nox -urllib3==2.3.0 \ - --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ - --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d - # via requests -virtualenv==20.28.1 \ - --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ - --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 - # via nox diff --git a/packages/google-cloud-logging/.kokoro/docs/common.cfg b/packages/google-cloud-logging/.kokoro/docs/common.cfg deleted file mode 100644 index 3bf9925da058..000000000000 --- a/packages/google-cloud-logging/.kokoro/docs/common.cfg +++ /dev/null @@ -1,86 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/publish-docs.sh" -} - -env_vars: { - key: "STAGING_BUCKET" - value: "docs-staging" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - # Push google cloud library docs to the Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2" -} - -# It will upload the docker image after successful builds. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "true" -} - -# It will always build the docker image. -env_vars: { - key: "TRAMPOLINE_DOCKERFILE" - value: ".kokoro/docker/docs/Dockerfile" -} - -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} - - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/packages/google-cloud-logging/.kokoro/docs/docs-presubmit.cfg b/packages/google-cloud-logging/.kokoro/docs/docs-presubmit.cfg deleted file mode 100644 index 3d5288befd4a..000000000000 --- a/packages/google-cloud-logging/.kokoro/docs/docs-presubmit.cfg +++ /dev/null @@ -1,28 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "STAGING_BUCKET" - value: "gcloud-python-test" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - value: "gcloud-python-test" -} - -# We only upload the image in the main `docs` build. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "false" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/build.sh" -} - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "docs docfx" -} diff --git a/packages/google-cloud-logging/.kokoro/docs/docs.cfg b/packages/google-cloud-logging/.kokoro/docs/docs.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-logging/.kokoro/docs/docs.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/publish-docs.sh b/packages/google-cloud-logging/.kokoro/publish-docs.sh deleted file mode 100755 index 4ed4aaf1346f..000000000000 --- a/packages/google-cloud-logging/.kokoro/publish-docs.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -export PATH="${HOME}/.local/bin:${PATH}" - -# build docs -nox -s docs - -# create metadata -python3.10 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3.10 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3.10 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" - - -# docfx yaml files -nox -s docfx - -# create metadata. -python3.10 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3.10 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3.10 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index 3a0271ca1787..7dd3385e0c90 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -261,7 +261,7 @@ def place_before(path, text, *before_text, escape=None): # -------------------------------------------------------------------------- # add shared environment variables to test configs -tracked_subdirs = ["continuous", "presubmit", "samples", "docs"] +tracked_subdirs = ["continuous", "presubmit", "samples"] for subdir in tracked_subdirs: for path, subdirs, files in os.walk(f".kokoro/{subdir}"): for name in files: From b16e6b4fd89eb751ee4e29e838c94b8554062aa6 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 Mar 2025 12:57:22 -0400 Subject: [PATCH 838/855] fix: Allow protobuf 6.x (#977) Co-authored-by: ohmayr --- packages/google-cloud-logging/setup.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 244b30c7cdee..6011b09a92fd 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -36,19 +36,19 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "google-cloud-appengine-logging>=0.1.3, <2.0.0dev", - "google-cloud-audit-log >= 0.2.4, < 1.0.0dev", - "google-cloud-core >= 2.0.0, <3.0.0dev", - "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "google-cloud-appengine-logging>=0.1.3, <2.0.0", + "google-cloud-audit-log >= 0.2.4, < 1.0.0", + "google-cloud-core >= 2.0.0, <3.0.0", + "grpc-google-iam-v1 >=0.12.4, <1.0.0", "opentelemetry-api >= 1.9.0", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "proto-plus >= 1.25.0, <2.0.0dev; python_version>='3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "proto-plus >= 1.22.0, <2.0.0", + "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'", + "proto-plus >= 1.25.0, <2.0.0; python_version>='3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-logging" From d92ea560705c5262dcfb3ea79429a35498304b6e Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Tue, 11 Mar 2025 02:40:22 -0400 Subject: [PATCH 839/855] docs: update README to break infinite redirect loop (#972) googleapis.dev link has been deprecated and redirected. Co-authored-by: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> --- packages/google-cloud-logging/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-logging/README.rst b/packages/google-cloud-logging/README.rst index 84dd1e77fd90..d9549ed7dbdc 100644 --- a/packages/google-cloud-logging/README.rst +++ b/packages/google-cloud-logging/README.rst @@ -14,7 +14,7 @@ Logging configuration. .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-logging.svg :target: https://pypi.org/project/google-cloud-logging/ .. _Cloud Logging API: https://cloud.google.com/logging -.. _Client Library Documentation: https://googleapis.dev/python/logging/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/logging/latest/summary_overview .. _Product Documentation: https://cloud.google.com/logging/docs .. _Setting Up Cloud Logging for Python: https://cloud.google.com/logging/docs/setup/python .. _Python's standard logging library: https://docs.python.org/2/library/logging.html From 3beb8cce42d755b97ad68084e17161c53361aacc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 10:52:55 -0400 Subject: [PATCH 840/855] chore: Update gapic-generator-python to 1.23.6 (#982) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to 1.23.6 PiperOrigin-RevId: 738170370 Source-Link: https://github.com/googleapis/googleapis/commit/3f1e17aa2dec3f146a9a2a8a64c5c6d19d0b6e15 Source-Link: https://github.com/googleapis/googleapis-gen/commit/9afd8c33d4cae610b75fa4999264ea8c8c66b9d2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOWFmZDhjMzNkNGNhZTYxMGI3NWZhNDk5OTI2NGVhOGM4YzY2YjlkMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google/cloud/logging_v2/services/__init__.py | 2 +- .../cloud/logging_v2/services/config_service_v2/__init__.py | 2 +- .../cloud/logging_v2/services/config_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/pagers.py | 2 +- .../services/config_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/config_service_v2/transports/base.py | 2 +- .../logging_v2/services/config_service_v2/transports/grpc.py | 2 +- .../services/config_service_v2/transports/grpc_asyncio.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/__init__.py | 2 +- .../logging_v2/services/logging_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/pagers.py | 2 +- .../services/logging_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/logging_service_v2/transports/base.py | 2 +- .../logging_v2/services/logging_service_v2/transports/grpc.py | 2 +- .../services/logging_service_v2/transports/grpc_asyncio.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/__init__.py | 2 +- .../logging_v2/services/metrics_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/pagers.py | 2 +- .../services/metrics_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/metrics_service_v2/transports/base.py | 2 +- .../logging_v2/services/metrics_service_v2/transports/grpc.py | 2 +- .../services/metrics_service_v2/transports/grpc_asyncio.py | 2 +- .../google/cloud/logging_v2/types/__init__.py | 2 +- .../google/cloud/logging_v2/types/log_entry.py | 2 +- .../google/cloud/logging_v2/types/logging.py | 2 +- .../google/cloud/logging_v2/types/logging_config.py | 2 +- .../google/cloud/logging_v2/types/logging_metrics.py | 2 +- ...ing_v2_generated_config_service_v2_copy_log_entries_async.py | 2 +- ...ging_v2_generated_config_service_v2_copy_log_entries_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_create_bucket_async.py | 2 +- ..._v2_generated_config_service_v2_create_bucket_async_async.py | 2 +- ...g_v2_generated_config_service_v2_create_bucket_async_sync.py | 2 +- ...logging_v2_generated_config_service_v2_create_bucket_sync.py | 2 +- ...ing_v2_generated_config_service_v2_create_exclusion_async.py | 2 +- ...ging_v2_generated_config_service_v2_create_exclusion_sync.py | 2 +- .../logging_v2_generated_config_service_v2_create_link_async.py | 2 +- .../logging_v2_generated_config_service_v2_create_link_sync.py | 2 +- .../logging_v2_generated_config_service_v2_create_sink_async.py | 2 +- .../logging_v2_generated_config_service_v2_create_sink_sync.py | 2 +- .../logging_v2_generated_config_service_v2_create_view_async.py | 2 +- .../logging_v2_generated_config_service_v2_create_view_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_delete_bucket_async.py | 2 +- ...logging_v2_generated_config_service_v2_delete_bucket_sync.py | 2 +- ...ing_v2_generated_config_service_v2_delete_exclusion_async.py | 2 +- ...ging_v2_generated_config_service_v2_delete_exclusion_sync.py | 2 +- .../logging_v2_generated_config_service_v2_delete_link_async.py | 2 +- .../logging_v2_generated_config_service_v2_delete_link_sync.py | 2 +- .../logging_v2_generated_config_service_v2_delete_sink_async.py | 2 +- .../logging_v2_generated_config_service_v2_delete_sink_sync.py | 2 +- .../logging_v2_generated_config_service_v2_delete_view_async.py | 2 +- .../logging_v2_generated_config_service_v2_delete_view_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_bucket_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_bucket_sync.py | 2 +- ...ng_v2_generated_config_service_v2_get_cmek_settings_async.py | 2 +- ...ing_v2_generated_config_service_v2_get_cmek_settings_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_get_exclusion_async.py | 2 +- ...logging_v2_generated_config_service_v2_get_exclusion_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_link_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_link_sync.py | 2 +- ...logging_v2_generated_config_service_v2_get_settings_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_settings_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_sink_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_sink_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_view_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_view_sync.py | 2 +- ...logging_v2_generated_config_service_v2_list_buckets_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_buckets_sync.py | 2 +- ...ging_v2_generated_config_service_v2_list_exclusions_async.py | 2 +- ...gging_v2_generated_config_service_v2_list_exclusions_sync.py | 2 +- .../logging_v2_generated_config_service_v2_list_links_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_links_sync.py | 2 +- .../logging_v2_generated_config_service_v2_list_sinks_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_sinks_sync.py | 2 +- .../logging_v2_generated_config_service_v2_list_views_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_views_sync.py | 2 +- ...ging_v2_generated_config_service_v2_undelete_bucket_async.py | 2 +- ...gging_v2_generated_config_service_v2_undelete_bucket_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_update_bucket_async.py | 2 +- ..._v2_generated_config_service_v2_update_bucket_async_async.py | 2 +- ...g_v2_generated_config_service_v2_update_bucket_async_sync.py | 2 +- ...logging_v2_generated_config_service_v2_update_bucket_sync.py | 2 +- ...v2_generated_config_service_v2_update_cmek_settings_async.py | 2 +- ..._v2_generated_config_service_v2_update_cmek_settings_sync.py | 2 +- ...ing_v2_generated_config_service_v2_update_exclusion_async.py | 2 +- ...ging_v2_generated_config_service_v2_update_exclusion_sync.py | 2 +- ...ging_v2_generated_config_service_v2_update_settings_async.py | 2 +- ...gging_v2_generated_config_service_v2_update_settings_sync.py | 2 +- .../logging_v2_generated_config_service_v2_update_sink_async.py | 2 +- .../logging_v2_generated_config_service_v2_update_sink_sync.py | 2 +- .../logging_v2_generated_config_service_v2_update_view_async.py | 2 +- .../logging_v2_generated_config_service_v2_update_view_sync.py | 2 +- .../logging_v2_generated_logging_service_v2_delete_log_async.py | 2 +- .../logging_v2_generated_logging_service_v2_delete_log_sync.py | 2 +- ...ng_v2_generated_logging_service_v2_list_log_entries_async.py | 2 +- ...ing_v2_generated_logging_service_v2_list_log_entries_sync.py | 2 +- .../logging_v2_generated_logging_service_v2_list_logs_async.py | 2 +- .../logging_v2_generated_logging_service_v2_list_logs_sync.py | 2 +- ...ging_service_v2_list_monitored_resource_descriptors_async.py | 2 +- ...gging_service_v2_list_monitored_resource_descriptors_sync.py | 2 +- ...ng_v2_generated_logging_service_v2_tail_log_entries_async.py | 2 +- ...ing_v2_generated_logging_service_v2_tail_log_entries_sync.py | 2 +- ...g_v2_generated_logging_service_v2_write_log_entries_async.py | 2 +- ...ng_v2_generated_logging_service_v2_write_log_entries_sync.py | 2 +- ...g_v2_generated_metrics_service_v2_create_log_metric_async.py | 2 +- ...ng_v2_generated_metrics_service_v2_create_log_metric_sync.py | 2 +- ...g_v2_generated_metrics_service_v2_delete_log_metric_async.py | 2 +- ...ng_v2_generated_metrics_service_v2_delete_log_metric_sync.py | 2 +- ...ging_v2_generated_metrics_service_v2_get_log_metric_async.py | 2 +- ...gging_v2_generated_metrics_service_v2_get_log_metric_sync.py | 2 +- ...ng_v2_generated_metrics_service_v2_list_log_metrics_async.py | 2 +- ...ing_v2_generated_metrics_service_v2_list_log_metrics_sync.py | 2 +- ...g_v2_generated_metrics_service_v2_update_log_metric_async.py | 2 +- ...ng_v2_generated_metrics_service_v2_update_log_metric_sync.py | 2 +- packages/google-cloud-logging/tests/__init__.py | 2 +- packages/google-cloud-logging/tests/unit/__init__.py | 2 +- packages/google-cloud-logging/tests/unit/gapic/__init__.py | 2 +- .../tests/unit/gapic/logging_v2/__init__.py | 2 +- .../tests/unit/gapic/logging_v2/test_config_service_v2.py | 2 +- .../tests/unit/gapic/logging_v2/test_logging_service_v2.py | 2 +- .../tests/unit/gapic/logging_v2/test_metrics_service_v2.py | 2 +- 123 files changed, 123 insertions(+), 123 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py index a56e06a1d6ac..187d00d52bde 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index a2a4126d922b..5d904b5b959a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 653a350aec71..39ee70613356 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index f151a7bf665e..62906815e22d 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index eb6d2764a9e0..6f8979ef819f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index fcd20e25a325..ffc8480599da 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index f83ac9b6af7c..8870e119c55a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 5047ae67f45a..2686f80e37d3 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py index 2f8ce7deee61..41c0dc4fab22 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 7832dda52f56..beb1891b5fbc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 4624c1af883a..bdd8783d375b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 12a5268732aa..f19ad6304089 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index 668b54522433..48f0b711cd4c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 406a2b87865e..7a08a140a28e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 49bea46d6c1d..87d164a0d627 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 4e3a2af1ac40..f73ac1150491 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index d95456f174ec..41a1ef4a6ab2 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 2ecfe397b676..01ddfce894ec 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 3e4421f023e8..6e7f0b056ed0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 5963fea31b66..75fc998a2a64 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index 820b7f2a2c4a..4975feb99409 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index e4fc7b1e44e9..0dd83b37be0b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 0ea3179c8e75..57461b906b2f 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 68335a322f80..01aa05771d09 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py index a7e5bed5f224..efea7930762a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py index df4901dc2886..e52f3085d128 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py index 325ec1ded454..5b46a15e1720 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index 0e106779009c..6ed09222a535 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py index b437ba8cca64..dd90dd3cb6e4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_metrics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py index ebb0fb6e8327..f4a59244df81 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py index 81e324800269..896bfe77d023 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py index 946976965e26..25292de9706b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py index 27530c87e7db..95c692aac24d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py index fbca2158fb6e..d1da5bbe7eec 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py index 8d2cd4568caa..395188ef855d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py index adeda1dbc4a3..10cb193bea2b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py index e68bd72bd27b..a52541ea41c5 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py index ae99ac5dcdd2..8e4558480203 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py index c385ec07fdb8..e1f9483944f6 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py index 54d9ae63f6c5..e83d648a1451 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py index 216f5aff4ecc..aa694125d3ef 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py index 505cf48d6431..f40f1f6d5fe8 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py index 5984c5a59f7f..d4f174cf5204 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py index d5be4998e62a..d56884bf396e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py index 2746db10658d..0b4b38bf4491 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py index ed33724d94fa..1004cf6e8362 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py index 706281a237f1..e3c3f0be054b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py index e19a7a781251..30407d37ee0e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py index 18a34126e074..8ddbbcaf3612 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py index f00e20418f79..17ed0b302c69 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py index 78f486498b04..bc82c3aa0be7 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py index bf1af9401568..ca77b619c46c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py index 3ef94f7a79d5..f6cd1237aea8 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py index 4b36ba8f3266..fd2b2a966032 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py index c13a534f204d..728615d94e34 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py index 90e7db19f00f..eb661ea337eb 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py index 71459b5b6e09..cc0b8e49a30d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py index 0bf125892cec..c3a10df02df4 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py index afd01364f46a..0009b048b4bc 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py index 0fb41ff507ca..2f1e4647f098 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py index 0650a0dbced0..8c8d9ddc1f0f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py index f40cd796fba6..41abc13acba7 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py index 2c35d7ed7b47..93bb910a5bca 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py index b5a1f32ad51a..bdc293b5b2a8 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py index 27fecef31d66..a4b12474db8c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py index 576d0f67fc53..4474f6c28fb6 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py index affd70728d9e..ce568088909e 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py index 7ea5d3bd0306..d4aebfa953ec 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py index 1f78f496894e..7fd1e53dee02 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py index 231c07081921..199b152b9f72 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py index e27b3be4f94a..7eafeb00e465 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py index 3c8fdf4fd5cf..a8eee844ca3f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py index 2dbd4b41bcd4..d6fde0bb70a8 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py index 2e73bbab99c7..33e10deb2698 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py index 971da2b3d3dd..162119bc1f7d 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py index 0324db46320a..43e8352592c7 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py index 7fc0350e0165..dbddce32c0f9 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py index eba1b485dffb..7650a542ff98 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py index a4f4ee0686a1..c320d118e26c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py index fd9c7c9cc357..4566ea26a1ad 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py index ea9cf9dee5e8..f127c99042b4 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py index 148fdc440dff..550aed7fba5b 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py index 1093d553f46e..bfa58357d203 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py index e8ef2a1a574c..22e799e9d892 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py index 71ce93d619e9..766376e0e227 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py index bfdee5a1d333..a19fbea8cea7 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py index e90b2f0e3556..3ba3a8f24a80 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py index ac1601fb83e3..84a3dd245640 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py index 110f14903883..3754cd2e7757 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py index d71cd2e3d7b3..f2c54208bf04 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py index 27884e87287d..6238f9a10d98 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py index 25eed782e618..9ba5fc19f589 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py index b72847a7706e..97a2ebe4d7d8 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py index 8122bbb6f960..89495ff0be58 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py index c77abc3bc883..24e4cd924bc8 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index 2a3b80a7c64e..e5226e985187 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index 4b51c26537b6..3fb660f90254 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py index 3b1d6b8d76a4..4e3153fd6e05 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py index 54ee4886d33f..fa44c8cc7c81 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py index 26ff77764849..0545206073f9 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py index e83497a80167..cd404c1e17d5 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index 904bd1400fcd..81eb975b99cb 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index 9f11c2f2dcd2..5d91ceb7ee3f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py index 0ee78a31cb84..300dd78198fc 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py index 136677f0b41c..7b16177ce634 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py index c0fbe4247dbf..8ea8a849bbce 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py index 122a776d5448..f67758eae2cc 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py index 64c85005273b..68a5e6c8d7f0 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py index f1be9234d569..462f8bc32e0f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py index 530611d0c1d2..9eeea97fcf89 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py index adfab558f7e5..cf60c346de0f 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py index 0ee2265c28e3..f7bc654c8c81 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py index fa9a650c26bf..4ae9a795e99c 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py index dc0a60d7c609..2c3669bca0ee 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py index 8baebc548d56..2fed9c39e114 100644 --- a/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py +++ b/packages/google-cloud-logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/__init__.py b/packages/google-cloud-logging/tests/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-logging/tests/__init__.py +++ b/packages/google-cloud-logging/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/__init__.py b/packages/google-cloud-logging/tests/unit/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-logging/tests/unit/__init__.py +++ b/packages/google-cloud-logging/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/__init__.py b/packages/google-cloud-logging/tests/unit/gapic/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-logging/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index e29dc2a27ef0..73a8f5d32330 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 0ae4344deb27..ef3833740586 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index b63f3b750d87..7c59a09f1e52 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 90b6fdae616e8b42c250772d814dc2731b8e077a Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 26 Mar 2025 13:12:59 -0400 Subject: [PATCH 841/855] fix(deps): require google-cloud-audit-log >= 0.3.1 (#979) * fix(deps): require google-cloud-audit-log >= 0.3.1 * update constraints-3.8.txt * remove dev --- packages/google-cloud-logging/setup.py | 2 +- packages/google-cloud-logging/testing/constraints-3.7.txt | 2 +- packages/google-cloud-logging/testing/constraints-3.8.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/setup.py b/packages/google-cloud-logging/setup.py index 6011b09a92fd..c80db0467b8b 100644 --- a/packages/google-cloud-logging/setup.py +++ b/packages/google-cloud-logging/setup.py @@ -41,7 +41,7 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "google-cloud-appengine-logging>=0.1.3, <2.0.0", - "google-cloud-audit-log >= 0.2.4, < 1.0.0", + "google-cloud-audit-log >= 0.3.1, < 1.0.0", "google-cloud-core >= 2.0.0, <3.0.0", "grpc-google-iam-v1 >=0.12.4, <1.0.0", "opentelemetry-api >= 1.9.0", diff --git a/packages/google-cloud-logging/testing/constraints-3.7.txt b/packages/google-cloud-logging/testing/constraints-3.7.txt index d3ab26cf2be1..113004889e39 100644 --- a/packages/google-cloud-logging/testing/constraints-3.7.txt +++ b/packages/google-cloud-logging/testing/constraints-3.7.txt @@ -10,7 +10,7 @@ proto-plus==1.22.0 protobuf==3.20.2 google-cloud-core==2.0.0 google-cloud-appengine-logging==0.1.3 -google-cloud-audit-log==0.2.4 +google-cloud-audit-log==0.3.1 grpc-google-iam-v1==0.12.4 opentelemetry-api==1.9.0 diff --git a/packages/google-cloud-logging/testing/constraints-3.8.txt b/packages/google-cloud-logging/testing/constraints-3.8.txt index 443e69ae2e2f..76b620077070 100644 --- a/packages/google-cloud-logging/testing/constraints-3.8.txt +++ b/packages/google-cloud-logging/testing/constraints-3.8.txt @@ -7,7 +7,7 @@ proto-plus==1.22.0 protobuf==4.21.6 google-cloud-core==2.0.0 google-cloud-appengine-logging==0.1.3 -google-cloud-audit-log==0.2.4 +google-cloud-audit-log==0.3.1 grpc-google-iam-v1==0.12.4 opentelemetry-api==1.9.0 From 432bf99cec113ae2694d909b8a9fa143ba329cf0 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Mon, 31 Mar 2025 17:23:53 -0400 Subject: [PATCH 842/855] feat: Added flushes/close functionality to logging handlers (#917) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Added flushes/close functionality to logging handlers * Fixed unit test issues. * linting * more linting * Addressed code review feedback * Refactored _close * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Added system tests * make transport_open private * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google/cloud/logging_v2/client.py | 15 ++ .../cloud/logging_v2/handlers/handlers.py | 24 +++ .../handlers/transports/background_thread.py | 58 +++++-- .../logging_v2/handlers/transports/base.py | 8 + .../logging_v2/handlers/transports/sync.py | 7 + .../tests/system/test_system.py | 67 +++++++ .../tests/unit/handlers/test_handlers.py | 81 +++++++++ .../transports/test_background_thread.py | 143 +++++++++++++-- .../unit/handlers/transports/test_base.py | 4 + .../tests/unit/test_client.py | 164 ++++++++++++++++++ 10 files changed, 547 insertions(+), 24 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/client.py index 1b5beeb245d6..f52845ee5e1a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/client.py @@ -149,6 +149,8 @@ def __init__( else: self._use_grpc = _use_grpc + self._handlers = set() + @property def logging_api(self): """Helper for logging-related API calls. @@ -411,4 +413,17 @@ def setup_logging( dict: keyword args passed to handler constructor """ handler = self.get_default_handler(**kw) + self._handlers.add(handler) setup_logging(handler, log_level=log_level, excluded_loggers=excluded_loggers) + + def flush_handlers(self): + """Flushes all Python log handlers associated with this Client.""" + + for handler in self._handlers: + handler.flush() + + def close(self): + """Closes the Client and all handlers associated with this Client.""" + super(Client, self).close() + for handler in self._handlers: + handler.close() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index e71f673f7e3f..364246d5852c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -188,7 +188,10 @@ def __init__( resource = detect_resource(client.project) self.name = name self.client = client + client._handlers.add(self) self.transport = transport(client, name, resource=resource) + self._transport_open = True + self._transport_cls = transport self.project_id = client.project self.resource = resource self.labels = labels @@ -213,6 +216,12 @@ def emit(self, record): labels = {**add_resource_labels(resource, record), **(labels or {})} or None # send off request + if not self._transport_open: + self.transport = self._transport_cls( + self.client, self.name, resource=self.resource + ) + self._transport_open = True + self.transport.send( record, message, @@ -225,6 +234,21 @@ def emit(self, record): source_location=record._source_location, ) + def flush(self): + """Forces the Transport object to submit any pending log records. + + For SyncTransport, this is a no-op. + """ + super(CloudLoggingHandler, self).flush() + if self._transport_open: + self.transport.flush() + + def close(self): + """Closes the log handler and cleans up all Transport objects used.""" + self.transport.close() + self.transport = None + self._transport_open = False + def _format_and_parse_message(record, formatter_handler): """ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py index 7cf2799f59c3..021112fdbbc0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/background_thread.py @@ -38,6 +38,13 @@ _WORKER_TERMINATOR = object() _LOGGER = logging.getLogger(__name__) +_CLOSE_THREAD_SHUTDOWN_ERROR_MSG = ( + "CloudLoggingHandler shutting down, cannot send logs entries to Cloud Logging due to " + "inconsistent threading behavior at shutdown. To avoid this issue, flush the logging handler " + "manually or switch to StructuredLogHandler. You can also close the CloudLoggingHandler manually " + "via handler.close or client.close." +) + def _get_many(queue_, *, max_items=None, max_latency=0): """Get multiple items from a Queue. @@ -140,9 +147,11 @@ def _thread_main(self): else: batch.log(**item) - self._safely_commit_batch(batch) + # We cannot commit logs upstream if the main thread is shutting down + if threading.main_thread().is_alive(): + self._safely_commit_batch(batch) - for _ in items: + for it in items: self._queue.task_done() _LOGGER.debug("Background thread exited gracefully.") @@ -162,7 +171,7 @@ def start(self): ) self._thread.daemon = True self._thread.start() - atexit.register(self._main_thread_terminated) + atexit.register(self._handle_exit) def stop(self, *, grace_period=None): """Signals the background thread to stop. @@ -202,26 +211,26 @@ def stop(self, *, grace_period=None): return success - def _main_thread_terminated(self): - """Callback that attempts to send pending logs before termination.""" + def _close(self, close_msg): + """Callback that attempts to send pending logs before termination if the main thread is alive.""" if not self.is_alive: return if not self._queue.empty(): - print( - "Program shutting down, attempting to send %d queued log " - "entries to Cloud Logging..." % (self._queue.qsize(),), - file=sys.stderr, - ) + print(close_msg, file=sys.stderr) - if self.stop(grace_period=self._grace_period): + if threading.main_thread().is_alive() and self.stop( + grace_period=self._grace_period + ): print("Sent all pending logs.", file=sys.stderr) - else: + elif not self._queue.empty(): print( "Failed to send %d pending logs." % (self._queue.qsize(),), file=sys.stderr, ) + self._thread = None + def enqueue(self, record, message, **kwargs): """Queues a log entry to be written by the background thread. @@ -251,6 +260,26 @@ def flush(self): """Submit any pending log records.""" self._queue.join() + def close(self): + """Signals the worker thread to stop, then closes the transport thread. + + This call will attempt to send pending logs before termination, and + should be followed up by disowning the transport object. + """ + atexit.unregister(self._handle_exit) + self._close( + "Background thread shutting down, attempting to send %d queued log " + "entries to Cloud Logging..." % (self._queue.qsize(),) + ) + + def _handle_exit(self): + """Handle system exit. + + Since we cannot send pending logs during system shutdown due to thread errors, + log an error message to stderr to notify the user. + """ + self._close(_CLOSE_THREAD_SHUTDOWN_ERROR_MSG) + class BackgroundThreadTransport(Transport): """Asynchronous transport that uses a background thread.""" @@ -285,6 +314,7 @@ def __init__( """ self.client = client logger = self.client.logger(name, resource=resource) + self.grace_period = grace_period self.worker = _Worker( logger, grace_period=grace_period, @@ -307,3 +337,7 @@ def send(self, record, message, **kwargs): def flush(self): """Submit any pending log records.""" self.worker.flush() + + def close(self): + """Closes the worker thread.""" + self.worker.close() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py index a0c9aafa4aaa..31e8f418affe 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/base.py @@ -51,3 +51,11 @@ def flush(self): For blocking/sync transports, this is a no-op. """ + pass + + def close(self): + """Closes the transport and cleans up resources used by it. + + This call should be followed up by disowning the transport. + """ + pass diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py index 17a4e554e8ea..6bf91f8da700 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/transports/sync.py @@ -59,3 +59,10 @@ def send(self, record, message, **kwargs): labels=labels, **kwargs, ) + + def close(self): + """Closes the transport and cleans up resources used by it. + + This call is usually followed up by cleaning up the reference to the transport. + """ + self.logger = None diff --git a/packages/google-cloud-logging/tests/system/test_system.py b/packages/google-cloud-logging/tests/system/test_system.py index d4ec4da36b15..487ecde62d39 100644 --- a/packages/google-cloud-logging/tests/system/test_system.py +++ b/packages/google-cloud-logging/tests/system/test_system.py @@ -34,6 +34,7 @@ import google.cloud.logging from google.cloud._helpers import UTC from google.cloud.logging_v2.handlers import CloudLoggingHandler +from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport from google.cloud.logging_v2.handlers.transports import SyncTransport from google.cloud.logging_v2 import client from google.cloud.logging_v2.resource import Resource @@ -719,6 +720,72 @@ def test_log_handler_otel_integration(self): self.assertEqual(entries[0].span_id, expected_span_id) self.assertTrue(entries[0].trace_sampled, expected_tracesampled) + def test_log_handler_close(self): + from multiprocessing import Process + + LOG_MESSAGE = "This is a test of handler.close before exiting." + LOGGER_NAME = "close-test" + handler_name = self._logger_name(LOGGER_NAME) + + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler_name) + self.to_delete.append(logger) + + # Run a simulation of logging an entry then immediately shutting down. + # The .close() function before the process exits should prevent the + # thread shutdown error and let us log the message. + def subprocess_main(): + # logger.delete and logger.list_entries work by filtering on log name, so we + # can create new objects with the same name and have the queries on the parent + # process still work. + handler = CloudLoggingHandler( + Config.CLIENT, name=handler_name, transport=BackgroundThreadTransport + ) + cloud_logger = logging.getLogger(LOGGER_NAME) + cloud_logger.addHandler(handler) + cloud_logger.warning(LOG_MESSAGE) + handler.close() + + proc = Process(target=subprocess_main) + proc.start() + proc.join() + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, LOG_MESSAGE) + + def test_log_client_flush_handlers(self): + from multiprocessing import Process + + LOG_MESSAGE = "This is a test of client.flush_handlers before exiting." + LOGGER_NAME = "close-test" + handler_name = self._logger_name(LOGGER_NAME) + + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler_name) + self.to_delete.append(logger) + + # Run a simulation of logging an entry then immediately shutting down. + # The .close() function before the process exits should prevent the + # thread shutdown error and let us log the message. + def subprocess_main(): + # logger.delete and logger.list_entries work by filtering on log name, so we + # can create new objects with the same name and have the queries on the parent + # process still work. + handler = CloudLoggingHandler( + Config.CLIENT, name=handler_name, transport=BackgroundThreadTransport + ) + cloud_logger = logging.getLogger(LOGGER_NAME) + cloud_logger.addHandler(handler) + cloud_logger.warning(LOG_MESSAGE) + Config.CLIENT.flush_handlers() + + proc = Process(target=subprocess_main) + proc.start() + proc.join() + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, LOG_MESSAGE) + def test_create_metric(self): METRIC_NAME = "test-create-metric%s" % (_RESOURCE_ID,) metric = Config.CLIENT.metric( diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 14b2e5cba6f8..2e9484937a99 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -461,6 +461,7 @@ def test_ctor_defaults(self): self.assertEqual(handler.name, DEFAULT_LOGGER_NAME) self.assertIs(handler.client, client) self.assertIsInstance(handler.transport, _Transport) + self.assertTrue(handler._transport_open) self.assertIs(handler.transport.client, client) self.assertEqual(handler.transport.name, DEFAULT_LOGGER_NAME) global_resource = _create_global_resource(self.PROJECT) @@ -468,6 +469,17 @@ def test_ctor_defaults(self): self.assertIsNone(handler.labels) self.assertIs(handler.stream, sys.stderr) + def test_add_handler_to_client_handlers(self): + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, + ) + self.assertIn(handler, client._handlers) + def test_ctor_explicit(self): import io from google.cloud.logging import Resource @@ -790,6 +802,56 @@ def test_emit_with_encoded_json(self): ), ) + def test_emit_after_close(self): + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE + ) + logname = "loggername" + message = "hello world" + record = logging.LogRecord( + logname, logging.INFO, None, None, message, None, None + ) + handler.handle(record) + old_transport = handler.transport + self.assertEqual( + handler.transport.send_called_with, + ( + record, + message, + _GLOBAL_RESOURCE, + {"python_logger": logname}, + None, + None, + False, + None, + None, + ), + ) + + handler.close() + self.assertFalse(handler._transport_open) + + handler.handle(record) + self.assertTrue(handler._transport_open) + self.assertNotEqual(handler.transport, old_transport) + self.assertEqual( + handler.transport.send_called_with, + ( + record, + message, + _GLOBAL_RESOURCE, + {"python_logger": logname}, + None, + None, + False, + None, + None, + ), + ) + def test_format_with_arguments(self): """ Handler should support format string arguments @@ -825,6 +887,20 @@ def test_format_with_arguments(self): ), ) + def test_close(self): + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, + ) + old_transport = handler.transport + handler.close() + self.assertFalse(handler._transport_open) + self.assertTrue(old_transport.close_called) + class TestFormatAndParseMessage(unittest.TestCase): def test_none(self): @@ -1127,12 +1203,14 @@ def release(self): class _Client(object): def __init__(self, project): self.project = project + self._handlers = set() class _Transport(object): def __init__(self, client, name, resource=None): self.client = client self.name = name + self.close_called = False def send( self, @@ -1157,3 +1235,6 @@ def send( http_request, source_location, ) + + def close(self): + self.close_called = True diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py index d4954ff7b5e0..9fdccb17289d 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_background_thread.py @@ -12,13 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import contextlib import time import logging import queue +import re import unittest import mock +from io import StringIO + class TestBackgroundThreadHandler(unittest.TestCase): PROJECT = "PROJECT" @@ -176,6 +180,11 @@ def test_worker(self): class Test_Worker(unittest.TestCase): NAME = "python_logger" + def setUp(self): + import sys + + print("In method", self._testMethodName, file=sys.stderr) + @staticmethod def _get_target_class(): from google.cloud.logging_v2.handlers.transports import background_thread @@ -187,9 +196,26 @@ def _make_one(self, *args, **kw): def _start_with_thread_patch(self, worker): with mock.patch("threading.Thread", new=_Thread) as thread_mock: - with mock.patch("atexit.register") as atexit_mock: - worker.start() - return thread_mock, atexit_mock + worker.start() + return thread_mock + + @staticmethod + @contextlib.contextmanager + def _init_atexit_mock(): + atexit_mock = _AtexitMock() + with mock.patch.multiple( + "atexit", register=atexit_mock.register, unregister=atexit_mock.unregister + ): + yield atexit_mock + + @staticmethod + @contextlib.contextmanager + def _init_main_thread_is_alive_mock(is_alive): + with mock.patch("threading.main_thread") as main_thread_func_mock: + main_thread_obj_mock = mock.Mock() + main_thread_func_mock.return_value = main_thread_obj_mock + main_thread_obj_mock.is_alive = mock.Mock(return_value=is_alive) + yield def test_constructor(self): logger = _Logger(self.NAME) @@ -216,14 +242,15 @@ def test_start(self): worker = self._make_one(_Logger(self.NAME)) - _, atexit_mock = self._start_with_thread_patch(worker) + with self._init_atexit_mock() as atexit_mock: + self._start_with_thread_patch(worker) self.assertTrue(worker.is_alive) self.assertIsNotNone(worker._thread) self.assertTrue(worker._thread.daemon) self.assertEqual(worker._thread._target, worker._thread_main) self.assertEqual(worker._thread._name, background_thread._WORKER_THREAD_NAME) - atexit_mock.assert_called_once_with(worker._main_thread_terminated) + self.assertIn(worker._handle_exit, atexit_mock.registered_funcs) # Calling start again should not start a new thread. current_thread = worker._thread @@ -260,29 +287,33 @@ def test_stop_no_grace(self): self.assertEqual(thread._timeout, None) - def test__main_thread_terminated(self): + def test__close(self): worker = self._make_one(_Logger(self.NAME)) self._start_with_thread_patch(worker) - worker._main_thread_terminated() + worker._close("") self.assertFalse(worker.is_alive) # Calling twice should not be an error - worker._main_thread_terminated() + worker._close("") - def test__main_thread_terminated_non_empty_queue(self): + def test__close_non_empty_queue(self): worker = self._make_one(_Logger(self.NAME)) + msg = "My Message" self._start_with_thread_patch(worker) record = mock.Mock() record.created = time.time() worker.enqueue(record, "") - worker._main_thread_terminated() + + with mock.patch("sys.stderr", new_callable=StringIO) as stderr_mock: + worker._close(msg) + self.assertIn(msg, stderr_mock.getvalue()) self.assertFalse(worker.is_alive) - def test__main_thread_terminated_did_not_join(self): + def test__close_did_not_join(self): worker = self._make_one(_Logger(self.NAME)) self._start_with_thread_patch(worker) @@ -290,7 +321,65 @@ def test__main_thread_terminated_did_not_join(self): record = mock.Mock() record.created = time.time() worker.enqueue(record, "") - worker._main_thread_terminated() + worker._close("") + + self.assertFalse(worker.is_alive) + + def test__handle_exit(self): + from google.cloud.logging_v2.handlers.transports.background_thread import ( + _CLOSE_THREAD_SHUTDOWN_ERROR_MSG, + ) + + worker = self._make_one(_Logger(self.NAME)) + + with mock.patch("sys.stderr", new_callable=StringIO) as stderr_mock: + with self._init_main_thread_is_alive_mock(False): + with self._init_atexit_mock(): + self._start_with_thread_patch(worker) + self._enqueue_record(worker, "test") + worker._handle_exit() + + self.assertRegex( + stderr_mock.getvalue(), + re.compile("^%s$" % _CLOSE_THREAD_SHUTDOWN_ERROR_MSG, re.MULTILINE), + ) + + self.assertRegex( + stderr_mock.getvalue(), + re.compile( + r"^Failed to send %d pending logs\.$" % worker._queue.qsize(), + re.MULTILINE, + ), + ) + + def test__handle_exit_no_items(self): + worker = self._make_one(_Logger(self.NAME)) + + with mock.patch("sys.stderr", new_callable=StringIO) as stderr_mock: + with self._init_main_thread_is_alive_mock(False): + with self._init_atexit_mock(): + self._start_with_thread_patch(worker) + worker._handle_exit() + + self.assertEqual(stderr_mock.getvalue(), "") + + def test_close_unregister_atexit(self): + worker = self._make_one(_Logger(self.NAME)) + + with mock.patch("sys.stderr", new_callable=StringIO) as stderr_mock: + with self._init_atexit_mock() as atexit_mock: + self._start_with_thread_patch(worker) + self.assertIn(worker._handle_exit, atexit_mock.registered_funcs) + worker.close() + self.assertNotIn(worker._handle_exit, atexit_mock.registered_funcs) + + self.assertNotRegex( + stderr_mock.getvalue(), + re.compile( + r"^Failed to send %d pending logs\.$" % worker._queue.qsize(), + re.MULTILINE, + ), + ) self.assertFalse(worker.is_alive) @@ -402,6 +491,23 @@ def test__thread_main_batches(self): self.assertFalse(worker._cloud_logger._batch.commit_called) self.assertEqual(worker._queue.qsize(), 0) + def test__thread_main_main_thread_terminated(self): + from google.cloud.logging_v2.handlers.transports import background_thread + + worker = self._make_one(_Logger(self.NAME)) + self._enqueue_record(worker, "1") + worker._queue.put_nowait(background_thread._WORKER_TERMINATOR) + + with mock.patch("threading.main_thread") as main_thread_func_mock: + main_thread_obj_mock = mock.Mock() + main_thread_func_mock.return_value = main_thread_obj_mock + main_thread_obj_mock.is_alive = mock.Mock(return_value=False) + self._enqueue_record(worker, "1") + self._enqueue_record(worker, "2") + worker._thread_main() + + self.assertFalse(worker._cloud_logger._batch.commit_called) + @mock.patch("time.time", autospec=True, return_value=1) def test__thread_main_max_latency(self, time): # Note: this test is a bit brittle as it assumes the operation of @@ -565,3 +671,16 @@ def __init__(self, project, _http=None, credentials=None): def logger(self, name, resource=None): # pylint: disable=unused-argument self._logger = _Logger(name, resource=resource) return self._logger + + +class _AtexitMock(object): + """_AtexitMock is a simulation of registering/unregistering functions in atexit using a dummy set.""" + + def __init__(self): + self.registered_funcs = set() + + def register(self, func): + self.registered_funcs.add(func) + + def unregister(self, func): + self.registered_funcs.remove(func) diff --git a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py index a0013cadf14b..b723db87b855 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py +++ b/packages/google-cloud-logging/tests/unit/handlers/transports/test_base.py @@ -38,3 +38,7 @@ def test_resource_is_valid_argunent(self): def test_flush_is_abstract_and_optional(self): target = self._make_one("client", "name") target.flush() + + def test_close_is_abstract_and_optional(self): + target = self._make_one("client", "name") + target.close() diff --git a/packages/google-cloud-logging/tests/unit/test_client.py b/packages/google-cloud-logging/tests/unit/test_client.py index 2d12a283e74b..6a9a7fd840bc 100644 --- a/packages/google-cloud-logging/tests/unit/test_client.py +++ b/packages/google-cloud-logging/tests/unit/test_client.py @@ -842,6 +842,7 @@ def test_setup_logging(self): (handler,) = args self.assertIsInstance(handler, CloudLoggingHandler) + self.assertIn(handler, client._handlers) handler.transport.worker.stop() @@ -882,6 +883,7 @@ def test_setup_logging_w_extra_kwargs(self): self.assertEqual(handler.name, name) self.assertEqual(handler.resource, resource) self.assertEqual(handler.labels, labels) + self.assertIn(handler, client._handlers) handler.transport.worker.stop() @@ -929,6 +931,168 @@ def test_setup_logging_w_extra_kwargs_structured_log(self): "log_level": 20, } self.assertEqual(kwargs, expected_kwargs) + self.assertIn(handler, client._handlers) + + def test_flush_handlers_cloud_logging_handler(self): + import io + from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging import Resource + + name = "test-logger" + resource = Resource("resource_type", {"resource_label": "value"}) + labels = {"handler_label": "value"} + stream = io.BytesIO() + + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) + + with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked: + client.setup_logging( + name=name, resource=resource, labels=labels, stream=stream + ) + + self.assertEqual(len(mocked.mock_calls), 1) + _, args, kwargs = mocked.mock_calls[0] + + (handler,) = args + self.assertIsInstance(handler, CloudLoggingHandler) + + handler.flush = mock.Mock() + client.flush_handlers() + handler.flush.assert_called_once_with() + + def test_flush_handlers_cloud_logging_handler_no_setup_logging(self): + from google.cloud.logging.handlers import CloudLoggingHandler + + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) + + handler = CloudLoggingHandler(client) + self.assertIn(handler, client._handlers) + + handler.flush = mock.Mock() + client.flush_handlers() + handler.flush.assert_called_once_with() + + def test_flush_handlers_structured_log(self): + import io + from google.cloud.logging.handlers import StructuredLogHandler + from google.cloud.logging import Resource + from google.cloud.logging_v2.client import _GKE_RESOURCE_TYPE + + name = "test-logger" + resource = Resource(_GKE_RESOURCE_TYPE, {"resource_label": "value"}) + labels = {"handler_label": "value"} + stream = io.BytesIO() + + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) + + with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked: + client.setup_logging( + name=name, resource=resource, labels=labels, stream=stream + ) + + self.assertEqual(len(mocked.mock_calls), 1) + _, args, kwargs = mocked.mock_calls[0] + + (handler,) = args + self.assertIsInstance(handler, StructuredLogHandler) + + handler.flush = mock.Mock() + client.flush_handlers() + handler.flush.assert_called_once_with() + + def test_close_cloud_logging_handler(self): + import contextlib + import io + from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging import Resource + + name = "test-logger" + resource = Resource("resource_type", {"resource_label": "value"}) + labels = {"handler_label": "value"} + stream = io.BytesIO() + + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) + + with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked: + client.setup_logging( + name=name, resource=resource, labels=labels, stream=stream + ) + + self.assertEqual(len(mocked.mock_calls), 1) + _, args, kwargs = mocked.mock_calls[0] + + (handler,) = args + self.assertIsInstance(handler, CloudLoggingHandler) + + handler.close = mock.Mock() + with contextlib.closing(client): + pass + + handler.close.assert_called_once_with() + + def test_close_cloud_logging_handler_no_setup_logging(self): + import contextlib + from google.cloud.logging.handlers import CloudLoggingHandler + + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) + + handler = CloudLoggingHandler(client) + self.assertIn(handler, client._handlers) + + handler.close = mock.Mock() + with contextlib.closing(client): + pass + + handler.close.assert_called_once_with() + + def test_close_structured_log_handler(self): + import contextlib + import io + from google.cloud.logging.handlers import StructuredLogHandler + from google.cloud.logging import Resource + from google.cloud.logging_v2.client import _GKE_RESOURCE_TYPE + + name = "test-logger" + resource = Resource(_GKE_RESOURCE_TYPE, {"resource_label": "value"}) + labels = {"handler_label": "value"} + stream = io.BytesIO() + + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) + + with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked: + client.setup_logging( + name=name, resource=resource, labels=labels, stream=stream + ) + + self.assertEqual(len(mocked.mock_calls), 1) + _, args, kwargs = mocked.mock_calls[0] + + (handler,) = args + self.assertIsInstance(handler, StructuredLogHandler) + + handler.close = mock.Mock() + with contextlib.closing(client): + pass + + handler.close.assert_called_once_with() class _Connection(object): From be42ddf05ddeb55d5ecf58c3fe08150ffdcbf200 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 10 Apr 2025 13:04:58 -0400 Subject: [PATCH 843/855] chore(python): remove .flake8 configuration file in templates (#983) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): remove .flake8 configuration file in templates Source-Link: https://github.com/googleapis/synthtool/commit/fe66b0b76d54b7a54290160a000bdd7efb869c73 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ecf409a43d8b157fb83c403de4d83e3da7d88e423044410c0e2434bf776221d1 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Remove replacement in owlbot.py --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-logging/owlbot.py | 7 ------- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml index c631e1f7d7e9..6f1eaeb91e9c 100644 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5581906b957284864632cde4e9c51d1cc66b0094990b27e689132fe5cd036046 -# created: 2025-03-05 + digest: sha256:ecf409a43d8b157fb83c403de4d83e3da7d88e423044410c0e2434bf776221d1 +# created: 2025-04-10T16:21:41.67162455Z diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index 7dd3385e0c90..4c9acca9f4a0 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -126,13 +126,6 @@ def place_before(path, text, *before_text, escape=None): 'pass_down_envvars+=(\n "ENVIRONMENT"\n "RUNTIME"', ) -# don't lint environment tests -s.replace( - ".flake8", - "exclude =", - "exclude =\n # Exclude environment test code.\n tests/environment/**\n", -) - # use conventional commits for renovate bot s.replace( "renovate.json", From 4aeff82ce03675637a9747726bcaee1c0f05a6f8 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 16 Apr 2025 09:36:45 -0700 Subject: [PATCH 844/855] chore(main): release 3.12.0 (#973) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-logging/CHANGELOG.md | 22 +++++++++++++++++++ .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 26 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json index 5256172d0efb..ab95c4e24f9b 100644 --- a/packages/google-cloud-logging/.release-please-manifest.json +++ b/packages/google-cloud-logging/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.11.4" + ".": "3.12.0" } diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index 18201e5aaa36..c6161ca504c1 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,28 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.12.0](https://github.com/googleapis/python-logging/compare/v3.11.4...v3.12.0) (2025-04-10) + + +### Features + +* Add REST Interceptors which support reading metadata ([681bcc5](https://github.com/googleapis/python-logging/commit/681bcc5c1f983bb5a43e1d5ebcdb14e5e3f25a77)) +* Add support for opt-in debug logging ([681bcc5](https://github.com/googleapis/python-logging/commit/681bcc5c1f983bb5a43e1d5ebcdb14e5e3f25a77)) +* Added flushes/close functionality to logging handlers ([#917](https://github.com/googleapis/python-logging/issues/917)) ([d179304](https://github.com/googleapis/python-logging/commit/d179304b344277e349456f72cd90c56f28011286)) + + +### Bug Fixes + +* Allow protobuf 6.x ([#977](https://github.com/googleapis/python-logging/issues/977)) ([6757890](https://github.com/googleapis/python-logging/commit/675789001344fdae68ee20ec14e14c11c83a0433)) +* **deps:** Require google-cloud-audit-log >= 0.3.1 ([#979](https://github.com/googleapis/python-logging/issues/979)) ([1cc00ec](https://github.com/googleapis/python-logging/commit/1cc00ecf646a7a36eb32afd2e5df3d9aa7f564b1)) +* Fix typing issue with gRPC metadata when key ends in -bin ([681bcc5](https://github.com/googleapis/python-logging/commit/681bcc5c1f983bb5a43e1d5ebcdb14e5e3f25a77)) + + +### Documentation + +* Added documentation on log_level and excluded_loggers params in setup_logging ([#971](https://github.com/googleapis/python-logging/issues/971)) ([70d9d25](https://github.com/googleapis/python-logging/commit/70d9d25bf8c3c85a3c5523ecc7fbdbf72f08c583)) +* Update README to break infinite redirect loop ([#972](https://github.com/googleapis/python-logging/issues/972)) ([52cd907](https://github.com/googleapis/python-logging/commit/52cd907bb313df2766ec11e3d24c7e10cda31ca7)) + ## [3.11.4](https://github.com/googleapis/python-logging/compare/v3.11.3...v3.11.4) (2025-01-22) diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py index 846b83eacf1d..b5a6e376680a 100644 --- a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.11.4" # {x-release-please-version} +__version__ = "3.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py index 846b83eacf1d..b5a6e376680a 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.11.4" # {x-release-please-version} +__version__ = "3.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 50c444f70b85..3132ff272399 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "0.1.0" + "version": "3.12.0" }, "snippets": [ { From 91962c123f0fa74772ec824424e5c11a1238a0a9 Mon Sep 17 00:00:00 2001 From: Oscar Torreno Date: Mon, 21 Apr 2025 19:26:05 +0200 Subject: [PATCH 845/855] fix: make logging handler close conditional to having the transport opened (#990) There was a recent release (3.12.0) that included the changes introduced in #917. The newly introduced close method seems to be called by AppEngine Python runtime at shutdown, so if you would call it explicitly before the runtime does it, then the close function throws an exception because transport is None. --- .../google/cloud/logging_v2/handlers/handlers.py | 7 ++++--- .../tests/unit/handlers/test_handlers.py | 4 ++++ 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py index 364246d5852c..233d9eab36b5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/handlers/handlers.py @@ -245,9 +245,10 @@ def flush(self): def close(self): """Closes the log handler and cleans up all Transport objects used.""" - self.transport.close() - self.transport = None - self._transport_open = False + if self._transport_open: + self.transport.close() + self.transport = None + self._transport_open = False def _format_and_parse_message(record, formatter_handler): diff --git a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py index 2e9484937a99..3f25929e2b5b 100644 --- a/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py +++ b/packages/google-cloud-logging/tests/unit/handlers/test_handlers.py @@ -901,6 +901,10 @@ def test_close(self): self.assertFalse(handler._transport_open) self.assertTrue(old_transport.close_called) + # second call to close shouldn't throw an exception + handler.close() + self.assertFalse(handler._transport_open) + class TestFormatAndParseMessage(unittest.TestCase): def test_none(self): From 2499a9360630266558299d4faa3b0979dd0df876 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 22 Apr 2025 16:39:55 -0400 Subject: [PATCH 846/855] chore(main): release 3.12.1 (#992) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-logging/.release-please-manifest.json | 2 +- packages/google-cloud-logging/CHANGELOG.md | 7 +++++++ .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json index ab95c4e24f9b..d235af2ce8fc 100644 --- a/packages/google-cloud-logging/.release-please-manifest.json +++ b/packages/google-cloud-logging/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.12.0" + ".": "3.12.1" } diff --git a/packages/google-cloud-logging/CHANGELOG.md b/packages/google-cloud-logging/CHANGELOG.md index c6161ca504c1..1f98b01a8642 100644 --- a/packages/google-cloud-logging/CHANGELOG.md +++ b/packages/google-cloud-logging/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.12.1](https://github.com/googleapis/python-logging/compare/v3.12.0...v3.12.1) (2025-04-21) + + +### Bug Fixes + +* Make logging handler close conditional to having the transport opened ([#990](https://github.com/googleapis/python-logging/issues/990)) ([66c6b91](https://github.com/googleapis/python-logging/commit/66c6b91725eb479a0af138a2be13f3c25f369d7e)) + ## [3.12.0](https://github.com/googleapis/python-logging/compare/v3.11.4...v3.12.0) (2025-04-10) diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py index b5a6e376680a..14833215c203 100644 --- a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.12.0" # {x-release-please-version} +__version__ = "3.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py index b5a6e376680a..14833215c203 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.12.0" # {x-release-please-version} +__version__ = "3.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 3132ff272399..7d77545ab288 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.12.0" + "version": "3.12.1" }, "snippets": [ { From 711dfc2c95cd194d2bf8e99ae95e28e1262f6b0d Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 21 May 2025 16:23:30 -0400 Subject: [PATCH 847/855] test: Added cleanup of old sink storage buckets (#991) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * test: Added cleanup of old sink storage buckets * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * add list_buckets threshold --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../samples/snippets/export_test.py | 29 +++++++++++++++---- 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-logging/samples/snippets/export_test.py b/packages/google-cloud-logging/samples/snippets/export_test.py index c21fab2daef1..845359e09f69 100644 --- a/packages/google-cloud-logging/samples/snippets/export_test.py +++ b/packages/google-cloud-logging/samples/snippets/export_test.py @@ -19,7 +19,7 @@ import time import backoff -from google.cloud import logging +from google.cloud import logging, storage import pytest import export @@ -34,6 +34,10 @@ # old sink, in seconds CLEANUP_THRESHOLD = 7200 # 2 hours +# Max buckets to delete at a time, to mitigate operation timeout +# issues. To turn off in the future, set to None. +MAX_BUCKETS = 1500 + def _random_id(): return "".join( @@ -46,8 +50,8 @@ def _create_sink_name(): @backoff.on_exception(backoff.expo, Exception, max_time=60, raise_on_giveup=False) -def _delete_sink(sink): - sink.delete() +def _delete_object(obj): + obj.delete() # Runs once for entire test suite @@ -62,7 +66,20 @@ def cleanup_old_sinks(): if match: sink_timestamp = int(match.group(1)) if TIMESTAMP - sink_timestamp > CLEANUP_THRESHOLD: - _delete_sink(sink) + _delete_object(sink) + + storage_client = storage.Client() + + # See _sink_storage_setup in usage_guide.py for details about how + # sinks are named. + test_bucket_name_regex = r"^sink\-storage\-(\d+)$" + for bucket in storage_client.list_buckets(max_results=MAX_BUCKETS): + match = re.match(test_bucket_name_regex, bucket.name) + if match: + # Bucket timestamp is int(time.time() * 1000) + bucket_timestamp = int(match.group(1)) + if TIMESTAMP - bucket_timestamp // 1000 > CLEANUP_THRESHOLD: + _delete_object(bucket) @pytest.fixture @@ -79,7 +96,7 @@ def example_sink(cleanup_old_sinks): yield sink - _delete_sink(sink) + _delete_object(sink) def test_list(example_sink, capsys): @@ -99,7 +116,7 @@ def test_create(capsys): export.create_sink(sink_name, BUCKET, TEST_SINK_FILTER) # Clean-up the temporary sink. finally: - _delete_sink(logging.Client().sink(sink_name)) + _delete_object(logging.Client().sink(sink_name)) out, _ = capsys.readouterr() assert sink_name in out From b98c0dc610f28387b97af041ab046d388f7a8dd8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 23 May 2025 10:26:17 -0400 Subject: [PATCH 848/855] chore: Update gapic-generator-python to 1.25.0 (#985) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to 1.24.0 PiperOrigin-RevId: 747419463 Source-Link: https://github.com/googleapis/googleapis/commit/340579bf7f97ba56cda0c70176dc5b03a8357667 Source-Link: https://github.com/googleapis/googleapis-gen/commit/e8997ec5136ecb6ed9a969a4c2f13b3ab6a17c12 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTg5OTdlYzUxMzZlY2I2ZWQ5YTk2OWE0YzJmMTNiM2FiNmExN2MxMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to 1.24.1 PiperOrigin-RevId: 748739072 Source-Link: https://github.com/googleapis/googleapis/commit/b947e523934dbac5d97613d8aa08e04fc38c5fb6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/8c5821aa65a921d59b3f7653d6f37c9c67410c2f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOGM1ODIxYWE2NWE5MjFkNTliM2Y3NjUzZDZmMzdjOWM2NzQxMGMyZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to 1.25.0 PiperOrigin-RevId: 755914147 Source-Link: https://github.com/googleapis/googleapis/commit/97a83d76a09a7f6dcab43675c87bdfeb5bcf1cb5 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a9977efedc836ccece1f01d529b0315e1efe52ad Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTk5NzdlZmVkYzgzNmNjZWNlMWYwMWQ1MjliMDMxNWUxZWZlNTJhZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * remove docs/multiprocessing.rst * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * remove docs/multiprocessing.rst --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-logging/.flake8 | 18 ++++++++---------- .../services/config_service_v2/async_client.py | 4 ++++ .../services/config_service_v2/client.py | 3 +++ .../config_service_v2/transports/base.py | 4 ++++ .../config_service_v2/transports/grpc.py | 3 +-- .../logging_service_v2/async_client.py | 4 ++++ .../services/logging_service_v2/client.py | 3 +++ .../logging_service_v2/transports/base.py | 4 ++++ .../logging_service_v2/transports/grpc.py | 3 +-- .../metrics_service_v2/async_client.py | 4 ++++ .../services/metrics_service_v2/client.py | 3 +++ .../metrics_service_v2/transports/base.py | 4 ++++ .../metrics_service_v2/transports/grpc.py | 3 +-- packages/google-cloud-logging/owlbot.py | 1 + 14 files changed, 45 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-logging/.flake8 b/packages/google-cloud-logging/.flake8 index d93385ea1ac7..90316de21489 100644 --- a/packages/google-cloud-logging/.flake8 +++ b/packages/google-cloud-logging/.flake8 @@ -1,31 +1,29 @@ # -*- coding: utf-8 -*- -# -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by synthtool. DO NOT EDIT! +# [flake8] +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues ignore = E203, E231, E266, E501, W503 exclude = - # Exclude environment test code. - tests/environment/** - - # Exclude generated code. - **/proto/** + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint **/gapic/** **/services/** **/types/** + # Exclude Protobuf gencode *_pb2.py # Standard linting exemptions. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 5d904b5b959a..ad681a9c75a8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -37,6 +37,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -4223,5 +4224,8 @@ async def __aexit__(self, exc_type, exc, tb): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("ConfigServiceV2AsyncClient",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 39ee70613356..6c97c65568d0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -45,6 +45,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -4678,5 +4679,7 @@ def cancel_operation( gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("ConfigServiceV2Client",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index ffc8480599da..db7b93b853b8 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -26,6 +26,7 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.logging_v2.types import logging_config from google.longrunning import operations_pb2 # type: ignore @@ -35,6 +36,9 @@ gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class ConfigServiceV2Transport(abc.ABC): """Abstract transport class for ConfigServiceV2.""" diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 8870e119c55a..4dee4e647cb9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -73,12 +73,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra={ "serviceName": "google.logging.v2.ConfigServiceV2", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index beb1891b5fbc..8de5078450fa 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -40,6 +40,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -1263,5 +1264,8 @@ async def __aexit__(self, exc_type, exc, tb): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("LoggingServiceV2AsyncClient",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index bdd8783d375b..22318f07a9c0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -47,6 +47,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -1678,5 +1679,7 @@ def cancel_operation( gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("LoggingServiceV2Client",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 7a08a140a28e..7f7cfe9a23d9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.logging_v2.types import logging from google.longrunning import operations_pb2 # type: ignore @@ -34,6 +35,9 @@ gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class LoggingServiceV2Transport(abc.ABC): """Abstract transport class for LoggingServiceV2.""" diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 87d164a0d627..7bffe25b68bf 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -72,12 +72,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra={ "serviceName": "google.logging.v2.LoggingServiceV2", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 01ddfce894ec..129fc055b6d0 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -37,6 +37,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -1110,5 +1111,8 @@ async def __aexit__(self, exc_type, exc, tb): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("MetricsServiceV2AsyncClient",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 6e7f0b056ed0..f2f0f8ce1087 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -45,6 +45,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -1526,5 +1527,7 @@ def cancel_operation( gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("MetricsServiceV2Client",) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 0dd83b37be0b..22bc19736a51 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.logging_v2.types import logging_metrics from google.longrunning import operations_pb2 # type: ignore @@ -34,6 +35,9 @@ gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class MetricsServiceV2Transport(abc.ABC): """Abstract transport class for MetricsServiceV2.""" diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 57461b906b2f..fe0943a948b1 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -72,12 +72,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra={ "serviceName": "google.logging.v2.MetricsServiceV2", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index 4c9acca9f4a0..2be8464c2c68 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -73,6 +73,7 @@ def place_before(path, text, *before_text, escape=None): "google/cloud/logging_v2/__init__.py", "docs/index.rst", "docs/logging_v2", # Don't include gapic library docs. Users should use the hand-written layer instead + "docs/multiprocessing.rst", "scripts/fixup_logging_v2_keywords.py", # don't include script since it only works for generated layer ], ) From e60c043e6235671e0279e256fbffbe2816d8f330 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 23 May 2025 10:26:58 -0400 Subject: [PATCH 849/855] fix: remove setup.cfg configuration for creating universal wheels (#981) --- packages/google-cloud-logging/setup.cfg | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 packages/google-cloud-logging/setup.cfg diff --git a/packages/google-cloud-logging/setup.cfg b/packages/google-cloud-logging/setup.cfg deleted file mode 100644 index 052350089505..000000000000 --- a/packages/google-cloud-logging/setup.cfg +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 From ee0878f5fb3a4d48b95b3fa5dc3cf988afafc30b Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Wed, 5 Nov 2025 17:37:05 -0800 Subject: [PATCH 850/855] test: ignore DeprecationWarning for `credentials_file` argument and Python versions (#1058) * test: ignore DeprecationWarning for `credentials_file` argument * add more ignores * typo * add ignore * remove python 3.7 and 3.8 from kokoro unit test * change DEFAULT_PYTHON_VERSION to 3.10 --- packages/google-cloud-logging/noxfile.py | 4 +--- packages/google-cloud-logging/pytest.ini | 21 +++++++++++++++------ 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index b75e78ac3f09..e71504756b3a 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -32,11 +32,9 @@ ISORT_VERSION = "isort==5.11.0" LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.8" +DEFAULT_PYTHON_VERSION = "3.10" UNIT_TEST_PYTHON_VERSIONS: List[str] = [ - "3.7", - "3.8", "3.9", "3.10", "3.11", diff --git a/packages/google-cloud-logging/pytest.ini b/packages/google-cloud-logging/pytest.ini index 2d8ce14b8cb5..126bafe9301d 100644 --- a/packages/google-cloud-logging/pytest.ini +++ b/packages/google-cloud-logging/pytest.ini @@ -9,17 +9,22 @@ filterwarnings = ignore:.*pkg_resources is deprecated as an API:DeprecationWarning # Remove warning once https://github.com/grpc/grpc/issues/35974 is fixed ignore:unclosed:ResourceWarning - # Remove after support for Python 3.7 is dropped - ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/1939 is fixed ignore:get_mtls_endpoint_and_cert_source is deprecated.:DeprecationWarning # DeprecationWarnings triggered by Flask 1.0 testing by Flask dependencies in test code - # 3.7 deprecation warnings + ignore:Importing 'itsdangerous.json' is deprecated and will be removed in ItsDangerous 2.1:DeprecationWarning + # 3.7 deprecation warnings, remove after support for Python 3.7 is dropped + ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning ignore:Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated since Python 3.3,and in 3.9 it will stop working:DeprecationWarning - # 3.8 - 3.9 deprecation warnings - ignore:Importing 'itsdangerous.json' is deprecated and will be removed in ItsDangerous 2.1. Use Python's 'json' module instead.:DeprecationWarning + ignore:You are using a non-supported Python version \(3\.7:FutureWarning + # 3.8 deprecation warnings, remove after support for Python 3.8 is dropped + ignore:You are using a non-supported Python version \(3\.8:FutureWarning + # 3.9 deprecation warnings, remove after support for Python 3.9 is dropped + ignore:You are using a Python version \(3\.9:FutureWarning + # 3.10 deprecation warnings, remove after support for Python 3.10 is dropped + ignore:You are using a Python version \(3\.10:FutureWarning ignore:Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated since Python 3.3, and in 3.10 it will stop working:DeprecationWarning - # 3.12 deprecation warnings + # 3.12 deprecation warnings, remove after support for Python 3.12 is dropped ignore:Attribute s is deprecated and will be removed in Python 3.14; use value instead:DeprecationWarning ignore:ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead:DeprecationWarning ignore:'pkgutil.get_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec\(\) instead:DeprecationWarning @@ -27,3 +32,7 @@ filterwarnings = ignore:.*Please use message_factory.GetMessageClass\(\) instead. SymbolDatabase.GetPrototype\(\) will be removed soon.:UserWarning # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/2046 is fixed ignore:coroutine 'AsyncMockMixin._execute_mock_call' was never awaited:RuntimeWarning + # Remove when `credentials_file` argument is removed + ignore:The `credentials_file` argument is deprecated:DeprecationWarning + # Remove when load_credentials_from_file method is removed + ignore:The load_credentials_from_file method is deprecated:DeprecationWarning From a55d1c1b8d2f531b73c89f0a07ebab62dee9b46a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 18 Nov 2025 20:25:33 -0500 Subject: [PATCH 851/855] chore: update Python generator version to 1.25.1 (#1003) - [ ] Regenerate this pull request now. fix: Deprecate credentials_file argument chore: Update gapic-generator-python to 1.28.0 PiperOrigin-RevId: 816753840 Source-Link: https://github.com/googleapis/googleapis/commit/d06cf27a47074d1de3fde6f0ca48680a96229306 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a524e7310882bbb99bfe1399b18bed328979211c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTUyNGU3MzEwODgyYmJiOTliZmUxMzk5YjE4YmVkMzI4OTc5MjExYyJ9 BEGIN_NESTED_COMMIT chore: Update gapic-generator-python to 1.26.2 PiperOrigin-RevId: 802200836 Source-Link: https://github.com/googleapis/googleapis/commit/d300b151a973ce0425ae4ad07b3de957ca31bec6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a1ff0ae72ddcb68a259215d8c77661e2cdbb9b02 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTFmZjBhZTcyZGRjYjY4YTI1OTIxNWQ4Yzc3NjYxZTJjZGJiOWIwMiJ9 END_NESTED_COMMIT BEGIN_NESTED_COMMIT chore: update Python generator version to 1.25.1 PiperOrigin-RevId: 800535761 Source-Link: https://github.com/googleapis/googleapis/commit/4cf1f99cccc014627af5e8a6c0f80a3e6ec0d268 Source-Link: https://github.com/googleapis/googleapis-gen/commit/133d25b68e712116e1c5dc71fc3eb3c5e717022a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTMzZDI1YjY4ZTcxMjExNmUxYzVkYzcxZmMzZWIzYzVlNzE3MDIyYSJ9 END_NESTED_COMMIT --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../config_service_v2/async_client.py | 11 +-- .../services/config_service_v2/client.py | 11 +-- .../config_service_v2/transports/base.py | 5 +- .../config_service_v2/transports/grpc.py | 8 ++- .../transports/grpc_asyncio.py | 8 ++- .../logging_service_v2/async_client.py | 41 +++++------ .../services/logging_service_v2/client.py | 41 +++++------ .../logging_service_v2/transports/base.py | 5 +- .../logging_service_v2/transports/grpc.py | 8 ++- .../transports/grpc_asyncio.py | 8 ++- .../metrics_service_v2/async_client.py | 1 + .../services/metrics_service_v2/client.py | 1 + .../metrics_service_v2/transports/base.py | 5 +- .../metrics_service_v2/transports/grpc.py | 8 ++- .../transports/grpc_asyncio.py | 8 ++- .../cloud/logging_v2/types/log_entry.py | 8 +-- .../google/cloud/logging_v2/types/logging.py | 72 +++++++++---------- .../cloud/logging_v2/types/logging_config.py | 22 +++--- packages/google-cloud-logging/owlbot.py | 10 +-- .../snippet_metadata_google.logging.v2.json | 2 +- .../logging_v2/test_config_service_v2.py | 1 + .../logging_v2/test_logging_service_v2.py | 1 + .../logging_v2/test_metrics_service_v2.py | 1 + 23 files changed, 152 insertions(+), 134 deletions(-) diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index ad681a9c75a8..1ecc59542204 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -28,6 +28,7 @@ Type, Union, ) +import uuid from google.cloud.logging_v2 import gapic_version as package_version @@ -2967,7 +2968,7 @@ async def sample_get_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3110,7 +3111,7 @@ async def sample_create_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3267,7 +3268,7 @@ async def sample_update_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3505,7 +3506,7 @@ async def sample_get_cmek_settings(): the Google Cloud organization. See [Enabling CMEK for Log - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. """ @@ -3619,7 +3620,7 @@ async def sample_update_cmek_settings(): the Google Cloud organization. See [Enabling CMEK for Log - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. """ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py index 6c97c65568d0..fd859222076e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -32,6 +32,7 @@ Union, cast, ) +import uuid import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -3431,7 +3432,7 @@ def sample_get_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3571,7 +3572,7 @@ def sample_create_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3725,7 +3726,7 @@ def sample_update_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3957,7 +3958,7 @@ def sample_get_cmek_settings(): the Google Cloud organization. See [Enabling CMEK for Log - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. """ @@ -4069,7 +4070,7 @@ def sample_update_cmek_settings(): the Google Cloud organization. See [Enabling CMEK for Log - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. """ diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index db7b93b853b8..cbe76169cc13 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -75,9 +75,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 4dee4e647cb9..4825151de0e7 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -154,9 +154,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -290,9 +291,10 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 2686f80e37d3..4c5e9676184e 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -151,8 +151,9 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -203,9 +204,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 8de5078450fa..89a290cf7888 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -31,6 +31,7 @@ Type, Union, ) +import uuid from google.cloud.logging_v2 import gapic_version as package_version @@ -339,10 +340,10 @@ async def sample_delete_log(): log_name (:class:`str`): Required. The resource name of the log to delete: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, @@ -465,10 +466,10 @@ async def sample_write_log_entries(): to all log entries in ``entries`` that do not specify a value for ``log_name``: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: @@ -658,17 +659,17 @@ async def sample_list_log_entries(): Required. Names of one or more parent resources from which to retrieve log entries: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to this list. A maximum of 100 resources may be @@ -918,10 +919,10 @@ async def sample_list_logs(): parent (:class:`str`): Required. The resource name to list logs for: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 22318f07a9c0..692ed8ccfde9 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -34,6 +34,7 @@ Union, cast, ) +import uuid import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -754,10 +755,10 @@ def sample_delete_log(): log_name (str): Required. The resource name of the log to delete: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, @@ -877,10 +878,10 @@ def sample_write_log_entries(): to all log entries in ``entries`` that do not specify a value for ``log_name``: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: @@ -1066,17 +1067,17 @@ def sample_list_log_entries(): Required. Names of one or more parent resources from which to retrieve log entries: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to this list. A maximum of 100 resources may be @@ -1323,10 +1324,10 @@ def sample_list_logs(): parent (str): Required. The resource name to list logs for: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 7f7cfe9a23d9..881a6df3575b 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -75,9 +75,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 7bffe25b68bf..8ed0e80d4011 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -153,9 +153,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -288,9 +289,10 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index f73ac1150491..46fbec20c215 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -150,8 +150,9 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -202,9 +203,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 129fc055b6d0..7ca6059afce4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -28,6 +28,7 @@ Type, Union, ) +import uuid from google.cloud.logging_v2 import gapic_version as package_version diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index f2f0f8ce1087..7a43e3ddc14c 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -32,6 +32,7 @@ Union, cast, ) +import uuid import warnings from google.cloud.logging_v2 import gapic_version as package_version diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 22bc19736a51..22aad6790a50 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -75,9 +75,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index fe0943a948b1..d5ca8fd0f9b1 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -153,9 +153,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -288,9 +289,10 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 01aa05771d09..5134c4c8ad67 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -150,8 +150,9 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -202,9 +203,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py index e52f3085d128..38ec6e184fa4 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/log_entry.py @@ -214,10 +214,10 @@ class LogEntry(proto.Message): Example values: - - ``000000000000004a`` - - ``7a2190356c3fc94b`` - - ``0000f00300090021`` - - ``d39223e101960076`` + - ``000000000000004a`` + - ``7a2190356c3fc94b`` + - ``0000f00300090021`` + - ``d39223e101960076`` trace_sampled (bool): Optional. The sampling decision of the trace associated with the log entry. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py index 5b46a15e1720..e06819a3b7bc 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging.py @@ -51,10 +51,10 @@ class DeleteLogRequest(proto.Message): log_name (str): Required. The resource name of the log to delete: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, @@ -79,10 +79,10 @@ class WriteLogEntriesRequest(proto.Message): all log entries in ``entries`` that do not specify a value for ``log_name``: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: @@ -224,17 +224,17 @@ class ListLogEntriesRequest(proto.Message): Required. Names of one or more parent resources from which to retrieve log entries: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to this list. A maximum of 100 resources may be specified in a @@ -395,24 +395,24 @@ class ListLogsRequest(proto.Message): parent (str): Required. The resource name to list logs for: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` resource_names (MutableSequence[str]): Optional. List of resource names to list logs for: - - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` To support legacy queries, it could also be: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` The resource name in the ``parent`` field is added to this list. @@ -484,17 +484,17 @@ class TailLogEntriesRequest(proto.Message): Required. Name of a parent resource from which to retrieve log entries: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` filter (str): Optional. Only log entries that match the filter are returned. An empty filter matches all log entries in the diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py index 6ed09222a535..bfaec563fb49 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/types/logging_config.py @@ -350,9 +350,9 @@ class LogView(proto.Message): Filters are restricted to be a logical AND of ==/!= of any of the following: - - originating project/folder/organization/billing account. - - resource type - - log id + - originating project/folder/organization/billing account. + - resource type + - log id For example: @@ -1300,14 +1300,14 @@ class UpdateSinkRequest(proto.Message): the updated sink depends on both the old and new values of this field: - - If the old and new values of this field are both false or - both true, then there is no change to the sink's - ``writer_identity``. - - If the old value is false and the new value is true, then - ``writer_identity`` is changed to a unique service - account. - - It is an error if the old value is true and the new value - is set to false or defaulted to false. + - If the old and new values of this field are both false or + both true, then there is no change to the sink's + ``writer_identity``. + - If the old value is false and the new value is true, then + ``writer_identity`` is changed to a unique service + account. + - It is an error if the old value is true and the new value + is set to false or defaulted to false. update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. Field mask that specifies the fields in ``sink`` that need an update. A sink field will be overwritten if, diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/owlbot.py index 2be8464c2c68..04aa8926afb3 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/owlbot.py @@ -75,6 +75,7 @@ def place_before(path, text, *before_text, escape=None): "docs/logging_v2", # Don't include gapic library docs. Users should use the hand-written layer instead "docs/multiprocessing.rst", "scripts/fixup_logging_v2_keywords.py", # don't include script since it only works for generated layer + "noxfile.py", ], ) @@ -109,16 +110,9 @@ def place_before(path, text, *before_text, escape=None): ".github/workflows", # exclude gh actions as credentials are needed for tests ".github/auto-label.yaml", "README.rst", # This repo has a customized README + "noxfile.py", ], ) -s.replace("noxfile.py", -"""prerel_deps = \[ - "protobuf",""", -"""prerel_deps = [ - "google-cloud-audit-log", - "protobuf",""", -) - # adjust .trampolinerc for environment tests s.replace(".trampolinerc", "required_envvars[^\)]*\)", "required_envvars+=()") s.replace( diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 7d77545ab288..10c88271fc58 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.12.1" + "version": "0.0.0" }, "snippets": [ { diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 73a8f5d32330..8f3aa847fa06 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -14,6 +14,7 @@ # limitations under the License. # import os +import re # try/except added for compatibility with python < 3.8 try: diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index ef3833740586..14a9012e02c6 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -14,6 +14,7 @@ # limitations under the License. # import os +import re # try/except added for compatibility with python < 3.8 try: diff --git a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 7c59a09f1e52..6109c1ef73fc 100644 --- a/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/google-cloud-logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -14,6 +14,7 @@ # limitations under the License. # import os +import re # try/except added for compatibility with python < 3.8 try: From f350104d585c2ffdba300b740b302470441e6c9e Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 18 Nov 2025 21:20:37 -0500 Subject: [PATCH 852/855] chore(librarian): onboard to librarian (#1061) Towards https://github.com/googleapis/librarian/issues/2459 --- .../.github/.OwlBot.lock.yaml | 17 - .../google-cloud-logging/.github/.OwlBot.yaml | 26 - .../.github/auto-approve.yml | 3 - .../.github/release-please.yml | 15 - .../.github/release-trigger.yml | 2 - .../.github/sync-repo-settings.yaml | 18 - .../generator-input/.repo-metadata.json | 17 + .../generator-input/librarian.py} | 27 +- .../.librarian/generator-input/noxfile.py | 493 ++++++++++++++++++ .../.librarian/generator-input/setup.py | 97 ++++ .../.librarian/state.yaml | 41 ++ .../.release-please-manifest.json | 3 - packages/google-cloud-logging/MANIFEST.in | 13 +- .../docs/_templates/layout.html | 4 +- .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 2 +- packages/google-cloud-logging/noxfile.py | 2 +- .../release-please-config.json | 25 - .../snippet_metadata_google.logging.v2.json | 2 +- 19 files changed, 662 insertions(+), 147 deletions(-) delete mode 100644 packages/google-cloud-logging/.github/.OwlBot.lock.yaml delete mode 100644 packages/google-cloud-logging/.github/.OwlBot.yaml delete mode 100644 packages/google-cloud-logging/.github/auto-approve.yml delete mode 100644 packages/google-cloud-logging/.github/release-please.yml delete mode 100644 packages/google-cloud-logging/.github/release-trigger.yml delete mode 100644 packages/google-cloud-logging/.github/sync-repo-settings.yaml create mode 100644 packages/google-cloud-logging/.librarian/generator-input/.repo-metadata.json rename packages/google-cloud-logging/{owlbot.py => .librarian/generator-input/librarian.py} (88%) create mode 100644 packages/google-cloud-logging/.librarian/generator-input/noxfile.py create mode 100644 packages/google-cloud-logging/.librarian/generator-input/setup.py create mode 100644 packages/google-cloud-logging/.librarian/state.yaml delete mode 100644 packages/google-cloud-logging/.release-please-manifest.json delete mode 100644 packages/google-cloud-logging/release-please-config.json diff --git a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml b/packages/google-cloud-logging/.github/.OwlBot.lock.yaml deleted file mode 100644 index 6f1eaeb91e9c..000000000000 --- a/packages/google-cloud-logging/.github/.OwlBot.lock.yaml +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ecf409a43d8b157fb83c403de4d83e3da7d88e423044410c0e2434bf776221d1 -# created: 2025-04-10T16:21:41.67162455Z diff --git a/packages/google-cloud-logging/.github/.OwlBot.yaml b/packages/google-cloud-logging/.github/.OwlBot.yaml deleted file mode 100644 index 58377caf628c..000000000000 --- a/packages/google-cloud-logging/.github/.OwlBot.yaml +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - -deep-remove-regex: - - /owl-bot-staging - -deep-copy-regex: - - source: /google/logging/(v.*)/.*-py/(.*) - dest: /owl-bot-staging/$1/$2 - -begin-after-commit-hash: 130ce904e5d546c312943d10f48799590f9c0f66 - diff --git a/packages/google-cloud-logging/.github/auto-approve.yml b/packages/google-cloud-logging/.github/auto-approve.yml deleted file mode 100644 index 311ebbb853a9..000000000000 --- a/packages/google-cloud-logging/.github/auto-approve.yml +++ /dev/null @@ -1,3 +0,0 @@ -# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve -processes: - - "OwlBotTemplateChanges" diff --git a/packages/google-cloud-logging/.github/release-please.yml b/packages/google-cloud-logging/.github/release-please.yml deleted file mode 100644 index dbd2cc9debee..000000000000 --- a/packages/google-cloud-logging/.github/release-please.yml +++ /dev/null @@ -1,15 +0,0 @@ -releaseType: python -handleGHRelease: true -manifest: true -# NOTE: this section is generated by synthtool.languages.python -# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py -branches: -- branch: v2 - handleGHRelease: true - releaseType: python -- branch: v1 - handleGHRelease: true - releaseType: python -- branch: v0 - handleGHRelease: true - releaseType: python diff --git a/packages/google-cloud-logging/.github/release-trigger.yml b/packages/google-cloud-logging/.github/release-trigger.yml deleted file mode 100644 index d47d146a9bc5..000000000000 --- a/packages/google-cloud-logging/.github/release-trigger.yml +++ /dev/null @@ -1,2 +0,0 @@ -enabled: true -multiScmName: python-logging diff --git a/packages/google-cloud-logging/.github/sync-repo-settings.yaml b/packages/google-cloud-logging/.github/sync-repo-settings.yaml deleted file mode 100644 index 439a0bcb7715..000000000000 --- a/packages/google-cloud-logging/.github/sync-repo-settings.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings -# Rules for main branch protection -branchProtectionRules: -# Identifies the protection rule pattern. Name of the branch to be protected. -# Defaults to `main` -- pattern: main - requiresCodeOwnerReviews: true - requiresStrictStatusChecks: true - requiredStatusCheckContexts: - - 'Kokoro' - - 'cla/google' - - 'Samples - Lint' - - 'Samples - Python 3.7' - - 'Samples - Python 3.8' - - 'Samples - Python 3.9' - - 'Samples - Python 3.10' - - 'Samples - Python 3.11' - - 'Samples - Python 3.12' diff --git a/packages/google-cloud-logging/.librarian/generator-input/.repo-metadata.json b/packages/google-cloud-logging/.librarian/generator-input/.repo-metadata.json new file mode 100644 index 000000000000..83c212332e89 --- /dev/null +++ b/packages/google-cloud-logging/.librarian/generator-input/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "logging", + "name_pretty": "Cloud Logging", + "product_documentation": "https://cloud.google.com/logging/docs", + "client_documentation": "https://cloud.google.com/python/docs/reference/logging/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559764", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_COMBO", + "repo": "googleapis/python-logging", + "distribution_name": "google-cloud-logging", + "api_id": "logging.googleapis.com", + "codeowner_team": "@googleapis/api-logging @googleapis/api-logging-partners", + "default_version": "v2", + "api_shortname": "logging", + "api_description": "allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud and Amazon Web Services. Using the BindPlane service, you can also collect this data from over 150 common application components, on-premises systems, and hybrid cloud systems. BindPlane is included with your Google Cloud project at no additional cost." +} diff --git a/packages/google-cloud-logging/owlbot.py b/packages/google-cloud-logging/.librarian/generator-input/librarian.py similarity index 88% rename from packages/google-cloud-logging/owlbot.py rename to packages/google-cloud-logging/.librarian/generator-input/librarian.py index 04aa8926afb3..bf358a4b0218 100644 --- a/packages/google-cloud-logging/owlbot.py +++ b/packages/google-cloud-logging/.librarian/generator-input/librarian.py @@ -65,12 +65,12 @@ def place_before(path, text, *before_text, escape=None): ) s.move([library], excludes=[ - "**/gapic_version.py", "setup.py", "testing/constraints*.txt", "README.rst", "google/cloud/logging/__init__.py", # generated types are hidden from users "google/cloud/logging_v2/__init__.py", + "docs/conf.py", "docs/index.rst", "docs/logging_v2", # Don't include gapic library docs. Users should use the hand-written layer instead "docs/multiprocessing.rst", @@ -104,15 +104,14 @@ def place_before(path, text, *before_text, escape=None): s.move(templated_files, excludes=[ "docs/index.rst", - ".github/release-please.yml", + ".github/**", + ".kokoro/**", ".coveragerc", "docs/multiprocessing.rst", - ".github/workflows", # exclude gh actions as credentials are needed for tests - ".github/auto-label.yaml", "README.rst", # This repo has a customized README - "noxfile.py", ], ) + # adjust .trampolinerc for environment tests s.replace(".trampolinerc", "required_envvars[^\)]*\)", "required_envvars+=()") s.replace( @@ -242,21 +241,3 @@ def place_before(path, text, *before_text, escape=None): s.replace(sample_files, text, replacement) s.shell.run(["nox", "-s", "blacken"], hide_output=False) -s.shell.run(["nox", "-s", "blacken"], cwd="samples/snippets", hide_output=False) - -# -------------------------------------------------------------------------- -# Modify test configs -# -------------------------------------------------------------------------- - -# add shared environment variables to test configs -tracked_subdirs = ["continuous", "presubmit", "samples"] -for subdir in tracked_subdirs: - for path, subdirs, files in os.walk(f".kokoro/{subdir}"): - for name in files: - if name == "common.cfg": - file_path = os.path.join(path, name) - s.move( - ".kokoro/common_env_vars.cfg", - file_path, - merge=lambda src, dst, _,: f"{dst}\n{src}", - ) diff --git a/packages/google-cloud-logging/.librarian/generator-input/noxfile.py b/packages/google-cloud-logging/.librarian/generator-input/noxfile.py new file mode 100644 index 000000000000..5c352793ab19 --- /dev/null +++ b/packages/google-cloud-logging/.librarian/generator-input/noxfile.py @@ -0,0 +1,493 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +FLAKE8_VERSION = "flake8==6.1.0" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = [ + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ + "flask", + "webob", + "django", +] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ + "google-cloud-bigquery", + "google-cloud-pubsub", + "google-cloud-storage", + "google-cloud-testutils", + "opentelemetry-sdk", +] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", + "docfx", + "format", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install(FLAKE8_VERSION, BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python="3.13") +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=99") + + session.run("coverage", "erase") + + +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "google-cloud-audit-log", + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-logging/.librarian/generator-input/setup.py b/packages/google-cloud-logging/.librarian/generator-input/setup.py new file mode 100644 index 000000000000..c80db0467b8b --- /dev/null +++ b/packages/google-cloud-logging/.librarian/generator-input/setup.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-logging" + + +description = "Stackdriver Logging API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/logging/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "google-cloud-appengine-logging>=0.1.3, <2.0.0", + "google-cloud-audit-log >= 0.3.1, < 1.0.0", + "google-cloud-core >= 2.0.0, <3.0.0", + "grpc-google-iam-v1 >=0.12.4, <1.0.0", + "opentelemetry-api >= 1.9.0", + "proto-plus >= 1.22.0, <2.0.0", + "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'", + "proto-plus >= 1.25.0, <2.0.0; python_version>='3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-logging" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-logging/.librarian/state.yaml b/packages/google-cloud-logging/.librarian/state.yaml new file mode 100644 index 000000000000..2edc47251c9a --- /dev/null +++ b/packages/google-cloud-logging/.librarian/state.yaml @@ -0,0 +1,41 @@ +image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:8e2c32496077054105bd06c54a59d6a6694287bc053588e24debe6da6920ad91 +libraries: + - id: google-cloud-logging + version: 3.12.1 + last_generated_commit: 5400ccce473c439885bd6bf2924fd242271bfcab + apis: + - path: google/logging/v2 + service_config: logging_v2.yaml + source_roots: + - . + preserve_regex: [] + remove_regex: + - ^.coveragerc + - ^.flake8 + - ^.pre-commit-config.yaml + - ^.trampolinerc + - ^.repo-metadata.json + - ^LICENSE + - ^MANIFEST.in + - ^SECURITY.md + - ^mypy.ini + - ^noxfile.py + - ^renovate.json + - ^setup.py + - ^docs/summary_overview.md + - ^docs/_static/custom.css + - ^docs/_templates + - ^google/cloud/logging_v2/services + - ^google/cloud/logging_v2/types + - ^google/cloud/logging_v2/gapic_version.py + - ^google/cloud/logging_v2/gapic_metadata.json + - ^google/cloud/logging_v2/py.typed + - ^google/cloud/logging/gapic_version.py + - ^google/cloud/logging/py.typed + - ^samples/AUTHORING_GUIDE.md + - ^samples/CONTRIBUTING.md + - ^samples/generated_samples + - ^tests/__init__.py + - ^tests/unit/__init__.py + - ^tests/unit/gapic + tag_format: v{version} diff --git a/packages/google-cloud-logging/.release-please-manifest.json b/packages/google-cloud-logging/.release-please-manifest.json deleted file mode 100644 index d235af2ce8fc..000000000000 --- a/packages/google-cloud-logging/.release-please-manifest.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - ".": "3.12.1" -} diff --git a/packages/google-cloud-logging/MANIFEST.in b/packages/google-cloud-logging/MANIFEST.in index d6814cd60037..dae249ec8976 100644 --- a/packages/google-cloud-logging/MANIFEST.in +++ b/packages/google-cloud-logging/MANIFEST.in @@ -1,25 +1,20 @@ # -*- coding: utf-8 -*- -# -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by synthtool. DO NOT EDIT! +# include README.rst LICENSE -recursive-include google *.json *.proto py.typed +recursive-include google *.py *.pyi *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ - -# Exclude scripts for samples readmegen -prune scripts/readme-gen diff --git a/packages/google-cloud-logging/docs/_templates/layout.html b/packages/google-cloud-logging/docs/_templates/layout.html index 6316a537f72b..95e9c77fcfe1 100644 --- a/packages/google-cloud-logging/docs/_templates/layout.html +++ b/packages/google-cloud-logging/docs/_templates/layout.html @@ -20,8 +20,8 @@ {% endblock %}
-
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version. +
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. Library versions released prior to that date will continue to be available. For more information please visit Python 2 support on Google Cloud.
diff --git a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py index 14833215c203..bca86d6364c5 100644 --- a/packages/google-cloud-logging/google/cloud/logging/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py index 14833215c203..bca86d6364c5 100644 --- a/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-logging/noxfile.py b/packages/google-cloud-logging/noxfile.py index e71504756b3a..5c352793ab19 100644 --- a/packages/google-cloud-logging/noxfile.py +++ b/packages/google-cloud-logging/noxfile.py @@ -110,7 +110,7 @@ def lint(session): session.run("flake8", "google", "tests") -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.13") def blacken(session): """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) diff --git a/packages/google-cloud-logging/release-please-config.json b/packages/google-cloud-logging/release-please-config.json deleted file mode 100644 index 264e357f4fe4..000000000000 --- a/packages/google-cloud-logging/release-please-config.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json", - "packages": { - ".": { - "release-type": "python", - "extra-files": [ - "google/cloud/logging_v2/gapic_version.py", - "google/cloud/logging/gapic_version.py", - { - "type": "json", - "path": "samples/generated_samples/snippet_metadata_google.logging.v2.json", - "jsonpath": "$.clientLibrary.version" - } - ] - } - }, - "release-type": "python", - "plugins": [ - { - "type": "sentence-case" - } - ], - "initial-version": "0.1.0" -} - \ No newline at end of file diff --git a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 10c88271fc58..7d77545ab288 100644 --- a/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/google-cloud-logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "0.0.0" + "version": "3.12.1" }, "snippets": [ { From d683d948b113860c4366333c5d70c5f0d968e8c8 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 25 Nov 2025 20:19:06 +0000 Subject: [PATCH 853/855] Trigger owlbot post-processor --- .../google-cloud-logging/google-cloud-logging.txt | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 owl-bot-staging/google-cloud-logging/google-cloud-logging/google-cloud-logging.txt diff --git a/owl-bot-staging/google-cloud-logging/google-cloud-logging/google-cloud-logging.txt b/owl-bot-staging/google-cloud-logging/google-cloud-logging/google-cloud-logging.txt new file mode 100644 index 000000000000..e69de29bb2d1 From 637f72b6abeab39ed52a61f45b250aec5c796d0c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 25 Nov 2025 20:19:13 +0000 Subject: [PATCH 854/855] build: google-cloud-logging migration: adjust owlbot-related files --- .../google-cloud-logging/.github/CODEOWNERS | 12 - .../.github/CONTRIBUTING.md | 28 - .../.github/ISSUE_TEMPLATE/bug_report.md | 43 -- .../.github/ISSUE_TEMPLATE/feature_request.md | 18 - .../.github/ISSUE_TEMPLATE/support_request.md | 7 - .../.github/PULL_REQUEST_TEMPLATE.md | 7 - .../.github/auto-label.yaml | 19 - .../.github/blunderbuss.yml | 20 - .../.github/header-checker-lint.yml | 15 - .../.github/snippet-bot.yml | 0 .../google-cloud-logging/.kokoro/build.sh | 60 --- .../.kokoro/common_env_vars.cfg | 19 - .../.kokoro/continuous/common.cfg | 47 -- .../.kokoro/continuous/continuous.cfg | 1 - .../.kokoro/continuous/prerelease-deps.cfg | 7 - .../appengine_flex_container/common.cfg | 49 -- .../appengine_flex_container/continuous.cfg | 1 - .../appengine_flex_container/presubmit.cfg | 1 - .../appengine_flex_python/common.cfg | 49 -- .../appengine_flex_python/continuous.cfg | 1 - .../appengine_flex_python/presubmit.cfg | 1 - .../environment/appengine_standard/common.cfg | 49 -- .../appengine_standard/continuous.cfg | 1 - .../appengine_standard/presubmit.cfg | 1 - .../.kokoro/environment/cloudrun/common.cfg | 49 -- .../environment/cloudrun/continuous.cfg | 1 - .../environment/cloudrun/presubmit.cfg | 1 - .../.kokoro/environment/compute/common.cfg | 49 -- .../environment/compute/continuous.cfg | 1 - .../.kokoro/environment/compute/presubmit.cfg | 1 - .../.kokoro/environment/functions/common.cfg | 49 -- .../environment/functions/continuous.cfg | 1 - .../environment/functions/presubmit.cfg | 1 - .../environment/functions_37/common.cfg | 54 -- .../environment/functions_37/continuous.cfg | 1 - .../environment/functions_37/presubmit.cfg | 1 - .../.kokoro/environment/kubernetes/common.cfg | 49 -- .../environment/kubernetes/continuous.cfg | 1 - .../environment/kubernetes/presubmit.cfg | 1 - .../.kokoro/environment_tests.sh | 83 --- .../.kokoro/performance/common.cfg | 42 -- .../.kokoro/performance/performance.cfg | 1 - .../.kokoro/populate-secrets.sh | 43 -- .../.kokoro/presubmit/common.cfg | 47 -- .../.kokoro/presubmit/prerelease-deps.cfg | 7 - .../.kokoro/presubmit/presubmit.cfg | 1 - .../.kokoro/samples/lint/common.cfg | 53 -- .../.kokoro/samples/lint/continuous.cfg | 6 - .../.kokoro/samples/lint/periodic.cfg | 6 - .../.kokoro/samples/lint/presubmit.cfg | 6 - .../.kokoro/samples/python3.10/common.cfg | 59 --- .../.kokoro/samples/python3.10/continuous.cfg | 6 - .../samples/python3.10/periodic-head.cfg | 11 - .../.kokoro/samples/python3.10/periodic.cfg | 6 - .../.kokoro/samples/python3.10/presubmit.cfg | 6 - .../.kokoro/samples/python3.11/common.cfg | 59 --- .../.kokoro/samples/python3.11/continuous.cfg | 6 - .../samples/python3.11/periodic-head.cfg | 11 - .../.kokoro/samples/python3.11/periodic.cfg | 6 - .../.kokoro/samples/python3.11/presubmit.cfg | 6 - .../.kokoro/samples/python3.12/common.cfg | 59 --- .../.kokoro/samples/python3.12/continuous.cfg | 6 - .../samples/python3.12/periodic-head.cfg | 11 - .../.kokoro/samples/python3.12/periodic.cfg | 6 - .../.kokoro/samples/python3.12/presubmit.cfg | 6 - .../.kokoro/samples/python3.13/common.cfg | 60 --- .../.kokoro/samples/python3.13/continuous.cfg | 6 - .../samples/python3.13/periodic-head.cfg | 11 - .../.kokoro/samples/python3.13/periodic.cfg | 6 - .../.kokoro/samples/python3.13/presubmit.cfg | 6 - .../.kokoro/samples/python3.7/common.cfg | 59 --- .../.kokoro/samples/python3.7/continuous.cfg | 6 - .../samples/python3.7/periodic-head.cfg | 11 - .../.kokoro/samples/python3.7/periodic.cfg | 6 - .../.kokoro/samples/python3.7/presubmit.cfg | 6 - .../.kokoro/samples/python3.8/common.cfg | 59 --- .../.kokoro/samples/python3.8/continuous.cfg | 6 - .../samples/python3.8/periodic-head.cfg | 11 - .../.kokoro/samples/python3.8/periodic.cfg | 6 - .../.kokoro/samples/python3.8/presubmit.cfg | 6 - .../.kokoro/samples/python3.9/common.cfg | 59 --- .../.kokoro/samples/python3.9/continuous.cfg | 6 - .../samples/python3.9/periodic-head.cfg | 11 - .../.kokoro/samples/python3.9/periodic.cfg | 6 - .../.kokoro/samples/python3.9/presubmit.cfg | 6 - .../.kokoro/test-performance.sh | 44 -- .../.kokoro/test-samples-against-head.sh | 26 - .../.kokoro/test-samples-impl.sh | 103 ---- .../.kokoro/test-samples.sh | 44 -- .../.kokoro/trampoline.sh | 28 - .../.kokoro/trampoline_v2.sh | 487 ------------------ packages/google-cloud-logging/.trampolinerc | 62 --- .../google-cloud-logging/docs/changelog.md | 1 - .../single-library.git-migrate-history.sh | 8 +- 94 files changed, 4 insertions(+), 2427 deletions(-) delete mode 100644 packages/google-cloud-logging/.github/CODEOWNERS delete mode 100644 packages/google-cloud-logging/.github/CONTRIBUTING.md delete mode 100644 packages/google-cloud-logging/.github/ISSUE_TEMPLATE/bug_report.md delete mode 100644 packages/google-cloud-logging/.github/ISSUE_TEMPLATE/feature_request.md delete mode 100644 packages/google-cloud-logging/.github/ISSUE_TEMPLATE/support_request.md delete mode 100644 packages/google-cloud-logging/.github/PULL_REQUEST_TEMPLATE.md delete mode 100644 packages/google-cloud-logging/.github/auto-label.yaml delete mode 100644 packages/google-cloud-logging/.github/blunderbuss.yml delete mode 100644 packages/google-cloud-logging/.github/header-checker-lint.yml delete mode 100644 packages/google-cloud-logging/.github/snippet-bot.yml delete mode 100755 packages/google-cloud-logging/.kokoro/build.sh delete mode 100644 packages/google-cloud-logging/.kokoro/common_env_vars.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/continuous/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/continuous/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/continuous/prerelease-deps.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_standard/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/appengine_standard/presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/cloudrun/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/cloudrun/presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/compute/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/compute/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/compute/presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/functions/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/functions/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/functions/presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/functions_37/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/functions_37/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/functions_37/presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/kubernetes/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/environment/kubernetes/presubmit.cfg delete mode 100755 packages/google-cloud-logging/.kokoro/environment_tests.sh delete mode 100644 packages/google-cloud-logging/.kokoro/performance/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/performance/performance.cfg delete mode 100755 packages/google-cloud-logging/.kokoro/populate-secrets.sh delete mode 100644 packages/google-cloud-logging/.kokoro/presubmit/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/presubmit/prerelease-deps.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/presubmit/presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/lint/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/lint/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/lint/periodic.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/lint/presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.10/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.10/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.10/periodic-head.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.10/periodic.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.10/presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.11/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.11/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.11/periodic-head.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.11/periodic.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.11/presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.12/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.12/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.12/periodic-head.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.12/periodic.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.12/presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.13/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.13/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.13/periodic-head.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.13/periodic.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.13/presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.7/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.7/periodic.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.7/presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.8/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.8/periodic.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.8/presubmit.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.9/continuous.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.9/periodic-head.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.9/periodic.cfg delete mode 100644 packages/google-cloud-logging/.kokoro/samples/python3.9/presubmit.cfg delete mode 100755 packages/google-cloud-logging/.kokoro/test-performance.sh delete mode 100755 packages/google-cloud-logging/.kokoro/test-samples-against-head.sh delete mode 100755 packages/google-cloud-logging/.kokoro/test-samples-impl.sh delete mode 100755 packages/google-cloud-logging/.kokoro/test-samples.sh delete mode 100755 packages/google-cloud-logging/.kokoro/trampoline.sh delete mode 100755 packages/google-cloud-logging/.kokoro/trampoline_v2.sh delete mode 100644 packages/google-cloud-logging/.trampolinerc delete mode 120000 packages/google-cloud-logging/docs/changelog.md diff --git a/packages/google-cloud-logging/.github/CODEOWNERS b/packages/google-cloud-logging/.github/CODEOWNERS deleted file mode 100644 index 0738e11eea58..000000000000 --- a/packages/google-cloud-logging/.github/CODEOWNERS +++ /dev/null @@ -1,12 +0,0 @@ -# Code owners file. -# This file controls who is tagged for review for any given pull request. -# -# For syntax help see: -# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax -# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. - -# @googleapis/yoshi-python @googleapis/api-logging @googleapis/api-logging-partners are the default owners for changes in this repo -* @googleapis/yoshi-python @googleapis/api-logging @googleapis/api-logging-partners - -# @googleapis/python-samples-reviewers @googleapis/api-logging @googleapis/api-logging-partners are the default owners for samples changes -/samples/ @googleapis/python-samples-reviewers @googleapis/api-logging @googleapis/api-logging-partners diff --git a/packages/google-cloud-logging/.github/CONTRIBUTING.md b/packages/google-cloud-logging/.github/CONTRIBUTING.md deleted file mode 100644 index 939e5341e74d..000000000000 --- a/packages/google-cloud-logging/.github/CONTRIBUTING.md +++ /dev/null @@ -1,28 +0,0 @@ -# How to Contribute - -We'd love to accept your patches and contributions to this project. There are -just a few small guidelines you need to follow. - -## Contributor License Agreement - -Contributions to this project must be accompanied by a Contributor License -Agreement. You (or your employer) retain the copyright to your contribution; -this simply gives us permission to use and redistribute your contributions as -part of the project. Head over to to see -your current agreements on file or to sign a new one. - -You generally only need to submit a CLA once, so if you've already submitted one -(even if it was for a different project), you probably don't need to do it -again. - -## Code reviews - -All submissions, including submissions by project members, require review. We -use GitHub pull requests for this purpose. Consult -[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more -information on using pull requests. - -## Community Guidelines - -This project follows [Google's Open Source Community -Guidelines](https://opensource.google.com/conduct/). diff --git a/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/bug_report.md b/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index e9f7d79ac9c8..000000000000 --- a/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve - ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - -Please run down the following list and make sure you've tried the usual "quick fixes": - - - Search the issues already opened: https://github.com/googleapis/python-logging/issues - - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python - -If you are still having issues, please be sure to include as much information as possible: - -#### Environment details - - - OS type and version: - - Python version: `python --version` - - pip version: `pip --version` - - `google-cloud-logging` version: `pip show google-cloud-logging` - -#### Steps to reproduce - - 1. ? - 2. ? - -#### Code example - -```python -# example -``` - -#### Stack trace -``` -# example -``` - -Making sure to follow these steps will guarantee the quickest resolution possible. - -Thanks! diff --git a/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/feature_request.md b/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 6365857f33c6..000000000000 --- a/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this library - ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - - **Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - **Describe the solution you'd like** -A clear and concise description of what you want to happen. - **Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - **Additional context** -Add any other context or screenshots about the feature request here. diff --git a/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/support_request.md b/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/support_request.md deleted file mode 100644 index 995869032125..000000000000 --- a/packages/google-cloud-logging/.github/ISSUE_TEMPLATE/support_request.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -name: Support request -about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. - ---- - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/packages/google-cloud-logging/.github/PULL_REQUEST_TEMPLATE.md b/packages/google-cloud-logging/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index ff76f9a9c6d0..000000000000 --- a/packages/google-cloud-logging/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,7 +0,0 @@ -Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: -- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-logging/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea -- [ ] Ensure the tests and linter pass -- [ ] Code coverage does not decrease (if any source code was changed) -- [ ] Appropriate docs were updated (if necessary) - -Fixes # 🦕 diff --git a/packages/google-cloud-logging/.github/auto-label.yaml b/packages/google-cloud-logging/.github/auto-label.yaml deleted file mode 100644 index ccad49b4ebfb..000000000000 --- a/packages/google-cloud-logging/.github/auto-label.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -requestsize: - enabled: true -staleness: - pullrequest: true - old: 30 - extraold: 60 diff --git a/packages/google-cloud-logging/.github/blunderbuss.yml b/packages/google-cloud-logging/.github/blunderbuss.yml deleted file mode 100644 index d5f69b10a4ef..000000000000 --- a/packages/google-cloud-logging/.github/blunderbuss.yml +++ /dev/null @@ -1,20 +0,0 @@ -# Blunderbuss config -# -# This file controls who is assigned for pull requests and issues. -# Note: This file is autogenerated. To make changes to the assignee -# team, please update `codeowner_team` in `.repo-metadata.json`. -assign_issues: - - googleapis/api-logging - - googleapis/api-logging-partners - -assign_issues_by: - - labels: - - "samples" - to: - - googleapis/python-samples-reviewers - - googleapis/api-logging - - googleapis/api-logging-partners - -assign_prs: - - googleapis/api-logging - - googleapis/api-logging-partners diff --git a/packages/google-cloud-logging/.github/header-checker-lint.yml b/packages/google-cloud-logging/.github/header-checker-lint.yml deleted file mode 100644 index 6fe78aa7987a..000000000000 --- a/packages/google-cloud-logging/.github/header-checker-lint.yml +++ /dev/null @@ -1,15 +0,0 @@ -{"allowedCopyrightHolders": ["Google LLC"], - "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], - "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], - "sourceFileExtensions": [ - "ts", - "js", - "java", - "sh", - "Dockerfile", - "yaml", - "py", - "html", - "txt" - ] -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.github/snippet-bot.yml b/packages/google-cloud-logging/.github/snippet-bot.yml deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-logging/.kokoro/build.sh b/packages/google-cloud-logging/.kokoro/build.sh deleted file mode 100755 index d41b45aa1dd0..000000000000 --- a/packages/google-cloud-logging/.kokoro/build.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") - -if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") -fi - -pushd "${PROJECT_ROOT}" - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Setup service account credentials. -if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] -then - export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json -fi - -# Setup project id. -if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] -then - export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -fi - -# If this is a continuous build, send the test log to the FlakyBot. -# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. -if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then - cleanup() { - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot - } - trap cleanup EXIT HUP -fi - -# If NOX_SESSION is set, it only runs the specified session, -# otherwise run all the sessions. -if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} -else - python3 -m nox -fi diff --git a/packages/google-cloud-logging/.kokoro/common_env_vars.cfg b/packages/google-cloud-logging/.kokoro/common_env_vars.cfg deleted file mode 100644 index 69ba31edf6d0..000000000000 --- a/packages/google-cloud-logging/.kokoro/common_env_vars.cfg +++ /dev/null @@ -1,19 +0,0 @@ - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/packages/google-cloud-logging/.kokoro/continuous/common.cfg b/packages/google-cloud-logging/.kokoro/continuous/common.cfg deleted file mode 100644 index 6745b353755d..000000000000 --- a/packages/google-cloud-logging/.kokoro/continuous/common.cfg +++ /dev/null @@ -1,47 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/build.sh" -} - - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/packages/google-cloud-logging/.kokoro/continuous/continuous.cfg b/packages/google-cloud-logging/.kokoro/continuous/continuous.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-logging/.kokoro/continuous/continuous.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/continuous/prerelease-deps.cfg b/packages/google-cloud-logging/.kokoro/continuous/prerelease-deps.cfg deleted file mode 100644 index 3595fb43f5c0..000000000000 --- a/packages/google-cloud-logging/.kokoro/continuous/prerelease-deps.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "prerelease_deps" -} diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg deleted file mode 100644 index c53ed690f7c7..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/common.cfg +++ /dev/null @@ -1,49 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - - -# Specify which tests to run -env_vars: { - key: "ENVIRONMENT" - value: "appengine_flex_container" -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/environment_tests.sh" -} - -# add labels to help with testgrid filtering -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/continuous.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/continuous.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/presubmit.cfg deleted file mode 100644 index 18a4c35325b8..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_container/presubmit.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg deleted file mode 100644 index d5ea9288b615..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/common.cfg +++ /dev/null @@ -1,49 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - - -# Specify which tests to run -env_vars: { - key: "ENVIRONMENT" - value: "appengine_flex_python" -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/environment_tests.sh" -} - -# add labels to help with testgrid filtering -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/continuous.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/continuous.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/presubmit.cfg deleted file mode 100644 index 18a4c35325b8..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/appengine_flex_python/presubmit.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg deleted file mode 100644 index d31bde925264..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/appengine_standard/common.cfg +++ /dev/null @@ -1,49 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - - -# Specify which tests to run -env_vars: { - key: "ENVIRONMENT" - value: "appengine_standard" -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/environment_tests.sh" -} - -# add labels to help with testgrid filtering -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_standard/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_standard/continuous.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/appengine_standard/continuous.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/appengine_standard/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/appengine_standard/presubmit.cfg deleted file mode 100644 index 18a4c35325b8..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/appengine_standard/presubmit.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg b/packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg deleted file mode 100644 index 855a6f6e10c7..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/cloudrun/common.cfg +++ /dev/null @@ -1,49 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - - -# Specify which tests to run -env_vars: { - key: "ENVIRONMENT" - value: "cloudrun" -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/environment_tests.sh" -} - -# add labels to help with testgrid filtering -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - diff --git a/packages/google-cloud-logging/.kokoro/environment/cloudrun/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/cloudrun/continuous.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/cloudrun/continuous.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/cloudrun/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/cloudrun/presubmit.cfg deleted file mode 100644 index 18a4c35325b8..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/cloudrun/presubmit.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.kokoro/environment/compute/common.cfg b/packages/google-cloud-logging/.kokoro/environment/compute/common.cfg deleted file mode 100644 index 519d791cc03a..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/compute/common.cfg +++ /dev/null @@ -1,49 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - - -# Specify which tests to run -env_vars: { - key: "ENVIRONMENT" - value: "compute" -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/environment_tests.sh" -} - -# add labels to help with testgrid filtering -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - diff --git a/packages/google-cloud-logging/.kokoro/environment/compute/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/compute/continuous.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/compute/continuous.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/compute/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/compute/presubmit.cfg deleted file mode 100644 index 18a4c35325b8..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/compute/presubmit.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.kokoro/environment/functions/common.cfg b/packages/google-cloud-logging/.kokoro/environment/functions/common.cfg deleted file mode 100644 index 667a285b89c6..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/functions/common.cfg +++ /dev/null @@ -1,49 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - - -# Specify which tests to run -env_vars: { - key: "ENVIRONMENT" - value: "functions" -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/environment_tests.sh" -} - -# add labels to help with testgrid filtering -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - diff --git a/packages/google-cloud-logging/.kokoro/environment/functions/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/functions/continuous.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/functions/continuous.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/functions/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/functions/presubmit.cfg deleted file mode 100644 index 18a4c35325b8..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/functions/presubmit.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.kokoro/environment/functions_37/common.cfg b/packages/google-cloud-logging/.kokoro/environment/functions_37/common.cfg deleted file mode 100644 index 2ee8d6fc545b..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/functions_37/common.cfg +++ /dev/null @@ -1,54 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - - -# Specify which tests to run -env_vars: { - key: "ENVIRONMENT" - value: "functions" -} - -env_vars: { - key: "RUNTIME" - value: "python37" -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/environment_tests.sh" -} - -# add labels to help with testgrid filtering -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - diff --git a/packages/google-cloud-logging/.kokoro/environment/functions_37/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/functions_37/continuous.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/functions_37/continuous.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/functions_37/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/functions_37/presubmit.cfg deleted file mode 100644 index 18a4c35325b8..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/functions_37/presubmit.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg b/packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg deleted file mode 100644 index b778627f488e..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/kubernetes/common.cfg +++ /dev/null @@ -1,49 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - - -# Specify which tests to run -env_vars: { - key: "ENVIRONMENT" - value: "kubernetes" -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/environment_tests.sh" -} - -# add labels to help with testgrid filtering -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - diff --git a/packages/google-cloud-logging/.kokoro/environment/kubernetes/continuous.cfg b/packages/google-cloud-logging/.kokoro/environment/kubernetes/continuous.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/kubernetes/continuous.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/environment/kubernetes/presubmit.cfg b/packages/google-cloud-logging/.kokoro/environment/kubernetes/presubmit.cfg deleted file mode 100644 index 18a4c35325b8..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment/kubernetes/presubmit.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/google-cloud-logging/.kokoro/environment_tests.sh b/packages/google-cloud-logging/.kokoro/environment_tests.sh deleted file mode 100755 index 29913aeaf581..000000000000 --- a/packages/google-cloud-logging/.kokoro/environment_tests.sh +++ /dev/null @@ -1,83 +0,0 @@ -#!/bin/bash -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eox pipefail - -if [[ -z "${ENVIRONMENT:-}" ]]; then - echo "ENVIRONMENT not set. Exiting" - exit 1 -fi - -if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-logging" -fi - -# make sure submodule is up to date -git submodule update --init --recursive - -cd "${PROJECT_ROOT}/tests/environment" - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json -gcloud auth activate-service-account --key-file=$GOOGLE_APPLICATION_CREDENTIALS - -# Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -gcloud config set project $PROJECT_ID - -# set a default zone. -gcloud config set compute/zone us-central1-b - -# authenticate docker -gcloud auth configure-docker -q - -# Install nox -virtualenv .venv -source .venv/bin/activate -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version - -# Install kubectl -if [[ "${ENVIRONMENT}" == "kubernetes" ]]; then - curl -LO https://dl.k8s.io/release/v1.20.0/bin/linux/amd64/kubectl - chmod +x kubectl - mkdir -p ~/.local/bin - mv ./kubectl ~/.local/bin - export PATH=$PATH:~/.local/bin -fi - -# create a unique id for this run -UUID=$(python -c 'import uuid; print(str(uuid.uuid1())[:7])') -export ENVCTL_ID=ci-$UUID -echo $ENVCTL_ID - -# Run the specified environment test -set +e - -python3 -m nox --session "tests(language='python', platform='$ENVIRONMENT')" -TEST_STATUS_CODE=$? - -# destroy resources -echo "cleaning up..." -${PROJECT_ROOT}/tests/environment/envctl/envctl python $ENVIRONMENT destroy - -# exit with proper status code -exit $TEST_STATUS_CODE diff --git a/packages/google-cloud-logging/.kokoro/performance/common.cfg b/packages/google-cloud-logging/.kokoro/performance/common.cfg deleted file mode 100644 index b3dd88793c9b..000000000000 --- a/packages/google-cloud-logging/.kokoro/performance/common.cfg +++ /dev/null @@ -1,42 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-performance.sh" -} - -# add labels to help with testgrid filtering -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - diff --git a/packages/google-cloud-logging/.kokoro/performance/performance.cfg b/packages/google-cloud-logging/.kokoro/performance/performance.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-logging/.kokoro/performance/performance.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/populate-secrets.sh b/packages/google-cloud-logging/.kokoro/populate-secrets.sh deleted file mode 100755 index c435402f473e..000000000000 --- a/packages/google-cloud-logging/.kokoro/populate-secrets.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} -function msg { println "$*" >&2 ;} -function println { printf '%s\n' "$(now) $*" ;} - - -# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: -# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com -SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" -msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" -mkdir -p ${SECRET_LOCATION} -for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") -do - msg "Retrieving secret ${key}" - docker run --entrypoint=gcloud \ - --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ - gcr.io/google.com/cloudsdktool/cloud-sdk \ - secrets versions access latest \ - --project cloud-devrel-kokoro-resources \ - --secret ${key} > \ - "${SECRET_LOCATION}/${key}" - if [[ $? == 0 ]]; then - msg "Secret written to ${SECRET_LOCATION}/${key}" - else - msg "Error retrieving secret ${key}" - fi -done diff --git a/packages/google-cloud-logging/.kokoro/presubmit/common.cfg b/packages/google-cloud-logging/.kokoro/presubmit/common.cfg deleted file mode 100644 index 6745b353755d..000000000000 --- a/packages/google-cloud-logging/.kokoro/presubmit/common.cfg +++ /dev/null @@ -1,47 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/build.sh" -} - - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/packages/google-cloud-logging/.kokoro/presubmit/prerelease-deps.cfg b/packages/google-cloud-logging/.kokoro/presubmit/prerelease-deps.cfg deleted file mode 100644 index 3595fb43f5c0..000000000000 --- a/packages/google-cloud-logging/.kokoro/presubmit/prerelease-deps.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "prerelease_deps" -} diff --git a/packages/google-cloud-logging/.kokoro/presubmit/presubmit.cfg b/packages/google-cloud-logging/.kokoro/presubmit/presubmit.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-logging/.kokoro/presubmit/presubmit.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/lint/common.cfg b/packages/google-cloud-logging/.kokoro/samples/lint/common.cfg deleted file mode 100644 index feb119185a02..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/lint/common.cfg +++ /dev/null @@ -1,53 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "lint" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/packages/google-cloud-logging/.kokoro/samples/lint/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/lint/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/lint/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/lint/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/lint/periodic.cfg deleted file mode 100644 index 50fec9649732..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/lint/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/lint/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/lint/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/lint/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.10/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.10/common.cfg deleted file mode 100644 index 733aed7c4478..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.10/common.cfg +++ /dev/null @@ -1,59 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.10" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-310" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.10/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.10/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.10/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.10/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.10/periodic-head.cfg deleted file mode 100644 index 7e2973e3b659..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.10/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.10/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.10/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.10/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.10/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.10/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.10/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.11/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.11/common.cfg deleted file mode 100644 index 557e750529d6..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.11/common.cfg +++ /dev/null @@ -1,59 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.11" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-311" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.11/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.11/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.11/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.11/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.11/periodic-head.cfg deleted file mode 100644 index 7e2973e3b659..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.11/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.11/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.11/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.11/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.11/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.11/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.11/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.12/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.12/common.cfg deleted file mode 100644 index fb8ce87952aa..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.12/common.cfg +++ /dev/null @@ -1,59 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.12" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-312" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.12/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.12/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.12/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.12/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.12/periodic-head.cfg deleted file mode 100644 index 7e2973e3b659..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.12/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.12/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.12/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.12/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.12/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.12/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.12/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.13/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.13/common.cfg deleted file mode 100644 index 4eb8ee8be91b..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.13/common.cfg +++ /dev/null @@ -1,60 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.13" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-313" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.13/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.13/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.13/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.13/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.13/periodic-head.cfg deleted file mode 100644 index 7e2973e3b659..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.13/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.13/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.13/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.13/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.13/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.13/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.13/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg deleted file mode 100644 index 5501afd73e93..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.7/common.cfg +++ /dev/null @@ -1,59 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.7" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py37" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.7/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg deleted file mode 100644 index 7e2973e3b659..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.7/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.7/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.7/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.7/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg deleted file mode 100644 index f3c555136e2f..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.8/common.cfg +++ /dev/null @@ -1,59 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.8" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py38" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.8/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg deleted file mode 100644 index 7e2973e3b659..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.8/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.8/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.8/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.8/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg deleted file mode 100644 index fe06e7578e54..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.9/common.cfg +++ /dev/null @@ -1,59 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.9" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py39" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.9/continuous.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.9/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.9/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic-head.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic-head.cfg deleted file mode 100644 index 7e2973e3b659..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.9/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-logging/.kokoro/samples/python3.9/presubmit.cfg b/packages/google-cloud-logging/.kokoro/samples/python3.9/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-logging/.kokoro/samples/python3.9/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/test-performance.sh b/packages/google-cloud-logging/.kokoro/test-performance.sh deleted file mode 100755 index a9a44c3cdebf..000000000000 --- a/packages/google-cloud-logging/.kokoro/test-performance.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -set -eox pipefail - -if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-logging" -fi - -cd "${PROJECT_ROOT}/tests/performance" - - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - - -# Install nox -python3 -m pip install --upgrade --quiet nox - -# run performance tests -set +e -python3 -m nox -TEST_STATUS_CODE=$? - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$TEST_STATUS_CODE" diff --git a/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh b/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh deleted file mode 100755 index e9d8bd79a644..000000000000 --- a/packages/google-cloud-logging/.kokoro/test-samples-against-head.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# A customized test runner for samples. -# -# For periodic builds, you can specify this file for testing against head. - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh b/packages/google-cloud-logging/.kokoro/test-samples-impl.sh deleted file mode 100755 index 53e365bc4e79..000000000000 --- a/packages/google-cloud-logging/.kokoro/test-samples-impl.sh +++ /dev/null @@ -1,103 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -# Exit early if samples don't exist -if ! find samples -name 'requirements.txt' | grep -q .; then - echo "No tests run. './samples/**/requirements.txt' not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -# `virtualenv==20.26.6` is added for Python 3.7 compatibility -python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6 - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.9 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot - fi - - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" diff --git a/packages/google-cloud-logging/.kokoro/test-samples.sh b/packages/google-cloud-logging/.kokoro/test-samples.sh deleted file mode 100755 index 7933d820149a..000000000000 --- a/packages/google-cloud-logging/.kokoro/test-samples.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# The default test runner for samples. -# -# For periodic builds, we rewinds the repo to the latest release, and -# run test-samples-impl.sh. - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -# Run periodic samples tests at latest release -if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - # preserving the test runner implementation. - cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" - echo "--- IMPORTANT IMPORTANT IMPORTANT ---" - echo "Now we rewind the repo back to the latest release..." - LATEST_RELEASE=$(git describe --abbrev=0 --tags) - git checkout $LATEST_RELEASE - echo "The current head is: " - echo $(git rev-parse --verify HEAD) - echo "--- IMPORTANT IMPORTANT IMPORTANT ---" - # move back the test runner implementation if there's no file. - if [ ! -f .kokoro/test-samples-impl.sh ]; then - cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh - fi -fi - -exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-logging/.kokoro/trampoline.sh b/packages/google-cloud-logging/.kokoro/trampoline.sh deleted file mode 100755 index 48f79699706e..000000000000 --- a/packages/google-cloud-logging/.kokoro/trampoline.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Always run the cleanup script, regardless of the success of bouncing into -# the container. -function cleanup() { - chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh - ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh - echo "cleanup"; -} -trap cleanup EXIT - -$(dirname $0)/populate-secrets.sh # Secret Manager secrets. -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/packages/google-cloud-logging/.kokoro/trampoline_v2.sh b/packages/google-cloud-logging/.kokoro/trampoline_v2.sh deleted file mode 100755 index 35fa529231dc..000000000000 --- a/packages/google-cloud-logging/.kokoro/trampoline_v2.sh +++ /dev/null @@ -1,487 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# trampoline_v2.sh -# -# This script does 3 things. -# -# 1. Prepare the Docker image for the test -# 2. Run the Docker with appropriate flags to run the test -# 3. Upload the newly built Docker image -# -# in a way that is somewhat compatible with trampoline_v1. -# -# To run this script, first download few files from gcs to /dev/shm. -# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). -# -# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm -# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm -# -# Then run the script. -# .kokoro/trampoline_v2.sh -# -# These environment variables are required: -# TRAMPOLINE_IMAGE: The docker image to use. -# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. -# -# You can optionally change these environment variables: -# TRAMPOLINE_IMAGE_UPLOAD: -# (true|false): Whether to upload the Docker image after the -# successful builds. -# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. -# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. -# Defaults to /workspace. -# Potentially there are some repo specific envvars in .trampolinerc in -# the project root. - - -set -euo pipefail - -TRAMPOLINE_VERSION="2.0.5" - -if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then - readonly IO_COLOR_RED="$(tput setaf 1)" - readonly IO_COLOR_GREEN="$(tput setaf 2)" - readonly IO_COLOR_YELLOW="$(tput setaf 3)" - readonly IO_COLOR_RESET="$(tput sgr0)" -else - readonly IO_COLOR_RED="" - readonly IO_COLOR_GREEN="" - readonly IO_COLOR_YELLOW="" - readonly IO_COLOR_RESET="" -fi - -function function_exists { - [ $(LC_ALL=C type -t $1)"" == "function" ] -} - -# Logs a message using the given color. The first argument must be one -# of the IO_COLOR_* variables defined above, such as -# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the -# given color. The log message will also have an RFC-3339 timestamp -# prepended (in UTC). You can disable the color output by setting -# TERM=vt100. -function log_impl() { - local color="$1" - shift - local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" - echo "================================================================" - echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" - echo "================================================================" -} - -# Logs the given message with normal coloring and a timestamp. -function log() { - log_impl "${IO_COLOR_RESET}" "$@" -} - -# Logs the given message in green with a timestamp. -function log_green() { - log_impl "${IO_COLOR_GREEN}" "$@" -} - -# Logs the given message in yellow with a timestamp. -function log_yellow() { - log_impl "${IO_COLOR_YELLOW}" "$@" -} - -# Logs the given message in red with a timestamp. -function log_red() { - log_impl "${IO_COLOR_RED}" "$@" -} - -readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) -readonly tmphome="${tmpdir}/h" -mkdir -p "${tmphome}" - -function cleanup() { - rm -rf "${tmpdir}" -} -trap cleanup EXIT - -RUNNING_IN_CI="${RUNNING_IN_CI:-false}" - -# The workspace in the container, defaults to /workspace. -TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" - -pass_down_envvars=( - # TRAMPOLINE_V2 variables. - # Tells scripts whether they are running as part of CI or not. - "RUNNING_IN_CI" - # Indicates which CI system we're in. - "TRAMPOLINE_CI" - # Indicates the version of the script. - "TRAMPOLINE_VERSION" -) - -log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" - -# Detect which CI systems we're in. If we're in any of the CI systems -# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be -# the name of the CI system. Both envvars will be passing down to the -# container for telling which CI system we're in. -if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then - # descriptive env var for indicating it's on CI. - RUNNING_IN_CI="true" - TRAMPOLINE_CI="kokoro" - if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then - if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then - log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." - exit 1 - fi - # This service account will be activated later. - TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" - else - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - gcloud auth list - fi - log_yellow "Configuring Container Registry access" - gcloud auth configure-docker --quiet - fi - pass_down_envvars+=( - # KOKORO dynamic variables. - "KOKORO_BUILD_NUMBER" - "KOKORO_BUILD_ID" - "KOKORO_JOB_NAME" - "KOKORO_GIT_COMMIT" - "KOKORO_GITHUB_COMMIT" - "KOKORO_GITHUB_PULL_REQUEST_NUMBER" - "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For FlakyBot - "KOKORO_GITHUB_COMMIT_URL" - "KOKORO_GITHUB_PULL_REQUEST_URL" - ) -elif [[ "${TRAVIS:-}" == "true" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="travis" - pass_down_envvars+=( - "TRAVIS_BRANCH" - "TRAVIS_BUILD_ID" - "TRAVIS_BUILD_NUMBER" - "TRAVIS_BUILD_WEB_URL" - "TRAVIS_COMMIT" - "TRAVIS_COMMIT_MESSAGE" - "TRAVIS_COMMIT_RANGE" - "TRAVIS_JOB_NAME" - "TRAVIS_JOB_NUMBER" - "TRAVIS_JOB_WEB_URL" - "TRAVIS_PULL_REQUEST" - "TRAVIS_PULL_REQUEST_BRANCH" - "TRAVIS_PULL_REQUEST_SHA" - "TRAVIS_PULL_REQUEST_SLUG" - "TRAVIS_REPO_SLUG" - "TRAVIS_SECURE_ENV_VARS" - "TRAVIS_TAG" - ) -elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="github-workflow" - pass_down_envvars+=( - "GITHUB_WORKFLOW" - "GITHUB_RUN_ID" - "GITHUB_RUN_NUMBER" - "GITHUB_ACTION" - "GITHUB_ACTIONS" - "GITHUB_ACTOR" - "GITHUB_REPOSITORY" - "GITHUB_EVENT_NAME" - "GITHUB_EVENT_PATH" - "GITHUB_SHA" - "GITHUB_REF" - "GITHUB_HEAD_REF" - "GITHUB_BASE_REF" - ) -elif [[ "${CIRCLECI:-}" == "true" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="circleci" - pass_down_envvars+=( - "CIRCLE_BRANCH" - "CIRCLE_BUILD_NUM" - "CIRCLE_BUILD_URL" - "CIRCLE_COMPARE_URL" - "CIRCLE_JOB" - "CIRCLE_NODE_INDEX" - "CIRCLE_NODE_TOTAL" - "CIRCLE_PREVIOUS_BUILD_NUM" - "CIRCLE_PROJECT_REPONAME" - "CIRCLE_PROJECT_USERNAME" - "CIRCLE_REPOSITORY_URL" - "CIRCLE_SHA1" - "CIRCLE_STAGE" - "CIRCLE_USERNAME" - "CIRCLE_WORKFLOW_ID" - "CIRCLE_WORKFLOW_JOB_ID" - "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" - "CIRCLE_WORKFLOW_WORKSPACE_ID" - ) -fi - -# Configure the service account for pulling the docker image. -function repo_root() { - local dir="$1" - while [[ ! -d "${dir}/.git" ]]; do - dir="$(dirname "$dir")" - done - echo "${dir}" -} - -# Detect the project root. In CI builds, we assume the script is in -# the git tree and traverse from there, otherwise, traverse from `pwd` -# to find `.git` directory. -if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - PROGRAM_PATH="$(realpath "$0")" - PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" - PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" -else - PROJECT_ROOT="$(repo_root $(pwd))" -fi - -log_yellow "Changing to the project root: ${PROJECT_ROOT}." -cd "${PROJECT_ROOT}" - -# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need -# to use this environment variable in `PROJECT_ROOT`. -if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then - - mkdir -p "${tmpdir}/gcloud" - gcloud_config_dir="${tmpdir}/gcloud" - - log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." - export CLOUDSDK_CONFIG="${gcloud_config_dir}" - - log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." - gcloud auth activate-service-account \ - --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" - log_yellow "Configuring Container Registry access" - gcloud auth configure-docker --quiet -fi - -required_envvars=( - # The basic trampoline configurations. - "TRAMPOLINE_IMAGE" - "TRAMPOLINE_BUILD_FILE" -) - -if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then - source "${PROJECT_ROOT}/.trampolinerc" -fi - -log_yellow "Checking environment variables." -for e in "${required_envvars[@]}" -do - if [[ -z "${!e:-}" ]]; then - log "Missing ${e} env var. Aborting." - exit 1 - fi -done - -# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 -# script: e.g. "github/repo-name/.kokoro/run_tests.sh" -TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" -log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" - -# ignore error on docker operations and test execution -set +e - -log_yellow "Preparing Docker image." -# We only download the docker image in CI builds. -if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - # Download the docker image specified by `TRAMPOLINE_IMAGE` - - # We may want to add --max-concurrent-downloads flag. - - log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." - if docker pull "${TRAMPOLINE_IMAGE}"; then - log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." - has_image="true" - else - log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." - has_image="false" - fi -else - # For local run, check if we have the image. - if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then - has_image="true" - else - has_image="false" - fi -fi - - -# The default user for a Docker container has uid 0 (root). To avoid -# creating root-owned files in the build directory we tell docker to -# use the current user ID. -user_uid="$(id -u)" -user_gid="$(id -g)" -user_name="$(id -un)" - -# To allow docker in docker, we add the user to the docker group in -# the host os. -docker_gid=$(cut -d: -f3 < <(getent group docker)) - -update_cache="false" -if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then - # Build the Docker image from the source. - context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") - docker_build_flags=( - "-f" "${TRAMPOLINE_DOCKERFILE}" - "-t" "${TRAMPOLINE_IMAGE}" - "--build-arg" "UID=${user_uid}" - "--build-arg" "USERNAME=${user_name}" - ) - if [[ "${has_image}" == "true" ]]; then - docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") - fi - - log_yellow "Start building the docker image." - if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then - echo "docker build" "${docker_build_flags[@]}" "${context_dir}" - fi - - # ON CI systems, we want to suppress docker build logs, only - # output the logs when it fails. - if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - if docker build "${docker_build_flags[@]}" "${context_dir}" \ - > "${tmpdir}/docker_build.log" 2>&1; then - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - cat "${tmpdir}/docker_build.log" - fi - - log_green "Finished building the docker image." - update_cache="true" - else - log_red "Failed to build the Docker image, aborting." - log_yellow "Dumping the build logs:" - cat "${tmpdir}/docker_build.log" - exit 1 - fi - else - if docker build "${docker_build_flags[@]}" "${context_dir}"; then - log_green "Finished building the docker image." - update_cache="true" - else - log_red "Failed to build the Docker image, aborting." - exit 1 - fi - fi -else - if [[ "${has_image}" != "true" ]]; then - log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." - exit 1 - fi -fi - -# We use an array for the flags so they are easier to document. -docker_flags=( - # Remove the container after it exists. - "--rm" - - # Use the host network. - "--network=host" - - # Run in priviledged mode. We are not using docker for sandboxing or - # isolation, just for packaging our dev tools. - "--privileged" - - # Run the docker script with the user id. Because the docker image gets to - # write in ${PWD} you typically want this to be your user id. - # To allow docker in docker, we need to use docker gid on the host. - "--user" "${user_uid}:${docker_gid}" - - # Pass down the USER. - "--env" "USER=${user_name}" - - # Mount the project directory inside the Docker container. - "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" - "--workdir" "${TRAMPOLINE_WORKSPACE}" - "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" - - # Mount the temporary home directory. - "--volume" "${tmphome}:/h" - "--env" "HOME=/h" - - # Allow docker in docker. - "--volume" "/var/run/docker.sock:/var/run/docker.sock" - - # Mount the /tmp so that docker in docker can mount the files - # there correctly. - "--volume" "/tmp:/tmp" - # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR - # TODO(tmatsuo): This part is not portable. - "--env" "TRAMPOLINE_SECRET_DIR=/secrets" - "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" - "--env" "KOKORO_GFILE_DIR=/secrets/gfile" - "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" - "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" -) - -# Add an option for nicer output if the build gets a tty. -if [[ -t 0 ]]; then - docker_flags+=("-it") -fi - -# Passing down env vars -for e in "${pass_down_envvars[@]}" -do - if [[ -n "${!e:-}" ]]; then - docker_flags+=("--env" "${e}=${!e}") - fi -done - -# If arguments are given, all arguments will become the commands run -# in the container, otherwise run TRAMPOLINE_BUILD_FILE. -if [[ $# -ge 1 ]]; then - log_yellow "Running the given commands '" "${@:1}" "' in the container." - readonly commands=("${@:1}") - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" - fi - docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" -else - log_yellow "Running the tests in a Docker container." - docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" - fi - docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" -fi - - -test_retval=$? - -if [[ ${test_retval} -eq 0 ]]; then - log_green "Build finished with ${test_retval}" -else - log_red "Build finished with ${test_retval}" -fi - -# Only upload it when the test passes. -if [[ "${update_cache}" == "true" ]] && \ - [[ $test_retval == 0 ]] && \ - [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then - log_yellow "Uploading the Docker image." - if docker push "${TRAMPOLINE_IMAGE}"; then - log_green "Finished uploading the Docker image." - else - log_red "Failed uploading the Docker image." - fi - # Call trampoline_after_upload_hook if it's defined. - if function_exists trampoline_after_upload_hook; then - trampoline_after_upload_hook - fi - -fi - -exit "${test_retval}" diff --git a/packages/google-cloud-logging/.trampolinerc b/packages/google-cloud-logging/.trampolinerc deleted file mode 100644 index 636e35c3257f..000000000000 --- a/packages/google-cloud-logging/.trampolinerc +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Add required env vars here. -required_envvars+=() - -# Add env vars which are passed down into the container here. -pass_down_envvars+=( - "ENVIRONMENT" - "RUNTIME" - "NOX_SESSION" - ############### - # Docs builds - ############### - "STAGING_BUCKET" - "V2_STAGING_BUCKET" - ################## - # Samples builds - ################## - "INSTALL_LIBRARY_FROM_SOURCE" - "RUN_TESTS_SESSION" - "BUILD_SPECIFIC_GCLOUD_PROJECT" - # Target directories. - "RUN_TESTS_DIRS" - # The nox session to run. - "RUN_TESTS_SESSION" -) - -# Prevent unintentional override on the default image. -if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ - [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then - echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." - exit 1 -fi - -# Define the default value if it makes sense. -if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then - TRAMPOLINE_IMAGE_UPLOAD="" -fi - -if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then - TRAMPOLINE_IMAGE="" -fi - -if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then - TRAMPOLINE_DOCKERFILE="" -fi - -if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then - TRAMPOLINE_BUILD_FILE="" -fi diff --git a/packages/google-cloud-logging/docs/changelog.md b/packages/google-cloud-logging/docs/changelog.md deleted file mode 120000 index 04c99a55caae..000000000000 --- a/packages/google-cloud-logging/docs/changelog.md +++ /dev/null @@ -1 +0,0 @@ -../CHANGELOG.md \ No newline at end of file diff --git a/scripts/split_repo_migration/single-library.git-migrate-history.sh b/scripts/split_repo_migration/single-library.git-migrate-history.sh index 0181b6368162..9f926e1d030e 100755 --- a/scripts/split_repo_migration/single-library.git-migrate-history.sh +++ b/scripts/split_repo_migration/single-library.git-migrate-history.sh @@ -70,7 +70,7 @@ echo "Created working directory: ${WORKDIR}" pushd "${WORKDIR}" # cd into workdir echo "Cloning source repository: ${SOURCE_REPO}" -git clone "git@github.com:${SOURCE_REPO}.git" source-repo +git clone --recurse-submodules "git@github.com:${SOURCE_REPO}.git" source-repo pushd source-repo @@ -124,7 +124,7 @@ git filter-branch \ --force \ --prune-empty \ --tree-filter \ - "shopt -s dotglob; mkdir -p ${WORKDIR}/migrated-source; mv * ${WORKDIR}/migrated-source; mkdir -p ${TARGET_PATH}; { mv ${WORKDIR}/migrated-source/* ${TARGET_PATH} || echo 'No files to move' ; }" + "git submodule update --init --recursive; find . -mindepth 2 -name .git -exec rm -rf {} +; shopt -s dotglob; mkdir -p ${WORKDIR}/migrated-source; mv * ${WORKDIR}/migrated-source; mkdir -p ${TARGET_PATH}; { mv ${WORKDIR}/migrated-source/* ${TARGET_PATH} || echo 'No files to move' ; }" # back to workdir popd @@ -142,8 +142,8 @@ echo "Success" popd # back to workdir # Do a diff between source code split repo and migrated code. -git clone "git@github.com:${SOURCE_REPO}.git" source-repo-validation # Not ideal to clone again. -rm -rf source-repo-validation/.git # That folder is not needed for validation. +git clone --recurse-submodules "git@github.com:${SOURCE_REPO}.git" source-repo-validation # Not ideal to clone again. +find source-repo-validation -name .git -exec rm -rf {} + # That folder is not needed for validation. DIFF_FILE="${WORKDIR}/diff.txt" if diff -r "${TARGET_REPO}/${TARGET_PATH}" source-repo-validation > "${DIFF_FILE}" ; then From 8c732306d873a0913edbad634828956a7479c441 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 16 Dec 2025 17:17:08 +0000 Subject: [PATCH 855/855] Add google-cloud-logging to librarian --- .librarian/state.yaml | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/.librarian/state.yaml b/.librarian/state.yaml index fc72a1f57277..995f5928ffbf 100644 --- a/.librarian/state.yaml +++ b/.librarian/state.yaml @@ -4304,3 +4304,42 @@ libraries: - README.rst - docs/summary_overview.md tag_format: '{id}-v{version}' + - id: google-cloud-logging + version: 3.12.1 + last_generated_commit: 5400ccce473c439885bd6bf2924fd242271bfcab + apis: + - path: google/logging/v2 + service_config: logging_v2.yaml + source_roots: + - . + preserve_regex: [] + remove_regex: + - ^packages/google-cloud-logging/.coveragerc + - ^packages/google-cloud-logging/.flake8 + - ^packages/google-cloud-logging/.pre-commit-config.yaml + - ^packages/google-cloud-logging/.trampolinerc + - ^packages/google-cloud-logging/.repo-metadata.json + - ^packages/google-cloud-logging/LICENSE + - ^packages/google-cloud-logging/MANIFEST.in + - ^packages/google-cloud-logging/SECURITY.md + - ^packages/google-cloud-logging/mypy.ini + - ^packages/google-cloud-logging/noxfile.py + - ^packages/google-cloud-logging/renovate.json + - ^packages/google-cloud-logging/setup.py + - ^packages/google-cloud-logging/docs/summary_overview.md + - ^packages/google-cloud-logging/docs/_static/custom.css + - ^packages/google-cloud-logging/docs/_templates + - ^packages/google-cloud-logging/google/cloud/logging_v2/services + - ^packages/google-cloud-logging/google/cloud/logging_v2/types + - ^packages/google-cloud-logging/google/cloud/logging_v2/gapic_version.py + - ^packages/google-cloud-logging/google/cloud/logging_v2/gapic_metadata.json + - ^packages/google-cloud-logging/google/cloud/logging_v2/py.typed + - ^packages/google-cloud-logging/google/cloud/logging/gapic_version.py + - ^packages/google-cloud-logging/google/cloud/logging/py.typed + - ^packages/google-cloud-logging/samples/AUTHORING_GUIDE.md + - ^packages/google-cloud-logging/samples/CONTRIBUTING.md + - ^packages/google-cloud-logging/samples/generated_samples + - ^packages/google-cloud-logging/tests/__init__.py + - ^packages/google-cloud-logging/tests/unit/__init__.py + - ^packages/google-cloud-logging/tests/unit/gapic + tag_format: v{version}